chore: Remove Skia 🎨 (#1740)

* Revert "feat: Skia for Android (#1731)"

This reverts commit a7c137da07.

* Remove some skia

* Remove all the Skia stuff.

* Update useFrameProcessor.ts

* Update lockfiles

* fix: Use native Preview again

* Use `OpenGLTexture&` again

* Remove `PreviewOutput` (we use `SurfaceView` in parallel)

* fix: Log photo widths

* fix: Fix cpplint
This commit is contained in:
Marc Rousavy 2023-09-01 12:20:17 +02:00 committed by GitHub
parent 22829083cd
commit 0a28454579
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
75 changed files with 404 additions and 2415 deletions

View File

@ -60,47 +60,6 @@ jobs:
- name: Run Gradle Build for example/android/ - name: Run Gradle Build for example/android/
run: cd example/android && ./gradlew assembleDebug --build-cache && cd ../.. run: cd example/android && ./gradlew assembleDebug --build-cache && cd ../..
build-no-skia:
name: Build Android Example App (without Skia)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache
uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install node_modules
run: yarn install --frozen-lockfile
- name: Install node_modules for example/
run: yarn install --frozen-lockfile --cwd example
- name: Remove react-native-skia
run: yarn remove @shopify/react-native-skia --cwd example
- name: Restore Gradle cache
uses: actions/cache@v2
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
restore-keys: |
${{ runner.os }}-gradle-
- name: Run Gradle Build for example/android/
run: cd example/android && ./gradlew assembleDebug --build-cache && cd ../..
build-no-frame-processors: build-no-frame-processors:
name: Build Android Example App (without Frame Processors) name: Build Android Example App (without Frame Processors)
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@ -79,68 +79,6 @@ jobs:
build \ build \
CODE_SIGNING_ALLOWED=NO | xcpretty" CODE_SIGNING_ALLOWED=NO | xcpretty"
build-no-skia:
name: Build iOS Example App without Skia
runs-on: macOS-latest
defaults:
run:
working-directory: example/ios
steps:
- uses: actions/checkout@v2
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Restore node_modules from cache
uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install node_modules for example/
run: yarn install --frozen-lockfile --cwd ..
- name: Remove react-native-skia
run: yarn remove @shopify/react-native-skia --cwd ..
- name: Restore buildcache
uses: mikehardy/buildcache-action@v1
continue-on-error: true
- name: Setup Ruby (bundle)
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6.10
bundler-cache: true
working-directory: example/ios
- name: Restore Pods cache
uses: actions/cache@v2
with:
path: |
example/ios/Pods
~/Library/Caches/CocoaPods
~/.cocoapods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
restore-keys: |
${{ runner.os }}-pods-
- name: Install Pods
run: bundle exec pod check || bundle exec pod install
- name: Install xcpretty
run: gem install xcpretty
- name: Build App
run: "set -o pipefail && xcodebuild \
CC=clang CPLUSPLUS=clang++ LD=clang LDPLUSPLUS=clang++ \
-derivedDataPath build -UseModernBuildSystem=YES \
-workspace VisionCameraExample.xcworkspace \
-scheme VisionCameraExample \
-sdk iphonesimulator \
-configuration Debug \
-destination 'platform=iOS Simulator,name=iPhone 11 Pro' \
build \
CODE_SIGNING_ALLOWED=NO | xcpretty"
build-no-frame-processors: build-no-frame-processors:
name: Build iOS Example App without Frame Processors name: Build iOS Example App without Frame Processors
runs-on: macOS-latest runs-on: macOS-latest

View File

@ -9,21 +9,12 @@ if defined?($VCDisableFrameProcessors)
Pod::UI.puts "[VisionCamera] $VCDisableFrameProcesors is set to #{$VCDisableFrameProcessors}!" Pod::UI.puts "[VisionCamera] $VCDisableFrameProcesors is set to #{$VCDisableFrameProcessors}!"
forceDisableFrameProcessors = $VCDisableFrameProcessors forceDisableFrameProcessors = $VCDisableFrameProcessors
end end
forceDisableSkia = false
if defined?($VCDisableSkia)
Pod::UI.puts "[VisionCamera] $VCDisableSkia is set to #{$VCDisableSkia}!"
forceDisableSkia = $VCDisableSkia
end
Pod::UI.puts("[VisionCamera] node modules #{Dir.exist?(nodeModules) ? "found at #{nodeModules}" : "not found!"}") Pod::UI.puts("[VisionCamera] node modules #{Dir.exist?(nodeModules) ? "found at #{nodeModules}" : "not found!"}")
workletsPath = File.join(nodeModules, "react-native-worklets-core") workletsPath = File.join(nodeModules, "react-native-worklets-core")
hasWorklets = File.exist?(workletsPath) && !forceDisableFrameProcessors hasWorklets = File.exist?(workletsPath) && !forceDisableFrameProcessors
Pod::UI.puts("[VisionCamera] react-native-worklets-core #{hasWorklets ? "found" : "not found"}, Frame Processors #{hasWorklets ? "enabled" : "disabled"}!") Pod::UI.puts("[VisionCamera] react-native-worklets-core #{hasWorklets ? "found" : "not found"}, Frame Processors #{hasWorklets ? "enabled" : "disabled"}!")
skiaPath = File.join(nodeModules, "@shopify", "react-native-skia")
hasSkia = hasWorklets && File.exist?(skiaPath) && !forceDisableSkia
Pod::UI.puts("[VisionCamera] react-native-skia #{hasSkia ? "found" : "not found"}, Skia Frame Processors #{hasSkia ? "enabled" : "disabled"}!")
Pod::Spec.new do |s| Pod::Spec.new do |s|
s.name = "VisionCamera" s.name = "VisionCamera"
s.version = package["version"] s.version = package["version"]
@ -37,10 +28,10 @@ Pod::Spec.new do |s|
s.source = { :git => "https://github.com/mrousavy/react-native-vision-camera.git", :tag => "#{s.version}" } s.source = { :git => "https://github.com/mrousavy/react-native-vision-camera.git", :tag => "#{s.version}" }
s.pod_target_xcconfig = { s.pod_target_xcconfig = {
"GCC_PREPROCESSOR_DEFINITIONS" => "$(inherited) SK_METAL=1 SK_GANESH=1 VISION_CAMERA_ENABLE_FRAME_PROCESSORS=#{hasWorklets} VISION_CAMERA_ENABLE_SKIA=#{hasSkia}", "GCC_PREPROCESSOR_DEFINITIONS" => "$(inherited) SK_METAL=1 SK_GANESH=1 VISION_CAMERA_ENABLE_FRAME_PROCESSORS=#{hasWorklets}",
"OTHER_SWIFT_FLAGS" => "$(inherited) #{hasWorklets ? "-D VISION_CAMERA_ENABLE_FRAME_PROCESSORS" : ""} #{hasSkia ? "-D VISION_CAMERA_ENABLE_SKIA" : ""}", "OTHER_SWIFT_FLAGS" => "$(inherited) #{hasWorklets ? "-D VISION_CAMERA_ENABLE_FRAME_PROCESSORS" : ""}",
"CLANG_CXX_LANGUAGE_STANDARD" => "c++17", "CLANG_CXX_LANGUAGE_STANDARD" => "c++17",
"HEADER_SEARCH_PATHS" => "\"$(PODS_TARGET_SRCROOT)/cpp/\"/** \"#{skiaPath}/cpp/skia/**\" " "HEADER_SEARCH_PATHS" => "\"$(PODS_TARGET_SRCROOT)/cpp/\"/** "
} }
s.requires_arc = true s.requires_arc = true
@ -63,10 +54,6 @@ Pod::Spec.new do |s|
hasWorklets ? "ios/Frame Processor/FrameProcessorPluginRegistry.h" : "", hasWorklets ? "ios/Frame Processor/FrameProcessorPluginRegistry.h" : "",
hasWorklets ? "ios/Frame Processor/VisionCameraProxy.h" : "", hasWorklets ? "ios/Frame Processor/VisionCameraProxy.h" : "",
hasWorklets ? "cpp/**/*.{cpp}" : "", hasWorklets ? "cpp/**/*.{cpp}" : "",
# Skia Frame Processors
hasSkia ? "ios/Skia Render Layer/*.{m,mm,swift}" : "",
hasSkia ? "ios/Skia Render Layer/SkiaRenderer.h" : "",
] ]
# Any private headers that are not globally unique should be mentioned here. # Any private headers that are not globally unique should be mentioned here.
# Otherwise there will be a nameclash, since CocoaPods flattens out any header directories # Otherwise there will be a nameclash, since CocoaPods flattens out any header directories
@ -82,8 +69,5 @@ Pod::Spec.new do |s|
if hasWorklets if hasWorklets
s.dependency "react-native-worklets-core" s.dependency "react-native-worklets-core"
if hasSkia
s.dependency "react-native-skia"
end
end end
end end

View File

@ -12,7 +12,7 @@ find_package(ReactAndroid REQUIRED CONFIG)
find_package(fbjni REQUIRED CONFIG) find_package(fbjni REQUIRED CONFIG)
find_library(LOG_LIB log) find_library(LOG_LIB log)
add_definitions(-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS=${ENABLE_FRAME_PROCESSORS} -DVISION_CAMERA_ENABLE_SKIA=${ENABLE_SKIA} -DEGL_EGLEXT_PROTOTYPES=1) add_definitions(-DVISION_CAMERA_ENABLE_FRAME_PROCESSORS=${ENABLE_FRAME_PROCESSORS})
# Add react-native-vision-camera sources # Add react-native-vision-camera sources
@ -35,11 +35,6 @@ add_library(
src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp src/main/cpp/frameprocessor/java-bindings/JFrameProcessorPlugin.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp src/main/cpp/frameprocessor/java-bindings/JVisionCameraProxy.cpp
src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp src/main/cpp/frameprocessor/java-bindings/JVisionCameraScheduler.cpp
# Skia Frame Processor
src/main/cpp/skia/SkiaRenderer.cpp
src/main/cpp/skia/JSkiaFrameProcessor.cpp
src/main/cpp/skia/DrawableFrameHostObject.cpp
src/main/cpp/skia/VisionCameraSkiaContext.cpp
) )
# Header Search Paths (includes) # Header Search Paths (includes)
@ -50,8 +45,6 @@ target_include_directories(
"src/main/cpp" "src/main/cpp"
"src/main/cpp/frameprocessor" "src/main/cpp/frameprocessor"
"src/main/cpp/frameprocessor/java-bindings" "src/main/cpp/frameprocessor/java-bindings"
"src/main/cpp/skia"
"src/main/cpp/skia/java-bindings"
"${NODE_MODULES_DIR}/react-native/ReactCommon" "${NODE_MODULES_DIR}/react-native/ReactCommon"
"${NODE_MODULES_DIR}/react-native/ReactCommon/callinvoker" "${NODE_MODULES_DIR}/react-native/ReactCommon/callinvoker"
"${NODE_MODULES_DIR}/react-native/ReactAndroid/src/main/jni/react/turbomodule" # <-- CallInvokerHolder JNI wrapper "${NODE_MODULES_DIR}/react-native/ReactAndroid/src/main/jni/react/turbomodule" # <-- CallInvokerHolder JNI wrapper
@ -65,6 +58,8 @@ target_link_libraries(
ReactAndroid::jsi # <-- RN: JSI ReactAndroid::jsi # <-- RN: JSI
ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings ReactAndroid::reactnativejni # <-- RN: React Native JNI bindings
fbjni::fbjni # <-- fbjni fbjni::fbjni # <-- fbjni
GLESv2 # <-- OpenGL (for VideoPipeline)
EGL # <-- OpenGL (EGL) (for VideoPipeline)
) )
# Optionally also add Frame Processors here # Optionally also add Frame Processors here
@ -75,57 +70,4 @@ if(ENABLE_FRAME_PROCESSORS)
react-native-worklets-core::rnworklets react-native-worklets-core::rnworklets
) )
message("VisionCamera: Frame Processors enabled!") message("VisionCamera: Frame Processors enabled!")
# Optionally also add Skia Integration here
if(ENABLE_SKIA)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DSK_GL -DSK_GANESH -DSK_BUILD_FOR_ANDROID")
find_package(shopify_react-native-skia REQUIRED CONFIG)
set(SKIA_PACKAGE shopify_react-native-skia::rnskia)
set(RNSKIA_PATH ${NODE_MODULES_DIR}/@shopify/react-native-skia)
set (SKIA_LIBS_PATH "${RNSKIA_PATH}/libs/android/${ANDROID_ABI}")
add_library(skia STATIC IMPORTED)
set_property(TARGET skia PROPERTY IMPORTED_LOCATION "${SKIA_LIBS_PATH}/libskia.a")
add_library(svg STATIC IMPORTED)
set_property(TARGET svg PROPERTY IMPORTED_LOCATION "${SKIA_LIBS_PATH}/libsvg.a")
add_library(skshaper STATIC IMPORTED)
set_property(TARGET skshaper PROPERTY IMPORTED_LOCATION "${SKIA_LIBS_PATH}/libskshaper.a")
# We need to include the headers from skia
# (Note: rnskia includes all their files without any relative path
# so for example "include/core/SkImage.h" becomes #include "SkImage.h".
# That's why for the prefab of rnskia, we flatten all cpp files into
# just one directory. HOWEVER, skia itself uses relative paths in
# their include statements, and so we have to include the path to skia)
target_include_directories(
${PACKAGE_NAME}
PRIVATE
"${RNSKIA_PATH}/cpp/api/"
"${RNSKIA_PATH}/cpp/jsi/"
"${RNSKIA_PATH}/cpp/rnskia/"
"${RNSKIA_PATH}/cpp/skia"
"${RNSKIA_PATH}/cpp/skia/include/"
"${RNSKIA_PATH}/cpp/skia/include/config/"
"${RNSKIA_PATH}/cpp/skia/include/core/"
"${RNSKIA_PATH}/cpp/skia/include/effects/"
"${RNSKIA_PATH}/cpp/skia/include/utils/"
"${RNSKIA_PATH}/cpp/skia/include/pathops/"
"${RNSKIA_PATH}/cpp/skia/modules/"
"${RNSKIA_PATH}/cpp/utils/"
)
target_link_libraries(
${PACKAGE_NAME}
GLESv2 # <-- Optional: OpenGL (for Skia)
EGL # <-- Optional: OpenGL (EGL) (for Skia)
${SKIA_PACKAGE} # <-- Optional: RN Skia
jnigraphics
skia
svg
skshaper
)
message("VisionCamera: Skia enabled!")
endif()
endif() endif()

View File

@ -66,10 +66,7 @@ static def findNodeModules(baseDir) {
def nodeModules = findNodeModules(projectDir) def nodeModules = findNodeModules(projectDir)
def hasWorklets = !safeExtGet("VisionCamera_disableFrameProcessors", false) && findProject(":react-native-worklets-core") != null def hasWorklets = !safeExtGet("VisionCamera_disableFrameProcessors", false) && findProject(":react-native-worklets-core") != null
def hasSkia = !safeExtGet("VisionCamera_disableSkia", false) && findProject(":shopify_react-native-skia") != null
logger.warn("[VisionCamera] react-native-worklets-core ${hasWorklets ? "found" : "not found"}, Frame Processors ${hasWorklets ? "enabled" : "disabled"}!") logger.warn("[VisionCamera] react-native-worklets-core ${hasWorklets ? "found" : "not found"}, Frame Processors ${hasWorklets ? "enabled" : "disabled"}!")
logger.warn("[VisionCamera] react-native-skia ${hasSkia ? "found" : "not found"}, Skia Frame Processors ${hasSkia ? "enabled" : "disabled"}!")
repositories { repositories {
google() google()
@ -105,8 +102,7 @@ android {
cppFlags "-O2 -frtti -fexceptions -Wall -Wno-unused-variable -fstack-protector-all" cppFlags "-O2 -frtti -fexceptions -Wall -Wno-unused-variable -fstack-protector-all"
arguments "-DANDROID_STL=c++_shared", arguments "-DANDROID_STL=c++_shared",
"-DNODE_MODULES_DIR=${nodeModules}", "-DNODE_MODULES_DIR=${nodeModules}",
"-DENABLE_FRAME_PROCESSORS=${hasWorklets}", "-DENABLE_FRAME_PROCESSORS=${hasWorklets}"
"-DENABLE_SKIA=${hasWorklets && hasSkia}"
abiFilters (*reactNativeArchitectures()) abiFilters (*reactNativeArchitectures())
} }
} }
@ -152,11 +148,6 @@ dependencies {
if (hasWorklets) { if (hasWorklets) {
// Frame Processor integration (optional) // Frame Processor integration (optional)
implementation project(":react-native-worklets-core") implementation project(":react-native-worklets-core")
if (hasSkia) {
// Skia Frame Processor integration (optional)
implementation project(":shopify_react-native-skia")
}
} }
} }

View File

@ -10,7 +10,6 @@
#include <android/native_window.h> #include <android/native_window.h>
#include <android/log.h> #include <android/log.h>
#include <chrono>
#include "OpenGLError.h" #include "OpenGLError.h"
@ -151,19 +150,4 @@ OpenGLTexture OpenGLContext::createTexture(OpenGLTexture::Type type, int width,
}; };
} }
void OpenGLContext::getPixelsOfTexture(const OpenGLTexture& texture, size_t* outSize, uint8_t** outPixels) {
glActiveTexture(GL_TEXTURE0);
glBindTexture(texture.target, texture.id);
glReadPixels(0, 0, texture.width, texture.height, GL_RGBA, GL_UNSIGNED_BYTE, *outPixels);
// height * width * components per pixel (4 for RGBA) * size of one number (byte)
*outSize = texture.height * texture.width * 4 * sizeof(uint8_t);
}
long OpenGLContext::getCurrentPresentationTime() {
auto now = std::chrono::steady_clock::now();
auto duration = now.time_since_epoch();
long long milliseconds = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();
return static_cast<long>(milliseconds);
}
} // namespace vision } // namespace vision

View File

@ -6,13 +6,12 @@
#include <EGL/egl.h> #include <EGL/egl.h>
#include <GLES2/gl2.h> #include <GLES2/gl2.h>
#include "OpenGLTexture.h"
#include <memory> #include <memory>
#include <functional> #include <functional>
#include <chrono>
#include "PassThroughShader.h" #include "PassThroughShader.h"
#include "OpenGLTexture.h"
namespace vision { namespace vision {
@ -53,16 +52,6 @@ class OpenGLContext {
*/ */
OpenGLTexture createTexture(OpenGLTexture::Type type, int width, int height); OpenGLTexture createTexture(OpenGLTexture::Type type, int width, int height);
/**
* Gets the pixels as CPU accessible memory of the given input texture
*/
void getPixelsOfTexture(const OpenGLTexture& texture, size_t* outSize, uint8_t** outPixels);
/**
* Gets the current presentation time for this OpenGL surface.
*/
long getCurrentPresentationTime();
public: public:
EGLDisplay display = EGL_NO_DISPLAY; EGLDisplay display = EGL_NO_DISPLAY;
EGLContext context = EGL_NO_CONTEXT; EGLContext context = EGL_NO_CONTEXT;
@ -70,13 +59,12 @@ class OpenGLContext {
EGLConfig config = nullptr; EGLConfig config = nullptr;
private: private:
explicit OpenGLContext() = default; OpenGLContext() = default;
void destroy(); void destroy();
void ensureOpenGL(); void ensureOpenGL();
private: private:
PassThroughShader _passThroughShader; PassThroughShader _passThroughShader;
std::chrono::time_point<std::chrono::system_clock> _startTime;
private: private:
static constexpr auto TAG = "OpenGLContext"; static constexpr auto TAG = "OpenGLContext";

View File

@ -43,35 +43,32 @@ void OpenGLRenderer::destroy() {
} }
} }
EGLSurface OpenGLRenderer::getEGLSurface() { void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) {
if (_surface == EGL_NO_SURFACE) { if (_surface == EGL_NO_SURFACE) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface..."); __android_log_print(ANDROID_LOG_INFO, TAG, "Creating Window Surface...");
_context->use(); _context->use();
_surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr); _surface = eglCreateWindowSurface(_context->display, _context->config, _outputSurface, nullptr);
} }
return _surface;
}
void OpenGLRenderer::renderTextureToSurface(const OpenGLTexture& texture, float* transformMatrix) { // 1. Activate the OpenGL context for this surface
// 1. Get (or create) the OpenGL EGLSurface which is the window render target (Android Surface) _context->use(_surface);
EGLSurface surface = getEGLSurface();
// 2. Activate the OpenGL context for this surface // 2. Set the viewport for rendering
_context->use(surface);
OpenGLError::checkIfError("Failed to use context!");
// 3. Set the viewport for rendering
glViewport(0, 0, _width, _height); glViewport(0, 0, _width, _height);
glDisable(GL_BLEND); glDisable(GL_BLEND);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// 4. Draw it using the pass-through shader which binds the texture and applies transforms // 3. Bind the input texture
glBindTexture(texture.target, texture.id);
glTexParameteri(texture.target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(texture.target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(texture.target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(texture.target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// 4. Draw it using the pass-through shader which also applies transforms
_passThroughShader.draw(texture, transformMatrix); _passThroughShader.draw(texture, transformMatrix);
// 5 Swap buffers to pass it to the window surface // 5. Swap buffers to pass it to the window surface
_context->flush(); eglSwapBuffers(_context->display, _surface);
OpenGLError::checkIfError("Failed to render Frame to Surface!");
} }
} // namespace vision } // namespace vision

View File

@ -11,6 +11,7 @@
#include "PassThroughShader.h" #include "PassThroughShader.h"
#include "OpenGLContext.h" #include "OpenGLContext.h"
#include "OpenGLTexture.h"
namespace vision { namespace vision {
@ -39,11 +40,6 @@ class OpenGLRenderer {
*/ */
void destroy(); void destroy();
/**
* Gets the EGLSurface (window surface) that this OpenGL renderer is configured to render to.
*/
EGLSurface getEGLSurface();
private: private:
explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface); explicit OpenGLRenderer(std::shared_ptr<OpenGLContext> context, ANativeWindow* surface);

View File

@ -10,28 +10,24 @@
#include "OpenGLError.h" #include "OpenGLError.h"
#include <string> #include <string>
#include <android/log.h>
namespace vision { namespace vision {
PassThroughShader::~PassThroughShader() { PassThroughShader::~PassThroughShader() {
if (_vertexBuffer != NO_BUFFER) {
glDeleteBuffers(1, &_vertexBuffer);
_vertexBuffer = NO_BUFFER;
}
if (_programId != NO_SHADER) { if (_programId != NO_SHADER) {
glDeleteProgram(_programId); glDeleteProgram(_programId);
_programId = NO_SHADER; _programId = NO_SHADER;
} }
if (_vertexBuffer != NO_BUFFER) {
glDeleteBuffers(1, &_vertexBuffer);
_vertexBuffer = NO_BUFFER;
}
} }
void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) { void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatrix) {
// 1. Set up Shader Program // 1. Set up Shader Program
if (_programId == NO_SHADER || _shaderTarget != texture.target) { if (_programId == NO_SHADER) {
if (_programId != NO_SHADER) { _programId = createProgram();
glDeleteProgram(_programId);
}
_programId = createProgram(texture.target);
glUseProgram(_programId); glUseProgram(_programId);
_vertexParameters = { _vertexParameters = {
.aPosition = glGetAttribLocation(_programId, "aPosition"), .aPosition = glGetAttribLocation(_programId, "aPosition"),
@ -41,7 +37,6 @@ void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatri
_fragmentParameters = { _fragmentParameters = {
.uTexture = glGetUniformLocation(_programId, "uTexture"), .uTexture = glGetUniformLocation(_programId, "uTexture"),
}; };
_shaderTarget = texture.target;
} }
glUseProgram(_programId); glUseProgram(_programId);
@ -49,10 +44,9 @@ void PassThroughShader::draw(const OpenGLTexture& texture, float* transformMatri
// 2. Set up Vertices Buffer // 2. Set up Vertices Buffer
if (_vertexBuffer == NO_BUFFER) { if (_vertexBuffer == NO_BUFFER) {
glGenBuffers(1, &_vertexBuffer); glGenBuffers(1, &_vertexBuffer);
}
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW); glBufferData(GL_ARRAY_BUFFER, sizeof(VERTICES), VERTICES, GL_STATIC_DRAW);
}
// 3. Pass all uniforms/attributes for vertex shader // 3. Pass all uniforms/attributes for vertex shader
glEnableVertexAttribArray(_vertexParameters.aPosition); glEnableVertexAttribArray(_vertexParameters.aPosition);
@ -97,10 +91,9 @@ GLuint PassThroughShader::loadShader(GLenum shaderType, const char* shaderCode)
return shader; return shader;
} }
GLuint PassThroughShader::createProgram(GLenum textureTarget) { GLuint PassThroughShader::createProgram() {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER); GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER);
auto fragmentShaderCode = textureTarget == GL_TEXTURE_EXTERNAL_OES ? FRAGMENT_SHADER_EXTERNAL_TEXTURE : FRAGMENT_SHADER; GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
GLuint program = glCreateProgram(); GLuint program = glCreateProgram();
if (program == 0) throw OpenGLError("Failed to create pass-through program!"); if (program == 0) throw OpenGLError("Failed to create pass-through program!");

View File

@ -14,7 +14,6 @@ namespace vision {
#define NO_SHADER 0 #define NO_SHADER 0
#define NO_POSITION 0 #define NO_POSITION 0
#define NO_BUFFER 0 #define NO_BUFFER 0
#define NO_SHADER_TARGET 0
struct Vertex { struct Vertex {
GLfloat position[2]; GLfloat position[2];
@ -28,17 +27,17 @@ class PassThroughShader {
/** /**
* Draw the texture using this shader. * Draw the texture using this shader.
* Note: At the moment, only EXTERNAL textures are supported by the Shader.
*/ */
void draw(const OpenGLTexture& texture, float* transformMatrix); void draw(const OpenGLTexture& texture, float* transformMatrix);
private: private:
// Loading // Loading
static GLuint loadShader(GLenum shaderType, const char* shaderCode); static GLuint loadShader(GLenum shaderType, const char* shaderCode);
static GLuint createProgram(GLenum textureTarget); static GLuint createProgram();
private: private:
// Shader program in memory // Parameters
GLenum _shaderTarget = NO_SHADER_TARGET;
GLuint _programId = NO_SHADER; GLuint _programId = NO_SHADER;
GLuint _vertexBuffer = NO_BUFFER; GLuint _vertexBuffer = NO_BUFFER;
struct VertexParameters { struct VertexParameters {
@ -71,17 +70,7 @@ class PassThroughShader {
} }
)"; )";
static constexpr char FRAGMENT_SHADER[] = R"( static constexpr char FRAGMENT_SHADER[] = R"(
precision mediump float;
varying vec2 vTexCoord;
uniform sampler2D uTexture;
void main() {
gl_FragColor = texture2D(uTexture, vTexCoord);
}
)";
static constexpr char FRAGMENT_SHADER_EXTERNAL_TEXTURE[] = R"(
#extension GL_OES_EGL_image_external : require #extension GL_OES_EGL_image_external : require
precision mediump float; precision mediump float;
varying vec2 vTexCoord; varying vec2 vTexCoord;
uniform samplerExternalOES uTexture; uniform samplerExternalOES uTexture;

View File

@ -16,7 +16,6 @@
#include "OpenGLTexture.h" #include "OpenGLTexture.h"
#include "JFrameProcessor.h" #include "JFrameProcessor.h"
#include "JSkiaFrameProcessor.h"
namespace vision { namespace vision {
@ -32,23 +31,29 @@ VideoPipeline::VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int
VideoPipeline::~VideoPipeline() { VideoPipeline::~VideoPipeline() {
// 1. Remove output surfaces // 1. Remove output surfaces
removeFrameProcessor(); removeFrameProcessorOutputSurface();
removeRecordingSessionOutputSurface(); removeRecordingSessionOutputSurface();
removePreviewOutputSurface();
// 2. Delete the input textures // 2. Delete the input textures
if (_inputTexture != std::nullopt) { if (_inputTexture != std::nullopt) {
glDeleteTextures(1, &_inputTexture->id); glDeleteTextures(1, &_inputTexture->id);
_inputTexture = std::nullopt;
} }
// 3. Destroy the OpenGL context // 3. Destroy the OpenGL context
_context = nullptr; _context = nullptr;
} }
void VideoPipeline::removeFrameProcessor() { void VideoPipeline::removeFrameProcessorOutputSurface() {
_frameProcessor = nullptr; if (_frameProcessorOutput) _frameProcessorOutput->destroy();
_frameProcessorOutput = nullptr;
} }
void VideoPipeline::setFrameProcessor(jni::alias_ref<JFrameProcessor::javaobject> frameProcessor) { void VideoPipeline::setFrameProcessorOutputSurface(jobject surface) {
_frameProcessor = jni::make_global(frameProcessor); // 1. Delete existing output surface
removeFrameProcessorOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_frameProcessorOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
} }
void VideoPipeline::removeRecordingSessionOutputSurface() { void VideoPipeline::removeRecordingSessionOutputSurface() {
@ -65,139 +70,45 @@ void VideoPipeline::setRecordingSessionOutputSurface(jobject surface) {
_recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window); _recordingSessionOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
} }
void VideoPipeline::removePreviewOutputSurface() {
if (_previewOutput) _previewOutput->destroy();
_previewOutput = nullptr;
}
jni::local_ref<JFrame> VideoPipeline::createFrame() {
static const auto createFrameMethod = javaClassLocal()->getMethod<JFrame()>("createFrame");
return createFrameMethod(_javaPart);
}
void VideoPipeline::setPreviewOutputSurface(jobject surface) {
// 1. Delete existing output surface
removePreviewOutputSurface();
// 2. Set new output surface if it is not null
ANativeWindow* window = ANativeWindow_fromSurface(jni::Environment::current(), surface);
_previewOutput = OpenGLRenderer::CreateWithWindowSurface(_context, window);
}
int VideoPipeline::getInputTextureId() { int VideoPipeline::getInputTextureId() {
if (_inputTexture == std::nullopt) { if (_inputTexture == std::nullopt) {
_inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height); _inputTexture = _context->createTexture(OpenGLTexture::Type::ExternalOES, _width, _height);
} }
return static_cast<int>(_inputTexture->id); return static_cast<int>(_inputTexture->id);
} }
void VideoPipeline::onBeforeFrame() { void VideoPipeline::onBeforeFrame() {
// 1. Activate the offscreen context
_context->use(); _context->use();
// 2. Prepare the external texture so the Camera can render into it glBindTexture(_inputTexture->target, _inputTexture->id);
OpenGLTexture& texture = _inputTexture.value();
glBindTexture(texture.target, texture.id);
} }
void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) { void VideoPipeline::onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrixParam) {
// 1. Activate the offscreen context // Get the OpenGL transform Matrix (transforms, scales, rotations)
_context->use();
// 2. Get the OpenGL transform Matrix (transforms, scales, rotations)
float transformMatrix[16]; float transformMatrix[16];
transformMatrixParam->getRegion(0, 16, transformMatrix); transformMatrixParam->getRegion(0, 16, transformMatrix);
// 3. Prepare the texture we are going to render
OpenGLTexture& texture = _inputTexture.value(); OpenGLTexture& texture = _inputTexture.value();
// 4. Render to all outputs! if (_frameProcessorOutput) {
auto isSkiaFrameProcessor = _frameProcessor != nullptr && _frameProcessor->isInstanceOf(JSkiaFrameProcessor::javaClassStatic()); __android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to FrameProcessor..");
if (isSkiaFrameProcessor) { _frameProcessorOutput->renderTextureToSurface(texture, transformMatrix);
// 4.1. If we have a Skia Frame Processor, prepare to render to an offscreen surface using Skia
jni::global_ref<JSkiaFrameProcessor::javaobject> skiaFrameProcessor = jni::static_ref_cast<JSkiaFrameProcessor::javaobject>(_frameProcessor);
SkiaRenderer& skiaRenderer = skiaFrameProcessor->cthis()->getSkiaRenderer();
auto drawCallback = [=](SkCanvas* canvas) {
// Create a JFrame instance (this uses queues/recycling)
auto frame = JFrame::create(texture.width,
texture.height,
texture.width * 4,
_context->getCurrentPresentationTime(),
"portrait",
false);
// Fill the Frame with the contents of the GL surface
_context->getPixelsOfTexture(texture,
&frame->cthis()->pixelsSize,
&frame->cthis()->pixels);
// Call the Frame processor with the Frame
frame->cthis()->incrementRefCount();
skiaFrameProcessor->cthis()->call(frame, canvas);
frame->cthis()->decrementRefCount();
};
// 4.2. Render to the offscreen surface using Skia
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering using Skia..");
OpenGLTexture offscreenTexture = skiaRenderer.renderTextureToOffscreenSurface(*_context,
texture,
transformMatrix,
drawCallback);
// 4.3. Now render the result of the offscreen surface to all output surfaces!
if (_previewOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
skiaRenderer.renderTextureToSurface(*_context, offscreenTexture, _previewOutput->getEGLSurface());
}
if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
skiaRenderer.renderTextureToSurface(*_context, offscreenTexture, _recordingSessionOutput->getEGLSurface());
}
} else {
// 4.1. If we have a Frame Processor, call it
if (_frameProcessor != nullptr) {
// Create a JFrame instance (this uses queues/recycling)
auto frame = JFrame::create(texture.width,
texture.height,
texture.width * 4,
_context->getCurrentPresentationTime(),
"portrait",
false);
// Fill the Frame with the contents of the GL surface
_context->getPixelsOfTexture(texture,
&frame->cthis()->pixelsSize,
&frame->cthis()->pixels);
// Call the Frame processor with the Frame
frame->cthis()->incrementRefCount();
_frameProcessor->cthis()->call(frame);
frame->cthis()->decrementRefCount();
}
// 4.2. Simply pass-through shader to render the texture to all output EGLSurfaces
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering using pass-through OpenGL Shader..");
if (_previewOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to Preview..");
_previewOutput->renderTextureToSurface(texture, transformMatrix);
} }
if (_recordingSessionOutput) { if (_recordingSessionOutput) {
__android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession.."); __android_log_print(ANDROID_LOG_INFO, TAG, "Rendering to RecordingSession..");
_recordingSessionOutput->renderTextureToSurface(texture, transformMatrix); _recordingSessionOutput->renderTextureToSurface(texture, transformMatrix);
} }
} }
}
void VideoPipeline::registerNatives() { void VideoPipeline::registerNatives() {
registerHybrid({ registerHybrid({
makeNativeMethod("initHybrid", VideoPipeline::initHybrid), makeNativeMethod("initHybrid", VideoPipeline::initHybrid),
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId), makeNativeMethod("setFrameProcessorOutputSurface", VideoPipeline::setFrameProcessorOutputSurface),
makeNativeMethod("setFrameProcessor", VideoPipeline::setFrameProcessor), makeNativeMethod("removeFrameProcessorOutputSurface", VideoPipeline::removeFrameProcessorOutputSurface),
makeNativeMethod("removeFrameProcessor", VideoPipeline::removeFrameProcessor),
makeNativeMethod("setPreviewOutputSurface", VideoPipeline::setPreviewOutputSurface),
makeNativeMethod("removePreviewOutputSurface", VideoPipeline::removePreviewOutputSurface),
makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface), makeNativeMethod("setRecordingSessionOutputSurface", VideoPipeline::setRecordingSessionOutputSurface),
makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface), makeNativeMethod("removeRecordingSessionOutputSurface", VideoPipeline::removeRecordingSessionOutputSurface),
makeNativeMethod("getInputTextureId", VideoPipeline::getInputTextureId),
makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame), makeNativeMethod("onBeforeFrame", VideoPipeline::onBeforeFrame),
makeNativeMethod("onFrame", VideoPipeline::onFrame), makeNativeMethod("onFrame", VideoPipeline::onFrame),
}); });

View File

@ -8,13 +8,11 @@
#include <fbjni/fbjni.h> #include <fbjni/fbjni.h>
#include <EGL/egl.h> #include <EGL/egl.h>
#include <android/native_window.h> #include <android/native_window.h>
#include <memory> #include "PassThroughShader.h"
#include "OpenGLRenderer.h" #include "OpenGLRenderer.h"
#include "OpenGLContext.h" #include "OpenGLContext.h"
#include <memory>
#include "OpenGLTexture.h" #include <optional>
#include "JFrameProcessor.h"
namespace vision { namespace vision {
@ -33,17 +31,13 @@ class VideoPipeline: public jni::HybridClass<VideoPipeline> {
int getInputTextureId(); int getInputTextureId();
// <- Frame Processor output // <- Frame Processor output
void setFrameProcessor(jni::alias_ref<JFrameProcessor::javaobject> frameProcessor); void setFrameProcessorOutputSurface(jobject surface);
void removeFrameProcessor(); void removeFrameProcessorOutputSurface();
// <- MediaRecorder output // <- MediaRecorder output
void setRecordingSessionOutputSurface(jobject surface); void setRecordingSessionOutputSurface(jobject surface);
void removeRecordingSessionOutputSurface(); void removeRecordingSessionOutputSurface();
// <- Preview output
void setPreviewOutputSurface(jobject surface);
void removePreviewOutputSurface();
// Frame callbacks // Frame callbacks
void onBeforeFrame(); void onBeforeFrame();
void onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrix); void onFrame(jni::alias_ref<jni::JArrayFloat> transformMatrix);
@ -51,22 +45,17 @@ class VideoPipeline: public jni::HybridClass<VideoPipeline> {
private: private:
// Private constructor. Use `create(..)` to create new instances. // Private constructor. Use `create(..)` to create new instances.
explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height); explicit VideoPipeline(jni::alias_ref<jhybridobject> jThis, int width, int height);
// Creates a new Frame instance which should be filled with data.
jni::local_ref<JFrame> createFrame();
private: private:
// Input Surface Texture // Input Surface Texture
std::optional<OpenGLTexture> _inputTexture; std::optional<OpenGLTexture> _inputTexture = std::nullopt;
int _width = 0; int _width = 0;
int _height = 0; int _height = 0;
// (Optional) Frame Processor that processes frames before they go into output
jni::global_ref<JFrameProcessor::javaobject> _frameProcessor = nullptr;
// Output Contexts // Output Contexts
std::shared_ptr<OpenGLContext> _context = nullptr; std::shared_ptr<OpenGLContext> _context = nullptr;
std::unique_ptr<OpenGLRenderer> _frameProcessorOutput = nullptr;
std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr; std::unique_ptr<OpenGLRenderer> _recordingSessionOutput = nullptr;
std::unique_ptr<OpenGLRenderer> _previewOutput = nullptr;
private: private:
friend HybridBase; friend HybridBase;

View File

@ -4,7 +4,6 @@
#include "JFrameProcessor.h" #include "JFrameProcessor.h"
#include "JVisionCameraProxy.h" #include "JVisionCameraProxy.h"
#include "VisionCameraProxy.h" #include "VisionCameraProxy.h"
#include "JSkiaFrameProcessor.h"
#include "VideoPipeline.h" #include "VideoPipeline.h"
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) { JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
@ -15,9 +14,6 @@ JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *) {
vision::VideoPipeline::registerNatives(); vision::VideoPipeline::registerNatives();
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
vision::JFrameProcessor::registerNatives(); vision::JFrameProcessor::registerNatives();
#endif
#if VISION_CAMERA_ENABLE_SKIA
vision::JSkiaFrameProcessor::registerNatives();
#endif #endif
}); });
} }

View File

@ -31,6 +31,7 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
@ -54,7 +55,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
const jsi::Value* args, const jsi::Value* args,
size_t count) -> jsi::Value { size_t count) -> jsi::Value {
// Increment retain count by one. // Increment retain count by one.
this->frame->cthis()->incrementRefCount(); this->frame->incrementRefCount();
return jsi::Value::undefined(); return jsi::Value::undefined();
}; };
return jsi::Function::createFromHostFunction(runtime, return jsi::Function::createFromHostFunction(runtime,
@ -68,7 +69,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
const jsi::Value* args, const jsi::Value* args,
size_t count) -> jsi::Value { size_t count) -> jsi::Value {
// Decrement retain count by one. If the retain count is zero, the Frame gets closed. // Decrement retain count by one. If the retain count is zero, the Frame gets closed.
this->frame->cthis()->decrementRefCount(); this->frame->decrementRefCount();
return jsi::Value::undefined(); return jsi::Value::undefined();
}; };
return jsi::Function::createFromHostFunction(runtime, return jsi::Function::createFromHostFunction(runtime,
@ -84,8 +85,8 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
if (!this->frame) { if (!this->frame) {
return jsi::String::createFromUtf8(runtime, "[closed frame]"); return jsi::String::createFromUtf8(runtime, "[closed frame]");
} }
auto width = this->frame->cthis()->getWidth(); auto width = this->frame->getWidth();
auto height = this->frame->cthis()->getHeight(); auto height = this->frame->getHeight();
auto str = std::to_string(width) + " x " + std::to_string(height) + " Frame"; auto str = std::to_string(width) + " x " + std::to_string(height) + " Frame";
return jsi::String::createFromUtf8(runtime, str); return jsi::String::createFromUtf8(runtime, str);
}; };
@ -96,8 +97,11 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
const jsi::Value& thisArg, const jsi::Value& thisArg,
const jsi::Value* args, const jsi::Value* args,
size_t count) -> jsi::Value { size_t count) -> jsi::Value {
size_t size = frame->cthis()->pixelsSize; auto buffer = this->frame->toByteBuffer();
uint8_t* pixels = frame->cthis()->pixels; if (!buffer->isDirect()) {
throw std::runtime_error("Failed to get byte content of Frame - array is not direct ByteBuffer!");
}
auto size = buffer->getDirectSize();
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache"; static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) { if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
@ -115,7 +119,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
// directly write to C++ JSI ArrayBuffer // directly write to C++ JSI ArrayBuffer
auto destinationBuffer = arrayBuffer.data(runtime); auto destinationBuffer = arrayBuffer.data(runtime);
memcpy(destinationBuffer, pixels, sizeof(uint8_t) * size); memcpy(destinationBuffer, buffer->getDirectAddress(), sizeof(uint8_t) * size);
return arrayBuffer; return arrayBuffer;
}; };
@ -123,30 +127,33 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
} }
if (name == "isValid") { if (name == "isValid") {
return jsi::Value(this->frame && this->frame->cthis()->getIsValid()); return jsi::Value(this->frame && this->frame->getIsValid());
} }
if (name == "width") { if (name == "width") {
return jsi::Value(this->frame->cthis()->getWidth()); return jsi::Value(this->frame->getWidth());
} }
if (name == "height") { if (name == "height") {
return jsi::Value(this->frame->cthis()->getHeight()); return jsi::Value(this->frame->getHeight());
} }
if (name == "isMirrored") { if (name == "isMirrored") {
return jsi::Value(this->frame->cthis()->getIsMirrored()); return jsi::Value(this->frame->getIsMirrored());
} }
if (name == "orientation") { if (name == "orientation") {
auto string = this->frame->cthis()->getOrientation(); auto string = this->frame->getOrientation();
return jsi::String::createFromUtf8(runtime, string->toStdString()); return jsi::String::createFromUtf8(runtime, string->toStdString());
} }
if (name == "pixelFormat") { if (name == "pixelFormat") {
auto string = this->frame->cthis()->getPixelFormat(); auto string = this->frame->getPixelFormat();
return jsi::String::createFromUtf8(runtime, string->toStdString()); return jsi::String::createFromUtf8(runtime, string->toStdString());
} }
if (name == "timestamp") { if (name == "timestamp") {
return jsi::Value(static_cast<double>(this->frame->cthis()->getTimestamp())); return jsi::Value(static_cast<double>(this->frame->getTimestamp()));
} }
if (name == "bytesPerRow") { if (name == "bytesPerRow") {
return jsi::Value(this->frame->cthis()->getBytesPerRow()); return jsi::Value(this->frame->getBytesPerRow());
}
if (name == "planesCount") {
return jsi::Value(this->frame->getPlanesCount());
} }
// fallback to base implementation // fallback to base implementation

View File

@ -26,7 +26,7 @@ class JSI_EXPORT FrameHostObject : public jsi::HostObject {
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) override; std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) override;
public: public:
jni::global_ref<JFrame::javaobject> frame; jni::global_ref<JFrame> frame;
}; };
} // namespace vision } // namespace vision

View File

@ -111,10 +111,10 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
return jsi::String::createFromUtf8(runtime, object->toString()); return jsi::String::createFromUtf8(runtime, object->toString());
} else if (object->isInstanceOf(jni::JList<jobject>::javaClassStatic())) { } else if (object->isInstanceOf(JList<jobject>::javaClassStatic())) {
// List<E> // List<E>
auto arrayList = jni::static_ref_cast<jni::JList<jobject>>(object); auto arrayList = static_ref_cast<JList<jobject>>(object);
auto size = arrayList->size(); auto size = arrayList->size();
auto result = jsi::Array(runtime, size); auto result = jsi::Array(runtime, size);
@ -125,10 +125,10 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
} }
return result; return result;
} else if (object->isInstanceOf(jni::JMap<jstring, jobject>::javaClassStatic())) { } else if (object->isInstanceOf(JMap<jstring, jobject>::javaClassStatic())) {
// Map<K, V> // Map<K, V>
auto map = jni::static_ref_cast<jni::JMap<jstring, jobject>>(object); auto map = static_ref_cast<JMap<jstring, jobject>>(object);
auto result = jsi::Object(runtime); auto result = jsi::Object(runtime);
for (const auto& entry : *map) { for (const auto& entry : *map) {
@ -140,7 +140,7 @@ jsi::Value JSIJNIConversion::convertJNIObjectToJSIValue(jsi::Runtime &runtime, c
return result; return result;
} else if (object->isInstanceOf(JFrame::javaClassStatic())) { } else if (object->isInstanceOf(JFrame::javaClassStatic())) {
// Frame // Frame
auto frame = jni::static_ref_cast<JFrame::javaobject>(object); auto frame = static_ref_cast<JFrame>(object);
// box into HostObject // box into HostObject
auto hostObject = std::make_shared<FrameHostObject>(frame); auto hostObject = std::make_shared<FrameHostObject>(frame);

View File

@ -39,7 +39,6 @@ std::vector<jsi::PropNameID> VisionCameraProxy::getPropertyNames(jsi::Runtime& r
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor"))); result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor"))); result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin"))); result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("isSkiaEnabled")));
return result; return result;
} }
@ -65,13 +64,6 @@ jsi::Value VisionCameraProxy::getFrameProcessorPlugin(jsi::Runtime& runtime,
jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) { jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime); auto name = propName.utf8(runtime);
if (name == "isSkiaEnabled") {
#ifdef VISION_CAMERA_ENABLE_SKIA
return jsi::Value(true);
#else
return jsi::Value(false);
#endif
}
if (name == "setFrameProcessor") { if (name == "setFrameProcessor") {
return jsi::Function::createFromHostFunction(runtime, return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"), jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"),

View File

@ -11,85 +11,71 @@
namespace vision { namespace vision {
using namespace facebook; using namespace facebook;
using namespace jni;
void JFrame::registerNatives() { int JFrame::getWidth() const {
registerHybrid({ static const auto getWidthMethod = getClass()->getMethod<jint()>("getWidth");
makeNativeMethod("getWidth", JFrame::getWidth), return getWidthMethod(self());
makeNativeMethod("getHeight", JFrame::getHeight),
makeNativeMethod("getBytesPerRow", JFrame::getBytesPerRow),
makeNativeMethod("getTimestamp", JFrame::getTimestamp),
makeNativeMethod("getOrientation", JFrame::getOrientation),
makeNativeMethod("getIsMirrored", JFrame::getIsMirrored),
makeNativeMethod("getPixelFormat", JFrame::getPixelFormat),
makeNativeMethod("getByteBuffer", JFrame::getByteBuffer),
makeNativeMethod("getIsValid", JFrame::getIsValid),
});
} }
jni::local_ref<JFrame::javaobject> JFrame::create(int width, int JFrame::getHeight() const {
int height, static const auto getWidthMethod = getClass()->getMethod<jint()>("getHeight");
int bytesPerRow, return getWidthMethod(self());
long timestamp,
const std::string& orientation,
bool isMirrored) {
return newObjectCxxArgs(width,
height,
bytesPerRow,
timestamp,
orientation,
isMirrored);
} }
JFrame::JFrame(int width, bool JFrame::getIsValid() const {
int height, static const auto getIsValidMethod = getClass()->getMethod<jboolean()>("getIsValid");
int bytesPerRow, return getIsValidMethod(self());
long timestamp,
const std::string& orientation,
bool isMirrored) {
_width = width;
_height = height;
_bytesPerRow = bytesPerRow;
_timestamp = timestamp;
_orientation = orientation;
_isMirrored = isMirrored;
_refCount = 0;
pixelsSize = height * bytesPerRow;
pixels = (uint8_t*) malloc(pixelsSize);
} }
JFrame::~JFrame() noexcept { bool JFrame::getIsMirrored() const {
close(); static const auto getIsMirroredMethod = getClass()->getMethod<jboolean()>("getIsMirrored");
return getIsMirroredMethod(self());
} }
bool JFrame::getIsValid() { jlong JFrame::getTimestamp() const {
return _refCount > 0 && !_isClosed; static const auto getTimestampMethod = getClass()->getMethod<jlong()>("getTimestamp");
return getTimestampMethod(self());
} }
jni::local_ref<jni::JByteBuffer> JFrame::getByteBuffer() { local_ref<JString> JFrame::getOrientation() const {
if (!getIsValid()) { static const auto getOrientationMethod = getClass()->getMethod<JString()>("getOrientation");
[[unlikely]] return getOrientationMethod(self());
throw std::runtime_error("Frame is no longer valid, cannot access getByteBuffer!");
} }
return jni::JByteBuffer::wrapBytes(pixels, pixelsSize);
local_ref<JString> JFrame::getPixelFormat() const {
static const auto getPixelFormatMethod = getClass()->getMethod<JString()>("getPixelFormat");
return getPixelFormatMethod(self());
}
int JFrame::getPlanesCount() const {
static const auto getPlanesCountMethod = getClass()->getMethod<jint()>("getPlanesCount");
return getPlanesCountMethod(self());
}
int JFrame::getBytesPerRow() const {
static const auto getBytesPerRowMethod = getClass()->getMethod<jint()>("getBytesPerRow");
return getBytesPerRowMethod(self());
}
local_ref<JByteBuffer> JFrame::toByteBuffer() const {
static const auto toByteBufferMethod = getClass()->getMethod<JByteBuffer()>("toByteBuffer");
return toByteBufferMethod(self());
} }
void JFrame::incrementRefCount() { void JFrame::incrementRefCount() {
std::unique_lock lock(_mutex); static const auto incrementRefCountMethod = getClass()->getMethod<void()>("incrementRefCount");
_refCount++; incrementRefCountMethod(self());
} }
void JFrame::decrementRefCount() { void JFrame::decrementRefCount() {
std::unique_lock lock(_mutex); static const auto decrementRefCountMethod = getClass()->getMethod<void()>("decrementRefCount");
_refCount--; decrementRefCountMethod(self());
if (_refCount <= 0) {
this->close();
}
} }
void JFrame::close() { void JFrame::close() {
_isClosed = true; static const auto closeMethod = getClass()->getMethod<void()>("close");
free(pixels); closeMethod(self());
pixels = nullptr;
} }
} // namespace vision } // namespace vision

View File

@ -7,70 +7,29 @@
#include <jni.h> #include <jni.h>
#include <fbjni/fbjni.h> #include <fbjni/fbjni.h>
#include <fbjni/ByteBuffer.h> #include <fbjni/ByteBuffer.h>
#include <android/hardware_buffer.h>
#include <android/hardware_buffer_jni.h>
#include <mutex>
namespace vision { namespace vision {
using namespace facebook; using namespace facebook;
using namespace jni;
class JFrame : public jni::HybridClass<JFrame> { struct JFrame : public JavaClass<JFrame> {
public:
static constexpr auto kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/Frame;"; static constexpr auto kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/Frame;";
static void registerNatives();
static jni::local_ref<JFrame::javaobject> create(int width,
int height,
int bytesPerRow,
long timestamp,
const std::string& orientation,
bool isMirrored);
~JFrame() noexcept;
protected:
friend HybridBase;
explicit JFrame(int width,
int height,
int bytesPerRow,
long timestamp,
const std::string& orientation,
bool isMirrored);
public: public:
int getWidth() { return _width; } int getWidth() const;
int getHeight() { return _height; } int getHeight() const;
int getBytesPerRow() { return _bytesPerRow; } bool getIsValid() const;
jlong getTimestamp() { return _timestamp; } bool getIsMirrored() const;
jni::local_ref<jni::JString> getOrientation() { return jni::make_jstring(_orientation); } int getPlanesCount() const;
bool getIsMirrored() { return _isMirrored; } int getBytesPerRow() const;
jlong getTimestamp() const;
// TODO: Can this be something other than RGB? local_ref<JString> getOrientation() const;
jni::local_ref<jni::JString> getPixelFormat() { return jni::make_jstring("rgb"); } local_ref<JString> getPixelFormat() const;
local_ref<JByteBuffer> toByteBuffer() const;
bool getIsValid();
jni::local_ref<jni::JByteBuffer> getByteBuffer();
void incrementRefCount(); void incrementRefCount();
void decrementRefCount(); void decrementRefCount();
void close(); void close();
// Backing byte data
uint8_t* pixels = nullptr;
size_t pixelsSize = 0;
private:
// Frame info
int _width = 0;
int _height = 0;
int _bytesPerRow = 0;
long _timestamp = 0;
std::string _orientation = {};
bool _isMirrored = false;
// Ref-counting
int _refCount = 0;
bool _isClosed = false;
std::mutex _mutex;
}; };
} // namespace vision } // namespace vision

View File

@ -17,6 +17,9 @@ using namespace facebook;
using namespace jni; using namespace jni;
void JFrameProcessor::registerNatives() { void JFrameProcessor::registerNatives() {
registerHybrid({
makeNativeMethod("call", JFrameProcessor::call)
});
} }
using TSelf = jni::local_ref<JFrameProcessor::javaobject>; using TSelf = jni::local_ref<JFrameProcessor::javaobject>;

View File

@ -21,7 +21,7 @@ namespace vision {
using namespace facebook; using namespace facebook;
class JFrameProcessor : public jni::HybridClass<JFrameProcessor> { struct JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
public: public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/FrameProcessor;"; static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/frameprocessor/FrameProcessor;";
static void registerNatives(); static void registerNatives();
@ -30,25 +30,20 @@ class JFrameProcessor : public jni::HybridClass<JFrameProcessor> {
public: public:
/** /**
* Wrap the Frame in a HostObject and call the Frame Processor. * Call the JS Frame Processor.
*/ */
void call(jni::alias_ref<JFrame::javaobject> frame); void call(alias_ref<JFrame::javaobject> frame);
protected: private:
friend HybridBase; // Private constructor. Use `create(..)` to create new instances.
// C++ only constructor. Use `create(..)` to create new instances.
explicit JFrameProcessor(std::shared_ptr<RNWorklet::JsiWorklet> worklet, explicit JFrameProcessor(std::shared_ptr<RNWorklet::JsiWorklet> worklet,
std::shared_ptr<RNWorklet::JsiWorkletContext> context); std::shared_ptr<RNWorklet::JsiWorkletContext> context);
JFrameProcessor(const JFrameProcessor &) = delete;
JFrameProcessor &operator=(const JFrameProcessor &) = delete;
protected: private:
/**
* Call the JS Frame Processor with the given Frame Host Object.
*/
void callWithFrameHostObject(const std::shared_ptr<FrameHostObject>& frameHostObject) const; void callWithFrameHostObject(const std::shared_ptr<FrameHostObject>& frameHostObject) const;
private: private:
friend HybridBase;
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker; std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext; std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
}; };

View File

@ -18,10 +18,6 @@
#include <react-native-worklets-core/WKTJsiWorkletContext.h> #include <react-native-worklets-core/WKTJsiWorkletContext.h>
#endif #endif
#if VISION_CAMERA_ENABLE_SKIA
#include "JSkiaFrameProcessor.h"
#endif
namespace vision { namespace vision {
using TSelf = local_ref<HybridClass<JVisionCameraProxy>::jhybriddata>; using TSelf = local_ref<HybridClass<JVisionCameraProxy>::jhybriddata>;
@ -35,7 +31,6 @@ JVisionCameraProxy::JVisionCameraProxy(const jni::alias_ref<JVisionCameraProxy::
const jni::global_ref<JVisionCameraScheduler::javaobject>& scheduler) { const jni::global_ref<JVisionCameraScheduler::javaobject>& scheduler) {
_javaPart = make_global(javaThis); _javaPart = make_global(javaThis);
_runtime = runtime; _runtime = runtime;
_callInvoker = callInvoker;
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
__android_log_write(ANDROID_LOG_INFO, TAG, "Creating Worklet Context..."); __android_log_write(ANDROID_LOG_INFO, TAG, "Creating Worklet Context...");
@ -58,12 +53,6 @@ JVisionCameraProxy::JVisionCameraProxy(const jni::alias_ref<JVisionCameraProxy::
#else #else
__android_log_write(ANDROID_LOG_INFO, TAG, "Frame Processors are disabled!"); __android_log_write(ANDROID_LOG_INFO, TAG, "Frame Processors are disabled!");
#endif #endif
#ifdef VISION_CAMERA_ENABLE_SKIA
__android_log_write(ANDROID_LOG_INFO, TAG, "Skia is enabled!");
#else
__android_log_write(ANDROID_LOG_INFO, TAG, "Skia is disabled!");
#endif
} }
JVisionCameraProxy::~JVisionCameraProxy() { JVisionCameraProxy::~JVisionCameraProxy() {
@ -87,12 +76,6 @@ void JVisionCameraProxy::setFrameProcessor(int viewTag,
jni::local_ref<JFrameProcessor::javaobject> frameProcessor; jni::local_ref<JFrameProcessor::javaobject> frameProcessor;
if (frameProcessorType == "frame-processor") { if (frameProcessorType == "frame-processor") {
frameProcessor = JFrameProcessor::create(worklet, _workletContext); frameProcessor = JFrameProcessor::create(worklet, _workletContext);
} else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA
frameProcessor = JSkiaFrameProcessor::create(worklet, _workletContext, _callInvoker);
#else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif
} else { } else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType); throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
} }

View File

@ -36,13 +36,11 @@ class JVisionCameraProxy : public jni::HybridClass<JVisionCameraProxy> {
jni::local_ref<JMap<jstring, jobject>> options); jni::local_ref<JMap<jstring, jobject>> options);
jsi::Runtime* getJSRuntime() { return _runtime; } jsi::Runtime* getJSRuntime() { return _runtime; }
std::shared_ptr<react::CallInvoker> getCallInvoker() { return _callInvoker; }
private: private:
friend HybridBase; friend HybridBase;
jni::global_ref<JVisionCameraProxy::javaobject> _javaPart; jni::global_ref<JVisionCameraProxy::javaobject> _javaPart;
jsi::Runtime* _runtime; jsi::Runtime* _runtime;
std::shared_ptr<react::CallInvoker> _callInvoker;
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext; std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
#endif #endif

View File

@ -1,72 +0,0 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#include "DrawableFrameHostObject.h"
#include <SkCanvas.h>
#include "FrameHostObject.h"
namespace vision {
std::vector<jsi::PropNameID> DrawableFrameHostObject::getPropertyNames(jsi::Runtime& rt) {
auto result = FrameHostObject::getPropertyNames(rt);
// Skia - Render Frame
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
if (_canvas != nullptr) {
auto canvasPropNames = _canvas->getPropertyNames(rt);
for (auto& prop : canvasPropNames) {
result.push_back(std::move(prop));
}
}
return result;
}
SkRect inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta, size.width(),
size.height());
}
jsi::Value DrawableFrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "render") {
auto render = JSI_HOST_FUNCTION_LAMBDA {
if (_canvas == nullptr) {
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
}
throw std::runtime_error("render() is not yet implemented!");
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
}
if (name == "isDrawable") {
return jsi::Value(_canvas != nullptr);
}
if (_canvas != nullptr) {
// If we have a Canvas, try to access the property on there.
auto result = _canvas->get(runtime, propName);
if (!result.isUndefined()) {
return result;
}
}
// fallback to base implementation
return FrameHostObject::get(runtime, propName);
}
void DrawableFrameHostObject::invalidateCanvas() {
_canvas = nullptr;
}
} // namespace vision

View File

@ -1,33 +0,0 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#pragma once
#include <jsi/jsi.h>
#include "FrameHostObject.h"
#include "JFrame.h"
#include <SkCanvas.h>
#include <JsiSkCanvas.h>
namespace vision {
using namespace facebook;
class JSI_EXPORT DrawableFrameHostObject: public FrameHostObject {
public:
explicit DrawableFrameHostObject(const jni::alias_ref<JFrame::javaobject>& frame,
std::shared_ptr<RNSkia::JsiSkCanvas> canvas): FrameHostObject(frame), _canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
void invalidateCanvas();
private:
std::shared_ptr<RNSkia::JsiSkCanvas> _canvas;
};
} // namespace vision

View File

@ -1,61 +0,0 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS && VISION_CAMERA_ENABLE_SKIA
#include "JSkiaFrameProcessor.h"
#include <jni.h>
#include <fbjni/fbjni.h>
#include <utility>
#include "JFrame.h"
#include "DrawableFrameHostObject.h"
#include <RNSkPlatformContext.h>
#include "VisionCameraSkiaContext.h"
namespace vision {
using namespace facebook;
using namespace jni;
void JSkiaFrameProcessor::registerNatives() {
}
using TSelf = jni::local_ref<JSkiaFrameProcessor::javaobject>;
JSkiaFrameProcessor::JSkiaFrameProcessor(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
const std::shared_ptr<react::CallInvoker>& callInvoker)
: JSkiaFrameProcessor::HybridBase(worklet, context) {
// TODO: Can I use the Android Platform Context from react-native-skia here?
auto skiaPlatformContext = std::make_shared<VisionCameraSkiaContext>(context->getJsRuntime(),
callInvoker,
1.0f);
_jsiCanvas = std::make_shared<RNSkia::JsiSkCanvas>(skiaPlatformContext);
_skiaRenderer = std::make_shared<SkiaRenderer>();
}
TSelf JSkiaFrameProcessor::create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
const std::shared_ptr<react::CallInvoker>& callInvoker) {
return JSkiaFrameProcessor::newObjectCxxArgs(worklet, context, callInvoker);
}
void JSkiaFrameProcessor::call(alias_ref<JFrame::javaobject> frame,
SkCanvas* canvas) {
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas
_jsiCanvas->setCanvas(canvas);
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, _jsiCanvas);
// Call the base function in JFrameProcessor
callWithFrameHostObject(frameHostObject);
// Remove Skia Canvas from Host Object because it is no longer valid
frameHostObject->invalidateCanvas();
}
} // namespace vision
#endif

View File

@ -1,59 +0,0 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#pragma once
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS && VISION_CAMERA_ENABLE_SKIA
#include <string>
#include <memory>
#include <jni.h>
#include <fbjni/fbjni.h>
#include <react-native-worklets-core/WKTJsiWorklet.h>
#include <react-native-worklets-core/WKTJsiHostObject.h>
#include "JFrame.h"
#include "FrameHostObject.h"
#include "SkiaRenderer.h"
#include "JFrameProcessor.h"
#include <JsiSkCanvas.h>
#include <RNSkPlatformContext.h>
namespace vision {
using namespace facebook;
class JSkiaFrameProcessor : public jni::HybridClass<JSkiaFrameProcessor, JFrameProcessor> {
public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/skia/SkiaFrameProcessor;";
static void registerNatives();
static jni::local_ref<JSkiaFrameProcessor::javaobject> create(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
const std::shared_ptr<react::CallInvoker>& callInvoker);
public:
/**
* Call the JS Frame Processor with the given valid Canvas to draw on.
*/
void call(jni::alias_ref<JFrame::javaobject> frame,
SkCanvas* canvas);
SkiaRenderer& getSkiaRenderer() { return *_skiaRenderer; }
protected:
friend HybridBase;
// Private constructor. Use `create(..)` to create new instances.
explicit JSkiaFrameProcessor(const std::shared_ptr<RNWorklet::JsiWorklet>& worklet,
const std::shared_ptr<RNWorklet::JsiWorkletContext>& context,
const std::shared_ptr<react::CallInvoker>& callInvoker);
private:
std::shared_ptr<RNSkia::JsiSkCanvas> _jsiCanvas;
std::shared_ptr<SkiaRenderer> _skiaRenderer;
};
} // namespace vision
#endif

View File

@ -1,234 +0,0 @@
//
// Created by Marc Rousavy on 10.08.23.
//
#if VISION_CAMERA_ENABLE_SKIA
#include "SkiaRenderer.h"
#include <android/log.h>
#include "OpenGLError.h"
#include <GLES2/gl2ext.h>
#include <core/SkColorSpace.h>
#include <core/SkCanvas.h>
#include <core/SkYUVAPixmaps.h>
#include <gpu/gl/GrGLInterface.h>
#include <gpu/GrDirectContext.h>
#include <gpu/GrBackendSurface.h>
#include <gpu/ganesh/SkSurfaceGanesh.h>
#include <gpu/ganesh/SkImageGanesh.h>
#include <android/native_window_jni.h>
#include <android/surface_texture_jni.h>
// from <gpu/ganesh/gl/GrGLDefines.h>
#define GR_GL_RGBA8 0x8058
#define DEFAULT_FBO 0
namespace vision {
SkiaRenderer::~SkiaRenderer() {
_offscreenSurface = nullptr;
_offscreenSurfaceTextureId = NO_TEXTURE;
// 3. Delete the Skia context
if (_skiaContext != nullptr) {
_skiaContext->abandonContext();
_skiaContext = nullptr;
}
}
sk_sp<GrDirectContext> SkiaRenderer::getSkiaContext() {
if (_skiaContext == nullptr) {
_skiaContext = GrDirectContext::MakeGL();
}
return _skiaContext;
}
sk_sp<SkImage> SkiaRenderer::wrapTextureAsImage(OpenGLTexture &texture) {
GrGLTextureInfo textureInfo {
// OpenGL will automatically convert YUV -> RGB - if it's an EXTERNAL texture
.fTarget = texture.target,
.fID = texture.id,
.fFormat = GR_GL_RGBA8,
};
GrBackendTexture skiaTexture(texture.width,
texture.height,
GrMipMapped::kNo,
textureInfo);
sk_sp<SkImage> image = SkImages::BorrowTextureFrom(_skiaContext.get(),
skiaTexture,
kBottomLeft_GrSurfaceOrigin,
kN32_SkColorType,
kOpaque_SkAlphaType,
nullptr,
nullptr);
if (image == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to create Skia Image! Cannot wrap input texture (frame) using Skia.");
}
return image;
}
sk_sp<SkSurface> SkiaRenderer::wrapEglSurfaceAsSurface(EGLSurface eglSurface) {
GLint sampleCnt;
glGetIntegerv(GL_SAMPLES, &sampleCnt);
GLint stencilBits;
glGetIntegerv(GL_STENCIL_BITS, &stencilBits);
GrGLFramebufferInfo fboInfo {
// DEFAULT_FBO is FBO0, meaning the default on-screen FBO for that given surface
.fFBOID = DEFAULT_FBO,
.fFormat = GR_GL_RGBA8
};
EGLint width = 0, height = 0;
eglQuerySurface(eglGetCurrentDisplay(), eglSurface, EGL_WIDTH, &width);
eglQuerySurface(eglGetCurrentDisplay(), eglSurface, EGL_HEIGHT, &height);
GrBackendRenderTarget renderTarget(width,
height,
sampleCnt,
stencilBits,
fboInfo);
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
sk_sp<SkSurface> surface = SkSurfaces::WrapBackendRenderTarget(_skiaContext.get(),
renderTarget,
kBottomLeft_GrSurfaceOrigin,
kN32_SkColorType,
nullptr,
&props,
nullptr,
nullptr);
if (surface == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to create Skia Surface! Cannot wrap EGLSurface/FrameBuffer using Skia.");
}
return surface;
}
sk_sp<SkSurface> SkiaRenderer::getOffscreenSurface(int width, int height) {
if (_offscreenSurface == nullptr || _offscreenSurface->width() != width || _offscreenSurface->height() != height) {
// 1. Get Skia Context
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
// 2. Create a backend texture (TEXTURE_2D + Frame Buffer)
GrBackendTexture backendTexture = skiaContext->createBackendTexture(width,
height,
SkColorType::kN32_SkColorType,
GrMipMapped::kNo,
GrRenderable::kYes);
// 3. Get it's Texture ID
GrGLTextureInfo info;
backendTexture.getGLTextureInfo(&info);
_offscreenSurfaceTextureId = info.fID;
struct ReleaseContext {
GrDirectContext* context;
GrBackendTexture texture;
};
auto releaseCtx = new ReleaseContext(
{skiaContext.get(), backendTexture});
SkSurfaces::TextureReleaseProc releaseProc = [] (void* address) {
// 5. Once done using, delete the backend OpenGL texture.
auto releaseCtx = reinterpret_cast<ReleaseContext*>(address);
releaseCtx->context->deleteBackendTexture(releaseCtx->texture);
};
// 4. Wrap the newly created texture as an SkSurface
SkSurfaceProps props(0, kUnknown_SkPixelGeometry);
_offscreenSurface = SkSurfaces::WrapBackendTexture(skiaContext.get(),
backendTexture,
kBottomLeft_GrSurfaceOrigin,
0,
SkColorType::kN32_SkColorType,
nullptr,
&props,
releaseProc,
releaseCtx);
if (_offscreenSurface == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to create offscreen Skia Surface!");
}
}
return _offscreenSurface;
}
OpenGLTexture SkiaRenderer::renderTextureToOffscreenSurface(OpenGLContext& glContext,
OpenGLTexture& texture,
float* transformMatrix,
const DrawCallback& drawCallback) {
// 1. Activate the OpenGL context (eglMakeCurrent)
glContext.use();
// 2. Initialize Skia
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
// 3. Create the offscreen Skia Surface
sk_sp<SkSurface> surface = getOffscreenSurface(texture.width, texture.height);
// 4. Wrap the input texture as an image so we can draw it to the surface
sk_sp<SkImage> frame = wrapTextureAsImage(texture);
// 5. Prepare the Canvas
SkCanvas* canvas = _offscreenSurface->getCanvas();
if (canvas == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to get Skia Canvas!");
}
// TODO: Apply Matrix. No idea how though.
SkM44 matrix = SkM44::ColMajor(transformMatrix);
// 6. Render it!
canvas->clear(SkColors::kBlack);
canvas->drawImage(frame, 0, 0);
drawCallback(canvas);
// 8. Flush all Skia operations to OpenGL
_offscreenSurface->flushAndSubmit();
return OpenGLTexture {
.id = _offscreenSurfaceTextureId,
.target = GL_TEXTURE_2D,
.width = texture.width,
.height = texture.height,
};
}
void SkiaRenderer::renderTextureToSurface(OpenGLContext &glContext, OpenGLTexture &texture, EGLSurface surface) {
// 1. Activate the OpenGL context (eglMakeCurrent)
glContext.use(surface);
// 2. Initialize Skia
sk_sp<GrDirectContext> skiaContext = getSkiaContext();
// 3. Wrap the output EGLSurface in a Skia SkSurface
sk_sp<SkSurface> skSurface = wrapEglSurfaceAsSurface(surface);
// 4. Wrap the input texture in a Skia SkImage
sk_sp<SkImage> frame = wrapTextureAsImage(texture);
// 5. Prepare the Canvas!
SkCanvas* canvas = skSurface->getCanvas();
if (canvas == nullptr) {
[[unlikely]];
throw std::runtime_error("Failed to get Skia Canvas!");
}
// 6. Render it!
canvas->clear(SkColors::kBlack);
canvas->drawImage(frame, 0, 0);
// 7. Flush all Skia operations to OpenGL
skSurface->flushAndSubmit();
// 8. Swap the buffers so the onscreen surface gets updated.
glContext.flush();
}
} // namespace vision
#endif

View File

@ -1,72 +0,0 @@
//
// Created by Marc Rousavy on 10.08.23.
//
#pragma once
#if VISION_CAMERA_ENABLE_SKIA
#include <GLES2/gl2.h>
#include <EGL/egl.h>
#include <android/native_window.h>
#include <include/core/SkSurface.h>
#include <include/gpu/GrDirectContext.h>
#include "OpenGLContext.h"
#include "OpenGLTexture.h"
namespace vision {
#define NO_TEXTURE 0
using DrawCallback = std::function<void(SkCanvas*)>;
class SkiaRenderer {
public:
/**
* Create a new Skia renderer. You need to use OpenGL outside of this context to make sure the
* Skia renderer can use the global OpenGL context.
*/
explicit SkiaRenderer() {};
~SkiaRenderer();
/**
* Renders the given Texture (might be a Camera Frame) to a cached offscreen Texture using Skia.
*
* @returns The texture that was rendered to.
*/
OpenGLTexture renderTextureToOffscreenSurface(OpenGLContext& glContext,
OpenGLTexture& texture,
float* transformMatrix,
const DrawCallback& drawCallback);
/**
* Renders the given texture to the target output surface using Skia.
*/
void renderTextureToSurface(OpenGLContext& glContext,
OpenGLTexture& texture,
EGLSurface surface);
private:
// Gets or creates the Skia context.
sk_sp<GrDirectContext> getSkiaContext();
// Wraps a Texture as an SkImage allowing you to draw it
sk_sp<SkImage> wrapTextureAsImage(OpenGLTexture& texture);
// Wraps an EGLSurface as an SkSurface allowing you to draw into it
sk_sp<SkSurface> wrapEglSurfaceAsSurface(EGLSurface eglSurface);
// Gets or creates an off-screen surface that you can draw into
sk_sp<SkSurface> getOffscreenSurface(int width, int height);
private:
// Skia Context
sk_sp<GrDirectContext> _skiaContext = nullptr;
sk_sp<SkSurface> _offscreenSurface = nullptr;
GLuint _offscreenSurfaceTextureId = NO_TEXTURE;
static auto constexpr TAG = "SkiaRenderer";
};
} // namespace vision
#endif

View File

@ -1,8 +0,0 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#include "VisionCameraSkiaContext.h"
namespace vision {
} // vision

View File

@ -1,52 +0,0 @@
//
// Created by Marc Rousavy on 31.08.23.
//
#pragma once
#include <jsi/jsi.h>
#include <RNSkPlatformContext.h>
namespace vision {
using namespace facebook;
class VisionCameraSkiaContext: public RNSkia::RNSkPlatformContext {
public:
VisionCameraSkiaContext(jsi::Runtime* runtime,
std::shared_ptr<react::CallInvoker> callInvoker,
float pixelDensity)
: RNSkia::RNSkPlatformContext(runtime, callInvoker, pixelDensity) { }
void raiseError(const std::exception &err) override {
throw std::runtime_error("VisionCameraSkiaContext Error: " + std::string(err.what()));
}
void performStreamOperation(
const std::string &sourceUri,
const std::function<void(std::unique_ptr<SkStreamAsset>)> &op) override {
throw std::runtime_error("VisionCameraSkiaContext::performStreamOperation is not yet implemented!");
}
sk_sp<SkSurface> makeOffscreenSurface(int width, int height) override {
throw std::runtime_error("VisionCameraSkiaContext::makeOffscreenSurface is not yet implemented!");
}
void runOnMainThread(std::function<void()> task) override {
throw std::runtime_error("VisionCameraSkiaContext::runOnMainThread is not yet implemented!");
}
sk_sp<SkImage> takeScreenshotFromViewTag(size_t tag) override {
throw std::runtime_error("VisionCameraSkiaContext::takeScreenshotFromViewTag is not yet implemented!");
}
void startDrawLoop() override {
throw std::runtime_error("VisionCameraSkiaContext::startDrawLoop is not yet implemented!");
}
void stopDrawLoop() override {
throw std::runtime_error("VisionCameraSkiaContext::stopDrawLoop is not yet implemented!");
}
};
} // namespace vision

View File

@ -200,9 +200,8 @@ class CameraSession(private val context: Context,
private fun updateVideoOutputs() { private fun updateVideoOutputs() {
val videoPipeline = outputs?.videoOutput?.videoPipeline ?: return val videoPipeline = outputs?.videoOutput?.videoPipeline ?: return
val previewOutput = outputs?.previewOutput val previewOutput = outputs?.previewOutput
videoPipeline.setRecordingSessionOutput(recording) videoPipeline.setRecordingSessionOutput(this.recording)
videoPipeline.setFrameProcessorOutput(frameProcessor) videoPipeline.setFrameProcessorOutput(this.frameProcessor)
videoPipeline.setPreviewOutput(previewOutput?.surface)
} }
suspend fun takePhoto(qualityPrioritization: QualityPrioritization, suspend fun takePhoto(qualityPrioritization: QualityPrioritization,
@ -216,6 +215,8 @@ class CameraSession(private val context: Context,
val photoOutput = outputs.photoOutput ?: throw PhotoNotEnabledError() val photoOutput = outputs.photoOutput ?: throw PhotoNotEnabledError()
Log.i(TAG, "Photo capture 0/3 - preparing capture request (${photoOutput.size.width}x${photoOutput.size.height})...")
val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id) val cameraCharacteristics = cameraManager.getCameraCharacteristics(captureSession.device.id)
val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics) val orientation = outputOrientation.toSensorRelativeOrientation(cameraCharacteristics)
val captureRequest = captureSession.device.createPhotoCaptureRequest(cameraManager, val captureRequest = captureSession.device.createPhotoCaptureRequest(cameraManager,
@ -226,16 +227,16 @@ class CameraSession(private val context: Context,
enableRedEyeReduction, enableRedEyeReduction,
enableAutoStabilization, enableAutoStabilization,
orientation) orientation)
Log.i(TAG, "Photo capture 0/2 - starting capture...") Log.i(TAG, "Photo capture 1/3 - starting capture...")
val result = captureSession.capture(captureRequest, enableShutterSound) val result = captureSession.capture(captureRequest, enableShutterSound)
val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!! val timestamp = result[CaptureResult.SENSOR_TIMESTAMP]!!
Log.i(TAG, "Photo capture 1/2 complete - received metadata with timestamp $timestamp") Log.i(TAG, "Photo capture 2/3 complete - received metadata with timestamp $timestamp")
try { try {
val image = photoOutputSynchronizer.await(timestamp) val image = photoOutputSynchronizer.await(timestamp)
val isMirrored = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT val isMirrored = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT
Log.i(TAG, "Photo capture 2/2 complete - received ${image.width} x ${image.height} image.") Log.i(TAG, "Photo capture 3/3 complete - received ${image.width} x ${image.height} image.")
return CapturedPhoto(image, result, orientation, isMirrored, image.format) return CapturedPhoto(image, result, orientation, isMirrored, image.format)
} catch (e: CancellationException) { } catch (e: CancellationException) {
throw CaptureAbortedError(false) throw CaptureAbortedError(false)
@ -501,8 +502,7 @@ class CameraSession(private val context: Context,
val captureRequest = camera.createCaptureRequest(template) val captureRequest = camera.createCaptureRequest(template)
outputs.previewOutput?.let { output -> outputs.previewOutput?.let { output ->
Log.i(TAG, "Adding output surface ${output.outputType}..") Log.i(TAG, "Adding output surface ${output.outputType}..")
// TODO: Add here again? captureRequest.addTarget(output.surface)
// captureRequest.addTarget(output.surface)
} }
outputs.videoOutput?.let { output -> outputs.videoOutput?.let { output ->
Log.i(TAG, "Adding output surface ${output.outputType}..") Log.i(TAG, "Adding output surface ${output.outputType}..")

View File

@ -25,6 +25,7 @@ import com.mrousavy.camera.utils.outputs.CameraOutputs
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import java.io.Closeable
// //
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity. // TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
@ -90,7 +91,7 @@ class CameraView(context: Context) : FrameLayout(context) {
internal var frameProcessor: FrameProcessor? = null internal var frameProcessor: FrameProcessor? = null
set(value) { set(value) {
field = value field = value
cameraSession.frameProcessor = value cameraSession.frameProcessor = frameProcessor
} }
private val inputOrientation: Orientation private val inputOrientation: Orientation

View File

@ -64,8 +64,7 @@ suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
val outputConfigurations = arrayListOf<OutputConfiguration>() val outputConfigurations = arrayListOf<OutputConfiguration>()
outputs.previewOutput?.let { output -> outputs.previewOutput?.let { output ->
// TODO: add here again? outputConfigurations.add(output.toOutputConfiguration(characteristics))
// outputConfigurations.add(output.toOutputConfiguration(characteristics))
} }
outputs.photoOutput?.let { output -> outputs.photoOutput?.let { output ->
outputConfigurations.add(output.toOutputConfiguration(characteristics)) outputConfigurations.add(output.toOutputConfiguration(characteristics))

View File

@ -1,66 +1,147 @@
package com.mrousavy.camera.frameprocessor; package com.mrousavy.camera.frameprocessor;
import com.facebook.jni.HybridData; import android.graphics.ImageFormat;
import android.media.Image;
import com.facebook.proguard.annotations.DoNotStrip;
import com.mrousavy.camera.parsers.PixelFormat;
import com.mrousavy.camera.parsers.Orientation;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** @noinspection JavaJniMissingFunction*/
public class Frame { public class Frame {
private final HybridData mHybridData; private final Image image;
private final boolean isMirrored;
private final long timestamp;
private final Orientation orientation;
private int refCount = 0;
private Frame(HybridData hybridData) { public Frame(Image image, long timestamp, Orientation orientation, boolean isMirrored) {
mHybridData = hybridData; this.image = image;
this.timestamp = timestamp;
this.orientation = orientation;
this.isMirrored = isMirrored;
} }
@Override public Image getImage() {
protected void finalize() throws Throwable { return image;
super.finalize();
mHybridData.resetNative();
} }
/** @SuppressWarnings("unused")
* Get the width of the Frame, in it's sensor orientation. (in pixels) @DoNotStrip
*/ public int getWidth() {
public native int getWidth(); return image.getWidth();
/** }
* Get the height of the Frame, in it's sensor orientation. (in pixels)
*/ @SuppressWarnings("unused")
public native int getHeight(); @DoNotStrip
/** public int getHeight() {
* Get the number of bytes per row. return image.getHeight();
* * To get the number of components per pixel you can divide this with the Frame's width. }
* * To get the total size of the byte buffer you can multiply this with the Frame's height.
*/ @SuppressWarnings("unused")
public native int getBytesPerRow(); @DoNotStrip
/** public boolean getIsValid() {
* Get the local timestamp of this Frame. This is always monotonically increasing for each Frame. try {
*/ // will throw an exception if the image is already closed
public native long getTimestamp(); image.getCropRect();
/** // no exception thrown, image must still be valid.
* Get the Orientation of this Frame. The return value is the result of `Orientation.toUnionValue()`. return true;
*/ } catch (Exception e) {
public native String getOrientation(); // exception thrown, image has already been closed.
/** return false;
* Return whether this Frame is mirrored or not. Frames from the front-facing Camera are often mirrored. }
*/ }
public native boolean getIsMirrored();
/** @SuppressWarnings("unused")
* Get the pixel-format of this Frame. The return value is the result of `PixelFormat.toUnionValue()`. @DoNotStrip
*/ public boolean getIsMirrored() {
public native String getPixelFormat(); return isMirrored;
/** }
* Get the actual backing pixel data of this Frame using a zero-copy C++ ByteBuffer.
*/ @SuppressWarnings("unused")
public native ByteBuffer getByteBuffer(); @DoNotStrip
/** public long getTimestamp() {
* Get whether this Frame is still valid. return timestamp;
* A Frame is valid as long as it hasn't been closed by the Frame Processor Runtime Manager }
* (either because it ran out of Frames in it's queue and needs to close old ones, or because
* a Frame Processor finished executing and you're still trying to hold onto this Frame in native) @SuppressWarnings("unused")
*/ @DoNotStrip
public native boolean getIsValid(); public String getOrientation() {
return orientation.getUnionValue();
private native void incrementRefCount(); }
private native void decrementRefCount();
private native void close(); @SuppressWarnings("unused")
@DoNotStrip
public String getPixelFormat() {
PixelFormat format = PixelFormat.Companion.fromImageFormat(image.getFormat());
return format.getUnionValue();
}
@SuppressWarnings("unused")
@DoNotStrip
public int getPlanesCount() {
return image.getPlanes().length;
}
@SuppressWarnings("unused")
@DoNotStrip
public int getBytesPerRow() {
return image.getPlanes()[0].getRowStride();
}
private static ByteBuffer byteArrayCache;
@SuppressWarnings("unused")
@DoNotStrip
public ByteBuffer toByteBuffer() {
switch (image.getFormat()) {
case ImageFormat.YUV_420_888:
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
int totalSize = ySize + uSize + vSize;
if (byteArrayCache != null) byteArrayCache.rewind();
if (byteArrayCache == null || byteArrayCache.remaining() != totalSize) {
byteArrayCache = ByteBuffer.allocateDirect(totalSize);
}
byteArrayCache.put(yBuffer).put(uBuffer).put(vBuffer);
return byteArrayCache;
case ImageFormat.JPEG:
return image.getPlanes()[0].getBuffer();
default:
throw new RuntimeException("Cannot convert Frame with Format " + image.getFormat() + " to byte array!");
}
}
@SuppressWarnings("unused")
@DoNotStrip
public void incrementRefCount() {
synchronized (this) {
refCount++;
}
}
@SuppressWarnings("unused")
@DoNotStrip
public void decrementRefCount() {
synchronized (this) {
refCount--;
if (refCount <= 0) {
// If no reference is held on this Image, close it.
image.close();
}
}
}
@SuppressWarnings("unused")
@DoNotStrip
private void close() {
image.close();
}
} }

View File

@ -8,9 +8,15 @@ import com.facebook.jni.HybridData;
import com.facebook.proguard.annotations.DoNotStrip; import com.facebook.proguard.annotations.DoNotStrip;
/** /**
* Represents a JS Frame Processor. It's actual implementation is in NDK/C++. * Represents a JS Frame Processor
*/ */
public class FrameProcessor { @SuppressWarnings("JavaJniMissingFunction") // we're using fbjni.
public final class FrameProcessor {
/**
* Call the JS Frame Processor function with the given Frame
*/
public native void call(Frame frame);
@DoNotStrip @DoNotStrip
@Keep @Keep
private final HybridData mHybridData; private final HybridData mHybridData;

View File

@ -1,18 +0,0 @@
package com.mrousavy.camera.parsers
enum class PreviewType(override val unionValue: String): JSUnionValue {
NONE("none"),
NATIVE("native"),
SKIA("skia");
companion object: JSUnionValue.Companion<PreviewType> {
override fun fromUnionValue(unionValue: String?): PreviewType {
return when (unionValue) {
"none" -> NONE
"native" -> NATIVE
"skia" -> SKIA
else -> NONE
}
}
}
}

View File

@ -1,11 +0,0 @@
package com.mrousavy.camera.skia;
import com.facebook.jni.HybridData;
import com.mrousavy.camera.frameprocessor.FrameProcessor;
public class SkiaFrameProcessor extends FrameProcessor {
// Implementation is in JSkiaFrameProcessor.cpp
public SkiaFrameProcessor(HybridData hybridData) {
super(hybridData);
}
}

View File

@ -1,32 +0,0 @@
package com.mrousavy.camera.utils
import android.graphics.ImageFormat
import android.media.Image
import android.media.ImageReader
import android.media.ImageWriter
import java.io.Closeable
class ImageCreator(private val width: Int,
private val height: Int,
private val format: Int = ImageFormat.PRIVATE,
private val maxImages: Int = 3): Closeable {
private var imageReader: ImageReader? = null
private var imageWriter: ImageWriter? = null
override fun close() {
imageWriter?.close()
imageReader?.close()
}
fun createImage(): Image {
if (imageReader == null || imageWriter == null) {
imageWriter?.close()
imageReader?.close()
imageReader = ImageReader.newInstance(width, height, format, maxImages)
imageWriter = ImageWriter.newInstance(imageReader!!.surface, maxImages)
}
return imageWriter!!.dequeueInputImage()
}
}

View File

@ -41,6 +41,7 @@ class RecordingSession(context: Context,
val surface: Surface = MediaCodec.createPersistentInputSurface() val surface: Surface = MediaCodec.createPersistentInputSurface()
init { init {
outputFile = File.createTempFile("mrousavy", fileType.toExtension(), context.cacheDir) outputFile = File.createTempFile("mrousavy", fileType.toExtension(), context.cacheDir)
Log.i(TAG, "Creating RecordingSession for ${outputFile.absolutePath}") Log.i(TAG, "Creating RecordingSession for ${outputFile.absolutePath}")
@ -53,7 +54,7 @@ class RecordingSession(context: Context,
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4) recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
recorder.setOutputFile(outputFile.absolutePath) recorder.setOutputFile(outputFile.absolutePath)
recorder.setVideoEncodingBitRate(VIDEO_BIT_RATE) recorder.setVideoEncodingBitRate(VIDEO_BIT_RATE)
recorder.setVideoSize(size.width, size.height) recorder.setVideoSize(size.height, size.width)
if (fps != null) recorder.setVideoFrameRate(fps) if (fps != null) recorder.setVideoFrameRate(fps)
Log.i(TAG, "Using $codec Video Codec..") Log.i(TAG, "Using $codec Video Codec..")
@ -66,7 +67,7 @@ class RecordingSession(context: Context,
recorder.setAudioChannels(AUDIO_CHANNELS) recorder.setAudioChannels(AUDIO_CHANNELS)
} }
recorder.setInputSurface(surface) recorder.setInputSurface(surface)
recorder.setOrientationHint(orientation.toDegrees()) //recorder.setOrientationHint(orientation.toDegrees())
recorder.setOnErrorListener { _, what, extra -> recorder.setOnErrorListener { _, what, extra ->
Log.e(TAG, "MediaRecorder Error: $what ($extra)") Log.e(TAG, "MediaRecorder Error: $what ($extra)")

View File

@ -2,12 +2,15 @@ package com.mrousavy.camera.utils
import android.graphics.ImageFormat import android.graphics.ImageFormat
import android.graphics.SurfaceTexture import android.graphics.SurfaceTexture
import android.media.ImageReader
import android.media.ImageWriter import android.media.ImageWriter
import android.media.MediaRecorder import android.media.MediaRecorder
import android.util.Log import android.util.Log
import android.view.Surface import android.view.Surface
import com.facebook.jni.HybridData import com.facebook.jni.HybridData
import com.mrousavy.camera.frameprocessor.Frame
import com.mrousavy.camera.frameprocessor.FrameProcessor import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.parsers.Orientation
import java.io.Closeable import java.io.Closeable
/** /**
@ -23,25 +26,21 @@ class VideoPipeline(val width: Int,
val height: Int, val height: Int,
val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable { val format: Int = ImageFormat.PRIVATE): SurfaceTexture.OnFrameAvailableListener, Closeable {
companion object { companion object {
private const val MAX_IMAGES = 3 private const val MAX_IMAGES = 5
private const val TAG = "VideoPipeline" private const val TAG = "VideoPipeline"
} }
private val mHybridData: HybridData private val mHybridData: HybridData
private var isActive = true
// Input Texture
private var openGLTextureId: Int? = null private var openGLTextureId: Int? = null
private var transformMatrix = FloatArray(16) private var transformMatrix = FloatArray(16)
private var isActive = true
// Processing input texture
private var frameProcessor: FrameProcessor? = null
// Output 1 // Output 1
private var recordingSession: RecordingSession? = null private var frameProcessor: FrameProcessor? = null
private var imageReader: ImageReader? = null
// Output 2 // Output 2
private var previewSurface: Surface? = null private var recordingSession: RecordingSession? = null
// Input // Input
private val surfaceTexture: SurfaceTexture private val surfaceTexture: SurfaceTexture
@ -58,6 +57,8 @@ class VideoPipeline(val width: Int,
override fun close() { override fun close() {
synchronized(this) { synchronized(this) {
isActive = false isActive = false
imageReader?.close()
imageReader = null
frameProcessor = null frameProcessor = null
recordingSession = null recordingSession = null
surfaceTexture.release() surfaceTexture.release()
@ -90,6 +91,21 @@ class VideoPipeline(val width: Int,
} }
} }
private fun getImageReader(): ImageReader {
val imageReader = ImageReader.newInstance(width, height, format, MAX_IMAGES)
imageReader.setOnImageAvailableListener({ reader ->
Log.i("VideoPipeline", "ImageReader::onImageAvailable!")
val image = reader.acquireLatestImage() ?: return@setOnImageAvailableListener
// TODO: Get correct orientation and isMirrored
val frame = Frame(image, image.timestamp, Orientation.PORTRAIT, false)
frame.incrementRefCount()
frameProcessor?.call(frame)
frame.decrementRefCount()
}, null)
return imageReader
}
/** /**
* Configures the Pipeline to also call the given [FrameProcessor]. * Configures the Pipeline to also call the given [FrameProcessor].
* * If the [frameProcessor] is `null`, this output channel will be removed. * * If the [frameProcessor] is `null`, this output channel will be removed.
@ -102,11 +118,20 @@ class VideoPipeline(val width: Int,
this.frameProcessor = frameProcessor this.frameProcessor = frameProcessor
if (frameProcessor != null) { if (frameProcessor != null) {
// Configure OpenGL pipeline to stream Frames into the Frame Processor (CPU pixel access) if (this.imageReader == null) {
setFrameProcessor(frameProcessor) // 1. Create new ImageReader that just calls the Frame Processor
this.imageReader = getImageReader()
}
// 2. Configure OpenGL pipeline to stream Frames into the ImageReader's surface
setFrameProcessorOutputSurface(imageReader!!.surface)
} else { } else {
// Configure OpenGL pipeline to stop streaming Frames into a Frame Processor // 1. Configure OpenGL pipeline to stop streaming Frames into the ImageReader's surface
removeFrameProcessor() removeFrameProcessorOutputSurface()
// 2. Close the ImageReader
this.imageReader?.close()
this.imageReader = null
} }
} }
} }
@ -131,27 +156,12 @@ class VideoPipeline(val width: Int,
} }
} }
fun setPreviewOutput(surface: Surface?) {
synchronized(this) {
Log.i(TAG, "Setting Preview Output...")
if (surface != null) {
setPreviewOutputSurface(surface)
this.previewSurface = surface
} else {
removePreviewOutputSurface()
this.previewSurface = null
}
}
}
private external fun getInputTextureId(): Int private external fun getInputTextureId(): Int
private external fun onBeforeFrame() private external fun onBeforeFrame()
private external fun onFrame(transformMatrix: FloatArray) private external fun onFrame(transformMatrix: FloatArray)
private external fun setFrameProcessor(frameProcessor: FrameProcessor) private external fun setFrameProcessorOutputSurface(surface: Any)
private external fun removeFrameProcessor() private external fun removeFrameProcessorOutputSurface()
private external fun setRecordingSessionOutputSurface(surface: Any) private external fun setRecordingSessionOutputSurface(surface: Any)
private external fun removeRecordingSessionOutputSurface() private external fun removeRecordingSessionOutputSurface()
private external fun setPreviewOutputSurface(surface: Any)
private external fun removePreviewOutputSurface()
private external fun initHybrid(width: Int, height: Int): HybridData private external fun initHybrid(width: Int, height: Int): HybridData
} }

View File

@ -38,8 +38,7 @@ Before opening an issue, make sure you try the following:
3. Select **Swift File** and press **Next** 3. Select **Swift File** and press **Next**
4. Choose whatever name you want, e.g. `File.swift` and press **Create** 4. Choose whatever name you want, e.g. `File.swift` and press **Create**
5. Press **Create Bridging Header** when promted. 5. Press **Create Bridging Header** when promted.
6. Try building without Skia. Set `$VCDisableSkia = true` in the top of your Podfile, and try rebuilding. 6. Try building without Frame Processors. Set `$VCDisableFrameProcessors = true` in the top of your Podfile, and try rebuilding.
7. Try building without Frame Processors. Set `$VCDisableFrameProcessors = true` in the top of your Podfile, and try rebuilding.
### Runtime Issues ### Runtime Issues
@ -85,8 +84,7 @@ Before opening an issue, make sure you try the following:
``` ```
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip
``` ```
7. Try building without Skia. Set `VisionCamera_disableSkia = true` in your `gradle.properties`, and try rebuilding. 7. Try building without Frame Processors. Set `VisionCamera_disableFrameProcessors = true` in your `gradle.properties`, and try rebuilding.
8. Try building without Frame Processors. Set `VisionCamera_disableFrameProcessors = true` in your `gradle.properties`, and try rebuilding.
### Runtime Issues ### Runtime Issues

View File

@ -17,8 +17,9 @@ public class ExampleFrameProcessorPlugin extends FrameProcessorPlugin {
@Override @Override
public Object callback(@NotNull Frame frame, @Nullable Map<String, Object> params) { public Object callback(@NotNull Frame frame, @Nullable Map<String, Object> params) {
if (params == null) return null; if (params == null) return null;
Image image = frame.getImage();
Log.d("ExamplePlugin", frame.getWidth() + " x " + frame.getHeight() + " Image with format #" + frame.getPixelFormat() + ". Logging " + params.size() + " parameters:"); Log.d("ExamplePlugin", image.getWidth() + " x " + image.getHeight() + " Image with format #" + image.getFormat() + ". Logging " + params.size() + " parameters:");
for (String key : params.keySet()) { for (String key : params.keySet()) {
Object value = params.get(key); Object value = params.get(key);

View File

@ -41,4 +41,3 @@ hermesEnabled=true
# Can be set to true to disable the build setup # Can be set to true to disable the build setup
#VisionCamera_disableFrameProcessors=true #VisionCamera_disableFrameProcessors=true
#VisionCamera_disableSkia=true

View File

@ -333,10 +333,6 @@ PODS:
- React-Core - React-Core
- react-native-safe-area-context (4.7.1): - react-native-safe-area-context (4.7.1):
- React-Core - React-Core
- react-native-skia (0.1.200):
- React
- React-callinvoker
- React-Core
- react-native-video (5.2.1): - react-native-video (5.2.1):
- React-Core - React-Core
- react-native-video/Video (= 5.2.1) - react-native-video/Video (= 5.2.1)
@ -505,11 +501,10 @@ PODS:
- libwebp (~> 1.0) - libwebp (~> 1.0)
- SDWebImage/Core (~> 5.10) - SDWebImage/Core (~> 5.10)
- SocketRocket (0.6.1) - SocketRocket (0.6.1)
- VisionCamera (3.0.0-rc.6): - VisionCamera (3.0.0-rc.8):
- React - React
- React-callinvoker - React-callinvoker
- React-Core - React-Core
- react-native-skia
- react-native-worklets-core - react-native-worklets-core
- Yoga (1.14.0) - Yoga (1.14.0)
@ -540,7 +535,6 @@ DEPENDENCIES:
- "react-native-blur (from `../node_modules/@react-native-community/blur`)" - "react-native-blur (from `../node_modules/@react-native-community/blur`)"
- "react-native-cameraroll (from `../node_modules/@react-native-camera-roll/camera-roll`)" - "react-native-cameraroll (from `../node_modules/@react-native-camera-roll/camera-roll`)"
- react-native-safe-area-context (from `../node_modules/react-native-safe-area-context`) - react-native-safe-area-context (from `../node_modules/react-native-safe-area-context`)
- "react-native-skia (from `../node_modules/@shopify/react-native-skia`)"
- react-native-video (from `../node_modules/react-native-video`) - react-native-video (from `../node_modules/react-native-video`)
- react-native-worklets-core (from `../node_modules/react-native-worklets-core`) - react-native-worklets-core (from `../node_modules/react-native-worklets-core`)
- React-NativeModulesApple (from `../node_modules/react-native/ReactCommon/react/nativemodule/core/platform/ios`) - React-NativeModulesApple (from `../node_modules/react-native/ReactCommon/react/nativemodule/core/platform/ios`)
@ -628,8 +622,6 @@ EXTERNAL SOURCES:
:path: "../node_modules/@react-native-camera-roll/camera-roll" :path: "../node_modules/@react-native-camera-roll/camera-roll"
react-native-safe-area-context: react-native-safe-area-context:
:path: "../node_modules/react-native-safe-area-context" :path: "../node_modules/react-native-safe-area-context"
react-native-skia:
:path: "../node_modules/@shopify/react-native-skia"
react-native-video: react-native-video:
:path: "../node_modules/react-native-video" :path: "../node_modules/react-native-video"
react-native-worklets-core: react-native-worklets-core:
@ -713,7 +705,6 @@ SPEC CHECKSUMS:
react-native-blur: cfdad7b3c01d725ab62a8a729f42ea463998afa2 react-native-blur: cfdad7b3c01d725ab62a8a729f42ea463998afa2
react-native-cameraroll: 134805127580aed23403b8c2cb1548920dd77b3a react-native-cameraroll: 134805127580aed23403b8c2cb1548920dd77b3a
react-native-safe-area-context: 9697629f7b2cda43cf52169bb7e0767d330648c2 react-native-safe-area-context: 9697629f7b2cda43cf52169bb7e0767d330648c2
react-native-skia: d0b0aab6bb1f146eb6f379fb671b719deabd20fb
react-native-video: c26780b224543c62d5e1b2a7244a5cd1b50e8253 react-native-video: c26780b224543c62d5e1b2a7244a5cd1b50e8253
react-native-worklets-core: 7ad416a8965086b98b07964f7f6932560a54a14c react-native-worklets-core: 7ad416a8965086b98b07964f7f6932560a54a14c
React-NativeModulesApple: c57f3efe0df288a6532b726ad2d0322a9bf38472 React-NativeModulesApple: c57f3efe0df288a6532b726ad2d0322a9bf38472
@ -742,7 +733,7 @@ SPEC CHECKSUMS:
SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d SDWebImage: a7f831e1a65eb5e285e3fb046a23fcfbf08e696d
SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d SDWebImageWebPCoder: 908b83b6adda48effe7667cd2b7f78c897e5111d
SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17
VisionCamera: 35a762f77816462a4d59a580ca197ffa29954112 VisionCamera: 5bd7961602a7db4de21fdc3588df6ce01d693d37
Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce Yoga: 8796b55dba14d7004f980b54bcc9833ee45b28ce
PODFILE CHECKSUM: ab9c06b18c63e741c04349c0fd630c6d3145081c PODFILE CHECKSUM: ab9c06b18c63e741c04349c0fd630c6d3145081c

View File

@ -18,7 +18,6 @@
"@react-native-community/blur": "^4.3.2", "@react-native-community/blur": "^4.3.2",
"@react-navigation/native": "^6.1.7", "@react-navigation/native": "^6.1.7",
"@react-navigation/native-stack": "^6.9.13", "@react-navigation/native-stack": "^6.9.13",
"@shopify/react-native-skia": "^0.1.200",
"react": "^18.2.0", "react": "^18.2.0",
"react-native": "^0.72.3", "react-native": "^0.72.3",
"react-native-fast-image": "^8.6.3", "react-native-fast-image": "^8.6.3",

View File

@ -9,7 +9,6 @@ import {
sortFormats, sortFormats,
useCameraDevices, useCameraDevices,
useFrameProcessor, useFrameProcessor,
useSkiaFrameProcessor,
VideoFile, VideoFile,
} from 'react-native-vision-camera'; } from 'react-native-vision-camera';
import { Camera } from 'react-native-vision-camera'; import { Camera } from 'react-native-vision-camera';
@ -25,7 +24,6 @@ import IonIcon from 'react-native-vector-icons/Ionicons';
import type { Routes } from './Routes'; import type { Routes } from './Routes';
import type { NativeStackScreenProps } from '@react-navigation/native-stack'; import type { NativeStackScreenProps } from '@react-navigation/native-stack';
import { useIsFocused } from '@react-navigation/core'; import { useIsFocused } from '@react-navigation/core';
import { Skia } from '@shopify/react-native-skia';
import { FACE_SHADER } from './Shaders'; import { FACE_SHADER } from './Shaders';
import { examplePlugin } from './frame-processors/ExamplePlugin'; import { examplePlugin } from './frame-processors/ExamplePlugin';
@ -199,35 +197,11 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
console.log('re-rendering camera page without active camera'); console.log('re-rendering camera page without active camera');
} }
const radius = (format?.videoHeight ?? 1080) * 0.1; const frameProcessor = useFrameProcessor((frame) => {
const width = radius;
const height = radius;
const x = (format?.videoHeight ?? 1080) / 2 - radius / 2;
const y = (format?.videoWidth ?? 1920) / 2 - radius / 2;
const centerX = x + width / 2;
const centerY = y + height / 2;
const runtimeEffect = Skia.RuntimeEffect.Make(FACE_SHADER);
if (runtimeEffect == null) throw new Error('Shader failed to compile!');
const shaderBuilder = Skia.RuntimeShaderBuilder(runtimeEffect);
shaderBuilder.setUniform('r', [width]);
shaderBuilder.setUniform('x', [centerX]);
shaderBuilder.setUniform('y', [centerY]);
shaderBuilder.setUniform('resolution', [1920, 1080]);
const imageFilter = Skia.ImageFilter.MakeRuntimeShader(shaderBuilder, null, null);
const paint = Skia.Paint();
paint.setImageFilter(imageFilter);
const frameProcessor = useSkiaFrameProcessor((frame) => {
'worklet'; 'worklet';
const rect = Skia.XYWHRect(150, 150, 300, 300);
const paint = Skia.Paint();
paint.setColor(Skia.Color('red'));
frame.drawRect(rect, paint);
console.log(frame.timestamp, frame.toString(), frame.pixelFormat); console.log(frame.timestamp, frame.toString(), frame.pixelFormat);
examplePlugin(frame);
}, []); }, []);
return ( return (

View File

@ -2319,14 +2319,6 @@
dependencies: dependencies:
nanoid "^3.1.23" nanoid "^3.1.23"
"@shopify/react-native-skia@^0.1.200":
version "0.1.200"
resolved "https://registry.yarnpkg.com/@shopify/react-native-skia/-/react-native-skia-0.1.200.tgz#3ef86750106a3b7e02496133173b449bfce6abc2"
integrity sha512-wAauKsLgScLspJY4KzoV0lWoXFCbzsUDJ3uso0o81HQMKBjDvXG9aOq/xE0KFLQsrQVICRdbfvvoYLQvSh/Xmw==
dependencies:
canvaskit-wasm "0.38.0"
react-reconciler "^0.27.0"
"@sideway/address@^4.1.3": "@sideway/address@^4.1.3":
version "4.1.4" version "4.1.4"
resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0"
@ -3019,11 +3011,6 @@ caniuse-lite@^1.0.30001503:
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001517.tgz#90fabae294215c3495807eb24fc809e11dc2f0a8" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001517.tgz#90fabae294215c3495807eb24fc809e11dc2f0a8"
integrity sha512-Vdhm5S11DaFVLlyiKu4hiUTkpZu+y1KA/rZZqVQfOD5YdDT/eQKlkt7NaE0WGOFgX32diqt9MiP9CAiFeRklaA== integrity sha512-Vdhm5S11DaFVLlyiKu4hiUTkpZu+y1KA/rZZqVQfOD5YdDT/eQKlkt7NaE0WGOFgX32diqt9MiP9CAiFeRklaA==
canvaskit-wasm@0.38.0:
version "0.38.0"
resolved "https://registry.yarnpkg.com/canvaskit-wasm/-/canvaskit-wasm-0.38.0.tgz#83e6c46f3015c2ff3f6503157f47453af76a7be7"
integrity sha512-ZEG6lucpbQ4Ld+mY8C1Ng+PMLVP+/AX02jS0Sdl28NyMxuKSa9uKB8oGd1BYp1XWPyO2Jgr7U8pdyjJ/F3xR5Q==
chalk@^2.0.0, chalk@^2.4.2: chalk@^2.0.0, chalk@^2.4.2:
version "2.4.2" version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
@ -5857,14 +5844,6 @@ react-native@^0.72.3:
ws "^6.2.2" ws "^6.2.2"
yargs "^17.6.2" yargs "^17.6.2"
react-reconciler@^0.27.0:
version "0.27.0"
resolved "https://registry.yarnpkg.com/react-reconciler/-/react-reconciler-0.27.0.tgz#360124fdf2d76447c7491ee5f0e04503ed9acf5b"
integrity sha512-HmMDKciQjYmBRGuuhIaKA1ba/7a+UsM5FzOZsMO2JYHt9Jh8reCb7j1eDC95NOyUlKM9KRyvdx0flBuDvYSBoA==
dependencies:
loose-envify "^1.1.0"
scheduler "^0.21.0"
react-refresh@^0.4.0: react-refresh@^0.4.0:
version "0.4.3" version "0.4.3"
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.4.3.tgz#966f1750c191672e76e16c2efa569150cc73ab53" resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.4.3.tgz#966f1750c191672e76e16c2efa569150cc73ab53"
@ -6115,13 +6094,6 @@ scheduler@0.24.0-canary-efb381bbf-20230505:
dependencies: dependencies:
loose-envify "^1.1.0" loose-envify "^1.1.0"
scheduler@^0.21.0:
version "0.21.0"
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.21.0.tgz#6fd2532ff5a6d877b6edb12f00d8ab7e8f308820"
integrity sha512-1r87x5fz9MXqswA2ERLo0EbOAU74DpIUO090gIasYTqlVoJeMcl+Z1Rg7WHz+qtPujhS/hGIt9kxZOYBV3faRQ==
dependencies:
loose-envify "^1.1.0"
semver@^5.6.0: semver@^5.6.0:
version "5.7.1" version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"

View File

@ -245,7 +245,6 @@ enum CaptureError {
enum SystemError: String { enum SystemError: String {
case noManager = "no-camera-manager" case noManager = "no-camera-manager"
case skiaUnavailable = "skia-unavailable"
case frameProcessorsUnavailable = "frame-processors-unavailable" case frameProcessorsUnavailable = "frame-processors-unavailable"
var code: String { var code: String {
@ -256,8 +255,6 @@ enum SystemError: String {
switch self { switch self {
case .noManager: case .noManager:
return "No Camera Manager was found." return "No Camera Manager was found."
case .skiaUnavailable:
return "Skia Integration is unavailable - is @shopify/react-native-skia installed?"
case .frameProcessorsUnavailable: case .frameProcessorsUnavailable:
return "Frame Processors are unavailable - is react-native-worklets-core installed?" return "Frame Processors are unavailable - is react-native-worklets-core installed?"
} }

View File

@ -10,41 +10,10 @@ import AVFoundation
import Foundation import Foundation
extension CameraView { extension CameraView {
#if VISION_CAMERA_ENABLE_SKIA
@objc
func getSkiaRenderer() -> SkiaRenderer {
if skiaRenderer == nil {
skiaRenderer = SkiaRenderer()
}
return skiaRenderer!
}
#endif
public func setupPreviewView() { public func setupPreviewView() {
switch previewType {
case "none":
previewView?.removeFromSuperview()
previewView = nil
case "native":
// Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
if previewView is NativePreviewView { return }
previewView?.removeFromSuperview() previewView?.removeFromSuperview()
previewView = NativePreviewView(frame: frame, session: captureSession) previewView = NativePreviewView(frame: frame, session: captureSession)
addSubview(previewView!) addSubview(previewView!)
case "skia":
// Skia Preview View allows user to draw onto a Frame in a Frame Processor
#if VISION_CAMERA_ENABLE_SKIA
if previewView is SkiaPreviewView { return }
previewView?.removeFromSuperview()
previewView = SkiaPreviewView(frame: frame, skiaRenderer: getSkiaRenderer())
addSubview(previewView!)
#else
invokeOnError(.system(.skiaUnavailable))
return
#endif
default:
invokeOnError(.parameter(.invalid(unionName: "previewType", receivedValue: previewType as String)))
}
} }
internal func setupFpsGraph() { internal func setupFpsGraph() {

View File

@ -26,8 +26,7 @@ private let propsThatRequireReconfiguration = ["cameraId",
"photo", "photo",
"video", "video",
"enableFrameProcessor", "enableFrameProcessor",
"pixelFormat", "pixelFormat"]
"previewType"]
private let propsThatRequireDeviceReconfiguration = ["fps", private let propsThatRequireDeviceReconfiguration = ["fps",
"hdr", "hdr",
"lowLightBoost"] "lowLightBoost"]
@ -59,7 +58,6 @@ public final class CameraView: UIView {
@objc var zoom: NSNumber = 1.0 // in "factor" @objc var zoom: NSNumber = 1.0 // in "factor"
@objc var enableFpsGraph = false @objc var enableFpsGraph = false
@objc var videoStabilizationMode: NSString? @objc var videoStabilizationMode: NSString?
@objc var previewType: NSString = "none"
// events // events
@objc var onInitialized: RCTDirectEventBlock? @objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock? @objc var onError: RCTDirectEventBlock?
@ -93,9 +91,6 @@ public final class CameraView: UIView {
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
@objc public var frameProcessor: FrameProcessor? @objc public var frameProcessor: FrameProcessor?
#endif #endif
#if VISION_CAMERA_ENABLE_SKIA
internal var skiaRenderer: SkiaRenderer?
#endif
// CameraView+Zoom // CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer? internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0 internal var pinchScaleOffset: CGFloat = 1.0
@ -188,11 +183,6 @@ public final class CameraView: UIView {
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode") let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation") let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation")
if changedProps.contains("previewType") {
DispatchQueue.main.async {
self.setupPreviewView()
}
}
if changedProps.contains("enableFpsGraph") { if changedProps.contains("enableFpsGraph") {
DispatchQueue.main.async { DispatchQueue.main.async {
self.setupFpsGraph() self.setupFpsGraph()

View File

@ -41,7 +41,6 @@ RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString);
RCT_EXPORT_VIEW_PROPERTY(pixelFormat, NSString); RCT_EXPORT_VIEW_PROPERTY(pixelFormat, NSString);
// other props // other props
RCT_EXPORT_VIEW_PROPERTY(torch, NSString); RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(previewType, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber); RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL); RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
RCT_EXPORT_VIEW_PROPERTY(enableFpsGraph, BOOL); RCT_EXPORT_VIEW_PROPERTY(enableFpsGraph, BOOL);

View File

@ -18,10 +18,10 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isDrawable")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("pixelFormat"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("pixelFormat")));
// Conversion // Conversion
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString"))); result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
@ -105,9 +105,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer); return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
} }
if (name == "isDrawable") {
return jsi::Value(false);
}
if (name == "isValid") { if (name == "isValid") {
auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 && CMSampleBufferIsValid(frame.buffer); auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 && CMSampleBufferIsValid(frame.buffer);
return jsi::Value(isValid); return jsi::Value(isValid);
@ -175,6 +172,11 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
return jsi::Value((double) bytesPerRow); return jsi::Value((double) bytesPerRow);
} }
if (name == "planesCount") {
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
return jsi::Value((double) planesCount);
}
// fallback to base implementation // fallback to base implementation
return HostObject::get(runtime, propName); return HostObject::get(runtime, propName);

View File

@ -24,11 +24,6 @@
#import <React/RCTUIManager.h> #import <React/RCTUIManager.h>
#import <ReactCommon/RCTTurboModuleManager.h> #import <ReactCommon/RCTTurboModuleManager.h>
#if VISION_CAMERA_ENABLE_SKIA
#import "SkiaRenderer.h"
#import "../Skia Render Layer/SkiaFrameProcessor.h"
#endif
// Swift forward-declarations // Swift forward-declarations
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues"))) __attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues: NSObject @interface CameraQueues: NSObject
@ -38,9 +33,6 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView"))) __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView: UIView @interface CameraView: UIView
@property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor; @property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
#if VISION_CAMERA_ENABLE_SKIA
- (SkiaRenderer* _Nonnull)getSkiaRenderer;
#endif
@end @end
using namespace facebook; using namespace facebook;
@ -80,7 +72,6 @@ std::vector<jsi::PropNameID> VisionCameraProxy::getPropertyNames(jsi::Runtime& r
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor"))); result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor"))); result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin"))); result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin")));
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("isSkiaEnabled")));
return result; return result;
} }
@ -96,15 +87,6 @@ void VisionCameraProxy::setFrameProcessor(jsi::Runtime& runtime, int viewTag, co
view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:worklet view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:worklet
context:_workletContext]; context:_workletContext];
} else if (frameProcessorType == "skia-frame-processor") {
#if VISION_CAMERA_ENABLE_SKIA
SkiaRenderer* skiaRenderer = [view getSkiaRenderer];
view.frameProcessor = [[SkiaFrameProcessor alloc] initWithWorklet:worklet
context:_workletContext
skiaRenderer:skiaRenderer];
#else
throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
#endif
} else { } else {
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType); throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
} }
@ -135,13 +117,6 @@ jsi::Value VisionCameraProxy::getFrameProcessorPlugin(jsi::Runtime& runtime, std
jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) { jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime); auto name = propName.utf8(runtime);
if (name == "isSkiaEnabled") {
#ifdef VISION_CAMERA_ENABLE_SKIA
return jsi::Value(true);
#else
return jsi::Value(false);
#endif
}
if (name == "setFrameProcessor") { if (name == "setFrameProcessor") {
return jsi::Function::createFromHostFunction(runtime, return jsi::Function::createFromHostFunction(runtime,
jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"), jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"),

View File

@ -1,35 +0,0 @@
//
// DrawableFrameHostObject.h
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <jsi/jsi.h>
#import "../Frame Processor/FrameHostObject.h"
#import "../Frame Processor/Frame.h"
#import <CoreMedia/CMSampleBuffer.h>
#import "SkCanvas.h"
#import "JsiSkCanvas.h"
using namespace facebook;
class JSI_EXPORT DrawableFrameHostObject: public FrameHostObject {
public:
explicit DrawableFrameHostObject(Frame* frame,
std::shared_ptr<RNSkia::JsiSkCanvas> canvas):
FrameHostObject(frame), _canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
void invalidateCanvas();
private:
std::shared_ptr<RNSkia::JsiSkCanvas> _canvas;
};

View File

@ -1,83 +0,0 @@
//
// DrawableFrameHostObject.mm
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import "DrawableFrameHostObject.h"
#import "SkCanvas.h"
#import "SkImageHelpers.h"
std::vector<jsi::PropNameID> DrawableFrameHostObject::getPropertyNames(jsi::Runtime& rt) {
auto result = FrameHostObject::getPropertyNames(rt);
// Skia - Render Frame
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
if (_canvas != nullptr) {
auto canvasPropNames = _canvas->getPropertyNames(rt);
for (auto& prop : canvasPropNames) {
result.push_back(std::move(prop));
}
}
return result;
}
SkRect inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta, size.width(),
size.height());
}
jsi::Value DrawableFrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
if (name == "render") {
auto render = JSI_HOST_FUNCTION_LAMBDA {
if (_canvas == nullptr) {
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
}
// convert CMSampleBuffer to SkImage
auto context = _canvas->getCanvas()->recordingContext();
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, frame.buffer);
// draw SkImage
if (count > 0) {
// ..with paint/shader
auto paintHostObject = arguments[0].asObject(runtime).asHostObject<RNSkia::JsiSkPaint>(runtime);
auto paint = paintHostObject->getObject();
_canvas->getCanvas()->drawImage(image, 0, 0, SkSamplingOptions(), paint.get());
} else {
// ..without paint/shader
_canvas->getCanvas()->drawImage(image, 0, 0);
}
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
}
if (name == "isDrawable") {
return jsi::Value(_canvas != nullptr);
}
if (_canvas != nullptr) {
// If we have a Canvas, try to access the property on there.
auto result = _canvas->get(runtime, propName);
if (!result.isUndefined()) {
return result;
}
}
// fallback to base implementation
return FrameHostObject::get(runtime, propName);
}
void DrawableFrameHostObject::invalidateCanvas() {
_canvas = nullptr;
}

View File

@ -1,42 +0,0 @@
//
// SkImageHelpers.h
// VisionCamera
//
// Created by Marc Rousavy on 23.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <MetalKit/MetalKit.h>
#import <include/gpu/GrRecordingContext.h>
#import "SkImage.h"
#import "SkSize.h"
#import "SkRect.h"
class SkImageHelpers {
public:
SkImageHelpers() = delete;
public:
/**
Convert a CMSampleBuffer to an SkImage. Format has to be RGB.
*/
static sk_sp<SkImage> convertCMSampleBufferToSkImage(GrRecordingContext* context, CMSampleBufferRef sampleBuffer);
/**
Convert a MTLTexture to an SkImage. Format has to be RGB.
*/
static sk_sp<SkImage> convertMTLTextureToSkImage(GrRecordingContext* context, id<MTLTexture> mtlTexture);
/**
Creates a Center Crop Transformation Rect so that the source rect fills (aspectRatio: cover) the destination rect.
The return value should be passed as a sourceRect to a canvas->draw...Rect(..) function, destinationRect should stay the same.
*/
static SkRect createCenterCropRect(SkRect source, SkRect destination);
private:
static SkRect inscribe(SkSize size, SkRect rect);
};

View File

@ -1,116 +0,0 @@
//
// CMSampleBuffer+toSkImage.m
// VisionCamera
//
// Created by Marc Rousavy on 23.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "SkImageHelpers.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import <include/core/SkColorSpace.h>
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkImage.h>
#import <include/gpu/ganesh/SkImageGanesh.h>
#import <include/gpu/mtl/GrMtlTypes.h>
#import <include/gpu/GrBackendSurface.h>
#include <TargetConditionals.h>
#if TARGET_RT_BIG_ENDIAN
# define FourCC2Str(fourcc) (const char[]){*((char*)&fourcc), *(((char*)&fourcc)+1), *(((char*)&fourcc)+2), *(((char*)&fourcc)+3),0}
#else
# define FourCC2Str(fourcc) (const char[]){*(((char*)&fourcc)+3), *(((char*)&fourcc)+2), *(((char*)&fourcc)+1), *(((char*)&fourcc)+0),0}
#endif
inline CVMetalTextureCacheRef getTextureCache() {
static CVMetalTextureCacheRef textureCache = nil;
if (textureCache == nil) {
// Create a new Texture Cache
auto result = CVMetalTextureCacheCreate(kCFAllocatorDefault,
nil,
MTLCreateSystemDefaultDevice(),
nil,
&textureCache);
if (result != kCVReturnSuccess || textureCache == nil) {
throw std::runtime_error("Failed to create Metal Texture Cache!");
}
}
return textureCache;
}
sk_sp<SkImage> SkImageHelpers::convertCMSampleBufferToSkImage(GrRecordingContext* context, CMSampleBufferRef sampleBuffer) {
auto pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
double width = CVPixelBufferGetWidth(pixelBuffer);
double height = CVPixelBufferGetHeight(pixelBuffer);
// Make sure the format is RGB (BGRA_8888)
auto format = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (format != kCVPixelFormatType_32BGRA) {
auto error = std::string("VisionCamera: Frame has unknown Pixel Format (") + FourCC2Str(format) + std::string(") - cannot convert to SkImage!");
throw std::runtime_error(error);
}
auto textureCache = getTextureCache();
// Convert CMSampleBuffer* -> CVMetalTexture*
CVMetalTextureRef cvTexture;
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache,
pixelBuffer,
nil,
MTLPixelFormatBGRA8Unorm,
width,
height,
0, // plane index
&cvTexture);
auto mtlTexture = CVMetalTextureGetTexture(cvTexture);
auto image = convertMTLTextureToSkImage(context, mtlTexture);
// Release the Texture wrapper (it will still be strong)
CFRelease(cvTexture);
return image;
}
sk_sp<SkImage> SkImageHelpers::convertMTLTextureToSkImage(GrRecordingContext* context, id<MTLTexture> texture) {
// Convert the rendered MTLTexture to an SkImage
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendTexture backendTexture((int)texture.width,
(int)texture.height,
GrMipmapped::kNo,
textureInfo);
// TODO: Adopt or Borrow?
auto image = SkImages::AdoptTextureFrom(context,
backendTexture,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
kOpaque_SkAlphaType,
SkColorSpace::MakeSRGB());
return image;
}
SkRect SkImageHelpers::createCenterCropRect(SkRect sourceRect, SkRect destinationRect) {
SkSize src;
if (destinationRect.width() / destinationRect.height() > sourceRect.width() / sourceRect.height()) {
src = SkSize::Make(sourceRect.width(), (sourceRect.width() * destinationRect.height()) / destinationRect.width());
} else {
src = SkSize::Make((sourceRect.height() * destinationRect.width()) / destinationRect.height(), sourceRect.height());
}
return inscribe(src, sourceRect);
}
SkRect SkImageHelpers::inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta,
size.width(),
size.height());
}

View File

@ -1,27 +0,0 @@
//
// SkiaFrameProcessor.h
// VisionCamera
//
// Created by Marc Rousavy on 14.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import "FrameProcessor.h"
#import "SkiaRenderer.h"
#ifdef __cplusplus
#import "WKTJsiWorklet.h"
#endif
@interface SkiaFrameProcessor: FrameProcessor
#ifdef __cplusplus
- (instancetype _Nonnull) initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer;
#endif
@end

View File

@ -1,56 +0,0 @@
//
// SkiaFrameProcessor.mm
// VisionCamera
//
// Created by Marc Rousavy on 14.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "SkiaFrameProcessor.h"
#import "SkiaRenderer.h"
#import <memory>
#import <jsi/jsi.h>
#import "DrawableFrameHostObject.h"
#import <react-native-skia/JsiSkCanvas.h>
#import <react-native-skia/RNSkiOSPlatformContext.h>
using namespace facebook;
@implementation SkiaFrameProcessor {
SkiaRenderer* _skiaRenderer;
std::shared_ptr<RNSkia::JsiSkCanvas> _skiaCanvas;
}
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context
skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer {
if (self = [super initWithWorklet:worklet
context:context]) {
_skiaRenderer = skiaRenderer;
auto platformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(context->getJsRuntime(),
RCTBridge.currentBridge);
_skiaCanvas = std::make_shared<RNSkia::JsiSkCanvas>(platformContext);
}
return self;
}
- (void)call:(Frame*)frame {
[_skiaRenderer renderCameraFrameToOffscreenSurface:frame.buffer
withDrawCallback:^(SkiaCanvas _Nonnull canvas) {
// Create the Frame Host Object wrapping the internal Frame and Skia Canvas
self->_skiaCanvas->setCanvas(static_cast<SkCanvas*>(canvas));
auto frameHostObject = std::make_shared<DrawableFrameHostObject>(frame, self->_skiaCanvas);
// Call JS Frame Processor
[self callWithFrameHostObject:frameHostObject];
// Remove Skia Canvas from Host Object because it is no longer valid
frameHostObject->invalidateCanvas();
}];
}
@end

View File

@ -1,51 +0,0 @@
//
// SkiaPreviewDisplayLink.swift
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import Foundation
class SkiaPreviewDisplayLink {
private var displayLink: CADisplayLink?
private let callback: (_ timestamp: Double) -> Void
init(callback: @escaping (_ timestamp: Double) -> Void) {
self.callback = callback
}
deinit {
stop()
}
@objc
func update(_ displayLink: CADisplayLink) {
callback(displayLink.timestamp)
}
func start() {
if displayLink == nil {
let displayLink = CADisplayLink(target: self, selector: #selector(update))
let queue = DispatchQueue(label: "mrousavy/VisionCamera.preview",
qos: .userInteractive,
attributes: [],
autoreleaseFrequency: .inherit,
target: nil)
queue.async {
displayLink.add(to: .current, forMode: .common)
self.displayLink = displayLink
ReactLogger.log(level: .info, message: "Starting Skia Preview Display Link...")
RunLoop.current.run()
ReactLogger.log(level: .info, message: "Skia Preview Display Link stopped.")
}
}
}
func stop() {
displayLink?.invalidate()
displayLink = nil
}
}

View File

@ -1,81 +0,0 @@
//
// SkiaPreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import Foundation
// MARK: - SkiaPreviewLayer
class SkiaPreviewLayer: CAMetalLayer {
private var pixelRatio: CGFloat {
return UIScreen.main.scale
}
init(device: MTLDevice) {
super.init()
framebufferOnly = true
self.device = device
isOpaque = false
pixelFormat = .bgra8Unorm
contentsScale = pixelRatio
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setSize(width: CGFloat, height: CGFloat) {
frame = CGRect(x: 0, y: 0, width: width, height: height)
drawableSize = CGSize(width: width * pixelRatio,
height: height * pixelRatio)
}
}
// MARK: - SkiaPreviewView
class SkiaPreviewView: PreviewView {
private let skiaRenderer: SkiaRenderer
private let previewLayer: SkiaPreviewLayer
private lazy var displayLink = SkiaPreviewDisplayLink(callback: { [weak self] _ in
// Called everytime to render the screen - e.g. 60 FPS
if let self = self {
self.skiaRenderer.renderLatestFrame(to: self.previewLayer)
}
})
init(frame: CGRect, skiaRenderer: SkiaRenderer) {
self.skiaRenderer = skiaRenderer
previewLayer = SkiaPreviewLayer(device: skiaRenderer.metalDevice)
super.init(frame: frame)
}
deinit {
self.displayLink.stop()
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func willMove(toSuperview newSuperview: UIView?) {
if newSuperview != nil {
layer.addSublayer(previewLayer)
displayLink.start()
} else {
previewLayer.removeFromSuperlayer()
displayLink.stop()
}
}
override func layoutSubviews() {
previewLayer.setSize(width: bounds.size.width,
height: bounds.size.height)
}
}

View File

@ -1,26 +0,0 @@
//
// SkiaRenderContext.h
// VisionCamera
//
// Created by Marc Rousavy on 02.12.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#pragma once
#import <MetalKit/MetalKit.h>
#import <include/gpu/GrDirectContext.h>
#import <include/gpu/mtl/GrMtlTypes.h>
struct RenderContext {
id<MTLDevice> device;
id<MTLCommandQueue> commandQueue;
sk_sp<GrDirectContext> skiaContext;
RenderContext() {
device = MTLCreateSystemDefaultDevice();
commandQueue = id<MTLCommandQueue>(CFRetain((GrMTLHandle)[device newCommandQueue]));
skiaContext = GrDirectContext::MakeMetal((__bridge void*)device,
(__bridge void*)commandQueue);
}
};

View File

@ -1,45 +0,0 @@
//
// SkiaRenderer.h
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
typedef void* SkiaCanvas;
typedef void(^draw_callback_t)(SkiaCanvas _Nonnull);
/**
A Camera Frame Renderer powered by Skia.
It provides two Contexts, one offscreen and one onscreen.
- Offscreen Context: Allows you to render a Frame into a Skia Canvas and draw onto it using Skia commands
- Onscreen Context: Allows you to render a Frame from the offscreen context onto a Layer allowing it to be displayed for Preview.
The two contexts may run at different Frame Rates.
*/
@interface SkiaRenderer : NSObject
/**
Renders the given Camera Frame to the offscreen Skia Canvas.
The given callback will be executed with a reference to the Skia Canvas
for the user to perform draw operations on (in this case, through a JS Frame Processor)
*/
- (void)renderCameraFrameToOffscreenSurface:(CMSampleBufferRef _Nonnull)sampleBuffer withDrawCallback:(draw_callback_t _Nonnull)callback;
/**
Renders the latest Frame to the onscreen Layer.
This should be called everytime you want the UI to update, e.g. for 60 FPS; every 16.66ms.
*/
- (void)renderLatestFrameToLayer:(CALayer* _Nonnull)layer;
/**
The Metal Device used for Rendering to the Layer
*/
@property (nonatomic, readonly) id<MTLDevice> _Nonnull metalDevice;
@end

View File

@ -1,212 +0,0 @@
//
// SkiaRenderer.mm
// VisionCamera
//
// Created by Marc Rousavy on 19.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "SkiaRenderer.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import "SkiaRenderContext.h"
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkColorSpace.h>
#import <include/gpu/mtl/GrMtlTypes.h>
#import <include/gpu/GrBackendSurface.h>
#import <include/gpu/ganesh/SkSurfaceGanesh.h>
#import <include/gpu/ganesh/mtl/SkSurfaceMetal.h>
#import "SkImageHelpers.h"
#import <system_error>
#import <memory>
#import <mutex>
@implementation SkiaRenderer {
// The context we draw each Frame on
std::unique_ptr<RenderContext> _offscreenContext;
// The context the preview runs on
std::unique_ptr<RenderContext> _layerContext;
// The texture holding the drawn-to Frame
id<MTLTexture> _texture;
// For synchronization between the two Threads/Contexts
std::mutex _textureMutex;
std::atomic<bool> _hasNewFrame;
}
- (instancetype)init {
if (self = [super init]) {
_offscreenContext = std::make_unique<RenderContext>();
_layerContext = std::make_unique<RenderContext>();
_texture = nil;
_hasNewFrame = false;
}
return self;
}
- (id<MTLDevice>)metalDevice {
return _layerContext->device;
}
- (id<MTLTexture>)getTexture:(NSUInteger)width height:(NSUInteger)height {
if (_texture == nil
|| _texture.width != width
|| _texture.height != height) {
// Create new texture with the given width and height
MTLTextureDescriptor* textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
width:width
height:height
mipmapped:NO];
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
_texture = [_offscreenContext->device newTextureWithDescriptor:textureDescriptor];
}
return _texture;
}
- (void)renderCameraFrameToOffscreenSurface:(CMSampleBufferRef)sampleBuffer withDrawCallback:(draw_callback_t)callback {
// Wrap in auto release pool since we want the system to clean up after rendering
@autoreleasepool {
// Get the Frame's PixelBuffer
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
throw std::runtime_error("SkiaRenderer: Pixel Buffer is corrupt/empty.");
}
// Lock Mutex to block the runLoop from overwriting the _currentDrawable
std::unique_lock lock(_textureMutex);
// Get the Metal Texture we use for in-memory drawing
auto texture = [self getTexture:CVPixelBufferGetWidth(pixelBuffer)
height:CVPixelBufferGetHeight(pixelBuffer)];
// Get & Lock the writeable Texture from the Metal Drawable
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendRenderTarget backendRenderTarget((int)texture.width,
(int)texture.height,
1,
textureInfo);
auto context = _offscreenContext->skiaContext.get();
// Create a Skia Surface from the writable Texture
auto surface = SkSurfaces::WrapBackendRenderTarget(context,
backendRenderTarget,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
SkColorSpace::MakeSRGB(),
nullptr);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
// Converts the CMSampleBuffer to an SkImage - RGB.
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, sampleBuffer);
auto canvas = surface->getCanvas();
// Clear everything so we keep it at a clean state
canvas->clear(SkColors::kBlack);
// Draw the Image into the Frame (aspectRatio: cover)
// The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
// but that'll just overwrite the existing one - no need to worry.
canvas->drawImage(image, 0, 0);
// Call the draw callback - probably a JS Frame Processor.
callback(static_cast<void*>(canvas));
// Flush all appended operations on the canvas and commit it to the SkSurface
surface->flushAndSubmit();
// Set dirty & free locks
_hasNewFrame = true;
lock.unlock();
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
- (void)renderLatestFrameToLayer:(CALayer* _Nonnull)layer {
if (!_hasNewFrame) {
// No new Frame has arrived in the meantime.
// We don't need to re-draw the texture to the screen if nothing has changed, abort.
return;
}
@autoreleasepool {
auto context = _layerContext->skiaContext.get();
// Create a Skia Surface from the CAMetalLayer (use to draw to the View)
GrMTLHandle drawableHandle;
auto surface = SkSurfaces::WrapCAMetalLayer(context,
(__bridge GrMTLHandle)layer,
kTopLeft_GrSurfaceOrigin,
1,
kBGRA_8888_SkColorType,
nullptr,
nullptr,
&drawableHandle);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
auto canvas = surface->getCanvas();
// Lock the Mutex so we can operate on the Texture atomically without
// renderFrameToCanvas() overwriting in between from a different thread
std::unique_lock lock(_textureMutex);
auto texture = _texture;
if (texture == nil) return;
// Calculate Center Crop (aspectRatio: cover) transform
auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height);
auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height());
sourceRect = SkImageHelpers::createCenterCropRect(sourceRect, destinationRect);
auto offsetX = -sourceRect.left();
auto offsetY = -sourceRect.top();
// The Canvas is equal to the View size, where-as the Frame has a different size (e.g. 4k)
// We scale the Canvas to the exact dimensions of the Frame so that the user can use the Frame as a coordinate system
canvas->save();
auto scaleW = static_cast<double>(surface->width()) / texture.width;
auto scaleH = static_cast<double>(surface->height()) / texture.height;
auto scale = MAX(scaleW, scaleH);
canvas->scale(scale, scale);
canvas->translate(offsetX, offsetY);
// Convert the rendered MTLTexture to an SkImage
auto image = SkImageHelpers::convertMTLTextureToSkImage(context, texture);
// Draw the Texture (Frame) to the Canvas
canvas->drawImage(image, 0, 0);
// Restore the scale & transform
canvas->restore();
surface->flushAndSubmit();
// Pass the drawable into the Metal Command Buffer and submit it to the GPU
id<CAMetalDrawable> drawable = (__bridge id<CAMetalDrawable>)drawableHandle;
id<MTLCommandBuffer> commandBuffer([_layerContext->commandQueue commandBuffer]);
[commandBuffer presentDrawable:drawable];
[commandBuffer commit];
// Set flag back to false
_hasNewFrame = false;
lock.unlock();
}
}
@end

View File

@ -12,7 +12,6 @@
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; }; B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B82F3A0A2A6896E3002BB804 /* PreviewView.swift */; }; B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B82F3A0A2A6896E3002BB804 /* PreviewView.swift */; };
B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* NativePreviewView.swift */; }; B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* NativePreviewView.swift */; };
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */ = {isa = PBXBuildFile; fileRef = B841262E292E41A1001AB448 /* SkImageHelpers.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; }; B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; }; B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */; }; B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */; };
@ -79,27 +78,22 @@
/* Begin PBXFileReference section */ /* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; }; 134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B80A319E293A5C10003EE681 /* SkiaRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderContext.h; sourceTree = "<group>"; };
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; }; B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; };
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; }; B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; };
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; }; B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = "<group>"; }; B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = "<group>"; };
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; }; B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; }; B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B8127E382A68871C00B06972 /* SkiaPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewView.swift; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; }; B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; }; B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; }; B82F3A0A2A6896E3002BB804 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* NativePreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativePreviewView.swift; sourceTree = "<group>"; }; B83D5EE629377117000AFD2F /* NativePreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativePreviewView.swift; sourceTree = "<group>"; };
B841262E292E41A1001AB448 /* SkImageHelpers.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkImageHelpers.mm; sourceTree = "<group>"; };
B8412630292E41AD001AB448 /* SkImageHelpers.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkImageHelpers.h; sourceTree = "<group>"; };
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; }; B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; }; B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; }; B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = "<group>"; }; B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = "<group>"; };
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+descriptor.swift"; sourceTree = "<group>"; }; B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+descriptor.swift"; sourceTree = "<group>"; };
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Orientation.swift"; sourceTree = "<group>"; }; B86400512784A23400E9D2CA /* CameraView+Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Orientation.swift"; sourceTree = "<group>"; };
B865BC5F2A6888DA0093DF1A /* SkiaPreviewDisplayLink.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewDisplayLink.swift; sourceTree = "<group>"; };
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; }; B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; }; B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; }; B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
@ -140,17 +134,11 @@
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; }; B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; }; B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSINSObjectConversion.mm; sourceTree = "<group>"; }; B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSINSObjectConversion.mm; sourceTree = "<group>"; };
B89A28742A68795E0092207F /* SkiaRenderer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaRenderer.mm; sourceTree = "<group>"; };
B89A28752A68796A0092207F /* SkiaRenderer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderer.h; sourceTree = "<group>"; };
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; }; B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaFrameProcessor.h; sourceTree = "<group>"; };
B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaFrameProcessor.mm; sourceTree = "<group>"; };
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; }; B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; };
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; }; B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; };
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; }; B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; };
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; }; B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = DrawableFrameHostObject.mm; sourceTree = "<group>"; };
B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DrawableFrameHostObject.h; sourceTree = "<group>"; };
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraProxy.h; sourceTree = "<group>"; }; B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraProxy.h; sourceTree = "<group>"; };
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraProxy.mm; sourceTree = "<group>"; }; B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraProxy.mm; sourceTree = "<group>"; };
B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Preview.swift"; sourceTree = "<group>"; }; B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Preview.swift"; sourceTree = "<group>"; };
@ -202,7 +190,6 @@
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */, B82F3A0A2A6896E3002BB804 /* PreviewView.swift */,
B83D5EE629377117000AFD2F /* NativePreviewView.swift */, B83D5EE629377117000AFD2F /* NativePreviewView.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */, B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B8FCA20C292669B800F1AC82 /* Skia Render Layer */,
B887516125E0102000DB86D6 /* Extensions */, B887516125E0102000DB86D6 /* Extensions */,
B887517225E0102000DB86D6 /* Parsers */, B887517225E0102000DB86D6 /* Parsers */,
B887516D25E0102000DB86D6 /* React Utils */, B887516D25E0102000DB86D6 /* React Utils */,
@ -287,24 +274,6 @@
path = "Frame Processor"; path = "Frame Processor";
sourceTree = "<group>"; sourceTree = "<group>";
}; };
B8FCA20C292669B800F1AC82 /* Skia Render Layer */ = {
isa = PBXGroup;
children = (
B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */,
B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */,
B8412630292E41AD001AB448 /* SkImageHelpers.h */,
B841262E292E41A1001AB448 /* SkImageHelpers.mm */,
B80A319E293A5C10003EE681 /* SkiaRenderContext.h */,
B89A28752A68796A0092207F /* SkiaRenderer.h */,
B89A28742A68795E0092207F /* SkiaRenderer.mm */,
B8127E382A68871C00B06972 /* SkiaPreviewView.swift */,
B865BC5F2A6888DA0093DF1A /* SkiaPreviewDisplayLink.swift */,
B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */,
B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */,
);
path = "Skia Render Layer";
sourceTree = "<group>";
};
/* End PBXGroup section */ /* End PBXGroup section */
/* Begin PBXNativeTarget section */ /* Begin PBXNativeTarget section */
@ -417,7 +386,6 @@
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */, B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */, B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */, B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */,
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */, B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */,
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */, B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */, B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,

View File

@ -76,7 +76,6 @@
"@react-native/eslint-config": "^0.72.2", "@react-native/eslint-config": "^0.72.2",
"@react-native/typescript-config": "^0.73.0", "@react-native/typescript-config": "^0.73.0",
"@release-it/conventional-changelog": "^7.0.0", "@release-it/conventional-changelog": "^7.0.0",
"@shopify/react-native-skia": "^0.1.200",
"@types/react": "^18.2.19", "@types/react": "^18.2.19",
"@types/react-native": "^0.72.2", "@types/react-native": "^0.72.2",
"eslint": "^8.46.0", "eslint": "^8.46.0",
@ -91,15 +90,11 @@
"typescript": "^5.1.6" "typescript": "^5.1.6"
}, },
"peerDependencies": { "peerDependencies": {
"@shopify/react-native-skia": "*",
"react": "*", "react": "*",
"react-native": "*", "react-native": "*",
"react-native-worklets-core": "*" "react-native-worklets-core": "*"
}, },
"peerDependenciesMeta": { "peerDependenciesMeta": {
"@shopify/react-native-skia": {
"optional": true
},
"react-native-worklets-core": { "react-native-worklets-core": {
"optional": true "optional": true
} }

View File

@ -22,7 +22,6 @@ interface OnErrorEvent {
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor'> & { type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor'> & {
cameraId: string; cameraId: string;
enableFrameProcessor: boolean; enableFrameProcessor: boolean;
previewType: 'native' | 'skia' | 'none';
onInitialized?: (event: NativeSyntheticEvent<void>) => void; onInitialized?: (event: NativeSyntheticEvent<void>) => void;
onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void; onError?: (event: NativeSyntheticEvent<OnErrorEvent>) => void;
onViewReady: () => void; onViewReady: () => void;
@ -413,7 +412,6 @@ export class Camera extends React.PureComponent<CameraProps> {
onInitialized={this.onInitialized} onInitialized={this.onInitialized}
onError={this.onError} onError={this.onError}
enableFrameProcessor={frameProcessor != null} enableFrameProcessor={frameProcessor != null}
previewType={frameProcessor?.type === 'skia-frame-processor' ? 'skia' : 'native'}
/> />
); );
} }

View File

@ -54,7 +54,6 @@ export type SystemError =
| 'system/camera-module-not-found' | 'system/camera-module-not-found'
| 'system/no-camera-manager' | 'system/no-camera-manager'
| 'system/frame-processors-unavailable' | 'system/frame-processors-unavailable'
| 'system/skia-unavailable'
| 'system/view-not-found'; | 'system/view-not-found';
export type UnknownError = 'unknown/unknown'; export type UnknownError = 'unknown/unknown';

View File

@ -1,21 +1,16 @@
import type { ViewProps } from 'react-native'; import type { ViewProps } from 'react-native';
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from './CameraDevice'; import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from './CameraDevice';
import type { CameraRuntimeError } from './CameraError'; import type { CameraRuntimeError } from './CameraError';
import type { DrawableFrame, Frame } from './Frame'; import type { Frame } from './Frame';
import type { Orientation } from './Orientation'; import type { Orientation } from './Orientation';
export type FrameProcessor = export type FrameProcessor = {
| {
frameProcessor: (frame: Frame) => void; frameProcessor: (frame: Frame) => void;
type: 'frame-processor'; type: 'frame-processor';
}
| {
frameProcessor: (frame: DrawableFrame) => void;
type: 'skia-frame-processor';
}; };
// TODO: Replace `enableHighQualityPhotos: boolean` in favor of `priorization: 'photo' | 'video'` // TODO: Replace `enableHighQualityPhotos: boolean` in favor of `priorization: 'photo' | 'video'`
// TODO: Use RCT_ENUM_PARSER for stuff like previewType, torch, videoStabilizationMode, and orientation // TODO: Use RCT_ENUM_PARSER for stuff like torch, videoStabilizationMode, and orientation
// TODO: Use Photo HostObject for stuff like depthData, portraitEffects, etc. // TODO: Use Photo HostObject for stuff like depthData, portraitEffects, etc.
// TODO: Add RAW capture support // TODO: Add RAW capture support
@ -193,8 +188,6 @@ export interface CameraProps extends ViewProps {
/** /**
* A worklet which will be called for every frame the Camera "sees". * A worklet which will be called for every frame the Camera "sees".
* *
* If {@linkcode previewType | previewType} is set to `"skia"`, you can draw content to the `Frame` using the react-native-skia API.
*
* > See [the Frame Processors documentation](https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors) for more information * > See [the Frame Processors documentation](https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors) for more information
* *
* @example * @example

View File

@ -1,4 +1,3 @@
import type { SkCanvas, SkPaint } from '@shopify/react-native-skia';
import type { Orientation } from './Orientation'; import type { Orientation } from './Orientation';
import { PixelFormat } from './PixelFormat'; import { PixelFormat } from './PixelFormat';
@ -22,6 +21,10 @@ export interface Frame {
* Returns the amount of bytes per row. * Returns the amount of bytes per row.
*/ */
bytesPerRow: number; bytesPerRow: number;
/**
* Returns the number of planes this frame contains.
*/
planesCount: number;
/** /**
* Returns whether the Frame is mirrored (selfie camera) or not. * Returns whether the Frame is mirrored (selfie camera) or not.
*/ */
@ -56,46 +59,9 @@ export interface Frame {
* ``` * ```
*/ */
toString(): string; toString(): string;
/**
* Whether the Frame can be drawn onto using Skia.
* Always false for `useFrameProcessor`. Use `useSkiaFrameProcessor` instead.
*/
isDrawable: boolean;
} }
export interface DrawableFrame extends Frame, SkCanvas { export interface FrameInternal extends Frame {
/**
* Renders the Frame to the screen.
*
* By default a Frame has already been rendered to the screen once, so if you call this method again,
* previously drawn content will be overwritten.
*
* @param paint (Optional) A Paint object to use to draw the Frame with. For example, this can contain a Shader (ImageFilter)
* @example
* ```ts
* const INVERTED_COLORS_SHADER = `
* uniform shader image;
* half4 main(vec2 pos) {
* vec4 color = image.eval(pos);
* return vec4(1.0 - color.rgb, 1.0);
* }`
* const runtimeEffect = Skia.RuntimeEffect.Make(INVERT_COLORS_SHADER)
* if (runtimeEffect == null) throw new Error('Shader failed to compile!')
* const shaderBuilder = Skia.RuntimeShaderBuilder(runtimeEffect)
* const imageFilter = Skia.ImageFilter.MakeRuntimeShader(shaderBuilder, null, null)
* const paint = Skia.Paint()
* paint.setImageFilter(imageFilter)
*
* const frameProcessor = useSkiaFrameProcessor((frame) => {
* 'worklet'
* frame.render(paint) // <-- draws frame with inverted colors now
* }, [paint])
* ```
*/
render: (paint?: SkPaint) => void;
}
export interface FrameInternal extends Frame, DrawableFrame {
/** /**
* Increment the Frame Buffer ref-count by one. * Increment the Frame Buffer ref-count by one.
* *

View File

@ -28,7 +28,6 @@ interface TVisionCameraProxy {
* The Plugin has to be registered on the native side, otherwise this returns `undefined` * The Plugin has to be registered on the native side, otherwise this returns `undefined`
*/ */
getFrameProcessorPlugin: (name: string) => FrameProcessorPlugin | undefined; getFrameProcessorPlugin: (name: string) => FrameProcessorPlugin | undefined;
isSkiaEnabled: boolean;
} }
let hasWorklets = false; let hasWorklets = false;
@ -66,7 +65,6 @@ try {
} }
let proxy: TVisionCameraProxy = { let proxy: TVisionCameraProxy = {
isSkiaEnabled: false,
getFrameProcessorPlugin: () => { getFrameProcessorPlugin: () => {
throw new CameraRuntimeError('system/frame-processors-unavailable', 'Frame Processors are not enabled!'); throw new CameraRuntimeError('system/frame-processors-unavailable', 'Frame Processors are not enabled!');
}, },

View File

@ -1,16 +1,23 @@
import { DependencyList, useMemo } from 'react'; import { DependencyList, useMemo } from 'react';
import type { DrawableFrame, Frame, FrameInternal } from '../Frame'; import type { Frame, FrameInternal } from '../Frame';
import { FrameProcessor } from '../CameraProps'; import { FrameProcessor } from '../CameraProps';
/**
* Create a new Frame Processor function which you can pass to the `<Camera>`.
* (See ["Frame Processors"](https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors))
*
* Make sure to add the `'worklet'` directive to the top of the Frame Processor function, otherwise it will not get compiled into a worklet.
*
* Also make sure to memoize the returned object, so that the Camera doesn't reset the Frame Processor Context each time.
*/
export function createFrameProcessor(frameProcessor: FrameProcessor['frameProcessor'], type: FrameProcessor['type']): FrameProcessor { export function createFrameProcessor(frameProcessor: FrameProcessor['frameProcessor'], type: FrameProcessor['type']): FrameProcessor {
return { return {
frameProcessor: (frame: Frame | DrawableFrame) => { frameProcessor: (frame: Frame) => {
'worklet'; 'worklet';
// Increment ref-count by one // Increment ref-count by one
(frame as FrameInternal).incrementRefCount(); (frame as FrameInternal).incrementRefCount();
try { try {
// Call sync frame processor // Call sync frame processor
// @ts-expect-error the frame type is ambiguous here
frameProcessor(frame); frameProcessor(frame);
} finally { } finally {
// Potentially delete Frame if we were the last ref (no runAsync) // Potentially delete Frame if we were the last ref (no runAsync)
@ -43,28 +50,3 @@ export function useFrameProcessor(frameProcessor: (frame: Frame) => void, depend
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
return useMemo(() => createFrameProcessor(frameProcessor, 'frame-processor'), dependencies); return useMemo(() => createFrameProcessor(frameProcessor, 'frame-processor'), dependencies);
} }
/**
* Returns a memoized Skia Frame Processor function wich you can pass to the `<Camera>`.
* The Skia Frame Processor allows you to draw anything onto the Frame using react-native-skia.
* (See ["Frame Processors"](https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors))
*
* Make sure to add the `'worklet'` directive to the top of the Frame Processor function, otherwise it will not get compiled into a worklet.
*
* @param frameProcessor The Frame Processor
* @param dependencies The React dependencies which will be copied into the VisionCamera JS-Runtime.
* @returns The memoized Frame Processor.
* @example
* ```ts
* const frameProcessor = useSkiaFrameProcessor((frame) => {
* 'worklet'
* const qrCodes = scanQRCodes(frame)
* frame.drawRect(...)
* console.log(`QR Codes: ${qrCodes}`)
* }, [])
* ```
*/
export function useSkiaFrameProcessor(frameProcessor: (frame: DrawableFrame) => void, dependencies: DependencyList): FrameProcessor {
// eslint-disable-next-line react-hooks/exhaustive-deps
return useMemo(() => createFrameProcessor(frameProcessor, 'skia-frame-processor'), dependencies);
}

View File

@ -1984,14 +1984,6 @@
conventional-recommended-bump "^7.0.1" conventional-recommended-bump "^7.0.1"
semver "7.5.1" semver "7.5.1"
"@shopify/react-native-skia@^0.1.200":
version "0.1.200"
resolved "https://registry.yarnpkg.com/@shopify/react-native-skia/-/react-native-skia-0.1.200.tgz#3ef86750106a3b7e02496133173b449bfce6abc2"
integrity sha512-wAauKsLgScLspJY4KzoV0lWoXFCbzsUDJ3uso0o81HQMKBjDvXG9aOq/xE0KFLQsrQVICRdbfvvoYLQvSh/Xmw==
dependencies:
canvaskit-wasm "0.38.0"
react-reconciler "^0.27.0"
"@sideway/address@^4.1.3": "@sideway/address@^4.1.3":
version "4.1.4" version "4.1.4"
resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0"
@ -2835,11 +2827,6 @@ caniuse-lite@^1.0.30001449:
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001451.tgz#2e197c698fc1373d63e1406d6607ea4617c613f1" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001451.tgz#2e197c698fc1373d63e1406d6607ea4617c613f1"
integrity sha512-XY7UbUpGRatZzoRft//5xOa69/1iGJRBlrieH6QYrkKLIFn3m7OVEJ81dSrKoy2BnKsdbX5cLrOispZNYo9v2w== integrity sha512-XY7UbUpGRatZzoRft//5xOa69/1iGJRBlrieH6QYrkKLIFn3m7OVEJ81dSrKoy2BnKsdbX5cLrOispZNYo9v2w==
canvaskit-wasm@0.38.0:
version "0.38.0"
resolved "https://registry.yarnpkg.com/canvaskit-wasm/-/canvaskit-wasm-0.38.0.tgz#83e6c46f3015c2ff3f6503157f47453af76a7be7"
integrity sha512-ZEG6lucpbQ4Ld+mY8C1Ng+PMLVP+/AX02jS0Sdl28NyMxuKSa9uKB8oGd1BYp1XWPyO2Jgr7U8pdyjJ/F3xR5Q==
chalk@5.3.0, chalk@^5.3.0: chalk@5.3.0, chalk@^5.3.0:
version "5.3.0" version "5.3.0"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385"
@ -6838,14 +6825,6 @@ react-native@^0.72.3:
ws "^6.2.2" ws "^6.2.2"
yargs "^17.6.2" yargs "^17.6.2"
react-reconciler@^0.27.0:
version "0.27.0"
resolved "https://registry.yarnpkg.com/react-reconciler/-/react-reconciler-0.27.0.tgz#360124fdf2d76447c7491ee5f0e04503ed9acf5b"
integrity sha512-HmMDKciQjYmBRGuuhIaKA1ba/7a+UsM5FzOZsMO2JYHt9Jh8reCb7j1eDC95NOyUlKM9KRyvdx0flBuDvYSBoA==
dependencies:
loose-envify "^1.1.0"
scheduler "^0.21.0"
react-refresh@^0.4.0: react-refresh@^0.4.0:
version "0.4.3" version "0.4.3"
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.4.3.tgz#966f1750c191672e76e16c2efa569150cc73ab53" resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.4.3.tgz#966f1750c191672e76e16c2efa569150cc73ab53"
@ -7215,13 +7194,6 @@ scheduler@0.24.0-canary-efb381bbf-20230505:
dependencies: dependencies:
loose-envify "^1.1.0" loose-envify "^1.1.0"
scheduler@^0.21.0:
version "0.21.0"
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.21.0.tgz#6fd2532ff5a6d877b6edb12f00d8ab7e8f308820"
integrity sha512-1r87x5fz9MXqswA2ERLo0EbOAU74DpIUO090gIasYTqlVoJeMcl+Z1Rg7WHz+qtPujhS/hGIt9kxZOYBV3faRQ==
dependencies:
loose-envify "^1.1.0"
semver-diff@^4.0.0: semver-diff@^4.0.0:
version "4.0.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-4.0.0.tgz#3afcf5ed6d62259f5c72d0d5d50dffbdc9680df5" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-4.0.0.tgz#3afcf5ed6d62259f5c72d0d5d50dffbdc9680df5"