feat: Draw onto Frame as if it was a Skia Canvas (#1479)

* Create Shaders.ts

* Add `previewType` and `enableFpsGraph`

* Add RN Skia native dependency

* Add Skia Preview View on iOS

* Pass 1

* Update FrameHostObject.mm

* Wrap Canvas

* Lockfiles

* fix: Fix stuff

* chore: Upgrade RNWorklets

* Add `previewType` to set the Preview

* feat: Add Example

* Update project.pbxproj

* `enableFpsGraph`

* Cache the `std::shared_ptr<FrameHostObject>`

* Update CameraView+RecordVideo.swift

* Update SkiaMetalCanvasProvider.mm

* Android: Integrate Skia Dependency

* fix: Use new Prefix

* Add example for rendering shader

* chore: Upgrade CameraX

* Remove KTX

* Enable `viewBinding`

* Revert "Enable `viewBinding`"

This reverts commit f2a603f53b33ea4311a296422ffd1a910ce03f9e.

* Revert "chore: Upgrade CameraX"

This reverts commit 8dc832cf8754490d31a6192e6c1a1f11cdcd94fe.

* Remove unneeded `ProcessCameraProvider.getInstance()` call

* fix: Add REA hotfix patch

* fix: Fix FrameHostObject dead in runAsync

* fix: Make `runAsync` run truly async by dropping new Frames while executing

* chore: Upgrade RN Worklets to latest

* chore: Upgrade RN Skia

* Revert "Remove KTX"

This reverts commit 253f586633f7af2da992d2279fc206dc62597129.

* Make Skia optional in CMake

* Fix import

* Update CMakeLists.txt

* Update build.gradle

* Update CameraView.kt

* Update CameraView.kt

* Update CameraView.kt

* Update Shaders.ts

* Center Blur

* chore: Upgrade RN Worklets

* feat: Add `toByteArray()`, `orientation`, `isMirrored` and `timestamp` to `Frame` (#1487)

* feat: Implement `orientation` and `isMirrored` on Frame

* feat: Add `toArrayBuffer()` func

* perf: Do faster buffer copy

* feat: Implement `toArrayBuffer()` on Android

* feat: Add `orientation` and `isMirrored` to Android

* feat: Add `timestamp` to Frame

* Update Frame.ts

* Update JImageProxy.h

* Update FrameHostObject.cpp

* Update FrameHostObject.cpp

* Update CameraPage.tsx

* fix: Format Swift
This commit is contained in:
Marc Rousavy 2023-02-21 15:00:48 +01:00 committed by GitHub
parent 1f7a2e07f2
commit 12f850c8e1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 2166 additions and 85 deletions

View File

@ -28,6 +28,7 @@ Pod::Spec.new do |s|
s.pod_target_xcconfig = {
"USE_HEADERMAP" => "YES",
"GCC_PREPROCESSOR_DEFINITIONS" => '$(inherited) SK_GL=1 SK_METAL=1',
"HEADER_SEARCH_PATHS" => "\"$(PODS_TARGET_SRCROOT)/ReactCommon\" \"$(PODS_TARGET_SRCROOT)\" \"$(PODS_ROOT)/RCT-Folly\" \"$(PODS_ROOT)/boost\" \"$(PODS_ROOT)/boost-for-react-native\" \"$(PODS_ROOT)/Headers/Private/React-Core\" "
}
s.compiler_flags = folly_compiler_flags + ' ' + boost_compiler_flags
@ -44,6 +45,7 @@ Pod::Spec.new do |s|
s.source_files = [
"ios/**/*.{m,mm,swift}",
"ios/CameraBridge.h",
"ios/Skia Render Layer/PreviewSkiaView.h",
"ios/Frame Processor/Frame.h",
"ios/Frame Processor/FrameProcessorCallback.h",
"ios/Frame Processor/FrameProcessorRuntimeManager.h",
@ -65,4 +67,5 @@ Pod::Spec.new do |s|
s.dependency "React"
s.dependency "React-Core"
s.dependency "react-native-worklets"
s.dependency "react-native-skia"
end

View File

@ -17,10 +17,16 @@ find_package(fbjni REQUIRED CONFIG)
find_package(react-native-worklets REQUIRED CONFIG)
find_library(LOG_LIB log)
# Skia is optional, if it's enabled we link it.
if (ENABLE_SKIA_INTEGRATION)
find_package(shopify_react-native-skia REQUIRED CONFIG)
endif()
# Add react-native-vision-camera sources
add_library(
${PACKAGE_NAME}
SHARED
../cpp/JSITypedArray.cpp
src/main/cpp/VisionCamera.cpp
src/main/cpp/JSIJNIConversion.cpp
src/main/cpp/FrameHostObject.cpp
@ -36,6 +42,7 @@ add_library(
target_include_directories(
${PACKAGE_NAME}
PRIVATE
"../cpp"
"src/main/cpp"
"${NODE_MODULES_DIR}/react-native/ReactCommon"
"${NODE_MODULES_DIR}/react-native/ReactCommon/callinvoker"
@ -53,3 +60,11 @@ target_link_libraries(
fbjni::fbjni # <-- fbjni
react-native-worklets::rnworklets # <-- RN Worklets
)
# Skia is optional. If it's enabled, we link it
if (ENABLE_SKIA_INTEGRATION)
target_link_libraries(
${PACKAGE_NAME}
shopify_react-native-skia::rnskia # <-- RN Skia
)
endif()

View File

@ -66,6 +66,8 @@ static def findNodeModules(baseDir) {
}
def nodeModules = findNodeModules(projectDir)
def isSkiaInstalled = findProject(":shopify_react-native-skia") != null
logger.warn("react-native-vision-camera: Skia integration is ${isSkiaInstalled ? "enabled" : "disabled"}!")
repositories {
google()
@ -99,7 +101,8 @@ android {
cmake {
cppFlags "-O2 -frtti -fexceptions -Wall -Wno-unused-variable -fstack-protector-all"
arguments "-DANDROID_STL=c++_shared",
"-DNODE_MODULES_DIR=${nodeModules}"
"-DNODE_MODULES_DIR=${nodeModules}",
"-DENABLE_SKIA_INTEGRATION=${isSkiaInstalled}"
abiFilters (*reactNativeArchitectures())
}
}
@ -145,6 +148,10 @@ dependencies {
implementation "androidx.exifinterface:exifinterface:1.3.3"
implementation project(":react-native-worklets")
if (isSkiaInstalled) {
implementation project(":shopify_react-native-skia")
}
}
// Resolves "LOCAL_SRC_FILES points to a missing file, Check that libfb.so exists or that its path is correct".

View File

@ -8,7 +8,8 @@
#include <jni.h>
#include <vector>
#include <string>
#include <JsiHostObject.h>
#include <WKTJsiHostObject.h>
#include "JSITypedArray.h"
namespace vision {
@ -30,8 +31,12 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
// Debugging
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
// Conversion
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
// Ref Management
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
@ -54,9 +59,38 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toString"), 0, toString);
}
if (name == "toArrayBuffer") {
auto toArrayBuffer = JSI_HOST_FUNCTION_LAMBDA {
auto buffer = this->frame->toByteArray();
auto arraySize = buffer->size();
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray> arrayBuffer(runtime, arraySize);
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
}
// Get from global JS cache
auto arrayBufferCache = runtime.global().getPropertyAsObject(runtime, ARRAYBUFFER_CACHE_PROP_NAME);
auto arrayBuffer = vision::getTypedArray(runtime, arrayBufferCache).get<vision::TypedArrayKind::Uint8ClampedArray>(runtime);
if (arrayBuffer.size(runtime) != arraySize) {
arrayBuffer = vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray>(runtime, arraySize);
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
}
// directly write to C++ JSI ArrayBuffer
auto destinationBuffer = arrayBuffer.data(runtime);
buffer->getRegion(0,
static_cast<jint>(arraySize),
reinterpret_cast<jbyte*>(destinationBuffer));
return arrayBuffer;
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
}
if (name == "incrementRefCount") {
auto incrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Increment retain count by one so ARC doesn't destroy the Frame Buffer.
// Increment retain count by one.
std::lock_guard lock(this->_refCountMutex);
this->_refCount++;
return jsi::Value::undefined();
@ -69,7 +103,7 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
if (name == "decrementRefCount") {
auto decrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Decrement retain count by one. If the retain count is zero, ARC will destroy the Frame Buffer.
// Decrement retain count by one. If the retain count is zero, we close the Frame.
std::lock_guard lock(this->_refCountMutex);
this->_refCount--;
if (_refCount < 1) {
@ -92,6 +126,16 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
if (name == "height") {
return jsi::Value(this->frame->getHeight());
}
if (name == "isMirrored") {
return jsi::Value(this->frame->getIsMirrored());
}
if (name == "orientation") {
auto string = this->frame->getOrientation();
return jsi::String::createFromUtf8(runtime, string->toStdString());
}
if (name == "timestamp") {
return jsi::Value(static_cast<double>(this->frame->getTimestamp()));
}
if (name == "bytesPerRow") {
return jsi::Value(this->frame->getBytesPerRow());
}

View File

@ -7,8 +7,8 @@
#include <jni.h>
#include <utility>
#include <string>
#include <JsiWorklet.h>
#include <JsiHostObject.h>
#include <WKTJsiWorklet.h>
#include <WKTJsiHostObject.h>
#include "CameraView.h"
#include "FrameHostObject.h"

View File

@ -9,7 +9,7 @@
#include <ReactCommon/CallInvokerHolder.h>
#include <memory>
#include <string>
#include <JsiWorkletContext.h>
#include <WKTJsiWorkletContext.h>
#include "CameraView.h"
#include "VisionCameraScheduler.h"

View File

@ -33,6 +33,24 @@ bool JImageProxy::getIsValid() const {
return isImageProxyValidMethod(utilsClass, self());
}
bool JImageProxy::getIsMirrored() const {
auto utilsClass = getUtilsClass();
static const auto isImageProxyMirroredMethod = utilsClass->getStaticMethod<jboolean(JImageProxy::javaobject)>("isImageProxyMirrored");
return isImageProxyMirroredMethod(utilsClass, self());
}
jlong JImageProxy::getTimestamp() const {
auto utilsClass = getUtilsClass();
static const auto getTimestampMethod = utilsClass->getStaticMethod<jlong(JImageProxy::javaobject)>("getTimestamp");
return getTimestampMethod(utilsClass, self());
}
local_ref<JString> JImageProxy::getOrientation() const {
auto utilsClass = getUtilsClass();
static const auto getOrientationMethod = utilsClass->getStaticMethod<JString(JImageProxy::javaobject)>("getOrientation");
return getOrientationMethod(utilsClass, self());
}
int JImageProxy::getPlanesCount() const {
auto utilsClass = getUtilsClass();
static const auto getPlanesCountMethod = utilsClass->getStaticMethod<jint(JImageProxy::javaobject)>("getPlanesCount");
@ -45,6 +63,13 @@ int JImageProxy::getBytesPerRow() const {
return getBytesPerRowMethod(utilsClass, self());
}
local_ref<JArrayByte> JImageProxy::toByteArray() const {
auto utilsClass = getUtilsClass();
static const auto toByteArrayMethod = utilsClass->getStaticMethod<JArrayByte(JImageProxy::javaobject)>("toByteArray");
return toByteArrayMethod(utilsClass, self());
}
void JImageProxy::close() {
static const auto closeMethod = getClass()->getMethod<void()>("close");
closeMethod(self());

View File

@ -19,8 +19,12 @@ struct JImageProxy : public JavaClass<JImageProxy> {
int getWidth() const;
int getHeight() const;
bool getIsValid() const;
bool getIsMirrored() const;
int getPlanesCount() const;
int getBytesPerRow() const;
jlong getTimestamp() const;
local_ref<JString> getOrientation() const;
local_ref<JArrayByte> toByteArray() const;
void close();
};

View File

@ -123,8 +123,6 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
internal var activeVideoRecording: Recording? = null
private var lastFrameProcessorCall = System.currentTimeMillis()
private var extensionsManager: ExtensionsManager? = null
private val scaleGestureListener: ScaleGestureDetector.SimpleOnScaleGestureListener
@ -326,7 +324,7 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
/**
* Configures the camera capture session. This should only be called when the camera device changes.
*/
@SuppressLint("RestrictedApi")
@SuppressLint("RestrictedApi", "UnsafeOptInUsageError")
private suspend fun configureSession() {
try {
val startTime = System.currentTimeMillis()
@ -461,10 +459,11 @@ class CameraView(context: Context, private val frameProcessorThread: ExecutorSer
if (enableFrameProcessor) {
Log.i(TAG, "Adding ImageAnalysis use-case...")
imageAnalysis = imageAnalysisBuilder.build().apply {
setAnalyzer(cameraExecutor, { image ->
setAnalyzer(cameraExecutor) { image ->
// Call JS Frame Processor
frameProcessorCallback(image)
})
// frame gets closed in FrameHostObject implementation (JS ref counting)
}
}
useCases.add(imageAnalysis!!)
}

View File

@ -172,7 +172,6 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
withPromise(promise) {
val cameraProvider = ProcessCameraProvider.getInstance(reactApplicationContext).await()
val extensionsManager = ExtensionsManager.getInstanceAsync(reactApplicationContext, cameraProvider).await()
ProcessCameraProvider.getInstance(reactApplicationContext).await()
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as? CameraManager
?: throw CameraManagerUnavailableError()

View File

@ -1,12 +1,16 @@
package com.mrousavy.camera.frameprocessor;
import android.annotation.SuppressLint;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.media.Image;
import androidx.annotation.Keep;
import androidx.camera.core.ImageProxy;
import com.facebook.proguard.annotations.DoNotStrip;
import java.nio.ByteBuffer;
@SuppressWarnings("unused") // used through JNI
@DoNotStrip
@Keep
@ -28,6 +32,33 @@ public class ImageProxyUtils {
}
}
@DoNotStrip
@Keep
public static boolean isImageProxyMirrored(ImageProxy imageProxy) {
Matrix matrix = imageProxy.getImageInfo().getSensorToBufferTransformMatrix();
// TODO: Figure out how to get isMirrored from ImageProxy
return false;
}
@DoNotStrip
@Keep
public static String getOrientation(ImageProxy imageProxy) {
int rotation = imageProxy.getImageInfo().getRotationDegrees();
if (rotation >= 45 && rotation < 135)
return "landscapeRight";
if (rotation >= 135 && rotation < 225)
return "portraitUpsideDown";
if (rotation >= 225 && rotation < 315)
return "landscapeLeft";
return "portrait";
}
@DoNotStrip
@Keep
public static long getTimestamp(ImageProxy imageProxy) {
return imageProxy.getImageInfo().getTimestamp();
}
@DoNotStrip
@Keep
public static int getPlanesCount(ImageProxy imageProxy) {
@ -39,4 +70,29 @@ public class ImageProxyUtils {
public static int getBytesPerRow(ImageProxy imageProxy) {
return imageProxy.getPlanes()[0].getRowStride();
}
private static byte[] byteArrayCache;
@DoNotStrip
@Keep
public static byte[] toByteArray(ImageProxy imageProxy) {
switch (imageProxy.getFormat()) {
case ImageFormat.YUV_420_888:
ByteBuffer yBuffer = imageProxy.getPlanes()[0].getBuffer();
ByteBuffer vuBuffer = imageProxy.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int vuSize = vuBuffer.remaining();
if (byteArrayCache == null || byteArrayCache.length != ySize + vuSize) {
byteArrayCache = new byte[ySize + vuSize];
}
yBuffer.get(byteArrayCache, 0, ySize);
vuBuffer.get(byteArrayCache, ySize, vuSize);
return byteArrayCache;
default:
throw new RuntimeException("Cannot convert Frame with Format " + imageProxy.getFormat() + " to byte array!");
}
}
}

331
cpp/JSITypedArray.cpp Normal file
View File

@ -0,0 +1,331 @@
//
// JSITypedArray.cpp
// VisionCamera
//
// Created by Marc Rousavy on 21.02.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
// Copied & Adapted from https://github.com/expo/expo/blob/main/packages/expo-gl/common/EXTypedArrayApi.cpp
// Credits to Expo
#include "JSITypedArray.h"
#include <unordered_map>
namespace vision {
template <TypedArrayKind T>
using ContentType = typename typedArrayTypeMap<T>::type;
enum class Prop {
Buffer, // "buffer"
Constructor, // "constructor"
Name, // "name"
Proto, // "__proto__"
Length, // "length"
ByteLength, // "byteLength"
ByteOffset, // "offset"
IsView, // "isView"
ArrayBuffer, // "ArrayBuffer"
Int8Array, // "Int8Array"
Int16Array, // "Int16Array"
Int32Array, // "Int32Array"
Uint8Array, // "Uint8Array"
Uint8ClampedArray, // "Uint8ClampedArray"
Uint16Array, // "Uint16Array"
Uint32Array, // "Uint32Array"
Float32Array, // "Float32Array"
Float64Array, // "Float64Array"
};
class PropNameIDCache {
public:
const jsi::PropNameID &get(jsi::Runtime &runtime, Prop prop) {
auto key = reinterpret_cast<uintptr_t>(&runtime);
if (this->props.find(key) == this->props.end()) {
this->props[key] = std::unordered_map<Prop, std::unique_ptr<jsi::PropNameID>>();
}
if (!this->props[key][prop]) {
this->props[key][prop] = std::make_unique<jsi::PropNameID>(createProp(runtime, prop));
}
return *(this->props[key][prop]);
}
const jsi::PropNameID &getConstructorNameProp(jsi::Runtime &runtime, TypedArrayKind kind);
void invalidate(uintptr_t key) {
if (props.find(key) != props.end()) {
props[key].clear();
}
}
private:
std::unordered_map<uintptr_t, std::unordered_map<Prop, std::unique_ptr<jsi::PropNameID>>> props;
jsi::PropNameID createProp(jsi::Runtime &runtime, Prop prop);
};
PropNameIDCache propNameIDCache;
InvalidateCacheOnDestroy::InvalidateCacheOnDestroy(jsi::Runtime &runtime) {
key = reinterpret_cast<uintptr_t>(&runtime);
}
InvalidateCacheOnDestroy::~InvalidateCacheOnDestroy() {
propNameIDCache.invalidate(key);
}
TypedArrayKind getTypedArrayKindForName(const std::string &name);
TypedArrayBase::TypedArrayBase(jsi::Runtime &runtime, size_t size, TypedArrayKind kind)
: TypedArrayBase(
runtime,
runtime.global()
.getProperty(runtime, propNameIDCache.getConstructorNameProp(runtime, kind))
.asObject(runtime)
.asFunction(runtime)
.callAsConstructor(runtime, {static_cast<double>(size)})
.asObject(runtime)) {}
TypedArrayBase::TypedArrayBase(jsi::Runtime &runtime, const jsi::Object &obj)
: jsi::Object(jsi::Value(runtime, obj).asObject(runtime)) {}
TypedArrayKind TypedArrayBase::getKind(jsi::Runtime &runtime) const {
auto constructorName = this->getProperty(runtime, propNameIDCache.get(runtime, Prop::Constructor))
.asObject(runtime)
.getProperty(runtime, propNameIDCache.get(runtime, Prop::Name))
.asString(runtime)
.utf8(runtime);
return getTypedArrayKindForName(constructorName);
};
size_t TypedArrayBase::size(jsi::Runtime &runtime) const {
return getProperty(runtime, propNameIDCache.get(runtime, Prop::Length)).asNumber();
}
size_t TypedArrayBase::length(jsi::Runtime &runtime) const {
return getProperty(runtime, propNameIDCache.get(runtime, Prop::Length)).asNumber();
}
size_t TypedArrayBase::byteLength(jsi::Runtime &runtime) const {
return getProperty(runtime, propNameIDCache.get(runtime, Prop::ByteLength)).asNumber();
}
size_t TypedArrayBase::byteOffset(jsi::Runtime &runtime) const {
return getProperty(runtime, propNameIDCache.get(runtime, Prop::ByteOffset)).asNumber();
}
bool TypedArrayBase::hasBuffer(jsi::Runtime &runtime) const {
auto buffer = getProperty(runtime, propNameIDCache.get(runtime, Prop::Buffer));
return buffer.isObject() && buffer.asObject(runtime).isArrayBuffer(runtime);
}
std::vector<uint8_t> TypedArrayBase::toVector(jsi::Runtime &runtime) {
auto start = reinterpret_cast<uint8_t *>(getBuffer(runtime).data(runtime) + byteOffset(runtime));
auto end = start + byteLength(runtime);
return std::vector<uint8_t>(start, end);
}
jsi::ArrayBuffer TypedArrayBase::getBuffer(jsi::Runtime &runtime) const {
auto buffer = getProperty(runtime, propNameIDCache.get(runtime, Prop::Buffer));
if (buffer.isObject() && buffer.asObject(runtime).isArrayBuffer(runtime)) {
return buffer.asObject(runtime).getArrayBuffer(runtime);
} else {
throw std::runtime_error("no ArrayBuffer attached");
}
}
bool isTypedArray(jsi::Runtime &runtime, const jsi::Object &jsObj) {
auto jsVal = runtime.global()
.getProperty(runtime, propNameIDCache.get(runtime, Prop::ArrayBuffer))
.asObject(runtime)
.getProperty(runtime, propNameIDCache.get(runtime, Prop::IsView))
.asObject(runtime)
.asFunction(runtime)
.callWithThis(runtime, runtime.global(), {jsi::Value(runtime, jsObj)});
if (jsVal.isBool()) {
return jsVal.getBool();
} else {
throw std::runtime_error("value is not a boolean");
}
}
TypedArrayBase getTypedArray(jsi::Runtime &runtime, const jsi::Object &jsObj) {
auto jsVal = runtime.global()
.getProperty(runtime, propNameIDCache.get(runtime, Prop::ArrayBuffer))
.asObject(runtime)
.getProperty(runtime, propNameIDCache.get(runtime, Prop::IsView))
.asObject(runtime)
.asFunction(runtime)
.callWithThis(runtime, runtime.global(), {jsi::Value(runtime, jsObj)});
if (jsVal.isBool()) {
return TypedArrayBase(runtime, jsObj);
} else {
throw std::runtime_error("value is not a boolean");
}
}
std::vector<uint8_t> arrayBufferToVector(jsi::Runtime &runtime, jsi::Object &jsObj) {
if (!jsObj.isArrayBuffer(runtime)) {
throw std::runtime_error("Object is not an ArrayBuffer");
}
auto jsArrayBuffer = jsObj.getArrayBuffer(runtime);
uint8_t *dataBlock = jsArrayBuffer.data(runtime);
size_t blockSize =
jsArrayBuffer.getProperty(runtime, propNameIDCache.get(runtime, Prop::ByteLength)).asNumber();
return std::vector<uint8_t>(dataBlock, dataBlock + blockSize);
}
void arrayBufferUpdate(
jsi::Runtime &runtime,
jsi::ArrayBuffer &buffer,
std::vector<uint8_t> data,
size_t offset) {
uint8_t *dataBlock = buffer.data(runtime);
size_t blockSize = buffer.size(runtime);
if (data.size() > blockSize) {
throw jsi::JSError(runtime, "ArrayBuffer is to small to fit data");
}
std::copy(data.begin(), data.end(), dataBlock + offset);
}
template <TypedArrayKind T>
TypedArray<T>::TypedArray(jsi::Runtime &runtime, size_t size) : TypedArrayBase(runtime, size, T){};
template <TypedArrayKind T>
TypedArray<T>::TypedArray(jsi::Runtime &runtime, std::vector<ContentType<T>> data)
: TypedArrayBase(runtime, data.size(), T) {
update(runtime, data);
};
template <TypedArrayKind T>
TypedArray<T>::TypedArray(TypedArrayBase &&base) : TypedArrayBase(std::move(base)) {}
template <TypedArrayKind T>
std::vector<ContentType<T>> TypedArray<T>::toVector(jsi::Runtime &runtime) {
auto start =
reinterpret_cast<ContentType<T> *>(getBuffer(runtime).data(runtime) + byteOffset(runtime));
auto end = start + size(runtime);
return std::vector<ContentType<T>>(start, end);
}
template <TypedArrayKind T>
void TypedArray<T>::update(jsi::Runtime &runtime, const std::vector<ContentType<T>> &data) {
if (data.size() != size(runtime)) {
throw jsi::JSError(runtime, "TypedArray can only be updated with a vector of the same size");
}
uint8_t *rawData = getBuffer(runtime).data(runtime) + byteOffset(runtime);
std::copy(data.begin(), data.end(), reinterpret_cast<ContentType<T> *>(rawData));
}
template <TypedArrayKind T>
void TypedArray<T>::updateUnsafe(jsi::Runtime &runtime, ContentType<T> *data, size_t length) {
if (length != size(runtime)) {
throw jsi::JSError(runtime, "TypedArray can only be updated with an array of the same size");
}
uint8_t *rawData = getBuffer(runtime).data(runtime) + byteOffset(runtime);
memcpy(rawData, data, length);
}
template <TypedArrayKind T>
uint8_t* TypedArray<T>::data(jsi::Runtime &runtime) {
return getBuffer(runtime).data(runtime) + byteOffset(runtime);
}
const jsi::PropNameID &PropNameIDCache::getConstructorNameProp(
jsi::Runtime &runtime,
TypedArrayKind kind) {
switch (kind) {
case TypedArrayKind::Int8Array:
return get(runtime, Prop::Int8Array);
case TypedArrayKind::Int16Array:
return get(runtime, Prop::Int16Array);
case TypedArrayKind::Int32Array:
return get(runtime, Prop::Int32Array);
case TypedArrayKind::Uint8Array:
return get(runtime, Prop::Uint8Array);
case TypedArrayKind::Uint8ClampedArray:
return get(runtime, Prop::Uint8ClampedArray);
case TypedArrayKind::Uint16Array:
return get(runtime, Prop::Uint16Array);
case TypedArrayKind::Uint32Array:
return get(runtime, Prop::Uint32Array);
case TypedArrayKind::Float32Array:
return get(runtime, Prop::Float32Array);
case TypedArrayKind::Float64Array:
return get(runtime, Prop::Float64Array);
}
}
jsi::PropNameID PropNameIDCache::createProp(jsi::Runtime &runtime, Prop prop) {
auto create = [&](const std::string &propName) {
return jsi::PropNameID::forUtf8(runtime, propName);
};
switch (prop) {
case Prop::Buffer:
return create("buffer");
case Prop::Constructor:
return create("constructor");
case Prop::Name:
return create("name");
case Prop::Proto:
return create("__proto__");
case Prop::Length:
return create("length");
case Prop::ByteLength:
return create("byteLength");
case Prop::ByteOffset:
return create("byteOffset");
case Prop::IsView:
return create("isView");
case Prop::ArrayBuffer:
return create("ArrayBuffer");
case Prop::Int8Array:
return create("Int8Array");
case Prop::Int16Array:
return create("Int16Array");
case Prop::Int32Array:
return create("Int32Array");
case Prop::Uint8Array:
return create("Uint8Array");
case Prop::Uint8ClampedArray:
return create("Uint8ClampedArray");
case Prop::Uint16Array:
return create("Uint16Array");
case Prop::Uint32Array:
return create("Uint32Array");
case Prop::Float32Array:
return create("Float32Array");
case Prop::Float64Array:
return create("Float64Array");
}
}
std::unordered_map<std::string, TypedArrayKind> nameToKindMap = {
{"Int8Array", TypedArrayKind::Int8Array},
{"Int16Array", TypedArrayKind::Int16Array},
{"Int32Array", TypedArrayKind::Int32Array},
{"Uint8Array", TypedArrayKind::Uint8Array},
{"Uint8ClampedArray", TypedArrayKind::Uint8ClampedArray},
{"Uint16Array", TypedArrayKind::Uint16Array},
{"Uint32Array", TypedArrayKind::Uint32Array},
{"Float32Array", TypedArrayKind::Float32Array},
{"Float64Array", TypedArrayKind::Float64Array},
};
TypedArrayKind getTypedArrayKindForName(const std::string &name) {
return nameToKindMap.at(name);
}
template class TypedArray<TypedArrayKind::Int8Array>;
template class TypedArray<TypedArrayKind::Int16Array>;
template class TypedArray<TypedArrayKind::Int32Array>;
template class TypedArray<TypedArrayKind::Uint8Array>;
template class TypedArray<TypedArrayKind::Uint8ClampedArray>;
template class TypedArray<TypedArrayKind::Uint16Array>;
template class TypedArray<TypedArrayKind::Uint32Array>;
template class TypedArray<TypedArrayKind::Float32Array>;
template class TypedArray<TypedArrayKind::Float64Array>;
} // namespace vision

183
cpp/JSITypedArray.h Normal file
View File

@ -0,0 +1,183 @@
//
// JSITypedArray.h
// VisionCamera
//
// Created by Marc Rousavy on 21.02.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
// Copied & Adapted from https://github.com/expo/expo/blob/main/packages/expo-gl/common/EXTypedArrayApi.h
// Credits to Expo
#pragma once
#include <jsi/jsi.h>
namespace jsi = facebook::jsi;
namespace vision {
enum class TypedArrayKind {
Int8Array,
Int16Array,
Int32Array,
Uint8Array,
Uint8ClampedArray,
Uint16Array,
Uint32Array,
Float32Array,
Float64Array,
};
template <TypedArrayKind T>
class TypedArray;
template <TypedArrayKind T>
struct typedArrayTypeMap;
template <>
struct typedArrayTypeMap<TypedArrayKind::Int8Array> {
typedef int8_t type;
};
template <>
struct typedArrayTypeMap<TypedArrayKind::Int16Array> {
typedef int16_t type;
};
template <>
struct typedArrayTypeMap<TypedArrayKind::Int32Array> {
typedef int32_t type;
};
template <>
struct typedArrayTypeMap<TypedArrayKind::Uint8Array> {
typedef uint8_t type;
};
template <>
struct typedArrayTypeMap<TypedArrayKind::Uint8ClampedArray> {
typedef uint8_t type;
};
template <>
struct typedArrayTypeMap<TypedArrayKind::Uint16Array> {
typedef uint16_t type;
};
template <>
struct typedArrayTypeMap<TypedArrayKind::Uint32Array> {
typedef uint32_t type;
};
template <>
struct typedArrayTypeMap<TypedArrayKind::Float32Array> {
typedef float type;
};
template <>
struct typedArrayTypeMap<TypedArrayKind::Float64Array> {
typedef double type;
};
// Instance of this class will invalidate PropNameIDCache when destructor is called.
// Attach this object to global in specific jsi::Runtime to make sure lifecycle of
// the cache object is connected to the lifecycle of the js runtime
class InvalidateCacheOnDestroy : public jsi::HostObject {
public:
InvalidateCacheOnDestroy(jsi::Runtime &runtime);
virtual ~InvalidateCacheOnDestroy();
virtual jsi::Value get(jsi::Runtime &, const jsi::PropNameID &name) {
return jsi::Value::null();
}
virtual void set(jsi::Runtime &, const jsi::PropNameID &name, const jsi::Value &value) {}
virtual std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime &rt) {
return {};
}
private:
uintptr_t key;
};
class TypedArrayBase : public jsi::Object {
public:
template <TypedArrayKind T>
using ContentType = typename typedArrayTypeMap<T>::type;
TypedArrayBase(jsi::Runtime &, size_t, TypedArrayKind);
TypedArrayBase(jsi::Runtime &, const jsi::Object &);
TypedArrayBase(TypedArrayBase &&) = default;
TypedArrayBase &operator=(TypedArrayBase &&) = default;
TypedArrayKind getKind(jsi::Runtime &runtime) const;
template <TypedArrayKind T>
TypedArray<T> get(jsi::Runtime &runtime) const &;
template <TypedArrayKind T>
TypedArray<T> get(jsi::Runtime &runtime) &&;
template <TypedArrayKind T>
TypedArray<T> as(jsi::Runtime &runtime) const &;
template <TypedArrayKind T>
TypedArray<T> as(jsi::Runtime &runtime) &&;
size_t size(jsi::Runtime &runtime) const;
size_t length(jsi::Runtime &runtime) const;
size_t byteLength(jsi::Runtime &runtime) const;
size_t byteOffset(jsi::Runtime &runtime) const;
bool hasBuffer(jsi::Runtime &runtime) const;
std::vector<uint8_t> toVector(jsi::Runtime &runtime);
jsi::ArrayBuffer getBuffer(jsi::Runtime &runtime) const;
private:
template <TypedArrayKind>
friend class TypedArray;
};
bool isTypedArray(jsi::Runtime &runtime, const jsi::Object &jsObj);
TypedArrayBase getTypedArray(jsi::Runtime &runtime, const jsi::Object &jsObj);
std::vector<uint8_t> arrayBufferToVector(jsi::Runtime &runtime, jsi::Object &jsObj);
void arrayBufferUpdate(
jsi::Runtime &runtime,
jsi::ArrayBuffer &buffer,
std::vector<uint8_t> data,
size_t offset);
template <TypedArrayKind T>
class TypedArray : public TypedArrayBase {
public:
TypedArray(jsi::Runtime &runtime, size_t size);
TypedArray(jsi::Runtime &runtime, std::vector<ContentType<T>> data);
TypedArray(TypedArrayBase &&base);
TypedArray(TypedArray &&) = default;
TypedArray &operator=(TypedArray &&) = default;
std::vector<ContentType<T>> toVector(jsi::Runtime &runtime);
void update(jsi::Runtime &runtime, const std::vector<ContentType<T>> &data);
void updateUnsafe(jsi::Runtime &runtime, ContentType<T> *data, size_t length);
uint8_t* data(jsi::Runtime &runtime);
};
template <TypedArrayKind T>
TypedArray<T> TypedArrayBase::get(jsi::Runtime &runtime) const & {
assert(getKind(runtime) == T);
(void)runtime; // when assert is disabled we need to mark this as used
return TypedArray<T>(jsi::Value(runtime, jsi::Value(runtime, *this).asObject(runtime)));
}
template <TypedArrayKind T>
TypedArray<T> TypedArrayBase::get(jsi::Runtime &runtime) && {
assert(getKind(runtime) == T);
(void)runtime; // when assert is disabled we need to mark this as used
return TypedArray<T>(std::move(*this));
}
template <TypedArrayKind T>
TypedArray<T> TypedArrayBase::as(jsi::Runtime &runtime) const & {
if (getKind(runtime) != T) {
throw jsi::JSError(runtime, "Object is not a TypedArray");
}
return get<T>(runtime);
}
template <TypedArrayKind T>
TypedArray<T> TypedArrayBase::as(jsi::Runtime &runtime) && {
if (getKind(runtime) != T) {
throw jsi::JSError(runtime, "Object is not a TypedArray");
}
return std::move(*this).get<T>(runtime);
}
} // namespace vision

View File

@ -276,6 +276,35 @@ PODS:
- RCTTypeSafety
- React-Core
- ReactCommon/turbomodule/core
- react-native-skia (0.1.175):
- React
- React-callinvoker
- React-Core
- react-native-skia/Api (= 0.1.175)
- react-native-skia/Jsi (= 0.1.175)
- react-native-skia/RNSkia (= 0.1.175)
- react-native-skia/SkiaHeaders (= 0.1.175)
- react-native-skia/Utils (= 0.1.175)
- react-native-skia/Api (0.1.175):
- React
- React-callinvoker
- React-Core
- react-native-skia/Jsi (0.1.175):
- React
- React-callinvoker
- React-Core
- react-native-skia/RNSkia (0.1.175):
- React
- React-callinvoker
- React-Core
- react-native-skia/SkiaHeaders (0.1.175):
- React
- React-callinvoker
- React-Core
- react-native-skia/Utils (0.1.175):
- React
- React-callinvoker
- React-Core
- react-native-slider (4.4.2):
- React-Core
- react-native-video (5.2.1):
@ -407,10 +436,11 @@ PODS:
- React-Core
- RNVectorIcons (9.2.0):
- React-Core
- VisionCamera (2.15.4):
- VisionCamera (3.0.0-rc.1):
- React
- React-callinvoker
- React-Core
- react-native-skia
- react-native-worklets
- Yoga (1.14.0)
@ -440,6 +470,7 @@ DEPENDENCIES:
- "react-native-blur (from `../node_modules/@react-native-community/blur`)"
- "react-native-cameraroll (from `../node_modules/@react-native-camera-roll/camera-roll`)"
- react-native-safe-area-context (from `../node_modules/react-native-safe-area-context`)
- "react-native-skia (from `../node_modules/@shopify/react-native-skia`)"
- "react-native-slider (from `../node_modules/@react-native-community/slider`)"
- react-native-video (from `../node_modules/react-native-video`)
- react-native-worklets (from `../node_modules/react-native-worklets`)
@ -516,6 +547,8 @@ EXTERNAL SOURCES:
:path: "../node_modules/@react-native-camera-roll/camera-roll"
react-native-safe-area-context:
:path: "../node_modules/react-native-safe-area-context"
react-native-skia:
:path: "../node_modules/@shopify/react-native-skia"
react-native-slider:
:path: "../node_modules/@react-native-community/slider"
react-native-video:
@ -589,6 +622,7 @@ SPEC CHECKSUMS:
react-native-blur: 50c9feabacbc5f49b61337ebc32192c6be7ec3c3
react-native-cameraroll: 5b25d0be40185d02e522bf2abf8a1ba4e8faa107
react-native-safe-area-context: 39c2d8be3328df5d437ac1700f4f3a4f75716acc
react-native-skia: 51f4a6586c362814f677df4ac4226f13c634bcfa
react-native-slider: 33b8d190b59d4f67a541061bb91775d53d617d9d
react-native-video: c26780b224543c62d5e1b2a7244a5cd1b50e8253
react-native-worklets: c7576ad4ad0f030ff41e8d74ad0077c96054a6c1
@ -610,7 +644,7 @@ SPEC CHECKSUMS:
RNScreens: 218801c16a2782546d30bd2026bb625c0302d70f
RNStaticSafeAreaInsets: 055ddbf5e476321720457cdaeec0ff2ba40ec1b8
RNVectorIcons: fcc2f6cb32f5735b586e66d14103a74ce6ad61f8
VisionCamera: 312151eb95370d1d764720de3b7dad33d8c7fb40
VisionCamera: 0d154cd0ab9043a3c8a4908fb57ad65c9e1f3baf
Yoga: 5ed1699acbba8863755998a4245daa200ff3817b
PODFILE CHECKSUM: d53724fe402c2547f1dd1cc571bbe77d9820e636

View File

@ -19,6 +19,7 @@
"@react-native-community/slider": "^4.4.2",
"@react-navigation/native": "^6.1.3",
"@react-navigation/native-stack": "^6.9.9",
"@shopify/react-native-skia": "^0.1.175",
"react": "^18.2.0",
"react-native": "^0.71.3",
"react-native-gesture-handler": "^2.9.0",
@ -29,7 +30,7 @@
"react-native-static-safe-area-insets": "^2.2.0",
"react-native-vector-icons": "^9.2.0",
"react-native-video": "^5.2.1",
"react-native-worklets": "https://github.com/chrfalch/react-native-worklets#15d52dd"
"react-native-worklets": "https://github.com/chrfalch/react-native-worklets#d62d76c"
},
"devDependencies": {
"@babel/core": "^7.20.12",

View File

@ -1,6 +1,6 @@
import * as React from 'react';
import { useRef, useState, useMemo, useCallback } from 'react';
import { StyleSheet, Text, View } from 'react-native';
import { Platform, StyleSheet, Text, View } from 'react-native';
import { PinchGestureHandler, PinchGestureHandlerGestureEvent, TapGestureHandler } from 'react-native-gesture-handler';
import {
CameraDeviceFormat,
@ -25,6 +25,8 @@ import { examplePlugin } from './frame-processors/ExamplePlugin';
import type { Routes } from './Routes';
import type { NativeStackScreenProps } from '@react-navigation/native-stack';
import { useIsFocused } from '@react-navigation/core';
import { Skia } from '@shopify/react-native-skia';
import { FACE_SHADER } from './Shaders';
const ReanimatedCamera = Reanimated.createAnimatedComponent(Camera);
Reanimated.addWhitelistedNativeProps({
@ -196,11 +198,37 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
console.log('re-rendering camera page without active camera');
}
const frameProcessor = useFrameProcessor((frame) => {
'worklet';
const values = examplePlugin(frame);
console.log(`Return Values: ${JSON.stringify(values)}`);
}, []);
const radius = (format?.videoHeight ?? 1080) * 0.1;
const width = radius;
const height = radius;
const x = (format?.videoHeight ?? 1080) / 2 - radius / 2;
const y = (format?.videoWidth ?? 1920) / 2 - radius / 2;
const centerX = x + width / 2;
const centerY = y + height / 2;
const runtimeEffect = Skia.RuntimeEffect.Make(FACE_SHADER);
if (runtimeEffect == null) throw new Error('Shader failed to compile!');
const shaderBuilder = Skia.RuntimeShaderBuilder(runtimeEffect);
shaderBuilder.setUniform('r', [width]);
shaderBuilder.setUniform('x', [centerX]);
shaderBuilder.setUniform('y', [centerY]);
shaderBuilder.setUniform('resolution', [1920, 1080]);
const imageFilter = Skia.ImageFilter.MakeRuntimeShader(shaderBuilder, null, null);
const paint = Skia.Paint();
paint.setImageFilter(imageFilter);
const isIOS = Platform.OS === 'ios';
const frameProcessor = useFrameProcessor(
(frame) => {
'worklet';
console.log(`Width: ${frame.width}`);
if (isIOS) frame.render(paint);
else console.log('Drawing to the Frame is not yet available on Android. WIP PR');
},
[isIOS, paint],
);
return (
<View style={styles.container}>
@ -224,6 +252,8 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
photo={true}
video={true}
audio={hasMicrophonePermission}
enableFpsGraph={true}
previewType="skia"
frameProcessor={device.supportsParallelVideoProcessing ? frameProcessor : undefined}
orientation="portrait"
/>

89
example/src/Shaders.ts Normal file
View File

@ -0,0 +1,89 @@
export const INVERTED_COLORS_SHADER = `
uniform shader image;
half4 main(vec2 pos) {
vec4 color = image.eval(pos);
return vec4(1.0 - color.rgb, 1.0);
}
`;
export const CHROMATIC_ABERRATION_SHADER = `
uniform shader image;
vec4 chromatic(vec2 pos, float offset) {
float r = image.eval(pos).r;
float g = image.eval(vec2(pos.x + offset, pos.y)).g;
float b = image.eval(vec2(pos.x + offset * 2.0, pos.y)).b;
return vec4(r, g, b, 1.0);
}
half4 main(vec2 pos) {
float offset = 50.0;
return chromatic(pos, offset);
}
`;
export const NO_SHADER = `
half4 main(vec2 pos) {
return vec4(1.0);
}
`;
export const BLUR_SHADER = `
const int samples = 35,
LOD = 2, // gaussian done on MIPmap at scale LOD
sLOD = 1 << LOD; // tile size = 2^LOD
const float sigma = float(samples) * .25;
float gaussian(vec2 i) {
return exp( -.5* dot(i/=sigma,i) ) / ( 6.28 * sigma*sigma );
}
vec4 blur(sampler2D sp, vec2 U, vec2 scale) {
vec4 O = vec4(0);
int s = samples/sLOD;
for ( int i = 0; i < s*s; i++ ) {
vec2 d = vec2(i%s, i/s)*float(sLOD) - float(samples)/2.;
O += gaussian(d) * textureLod( sp, U + scale * d , float(LOD) );
}
return O / O.a;
}
void mainImage(out vec4 O, vec2 U) {
O = blur( iChannel0, U/iResolution.xy, 1./iChannelResolution[0].xy );
}
`;
export const FACE_SHADER = `
uniform shader image;
uniform float x;
uniform float y;
uniform float r;
uniform vec2 resolution;
const float samples = 3.0;
const float radius = 40.0;
const float weight = 1.0;
half4 main(vec2 pos) {
float delta = pow((pow(pos.x - x, 2) + pow(pos.y - y, 2)), 0.5);
if (delta < r) {
vec3 sum = vec3(0.0);
vec3 accumulation = vec3(0);
vec3 weightedsum = vec3(0);
for (float deltaX = -samples * radius; deltaX <= samples * radius; deltaX += radius / samples) {
for (float deltaY = -samples * radius; deltaY <= samples * radius; deltaY += radius / samples) {
accumulation += image.eval(vec2(pos.x + deltaX, pos.y + deltaY)).rgb;
weightedsum += weight;
}
}
sum = accumulation / weightedsum;
return vec4(sum, 1.0);
}
else {
return image.eval(pos);
}
}
`;

View File

@ -1183,6 +1183,20 @@
dependencies:
nanoid "^3.1.23"
"@shopify/react-native-skia@^0.1.175":
version "0.1.175"
resolved "https://registry.yarnpkg.com/@shopify/react-native-skia/-/react-native-skia-0.1.175.tgz#4fc6b30f7d47d3dc9192791021d99e5d11f75739"
integrity sha512-vA5YPGu7GmBi5qliLyMzbpkH9mmCWAZoaoGhM9/g5o9zX8xAmUYcGgg3MOqxtnxCnfTmqFFBj43s+QGgMRTpqg==
dependencies:
"@types/pixelmatch" "^5.2.4"
"@types/pngjs" "^6.0.1"
"@types/ws" "^8.5.3"
canvaskit-wasm "0.38.0"
pixelmatch "^5.3.0"
pngjs "^6.0.0"
react-reconciler "^0.27.0"
ws "^8.11.0"
"@sideway/address@^4.1.3":
version "4.1.4"
resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0"
@ -1253,6 +1267,20 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.13.0.tgz#0400d1e6ce87e9d3032c19eb6c58205b0d3f7850"
integrity sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==
"@types/pixelmatch@^5.2.4":
version "5.2.4"
resolved "https://registry.yarnpkg.com/@types/pixelmatch/-/pixelmatch-5.2.4.tgz#ca145cc5ede1388c71c68edf2d1f5190e5ddd0f6"
integrity sha512-HDaSHIAv9kwpMN7zlmwfTv6gax0PiporJOipcrGsVNF3Ba+kryOZc0Pio5pn6NhisgWr7TaajlPEKTbTAypIBQ==
dependencies:
"@types/node" "*"
"@types/pngjs@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/@types/pngjs/-/pngjs-6.0.1.tgz#c711ec3fbbf077fed274ecccaf85dd4673130072"
integrity sha512-J39njbdW1U/6YyVXvC9+1iflZghP8jgRf2ndYghdJb5xL49LYDB+1EuAxfbuJ2IBbWIL3AjHPQhgaTxT3YaYeg==
dependencies:
"@types/node" "*"
"@types/prop-types@*":
version "15.7.5"
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf"
@ -1312,6 +1340,13 @@
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c"
integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==
"@types/ws@^8.5.3":
version "8.5.4"
resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.4.tgz#bb10e36116d6e570dd943735f86c933c1587b8a5"
integrity sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==
dependencies:
"@types/node" "*"
"@types/yargs-parser@*":
version "21.0.0"
resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b"
@ -1874,6 +1909,11 @@ caniuse-lite@^1.0.30001449:
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001452.tgz#dff7b8bb834b3a91808f0a9ff0453abb1fbba02a"
integrity sha512-Lkp0vFjMkBB3GTpLR8zk4NwW5EdRdnitwYJHDOOKIU85x4ckYCPQ+9WlVvSVClHxVReefkUMtWZH2l9KGlD51w==
canvaskit-wasm@0.38.0:
version "0.38.0"
resolved "https://registry.yarnpkg.com/canvaskit-wasm/-/canvaskit-wasm-0.38.0.tgz#83e6c46f3015c2ff3f6503157f47453af76a7be7"
integrity sha512-ZEG6lucpbQ4Ld+mY8C1Ng+PMLVP+/AX02jS0Sdl28NyMxuKSa9uKB8oGd1BYp1XWPyO2Jgr7U8pdyjJ/F3xR5Q==
chalk@^2.0.0:
version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
@ -4866,6 +4906,13 @@ pirates@^4.0.5:
resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b"
integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==
pixelmatch@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/pixelmatch/-/pixelmatch-5.3.0.tgz#5e5321a7abedfb7962d60dbf345deda87cb9560a"
integrity sha512-o8mkY4E/+LNUf6LzX96ht6k6CEDi65k9G2rjMtBe9Oo+VPKSvl+0GKHuH/AlG+GA5LPG/i5hrekkxUc3s2HU+Q==
dependencies:
pngjs "^6.0.0"
pkg-dir@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3"
@ -4880,6 +4927,11 @@ pkg-up@^3.1.0:
dependencies:
find-up "^3.0.0"
pngjs@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-6.0.0.tgz#ca9e5d2aa48db0228a52c419c3308e87720da821"
integrity sha512-TRzzuFRRmEoSW/p1KVAmiOgPco2Irlah+bGFCeNfJXxxYGwSw7YwAOAcd7X28K/m5bjBWKsC29KyoMfHbypayg==
posix-character-classes@^0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
@ -5090,9 +5142,9 @@ react-native-video@^5.2.1:
prop-types "^15.7.2"
shaka-player "^2.5.9"
"react-native-worklets@https://github.com/chrfalch/react-native-worklets#15d52dd":
"react-native-worklets@https://github.com/chrfalch/react-native-worklets#d62d76c":
version "0.1.0"
resolved "https://github.com/chrfalch/react-native-worklets#15d52dd1289831cecc7906823f613172e0c6cd2e"
resolved "https://github.com/chrfalch/react-native-worklets#d62d76c20ed7a3bbfebe5623bc976e5c2d9beabd"
react-native@^0.71.3:
version "0.71.3"
@ -5134,6 +5186,14 @@ react-native@^0.71.3:
whatwg-fetch "^3.0.0"
ws "^6.2.2"
react-reconciler@^0.27.0:
version "0.27.0"
resolved "https://registry.yarnpkg.com/react-reconciler/-/react-reconciler-0.27.0.tgz#360124fdf2d76447c7491ee5f0e04503ed9acf5b"
integrity sha512-HmMDKciQjYmBRGuuhIaKA1ba/7a+UsM5FzOZsMO2JYHt9Jh8reCb7j1eDC95NOyUlKM9KRyvdx0flBuDvYSBoA==
dependencies:
loose-envify "^1.1.0"
scheduler "^0.21.0"
react-refresh@^0.4.0:
version "0.4.3"
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.4.3.tgz#966f1750c191672e76e16c2efa569150cc73ab53"
@ -5377,6 +5437,13 @@ safe-regex@^1.1.0:
dependencies:
ret "~0.1.10"
scheduler@^0.21.0:
version "0.21.0"
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.21.0.tgz#6fd2532ff5a6d877b6edb12f00d8ab7e8f308820"
integrity sha512-1r87x5fz9MXqswA2ERLo0EbOAU74DpIUO090gIasYTqlVoJeMcl+Z1Rg7WHz+qtPujhS/hGIt9kxZOYBV3faRQ==
dependencies:
loose-envify "^1.1.0"
scheduler@^0.23.0:
version "0.23.0"
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe"
@ -6184,6 +6251,11 @@ ws@^7, ws@^7.5.1:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
ws@^8.11.0:
version "8.12.1"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.12.1.tgz#c51e583d79140b5e42e39be48c934131942d4a8f"
integrity sha512-1qo+M9Ba+xNhPB+YTWUlK6M17brTut5EXbcBaMRN5pH5dFrXz7lzz1ChFSUq3bOUl8yEvSenhHmYUNJxFzdJew==
xtend@~4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"

View File

@ -12,6 +12,7 @@
#import <React/RCTViewManager.h>
#import <React/RCTUIManager.h>
#import <React/RCTFPSGraph.h>
#import "FrameProcessorCallback.h"
#import "FrameProcessorRuntimeManager.h"

View File

@ -134,6 +134,15 @@ extension CameraView {
}
videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
videoOutput!.alwaysDiscardsLateVideoFrames = false
if previewType == "skia" {
// If the PreviewView is a Skia view, we need to use the RGB format since Skia works in the RGB colorspace instead of YUV.
// This does introduce a performance overhead, but it's inevitable since Skia would internally convert
// YUV frames to RGB anyways since all Shaders and draw operations operate in the RGB space.
videoOutput!.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_32BGRA, // default: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
]
}
captureSession.addOutput(videoOutput!)
}

View File

@ -9,6 +9,61 @@
import Foundation
extension CameraView {
private func rotateFrameSize(frameSize: CGSize, orientation: UIInterfaceOrientation) -> CGSize {
switch orientation {
case .portrait, .portraitUpsideDown, .unknown:
// swap width and height since the input orientation is rotated
return CGSize(width: frameSize.height, height: frameSize.width)
case .landscapeLeft, .landscapeRight:
// is same as camera sensor orientation
return frameSize
@unknown default:
return frameSize
}
}
/// Converts a Point in the UI View Layer to a Point in the Camera Frame coordinate system
func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
guard let previewView = previewView else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
guard let viewScale = window?.screen.scale else {
invokeOnError(.unknown(message: "View has no parent Window!"))
return .zero
}
let frameSize = rotateFrameSize(frameSize: videoDeviceInput.device.activeFormat.videoDimensions,
orientation: outputOrientation)
let viewSize = CGSize(width: previewView.bounds.width * viewScale,
height: previewView.bounds.height * viewScale)
let scale = min(frameSize.width / viewSize.width, frameSize.height / viewSize.height)
let scaledViewSize = CGSize(width: viewSize.width * scale, height: viewSize.height * scale)
let overlapX = scaledViewSize.width - frameSize.width
let overlapY = scaledViewSize.height - frameSize.height
let scaledPoint = CGPoint(x: point.x * scale, y: point.y * scale)
return CGPoint(x: scaledPoint.x - (overlapX / 2), y: scaledPoint.y - (overlapY / 2))
}
/// Converts a Point in the UI View Layer to a Point in the Camera Device Sensor coordinate system (x: [0..1], y: [0..1])
func captureDevicePointConverted(fromLayerPoint pointInLayer: CGPoint) -> CGPoint {
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
let frameSize = rotateFrameSize(frameSize: videoDeviceInput.device.activeFormat.videoDimensions,
orientation: outputOrientation)
let pointInFrame = convertLayerPointToFramePoint(layerPoint: pointInLayer)
return CGPoint(x: pointInFrame.x / frameSize.width, y: pointInFrame.y / frameSize.height)
}
func focus(point: CGPoint, promise: Promise) {
withPromise(promise) {
guard let device = self.videoDeviceInput?.device else {
@ -18,7 +73,8 @@ extension CameraView {
throw CameraError.device(DeviceError.focusNotSupported)
}
let normalizedPoint = self.videoPreviewLayer.captureDevicePointConverted(fromLayerPoint: point)
// in {0..1} system
let normalizedPoint = captureDevicePointConverted(fromLayerPoint: point)
do {
try device.lockForConfiguration()

View File

@ -16,7 +16,7 @@ extension CameraView {
}
// Orientation of the output connections (photo, video, frame processor)
private var outputOrientation: UIInterfaceOrientation {
var outputOrientation: UIInterfaceOrientation {
if let userOrientation = orientation as String?,
let parsedOrientation = try? UIInterfaceOrientation(withString: userOrientation) {
// user is overriding output orientation
@ -27,7 +27,7 @@ extension CameraView {
}
}
internal func updateOrientation() {
func updateOrientation() {
// Updates the Orientation for all rotable
let isMirrored = videoDeviceInput?.device.position == .front

View File

@ -190,8 +190,26 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
// Draw Frame to Preview View Canvas (and call Frame Processor)
if captureOutput is AVCaptureVideoDataOutput {
if let previewView = previewView as? PreviewSkiaView {
// Render to Skia PreviewView
previewView.drawFrame(sampleBuffer) { canvas in
// Call JS Frame Processor before passing Frame to GPU - allows user to draw
guard let frameProcessor = self.frameProcessorCallback else { return }
let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
frameProcessor(frame, canvas)
}
} else {
// Call JS Frame Processor. User cannot draw, since we don't have a Skia Canvas.
guard let frameProcessor = frameProcessorCallback else { return }
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
frameProcessor(frame, nil)
}
}
// Record Video Frame/Audio Sample to File
if isRecording {
// Write Video / Audio frame to file
guard let recordingSession = recordingSession else {
invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
return
@ -210,14 +228,21 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
}
if let frameProcessor = frameProcessorCallback, captureOutput is AVCaptureVideoDataOutput {
// Call the JavaScript Frame Processor func (worklet)
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
frameProcessor(frame)
}
#if DEBUG
if captureOutput is AVCaptureVideoDataOutput {
// Update FPS Graph per Frame
if let fpsGraph = fpsGraph {
DispatchQueue.main.async {
fpsGraph.onTick(CACurrentMediaTime())
}
}
}
#endif
}
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput, fileType: AVFileType, videoCodec: AVVideoCodecType?) -> [String: Any]? {
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput,
fileType: AVFileType,
videoCodec: AVVideoCodecType?) -> [String: Any]? {
if videoCodec != nil {
return videoOutput.recommendedVideoSettings(forVideoCodecType: videoCodec!, assetWriterOutputFileType: fileType)
} else {
@ -233,7 +258,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
return .up
}
switch UIDevice.current.orientation {
switch outputOrientation {
case .portrait:
return cameraPosition == .front ? .leftMirrored : .right
@ -246,8 +271,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
case .landscapeRight:
return cameraPosition == .front ? .upMirrored : .down
case .unknown, .faceUp, .faceDown:
fallthrough
case .unknown:
return .up
@unknown default:
return .up
}

View File

@ -26,7 +26,8 @@ private let propsThatRequireReconfiguration = ["cameraId",
"preset",
"photo",
"video",
"enableFrameProcessor"]
"enableFrameProcessor",
"previewType"]
private let propsThatRequireDeviceReconfiguration = ["fps",
"hdr",
"lowLightBoost",
@ -60,7 +61,9 @@ public final class CameraView: UIView {
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 1.0 // in "factor"
@objc var enableFpsGraph = false
@objc var videoStabilizationMode: NSString?
@objc var previewType: NSString?
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@ -92,7 +95,6 @@ public final class CameraView: UIView {
internal var isRecording = false
internal var recordingSession: RecordingSession?
@objc public var frameProcessorCallback: FrameProcessorCallback?
internal var lastFrameProcessorCall = DispatchTime.now().uptimeNanoseconds
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+Zoom
@ -103,27 +105,19 @@ public final class CameraView: UIView {
internal let videoQueue = CameraQueues.videoQueue
internal let audioQueue = CameraQueues.audioQueue
internal var previewView: UIView?
#if DEBUG
internal var fpsGraph: RCTFPSGraph?
#endif
/// Returns whether the AVCaptureSession is currently running (reflected by isActive)
var isRunning: Bool {
return captureSession.isRunning
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
override public class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
// pragma MARK: Setup
override public init(frame: CGRect) {
super.init(frame: frame)
videoPreviewLayer.session = captureSession
videoPreviewLayer.videoGravity = .resizeAspectFill
videoPreviewLayer.frame = layer.bounds
NotificationCenter.default.addObserver(self,
selector: #selector(sessionRuntimeError),
@ -141,6 +135,8 @@ public final class CameraView: UIView {
selector: #selector(onOrientationChanged),
name: UIDevice.orientationDidChangeNotification,
object: nil)
setupPreviewView()
}
@available(*, unavailable)
@ -165,15 +161,55 @@ public final class CameraView: UIView {
override public func willMove(toSuperview newSuperview: UIView?) {
super.willMove(toSuperview: newSuperview)
if !isMounted {
isMounted = true
guard let onViewReady = onViewReady else {
return
if newSuperview != nil {
if !isMounted {
isMounted = true
guard let onViewReady = onViewReady else {
return
}
onViewReady(nil)
}
onViewReady(nil)
}
}
override public func layoutSubviews() {
if let previewView = previewView {
previewView.frame = frame
previewView.bounds = bounds
}
}
func setupPreviewView() {
if previewType == "skia" {
// Skia Preview View allows user to draw onto a Frame in a Frame Processor
if previewView is PreviewSkiaView { return }
previewView?.removeFromSuperview()
previewView = PreviewSkiaView(frame: frame)
} else {
// Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
if previewView is PreviewView { return }
previewView?.removeFromSuperview()
previewView = PreviewView(frame: frame, session: captureSession)
}
addSubview(previewView!)
}
func setupFpsGraph() {
#if DEBUG
if enableFpsGraph {
if fpsGraph != nil { return }
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
fpsGraph!.layer.zPosition = 9999.0
addSubview(fpsGraph!)
} else {
fpsGraph?.removeFromSuperview()
fpsGraph = nil
}
#endif
}
// pragma MARK: Props updating
override public final func didSetProps(_ changedProps: [String]!) {
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
@ -188,7 +224,18 @@ public final class CameraView: UIView {
let shouldUpdateTorch = willReconfigure || changedProps.contains("torch") || shouldCheckActive
let shouldUpdateZoom = willReconfigure || changedProps.contains("zoom") || shouldCheckActive
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
let shouldUpdateOrientation = changedProps.contains("orientation")
let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation")
if changedProps.contains("previewType") {
DispatchQueue.main.async {
self.setupPreviewView()
}
}
if changedProps.contains("enableFpsGraph") {
DispatchQueue.main.async {
self.setupFpsGraph()
}
}
if shouldReconfigure ||
shouldReconfigureAudioSession ||
@ -199,6 +246,7 @@ public final class CameraView: UIView {
shouldReconfigureDevice ||
shouldUpdateVideoStabilization ||
shouldUpdateOrientation {
// Video Configuration
cameraQueue.async {
if shouldReconfigure {
self.configureCaptureSession()

View File

@ -42,8 +42,10 @@ RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString);
// other props
RCT_EXPORT_VIEW_PROPERTY(preset, NSString);
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(previewType, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
RCT_EXPORT_VIEW_PROPERTY(enableFpsGraph, BOOL);
RCT_EXPORT_VIEW_PROPERTY(orientation, NSString);
// Camera View Events
RCT_EXPORT_VIEW_PROPERTY(onError, RCTDirectEventBlock);

View File

@ -10,13 +10,20 @@
#import <jsi/jsi.h>
#import <CoreMedia/CMSampleBuffer.h>
#import "Frame.h"
#import "SkCanvas.h"
#import "JsiSkCanvas.h"
using namespace facebook;
class JSI_EXPORT FrameHostObject: public jsi::HostObject {
public:
explicit FrameHostObject(Frame* frame): frame(frame) { }
explicit FrameHostObject(Frame* frame): frame(frame) {}
explicit FrameHostObject(Frame* frame,
std::shared_ptr<RNSkia::JsiSkCanvas> canvas):
frame(frame), canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
@ -24,4 +31,5 @@ public:
public:
Frame* frame;
std::shared_ptr<RNSkia::JsiSkCanvas> canvas;
};

View File

@ -9,8 +9,11 @@
#import "FrameHostObject.h"
#import <Foundation/Foundation.h>
#import <jsi/jsi.h>
#import "JsiHostObject.h"
#import "JsiSharedValue.h"
#import "WKTJsiHostObject.h"
#import "SkCanvas.h"
#import "../Skia Render Layer/SkImageHelpers.h"
#import "../../cpp/JSITypedArray.h"
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
std::vector<jsi::PropNameID> result;
@ -18,15 +21,37 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
// Debugging
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
// Conversion
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
// Ref Management
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
// Skia
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
if (canvas != nullptr) {
auto canvasPropNames = canvas->getPropertyNames(rt);
for (auto& prop : canvasPropNames) {
result.push_back(std::move(prop));
}
}
return result;
}
SkRect inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta, size.width(),
size.height());
}
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
@ -67,6 +92,59 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
0,
decrementRefCount);
}
if (name == "render") {
auto render = JSI_HOST_FUNCTION_LAMBDA {
if (canvas == nullptr) {
throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
}
// convert CMSampleBuffer to SkImage
auto context = canvas->getCanvas()->recordingContext();
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, frame.buffer);
// draw SkImage
if (count > 0) {
// ..with paint/shader
auto paintHostObject = arguments[0].asObject(runtime).asHostObject<RNSkia::JsiSkPaint>(runtime);
auto paint = paintHostObject->getObject();
canvas->getCanvas()->drawImage(image, 0, 0, SkSamplingOptions(), paint.get());
} else {
// ..without paint/shader
canvas->getCanvas()->drawImage(image, 0, 0);
}
return jsi::Value::undefined();
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
}
if (name == "toArrayBuffer") {
auto toArrayBuffer = JSI_HOST_FUNCTION_LAMBDA {
auto pixelBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
auto height = CVPixelBufferGetHeight(pixelBuffer);
auto buffer = (uint8_t*) CVPixelBufferGetBaseAddress(pixelBuffer);
auto arraySize = bytesPerRow * height;
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray> arrayBuffer(runtime, arraySize);
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
}
auto arrayBufferCache = runtime.global().getPropertyAsObject(runtime, ARRAYBUFFER_CACHE_PROP_NAME);
auto arrayBuffer = vision::getTypedArray(runtime, arrayBufferCache).get<vision::TypedArrayKind::Uint8ClampedArray>(runtime);
if (arrayBuffer.size(runtime) != arraySize) {
arrayBuffer = vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray>(runtime, arraySize);
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
}
arrayBuffer.updateUnsafe(runtime, buffer, arraySize);
return arrayBuffer;
};
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
}
if (name == "isValid") {
auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 && CMSampleBufferIsValid(frame.buffer);
@ -82,6 +160,41 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto height = CVPixelBufferGetHeight(imageBuffer);
return jsi::Value((double) height);
}
if (name == "orientation") {
switch (frame.orientation) {
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
return jsi::String::createFromUtf8(runtime, "portrait");
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
return jsi::String::createFromUtf8(runtime, "portraitUpsideDown");
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
return jsi::String::createFromUtf8(runtime, "landscapeLeft");
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
return jsi::String::createFromUtf8(runtime, "landscapeRight");
}
}
if (name == "isMirrored") {
switch (frame.orientation) {
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
return jsi::Value(false);
case UIImageOrientationDownMirrored:
case UIImageOrientationUpMirrored:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
return jsi::Value(true);
}
}
if (name == "timestamp") {
auto timestamp = CMSampleBufferGetPresentationTimeStamp(frame.buffer);
auto seconds = static_cast<double>(CMTimeGetSeconds(timestamp));
return jsi::Value(seconds * 1000.0);
}
if (name == "bytesPerRow") {
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
@ -93,6 +206,11 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
return jsi::Value((double) planesCount);
}
if (canvas != nullptr) {
// If we have a Canvas, try to access the property on there.
return canvas->get(runtime, propName);
}
// fallback to base implementation
return HostObject::get(runtime, propName);
}

View File

@ -11,4 +11,4 @@
#import <Foundation/Foundation.h>
#import "Frame.h"
typedef void (^FrameProcessorCallback) (Frame* frame);
typedef void (^FrameProcessorCallback) (Frame* frame, void* skCanvas);

View File

@ -19,10 +19,10 @@
#import <React/RCTUIManager.h>
#import <ReactCommon/RCTTurboModuleManager.h>
#import "JsiWorkletContext.h"
#import "JsiWorkletApi.h"
#import "JsiWorklet.h"
#import "JsiHostObject.h"
#import "WKTJsiWorkletContext.h"
#import "WKTJsiWorkletApi.h"
#import "WKTJsiWorklet.h"
#import "WKTJsiHostObject.h"
#import "FrameProcessorUtils.h"
#import "FrameProcessorCallback.h"

View File

@ -17,7 +17,7 @@
#endif
#import <jsi/jsi.h>
#import "JsiWorklet.h"
#import "WKTJsiWorklet.h"
#import <memory>
using namespace facebook;

View File

@ -19,22 +19,41 @@
#import "JSConsoleHelper.h"
#import <ReactCommon/RCTTurboModule.h>
#import "JsiWorklet.h"
#import "WKTJsiWorklet.h"
#import "RNSkPlatformContext.h"
#import "RNSkiOSPlatformContext.h"
#import "JsiSkCanvas.h"
FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runtime, std::shared_ptr<RNWorklet::JsiWorklet> worklet) {
// Wrap Worklet call in invoker
auto workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
// Create cached Skia Canvas object
auto callInvoker = RCTBridge.currentBridge.jsCallInvoker;
auto skiaPlatformContext = std::make_shared<RNSkia::RNSkiOSPlatformContext>(&runtime, callInvoker);
auto canvasHostObject = std::make_shared<RNSkia::JsiSkCanvas>(skiaPlatformContext);
// Converts a Worklet to a callable Objective-C block function
return ^(Frame* frame) {
return ^(Frame* frame, void* skiaCanvas) {
try {
// Box the Frame to a JS Host Object
// Create cached Frame object
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
// Update cached Canvas object
if (skiaCanvas != nullptr) {
canvasHostObject->setCanvas((SkCanvas*)skiaCanvas);
frameHostObject->canvas = canvasHostObject;
} else {
frameHostObject->canvas = nullptr;
}
auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
jsi::Value jsValue(std::move(argument));
// Call the Worklet with the Frame JS Host Object as an argument
workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
// After the sync Frame Processor finished executing, remove the Canvas on that Frame instance. It can no longer draw.
frameHostObject->canvas = nullptr;
} catch (jsi::JSError& jsError) {
// JS Error occured, print it to console.
auto stack = std::regex_replace(jsError.getStack(), std::regex("\n"), "\n ");

34
ios/PreviewView.swift Normal file
View File

@ -0,0 +1,34 @@
//
// PreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 30.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
class PreviewView: UIView {
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
override public class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
init(frame: CGRect, session: AVCaptureSession) {
super.init(frame: frame)
videoPreviewLayer.session = session
videoPreviewLayer.videoGravity = .resizeAspectFill
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented!")
}
}

View File

@ -0,0 +1,26 @@
//
// PreviewSkiaView.h
// VisionCamera
//
// Created by Marc Rousavy on 17.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef PreviewSkiaView_h
#define PreviewSkiaView_h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "FrameProcessorCallback.h"
typedef void (^DrawCallback) (void* _Nonnull skCanvas);
@interface PreviewSkiaView: UIView
// Call to pass a new Frame to be drawn by the Skia Canvas
- (void) drawFrame:(_Nonnull CMSampleBufferRef)buffer withCallback:(DrawCallback _Nonnull)callback;
@end
#endif /* PreviewSkiaView_h */

View File

@ -0,0 +1,59 @@
//
// PreviewSkiaView.mm
// VisionCamera
//
// Created by Marc Rousavy on 17.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "PreviewSkiaView.h"
#import <Foundation/Foundation.h>
#import "SkiaMetalCanvasProvider.h"
#include <exception>
#include <string>
#if SHOW_FPS
#import <React/RCTFPSGraph.h>
#endif
@implementation PreviewSkiaView {
std::shared_ptr<SkiaMetalCanvasProvider> _canvasProvider;
}
- (void)drawFrame:(CMSampleBufferRef)buffer withCallback:(DrawCallback _Nonnull)callback {
if (_canvasProvider == nullptr) {
throw std::runtime_error("Cannot draw new Frame to Canvas when SkiaMetalCanvasProvider is null!");
}
_canvasProvider->renderFrameToCanvas(buffer, ^(SkCanvas* canvas) {
callback((void*)canvas);
});
}
- (void) willMoveToSuperview:(UIView *)newWindow {
if (newWindow == NULL) {
// Remove implementation view when the parent view is not set
if (_canvasProvider != nullptr) {
[_canvasProvider->getLayer() removeFromSuperlayer];
_canvasProvider = nullptr;
}
} else {
// Create implementation view when the parent view is set
if (_canvasProvider == nullptr) {
_canvasProvider = std::make_shared<SkiaMetalCanvasProvider>();
[self.layer addSublayer: _canvasProvider->getLayer()];
_canvasProvider->start();
}
}
}
- (void) layoutSubviews {
if (_canvasProvider != nullptr) {
_canvasProvider->setSize(self.bounds.size.width, self.bounds.size.height);
}
}
@end

View File

@ -0,0 +1,39 @@
//
// SkImageHelpers.h
// VisionCamera
//
// Created by Marc Rousavy on 23.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef SkImageHelpers_h
#define SkImageHelpers_h
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <include/gpu/GrRecordingContext.h>
#import "SkImage.h"
#import "SkSize.h"
#import "SkRect.h"
class SkImageHelpers {
public:
SkImageHelpers() = delete;
public:
/**
Convert a CMSampleBuffer to an SkImage. Format has to be RGB.
*/
static sk_sp<SkImage> convertCMSampleBufferToSkImage(GrRecordingContext* context, CMSampleBufferRef sampleBuffer);
/**
Creates a Center Crop Transformation Rect so that the source rect fills (aspectRatio: cover) the destination rect.
The return value should be passed as a sourceRect to a canvas->draw...Rect(..) function, destinationRect should stay the same.
*/
static SkRect createCenterCropRect(SkRect source, SkRect destination);
private:
static SkRect inscribe(SkSize size, SkRect rect);
};
#endif /* SkImageHelpers_h */

View File

@ -0,0 +1,108 @@
//
// CMSampleBuffer+toSkImage.m
// VisionCamera
//
// Created by Marc Rousavy on 23.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "SkImageHelpers.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import <include/core/SkColorSpace.h>
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkData.h>
#import <include/gpu/GrRecordingContext.h>
#include <TargetConditionals.h>
#if TARGET_RT_BIG_ENDIAN
# define FourCC2Str(fourcc) (const char[]){*((char*)&fourcc), *(((char*)&fourcc)+1), *(((char*)&fourcc)+2), *(((char*)&fourcc)+3),0}
#else
# define FourCC2Str(fourcc) (const char[]){*(((char*)&fourcc)+3), *(((char*)&fourcc)+2), *(((char*)&fourcc)+1), *(((char*)&fourcc)+0),0}
#endif
CVMetalTextureCacheRef getTextureCache(GrRecordingContext* context) {
static CVMetalTextureCacheRef textureCache = nil;
if (textureCache == nil) {
// Create a new Texture Cache
auto result = CVMetalTextureCacheCreate(kCFAllocatorDefault,
nil,
MTLCreateSystemDefaultDevice(),
nil,
&textureCache);
if (result != kCVReturnSuccess || textureCache == nil) {
throw std::runtime_error("Failed to create Metal Texture Cache!");
}
}
return textureCache;
}
sk_sp<SkImage> SkImageHelpers::convertCMSampleBufferToSkImage(GrRecordingContext* context, CMSampleBufferRef sampleBuffer) {
auto pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
double width = CVPixelBufferGetWidth(pixelBuffer);
double height = CVPixelBufferGetHeight(pixelBuffer);
// Make sure the format is RGB (BGRA_8888)
auto format = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (format != kCVPixelFormatType_32BGRA) {
auto fourCharCode = @(FourCC2Str(format));
auto error = std::string("VisionCamera: Frame has unknown Pixel Format (") + fourCharCode.UTF8String + std::string(") - cannot convert to SkImage!");
throw std::runtime_error(error);
}
auto textureCache = getTextureCache(context);
// Convert CMSampleBuffer* -> CVMetalTexture*
CVMetalTextureRef cvTexture;
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache,
pixelBuffer,
nil,
MTLPixelFormatBGRA8Unorm,
width,
height,
0, // plane index
&cvTexture);
GrMtlTextureInfo textureInfo;
auto mtlTexture = CVMetalTextureGetTexture(cvTexture);
textureInfo.fTexture.retain((__bridge void*)mtlTexture);
// Wrap it in a GrBackendTexture
GrBackendTexture texture(width, height, GrMipmapped::kNo, textureInfo);
// Create an SkImage from the existing texture
auto image = SkImage::MakeFromTexture(context,
texture,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
kOpaque_SkAlphaType,
SkColorSpace::MakeSRGB());
// Release the Texture wrapper (it will still be strong)
CFRelease(cvTexture);
return image;
}
SkRect SkImageHelpers::createCenterCropRect(SkRect sourceRect, SkRect destinationRect) {
SkSize src;
if (destinationRect.width() / destinationRect.height() > sourceRect.width() / sourceRect.height()) {
src = SkSize::Make(sourceRect.width(), (sourceRect.width() * destinationRect.height()) / destinationRect.width());
} else {
src = SkSize::Make((sourceRect.height() * destinationRect.width()) / destinationRect.height(), sourceRect.height());
}
return inscribe(src, sourceRect);
}
SkRect SkImageHelpers::inscribe(SkSize size, SkRect rect) {
auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
rect.y() + halfHeightDelta,
size.width(),
size.height());
}

View File

@ -0,0 +1,56 @@
#pragma once
#ifndef __cplusplus
#error This header has to be compiled with C++!
#endif
#import <MetalKit/MetalKit.h>
#import <QuartzCore/CAMetalLayer.h>
#import <AVFoundation/AVFoundation.h>
#include <functional>
#include <include/gpu/GrDirectContext.h>
#include <mutex>
#include <memory>
#include <atomic>
#import "VisionDisplayLink.h"
#import "SkiaMetalRenderContext.h"
class SkiaMetalCanvasProvider: public std::enable_shared_from_this<SkiaMetalCanvasProvider> {
public:
SkiaMetalCanvasProvider();
~SkiaMetalCanvasProvider();
// Render a Camera Frame to the off-screen canvas
void renderFrameToCanvas(CMSampleBufferRef sampleBuffer, const std::function<void(SkCanvas*)>& drawCallback);
// Start updating the DisplayLink (runLoop @ screen refresh rate) and draw Frames to the Layer
void start();
// Update the size of the View (Layer)
void setSize(int width, int height);
CALayer* getLayer();
private:
bool _isValid = false;
float _width = -1;
float _height = -1;
// For rendering Camera Frame -> off-screen MTLTexture
OffscreenRenderContext _offscreenContext;
// For rendering off-screen MTLTexture -> on-screen CAMetalLayer
LayerRenderContext _layerContext;
// For synchronization between the two Threads/Contexts
std::mutex _textureMutex;
std::atomic<bool> _hasNewFrame = false;
private:
void render();
id<MTLTexture> getTexture(int width, int height);
float getPixelDensity();
};

View File

@ -0,0 +1,240 @@
#import "SkiaMetalCanvasProvider.h"
#import <AVFoundation/AVFoundation.h>
#import <Metal/Metal.h>
#import <include/core/SkColorSpace.h>
#import <include/core/SkSurface.h>
#import <include/core/SkCanvas.h>
#import <include/core/SkFont.h>
#import <include/gpu/GrDirectContext.h>
#import "SkImageHelpers.h"
#include <memory>
SkiaMetalCanvasProvider::SkiaMetalCanvasProvider(): std::enable_shared_from_this<SkiaMetalCanvasProvider>() {
// Configure Metal Layer
_layerContext.layer = [CAMetalLayer layer];
_layerContext.layer.framebufferOnly = NO;
_layerContext.layer.device = _layerContext.device;
_layerContext.layer.opaque = false;
_layerContext.layer.contentsScale = getPixelDensity();
_layerContext.layer.pixelFormat = MTLPixelFormatBGRA8Unorm;
// Set up DisplayLink
_layerContext.displayLink = [[VisionDisplayLink alloc] init];
_isValid = true;
}
SkiaMetalCanvasProvider::~SkiaMetalCanvasProvider() {
_isValid = false;
NSLog(@"VisionCamera: Stopping SkiaMetalCanvasProvider DisplayLink...");
[_layerContext.displayLink stop];
}
void SkiaMetalCanvasProvider::start() {
NSLog(@"VisionCamera: Starting SkiaMetalCanvasProvider DisplayLink...");
[_layerContext.displayLink start:[weakThis = weak_from_this()](double time) {
auto thiz = weakThis.lock();
if (thiz) {
thiz->render();
}
}];
}
id<MTLTexture> SkiaMetalCanvasProvider::getTexture(int width, int height) {
if (_offscreenContext.texture == nil
|| _offscreenContext.texture.width != width
|| _offscreenContext.texture.height != height) {
// Create new texture with the given width and height
MTLTextureDescriptor* textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
width:width
height:height
mipmapped:NO];
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
_offscreenContext.texture = [_offscreenContext.device newTextureWithDescriptor:textureDescriptor];
}
return _offscreenContext.texture;
}
/**
Callback from the DisplayLink - renders the current in-memory off-screen texture to the on-screen CAMetalLayer
*/
void SkiaMetalCanvasProvider::render() {
if (_width == -1 && _height == -1) {
return;
}
if (!_hasNewFrame) {
// No new Frame has arrived in the meantime.
// We don't need to re-draw the texture to the screen if nothing has changed, abort.
return;
}
@autoreleasepool {
auto context = _layerContext.skiaContext.get();
// Create a Skia Surface from the CAMetalLayer (use to draw to the View)
GrMTLHandle drawableHandle;
auto surface = SkSurface::MakeFromCAMetalLayer(context,
(__bridge GrMTLHandle)_layerContext.layer,
kTopLeft_GrSurfaceOrigin,
1,
kBGRA_8888_SkColorType,
nullptr,
nullptr,
&drawableHandle);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
auto canvas = surface->getCanvas();
// Lock the Mutex so we can operate on the Texture atomically without
// renderFrameToCanvas() overwriting in between from a different thread
std::unique_lock lock(_textureMutex);
// Get the texture
auto texture = _offscreenContext.texture;
if (texture == nil) return;
// Calculate Center Crop (aspectRatio: cover) transform
auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height);
auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height());
sourceRect = SkImageHelpers::createCenterCropRect(sourceRect, destinationRect);
auto offsetX = -sourceRect.left();
auto offsetY = -sourceRect.top();
// The Canvas is equal to the View size, where-as the Frame has a different size (e.g. 4k)
// We scale the Canvas to the exact dimensions of the Frame so that the user can use the Frame as a coordinate system
canvas->save();
auto scaleW = static_cast<double>(surface->width()) / texture.width;
auto scaleH = static_cast<double>(surface->height()) / texture.height;
auto scale = MAX(scaleW, scaleH);
canvas->scale(scale, scale);
canvas->translate(offsetX, offsetY);
// Convert the rendered MTLTexture to an SkImage
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
GrBackendTexture backendTexture(texture.width, texture.height, GrMipmapped::kNo, textureInfo);
auto image = SkImage::MakeFromTexture(context,
backendTexture,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
kOpaque_SkAlphaType,
SkColorSpace::MakeSRGB());
// Draw the Texture (Frame) to the Canvas
canvas->drawImage(image, 0, 0);
// Restore the scale & transform
canvas->restore();
surface->flushAndSubmit();
// Pass the drawable into the Metal Command Buffer and submit it to the GPU
id<CAMetalDrawable> drawable = (__bridge id<CAMetalDrawable>)drawableHandle;
id<MTLCommandBuffer> commandBuffer([_layerContext.commandQueue commandBuffer]);
[commandBuffer presentDrawable:drawable];
[commandBuffer commit];
_hasNewFrame = false;
lock.unlock();
}
}
float SkiaMetalCanvasProvider::getPixelDensity() {
return UIScreen.mainScreen.scale;
}
/**
Render to a canvas. This uses the current in-memory off-screen texture and draws to it.
The buffer is expected to be in RGB (`BGRA_8888`) format.
While rendering, `drawCallback` will be invoked with a Skia Canvas instance which can be used for Frame Processing (JS).
*/
void SkiaMetalCanvasProvider::renderFrameToCanvas(CMSampleBufferRef sampleBuffer, const std::function<void(SkCanvas*)>& drawCallback) {
if (_width == -1 && _height == -1) {
return;
}
// Wrap in auto release pool since we want the system to clean up after rendering
// and not wait until later - we've seen some example of memory usage growing very
// fast in the simulator without this.
@autoreleasepool {
// Get the Frame's PixelBuffer
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
throw std::runtime_error("drawFrame: Pixel Buffer is corrupt/empty.");
}
// Lock Mutex to block the runLoop from overwriting the _currentDrawable
std::unique_lock lock(_textureMutex);
// Get the Metal Texture we use for in-memory drawing
auto texture = getTexture(CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer));
// Get & Lock the writeable Texture from the Metal Drawable
GrMtlTextureInfo fbInfo;
fbInfo.fTexture.retain((__bridge void*)texture);
GrBackendRenderTarget backendRT(texture.width,
texture.height,
1,
fbInfo);
auto context = _offscreenContext.skiaContext.get();
// Create a Skia Surface from the writable Texture
auto surface = SkSurface::MakeFromBackendRenderTarget(context,
backendRT,
kTopLeft_GrSurfaceOrigin,
kBGRA_8888_SkColorType,
nullptr,
nullptr);
if (surface == nullptr || surface->getCanvas() == nullptr) {
throw std::runtime_error("Skia surface could not be created from parameters.");
}
// Lock the Frame's PixelBuffer for the duration of the Frame Processor so the user can safely do operations on it
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
// Converts the CMSampleBuffer to an SkImage - RGB.
auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, sampleBuffer);
auto canvas = surface->getCanvas();
// Clear everything so we keep it at a clean state
canvas->clear(SkColors::kBlack);
// Draw the Image into the Frame (aspectRatio: cover)
// The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
// but that'll just overwrite the existing one - no need to worry.
canvas->drawImage(image, 0, 0);
// Call the JS Frame Processor.
drawCallback(canvas);
// Flush all appended operations on the canvas and commit it to the SkSurface
surface->flushAndSubmit();
_hasNewFrame = true;
lock.unlock();
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
void SkiaMetalCanvasProvider::setSize(int width, int height) {
_width = width;
_height = height;
_layerContext.layer.frame = CGRectMake(0, 0, width, height);
_layerContext.layer.drawableSize = CGSizeMake(width * getPixelDensity(),
height* getPixelDensity());
}
CALayer* SkiaMetalCanvasProvider::getLayer() { return _layerContext.layer; }

View File

@ -0,0 +1,41 @@
//
// SkiaMetalRenderContext.h
// VisionCamera
//
// Created by Marc Rousavy on 02.12.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef SkiaMetalRenderContext_h
#define SkiaMetalRenderContext_h
#import <MetalKit/MetalKit.h>
#import <QuartzCore/CAMetalLayer.h>
#import <AVFoundation/AVFoundation.h>
#include <include/gpu/GrDirectContext.h>
struct RenderContext {
id<MTLDevice> device;
id<MTLCommandQueue> commandQueue;
sk_sp<GrDirectContext> skiaContext;
RenderContext() {
device = MTLCreateSystemDefaultDevice();
commandQueue = id<MTLCommandQueue>(CFRetain((GrMTLHandle)[device newCommandQueue]));
skiaContext = GrDirectContext::MakeMetal((__bridge void*)device,
(__bridge void*)commandQueue);
}
};
// For rendering to an off-screen in-memory Metal Texture (MTLTexture)
struct OffscreenRenderContext: public RenderContext {
id<MTLTexture> texture;
};
// For rendering to a Metal Layer (CAMetalLayer)
struct LayerRenderContext: public RenderContext {
CAMetalLayer* layer;
VisionDisplayLink* displayLink;
};
#endif /* SkiaMetalRenderContext_h */

View File

@ -0,0 +1,38 @@
//
// VisionDisplayLink.h
// VisionCamera
//
// Created by Marc Rousavy on 28.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#ifndef DisplayLink_h
#define DisplayLink_h
#import <CoreFoundation/CoreFoundation.h>
#import <UIKit/UIKit.h>
typedef void (^block_t)(double);
@interface VisionDisplayLink : NSObject {
CADisplayLink *_displayLink;
double _currentFps;
double _previousFrameTimestamp;
}
@property(nonatomic, copy) block_t updateBlock;
// Start the DisplayLink's runLoop
- (void)start:(block_t)block;
// Stop the DisplayLink's runLoop
- (void)stop;
// Get the current FPS value
- (double)currentFps;
// The FPS value this DisplayLink is targeting
- (double)targetFps;
@end
#endif /* VisionDisplayLink_h */

View File

@ -0,0 +1,63 @@
//
// VisionDisplayLink.m
// VisionCamera
//
// Created by Marc Rousavy on 28.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
#import "VisionDisplayLink.h"
#import <Foundation/Foundation.h>
@implementation VisionDisplayLink
- (void)start:(block_t)block {
self.updateBlock = block;
// check whether the loop is already running
if (_displayLink == nil) {
// specify update method
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(update:)];
// Start a new Queue/Thread that will run the runLoop
dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, -1);
dispatch_queue_t queue = dispatch_queue_create("mrousavy/VisionCamera.preview", qos);
dispatch_async(queue, ^{
// Add the display link to the current run loop (thread on which we're currently running on)
NSRunLoop* loop = [NSRunLoop currentRunLoop];
[self->_displayLink addToRunLoop:loop forMode:NSRunLoopCommonModes];
// Run the runLoop (blocking)
[loop run];
NSLog(@"VisionCamera: DisplayLink runLoop ended.");
});
}
}
- (void)stop {
// check whether the loop is already stopped
if (_displayLink != nil) {
// if the display link is present, it gets invalidated (loop stops)
[_displayLink invalidate];
_displayLink = nil;
}
}
- (void)update:(CADisplayLink *)sender {
double time = sender.timestamp;
double diff = time - _previousFrameTimestamp;
_currentFps = 1.0 / diff;
_previousFrameTimestamp = time;
_updateBlock(time);
}
- (double)targetFps {
return 1.0 / _displayLink.duration;
}
- (double)currentFps {
return _currentFps;
}
@end

View File

@ -11,7 +11,11 @@
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */; };
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
B8248868292644EF00729383 /* PreviewSkiaView.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8248867292644EF00729383 /* PreviewSkiaView.mm */; };
B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */ = {isa = PBXBuildFile; fileRef = B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */; };
B83373B529266A350092E380 /* SkiaMetalCanvasProvider.mm in Sources */ = {isa = PBXBuildFile; fileRef = B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */; };
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */ = {isa = PBXBuildFile; fileRef = B841262E292E41A1001AB448 /* SkImageHelpers.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */; };
@ -54,6 +58,7 @@
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; };
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
B88A020D2934FC22009E035A /* VisionDisplayLink.m in Sources */ = {isa = PBXBuildFile; fileRef = B88A020C2934FC22009E035A /* VisionDisplayLink.m */; };
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
@ -78,8 +83,7 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
B80416F026AB16E8000DEB6A /* VisionCameraScheduler.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraScheduler.mm; sourceTree = "<group>"; };
B80416F126AB16F3000DEB6A /* VisionCameraScheduler.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraScheduler.h; sourceTree = "<group>"; };
B80A319E293A5C10003EE681 /* SkiaMetalRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaMetalRenderContext.h; sourceTree = "<group>"; };
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginRegistry.mm; sourceTree = "<group>"; };
B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorCallback.h; sourceTree = "<group>"; };
@ -89,8 +93,15 @@
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = "<group>"; };
B8248866292644E300729383 /* PreviewSkiaView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PreviewSkiaView.h; sourceTree = "<group>"; };
B8248867292644EF00729383 /* PreviewSkiaView.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PreviewSkiaView.mm; sourceTree = "<group>"; };
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "RCTBridge+runOnJS.h"; sourceTree = "<group>"; };
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = "RCTBridge+runOnJS.mm"; sourceTree = "<group>"; };
B83373B329266A350092E380 /* SkiaMetalCanvasProvider.h */ = {isa = PBXFileReference; explicitFileType = sourcecode.cpp.h; fileEncoding = 4; path = SkiaMetalCanvasProvider.h; sourceTree = "<group>"; };
B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaMetalCanvasProvider.mm; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B841262E292E41A1001AB448 /* SkImageHelpers.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkImageHelpers.mm; sourceTree = "<group>"; };
B8412630292E41AD001AB448 /* SkImageHelpers.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkImageHelpers.h; sourceTree = "<group>"; };
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
@ -137,6 +148,8 @@
B887518325E0102000DB86D6 /* CameraError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraError.swift; sourceTree = "<group>"; };
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
B88A020C2934FC22009E035A /* VisionDisplayLink.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisionDisplayLink.m; sourceTree = "<group>"; };
B88A020E2934FC29009E035A /* VisionDisplayLink.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionDisplayLink.h; sourceTree = "<group>"; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
@ -186,7 +199,9 @@
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
B83D5EE629377117000AFD2F /* PreviewView.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B8FCA20C292669B800F1AC82 /* Skia Render Layer */,
B887516125E0102000DB86D6 /* Extensions */,
B887517225E0102000DB86D6 /* Parsers */,
B887516D25E0102000DB86D6 /* React Utils */,
@ -270,12 +285,26 @@
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */,
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */,
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
B80416F026AB16E8000DEB6A /* VisionCameraScheduler.mm */,
B80416F126AB16F3000DEB6A /* VisionCameraScheduler.h */,
);
path = "Frame Processor";
sourceTree = "<group>";
};
B8FCA20C292669B800F1AC82 /* Skia Render Layer */ = {
isa = PBXGroup;
children = (
B8248866292644E300729383 /* PreviewSkiaView.h */,
B8248867292644EF00729383 /* PreviewSkiaView.mm */,
B83373B329266A350092E380 /* SkiaMetalCanvasProvider.h */,
B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */,
B8412630292E41AD001AB448 /* SkImageHelpers.h */,
B841262E292E41A1001AB448 /* SkImageHelpers.mm */,
B88A020E2934FC29009E035A /* VisionDisplayLink.h */,
B88A020C2934FC22009E035A /* VisionDisplayLink.m */,
B80A319E293A5C10003EE681 /* SkiaMetalRenderContext.h */,
);
path = "Skia Render Layer";
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
@ -379,18 +408,22 @@
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */,
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
B88751A225E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift in Sources */,
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */,
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */,
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */,
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
B83373B529266A350092E380 /* SkiaMetalCanvasProvider.mm in Sources */,
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */,
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */,
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */,
B8248868292644EF00729383 /* PreviewSkiaView.mm in Sources */,
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */,
@ -411,6 +444,7 @@
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
B88A020D2934FC22009E035A /* VisionDisplayLink.m in Sources */,
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,

View File

@ -12,6 +12,8 @@
"lib/commonjs",
"lib/module",
"lib/typescript",
"cpp/**/*.h",
"cpp/**/*.cpp",
"android/build.gradle",
"android/gradle.properties",
"android/CMakeLists.txt",
@ -27,6 +29,7 @@
"README.md"
],
"scripts": {
"prepare": "sh temp-patch.sh",
"typescript": "tsc --noEmit",
"lint": "eslint \"**/*.{js,ts,tsx}\"",
"lint-ci": "yarn lint -f ./node_modules/@firmnav/eslint-github-actions-formatter/dist/formatter.js",
@ -74,6 +77,7 @@
"@react-native-community/eslint-config": "^3.2.0",
"@react-native-community/eslint-plugin": "^1.3.0",
"@release-it/conventional-changelog": "^5.1.1",
"@shopify/react-native-skia": "^0.1.175",
"@types/react": "^18.0.27",
"@types/react-native": "^0.71.2",
"eslint": "^8.33.0",
@ -82,11 +86,12 @@
"react": "^18.2.0",
"react-native": "^0.71.3",
"react-native-builder-bob": "^0.20.3",
"react-native-worklets": "https://github.com/chrfalch/react-native-worklets#15d52dd",
"react-native-worklets": "https://github.com/chrfalch/react-native-worklets#d62d76c",
"release-it": "^15.6.0",
"typescript": "^4.9.5"
},
"peerDependencies": {
"@shopify/react-native-skia": "*",
"react": "*",
"react-native": "*",
"react-native-worklets": "*"

View File

@ -3,6 +3,7 @@ import type { CameraDevice, CameraDeviceFormat, ColorSpace, VideoStabilizationMo
import type { CameraRuntimeError } from './CameraError';
import type { CameraPreset } from './CameraPreset';
import type { Frame } from './Frame';
import type { Orientation } from './Orientation';
export interface CameraProps extends ViewProps {
/**
@ -151,10 +152,26 @@ export interface CameraProps extends ViewProps {
* @default false
*/
enableHighQualityPhotos?: boolean;
/**
* If `true`, show a debug view to display the FPS of the Camera session.
* This is useful for debugging your Frame Processor's speed.
*
* @default false
*/
enableFpsGraph?: boolean;
/**
* Represents the orientation of all Camera Outputs (Photo, Video, and Frame Processor). If this value is not set, the device orientation is used.
*/
orientation?: 'portrait' | 'portraitUpsideDown' | 'landscapeLeft' | 'landscapeRight';
orientation?: Orientation;
/**
* Render type of the Camera Preview Layer.
*
* * `native`: Uses the default platform native preview Layer. Uses less resources and is more efficient.
* * `skia`: Uses a Skia Canvas for rendering Camera frames to the screen. This allows you to draw to the screen using the react-native-skia API inside a Frame Processor.
*
* @default 'native'
*/
previewType?: 'native' | 'skia';
//#region Events
/**
@ -168,10 +185,12 @@ export interface CameraProps extends ViewProps {
/**
* A worklet which will be called for every frame the Camera "sees".
*
* > See [the Frame Processors documentation](https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors) for more information
* If {@linkcode CameraProps.previewType | previewType} is set to `"skia"`, you can draw content to the `Frame` using the react-native-skia API.
*
* Note: If you want to use `video` and `frameProcessor` simultaneously, make sure [`supportsParallelVideoProcessing`](https://mrousavy.github.io/react-native-vision-camera/docs/guides/devices#the-supportsparallelvideoprocessing-prop) is `true`.
*
* > See [the Frame Processors documentation](https://mrousavy.github.io/react-native-vision-camera/docs/guides/frame-processors) for more information
*
* @example
* ```tsx
* const frameProcessor = useFrameProcessor((frame) => {

View File

@ -1,7 +1,10 @@
import type { SkCanvas, SkPaint } from '@shopify/react-native-skia';
import type { Orientation } from './Orientation';
/**
* A single frame, as seen by the camera.
*/
export interface Frame {
export interface Frame extends SkCanvas {
/**
* Whether the underlying buffer is still valid or not. The buffer will be released after the frame processor returns, or `close()` is called.
*/
@ -22,7 +25,28 @@ export interface Frame {
* Returns the number of planes this frame contains.
*/
planesCount: number;
/**
* Returns whether the Frame is mirrored (selfie camera) or not.
*/
isMirrored: boolean;
/**
* Returns the timestamp of the Frame relative to the host sytem's clock.
*/
timestamp: number;
/**
* Represents the orientation of the Frame.
*
* Some ML Models are trained for specific orientations, so they need to be taken into
* consideration when running a frame processor. See also: `isMirrored`
*/
orientation: Orientation;
/**
* Get the underlying data of the Frame as a uint8 array buffer.
*
* Note that Frames are allocated on the GPU, so calling `toArrayBuffer()` will copy from the GPU to the CPU.
*/
toArrayBuffer(): Uint8Array;
/**
* Returns a string representation of the frame.
* @example
@ -31,6 +55,35 @@ export interface Frame {
* ```
*/
toString(): string;
/**
* Renders the Frame to the screen.
*
* By default a Frame has already been rendered to the screen once, so if you call this method again,
* previously drawn content will be overwritten.
*
* @param paint (Optional) A Paint object to use to draw the Frame with. For example, this can contain a Shader (ImageFilter)
* @example
* ```ts
* const INVERTED_COLORS_SHADER = `
* uniform shader image;
* half4 main(vec2 pos) {
* vec4 color = image.eval(pos);
* return vec4(1.0 - color.rgb, 1.0);
* }`
* const runtimeEffect = Skia.RuntimeEffect.Make(INVERT_COLORS_SHADER)
* if (runtimeEffect == null) throw new Error('Shader failed to compile!')
* const shaderBuilder = Skia.RuntimeShaderBuilder(runtimeEffect)
* const imageFilter = Skia.ImageFilter.MakeRuntimeShader(shaderBuilder, null, null)
* const paint = Skia.Paint()
* paint.setImageFilter(imageFilter)
*
* const frameProcessor = useFrameProcessor((frame) => {
* 'worklet'
* frame.render(paint) // <-- draws frame with inverted colors now
* }, [paint])
* ```
*/
render: (paint?: SkPaint) => void;
}
export interface FrameInternal extends Frame {

View File

@ -56,6 +56,7 @@ export function runAtTargetFps<T>(fps: number, func: () => T): T | undefined {
return undefined;
}
const isAsyncContextBusy = Worklets.createSharedValue(false);
const asyncContext = Worklets.createContext('VisionCamera.async');
const runOnAsyncContext = Worklets.createRunInContextFn((frame: Frame, func: () => void) => {
'worklet';
@ -65,6 +66,8 @@ const runOnAsyncContext = Worklets.createRunInContextFn((frame: Frame, func: ()
} finally {
// Potentially delete Frame if we were the last ref
(frame as FrameInternal).decrementRefCount();
isAsyncContextBusy.value = false;
}
}, asyncContext);
@ -94,9 +97,18 @@ const runOnAsyncContext = Worklets.createRunInContextFn((frame: Frame, func: ()
*/
export function runAsync(frame: Frame, func: () => void): void {
'worklet';
if (isAsyncContextBusy.value) {
// async context is currently busy, we cannot schedule new work in time.
// drop this frame/runAsync call.
return;
}
// Increment ref count by one
(frame as FrameInternal).incrementRefCount();
isAsyncContextBusy.value = true;
// Call in separate background context
runOnAsyncContext(frame, func);
}

1
src/Orientation.ts Normal file
View File

@ -0,0 +1 @@
export type Orientation = 'portrait' | 'portraitUpsideDown' | 'landscapeLeft' | 'landscapeRight';

3
temp-patch.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
sed -i '' -e "s/enter/exit/g" ./example/node_modules/react-native-reanimated/plugin.js

View File

@ -1689,6 +1689,20 @@
conventional-recommended-bump "^6.1.0"
semver "7.3.8"
"@shopify/react-native-skia@^0.1.175":
version "0.1.175"
resolved "https://registry.yarnpkg.com/@shopify/react-native-skia/-/react-native-skia-0.1.175.tgz#4fc6b30f7d47d3dc9192791021d99e5d11f75739"
integrity sha512-vA5YPGu7GmBi5qliLyMzbpkH9mmCWAZoaoGhM9/g5o9zX8xAmUYcGgg3MOqxtnxCnfTmqFFBj43s+QGgMRTpqg==
dependencies:
"@types/pixelmatch" "^5.2.4"
"@types/pngjs" "^6.0.1"
"@types/ws" "^8.5.3"
canvaskit-wasm "0.38.0"
pixelmatch "^5.3.0"
pngjs "^6.0.0"
react-reconciler "^0.27.0"
ws "^8.11.0"
"@sideway/address@^4.1.3":
version "4.1.4"
resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0"
@ -1791,6 +1805,20 @@
resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0"
integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==
"@types/pixelmatch@^5.2.4":
version "5.2.4"
resolved "https://registry.yarnpkg.com/@types/pixelmatch/-/pixelmatch-5.2.4.tgz#ca145cc5ede1388c71c68edf2d1f5190e5ddd0f6"
integrity sha512-HDaSHIAv9kwpMN7zlmwfTv6gax0PiporJOipcrGsVNF3Ba+kryOZc0Pio5pn6NhisgWr7TaajlPEKTbTAypIBQ==
dependencies:
"@types/node" "*"
"@types/pngjs@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/@types/pngjs/-/pngjs-6.0.1.tgz#c711ec3fbbf077fed274ecccaf85dd4673130072"
integrity sha512-J39njbdW1U/6YyVXvC9+1iflZghP8jgRf2ndYghdJb5xL49LYDB+1EuAxfbuJ2IBbWIL3AjHPQhgaTxT3YaYeg==
dependencies:
"@types/node" "*"
"@types/prop-types@*":
version "15.7.5"
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf"
@ -1827,6 +1855,13 @@
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c"
integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==
"@types/ws@^8.5.3":
version "8.5.4"
resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.4.tgz#bb10e36116d6e570dd943735f86c933c1587b8a5"
integrity sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==
dependencies:
"@types/node" "*"
"@types/yargs-parser@*":
version "21.0.0"
resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b"
@ -2567,6 +2602,11 @@ caniuse-lite@^1.0.30001449:
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001451.tgz#2e197c698fc1373d63e1406d6607ea4617c613f1"
integrity sha512-XY7UbUpGRatZzoRft//5xOa69/1iGJRBlrieH6QYrkKLIFn3m7OVEJ81dSrKoy2BnKsdbX5cLrOispZNYo9v2w==
canvaskit-wasm@0.38.0:
version "0.38.0"
resolved "https://registry.yarnpkg.com/canvaskit-wasm/-/canvaskit-wasm-0.38.0.tgz#83e6c46f3015c2ff3f6503157f47453af76a7be7"
integrity sha512-ZEG6lucpbQ4Ld+mY8C1Ng+PMLVP+/AX02jS0Sdl28NyMxuKSa9uKB8oGd1BYp1XWPyO2Jgr7U8pdyjJ/F3xR5Q==
chalk@5.1.2:
version "5.1.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.1.2.tgz#d957f370038b75ac572471e83be4c5ca9f8e8c45"
@ -6616,6 +6656,13 @@ pirates@^4.0.5:
resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b"
integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==
pixelmatch@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/pixelmatch/-/pixelmatch-5.3.0.tgz#5e5321a7abedfb7962d60dbf345deda87cb9560a"
integrity sha512-o8mkY4E/+LNUf6LzX96ht6k6CEDi65k9G2rjMtBe9Oo+VPKSvl+0GKHuH/AlG+GA5LPG/i5hrekkxUc3s2HU+Q==
dependencies:
pngjs "^6.0.0"
pkg-dir@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3"
@ -6631,6 +6678,11 @@ plist@^3.0.5:
base64-js "^1.5.1"
xmlbuilder "^15.1.1"
pngjs@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-6.0.0.tgz#ca9e5d2aa48db0228a52c419c3308e87720da821"
integrity sha512-TRzzuFRRmEoSW/p1KVAmiOgPco2Irlah+bGFCeNfJXxxYGwSw7YwAOAcd7X28K/m5bjBWKsC29KyoMfHbypayg==
pod-install@^0.1.38:
version "0.1.38"
resolved "https://registry.yarnpkg.com/pod-install/-/pod-install-0.1.38.tgz#1c16a800a5fc1abea0cafcc0e190f376368c76ab"
@ -6882,9 +6934,9 @@ react-native-gradle-plugin@^0.71.15:
resolved "https://registry.yarnpkg.com/react-native-gradle-plugin/-/react-native-gradle-plugin-0.71.15.tgz#9e6b506f30729fe8eb086981702f4e3c891d2b13"
integrity sha512-7S3pAuPaQJlhax6EZ4JMsDNpj05TfuzX9gPgWLrFfAIWIFLuJ6aDQYAZy2TEI9QJALPoWrj8LWaqP/DGYh14pw==
"react-native-worklets@https://github.com/chrfalch/react-native-worklets#15d52dd":
"react-native-worklets@https://github.com/chrfalch/react-native-worklets#d62d76c":
version "0.1.0"
resolved "https://github.com/chrfalch/react-native-worklets#15d52dd1289831cecc7906823f613172e0c6cd2e"
resolved "https://github.com/chrfalch/react-native-worklets#d62d76c20ed7a3bbfebe5623bc976e5c2d9beabd"
react-native@^0.71.3:
version "0.71.3"
@ -6926,6 +6978,14 @@ react-native@^0.71.3:
whatwg-fetch "^3.0.0"
ws "^6.2.2"
react-reconciler@^0.27.0:
version "0.27.0"
resolved "https://registry.yarnpkg.com/react-reconciler/-/react-reconciler-0.27.0.tgz#360124fdf2d76447c7491ee5f0e04503ed9acf5b"
integrity sha512-HmMDKciQjYmBRGuuhIaKA1ba/7a+UsM5FzOZsMO2JYHt9Jh8reCb7j1eDC95NOyUlKM9KRyvdx0flBuDvYSBoA==
dependencies:
loose-envify "^1.1.0"
scheduler "^0.21.0"
react-refresh@^0.4.0:
version "0.4.3"
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.4.3.tgz#966f1750c191672e76e16c2efa569150cc73ab53"
@ -7330,6 +7390,13 @@ sax@>=0.6.0:
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
scheduler@^0.21.0:
version "0.21.0"
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.21.0.tgz#6fd2532ff5a6d877b6edb12f00d8ab7e8f308820"
integrity sha512-1r87x5fz9MXqswA2ERLo0EbOAU74DpIUO090gIasYTqlVoJeMcl+Z1Rg7WHz+qtPujhS/hGIt9kxZOYBV3faRQ==
dependencies:
loose-envify "^1.1.0"
scheduler@^0.23.0:
version "0.23.0"
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe"
@ -8461,6 +8528,11 @@ ws@^7, ws@^7.5.1:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
ws@^8.11.0:
version "8.12.1"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.12.1.tgz#c51e583d79140b5e42e39be48c934131942d4a8f"
integrity sha512-1qo+M9Ba+xNhPB+YTWUlK6M17brTut5EXbcBaMRN5pH5dFrXz7lzz1ChFSUq3bOUl8yEvSenhHmYUNJxFzdJew==
xcode@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/xcode/-/xcode-3.0.1.tgz#3efb62aac641ab2c702458f9a0302696146aa53c"