diff --git a/.github/workflows/build-ios.yml b/.github/workflows/build-ios.yml
index e8e775c..c4606cd 100644
--- a/.github/workflows/build-ios.yml
+++ b/.github/workflows/build-ios.yml
@@ -47,7 +47,129 @@ jobs:
- name: Setup Ruby (bundle)
uses: ruby/setup-ruby@v1
with:
- ruby-version: 2.6.8
+ ruby-version: 2.6.10
+ bundler-cache: true
+ working-directory: example/ios
+
+ - name: Restore Pods cache
+ uses: actions/cache@v2
+ with:
+ path: |
+ example/ios/Pods
+ ~/Library/Caches/CocoaPods
+ ~/.cocoapods
+ key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-pods-
+ - name: Install Pods
+ run: bundle exec pod check || bundle exec pod install
+ - name: Install xcpretty
+ run: gem install xcpretty
+ - name: Build App
+ run: "set -o pipefail && xcodebuild \
+ CC=clang CPLUSPLUS=clang++ LD=clang LDPLUSPLUS=clang++ \
+ -derivedDataPath build -UseModernBuildSystem=YES \
+ -workspace VisionCameraExample.xcworkspace \
+ -scheme VisionCameraExample \
+ -sdk iphonesimulator \
+ -configuration Debug \
+ -destination 'platform=iOS Simulator,name=iPhone 11 Pro' \
+ build \
+ CODE_SIGNING_ALLOWED=NO | xcpretty"
+ build-no-skia:
+ name: Build iOS Example App without Skia
+ runs-on: macOS-latest
+ defaults:
+ run:
+ working-directory: example/ios
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Get yarn cache directory path
+ id: yarn-cache-dir-path
+ run: echo "::set-output name=dir::$(yarn cache dir)"
+ - name: Restore node_modules from cache
+ uses: actions/cache@v2
+ id: yarn-cache
+ with:
+ path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
+ key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-yarn-
+ - name: Install node_modules for example/
+ run: yarn install --frozen-lockfile --cwd ..
+ - name: Remove react-native-skia
+ run: yarn remove @shopify/react-native-skia --cwd ..
+
+ - name: Restore buildcache
+ uses: mikehardy/buildcache-action@v1
+ continue-on-error: true
+
+ - name: Setup Ruby (bundle)
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: 2.6.10
+ bundler-cache: true
+ working-directory: example/ios
+
+ - name: Restore Pods cache
+ uses: actions/cache@v2
+ with:
+ path: |
+ example/ios/Pods
+ ~/Library/Caches/CocoaPods
+ ~/.cocoapods
+ key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-pods-
+ - name: Install Pods
+ run: bundle exec pod check || bundle exec pod install
+ - name: Install xcpretty
+ run: gem install xcpretty
+ - name: Build App
+ run: "set -o pipefail && xcodebuild \
+ CC=clang CPLUSPLUS=clang++ LD=clang LDPLUSPLUS=clang++ \
+ -derivedDataPath build -UseModernBuildSystem=YES \
+ -workspace VisionCameraExample.xcworkspace \
+ -scheme VisionCameraExample \
+ -sdk iphonesimulator \
+ -configuration Debug \
+ -destination 'platform=iOS Simulator,name=iPhone 11 Pro' \
+ build \
+ CODE_SIGNING_ALLOWED=NO | xcpretty"
+ build-no-frame-processors:
+ name: Build iOS Example App without Frame Processors
+ runs-on: macOS-latest
+ defaults:
+ run:
+ working-directory: example/ios
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Get yarn cache directory path
+ id: yarn-cache-dir-path
+ run: echo "::set-output name=dir::$(yarn cache dir)"
+ - name: Restore node_modules from cache
+ uses: actions/cache@v2
+ id: yarn-cache
+ with:
+ path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
+ key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
+ restore-keys: |
+ ${{ runner.os }}-yarn-
+ - name: Install node_modules for example/
+ run: yarn install --frozen-lockfile --cwd ..
+ - name: Remove react-native-worklets
+ run: yarn remove react-native-worklets --cwd ..
+
+ - name: Restore buildcache
+ uses: mikehardy/buildcache-action@v1
+ continue-on-error: true
+
+ - name: Setup Ruby (bundle)
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: 2.6.10
bundler-cache: true
working-directory: example/ios
diff --git a/VisionCamera.podspec b/VisionCamera.podspec
index 79e0936..88bdf93 100644
--- a/VisionCamera.podspec
+++ b/VisionCamera.podspec
@@ -2,10 +2,22 @@ require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
-nodeModules = Dir.exist?(File.join(__dir__, "node_modules")) ? File.join(__dir__, "node_modules") : File.join(__dir__, "..")
+nodeModules = File.join(__dir__)
+tries = 0
+while !Dir.exist?(File.join(nodeModules, "node_modules")) && tries < 10
+ nodeModules = File.join(nodeModules, "..")
+ tries += 1
+end
+nodeModules = File.join(nodeModules, "node_modules")
+
+puts("[VisionCamera] node modules #{Dir.exist?(nodeModules) ? "found at #{nodeModules}" : "not found!"}")
+workletsPath = File.join(nodeModules, "react-native-worklets")
+hasWorklets = File.exist?(workletsPath)
+puts "[VisionCamera] react-native-worklets #{hasWorklets ? "found" : "not found"}, Frame Processors #{hasWorklets ? "enabled" : "disabled"}!"
+
skiaPath = File.join(nodeModules, "@shopify", "react-native-skia")
-hasSkia = File.exist?(skiaPath)
-puts "VisionCamera: Skia integration #{hasSkia ? "enabled" : "disabled"}!"
+hasSkia = hasWorklets && File.exist?(skiaPath)
+puts "[VisionCamera] react-native-skia #{hasSkia ? "found" : "not found"}, Skia Frame Processors #{hasSkia ? "enabled" : "disabled"}!"
Pod::Spec.new do |s|
s.name = "VisionCamera"
@@ -16,11 +28,12 @@ Pod::Spec.new do |s|
s.license = package["license"]
s.authors = package["author"]
- s.platforms = { :ios => "12.4" }
+ s.platforms = { :ios => "13.0" }
s.source = { :git => "https://github.com/mrousavy/react-native-vision-camera.git", :tag => "#{s.version}" }
s.pod_target_xcconfig = {
- "GCC_PREPROCESSOR_DEFINITIONS" => "$(inherited) SK_METAL=1 SK_GANESH=1",
+ "GCC_PREPROCESSOR_DEFINITIONS" => "$(inherited) SK_METAL=1 SK_GANESH=1 VISION_CAMERA_ENABLE_FRAME_PROCESSORS=#{hasWorklets} VISION_CAMERA_ENABLE_SKIA=#{hasSkia}",
+ "OTHER_SWIFT_FLAGS" => "$(inherited) #{hasWorklets ? "-D VISION_CAMERA_ENABLE_FRAME_PROCESSORS" : ""} #{hasSkia ? "-D VISION_CAMERA_ENABLE_SKIA" : ""}",
"CLANG_CXX_LANGUAGE_STANDARD" => "c++17",
"HEADER_SEARCH_PATHS" => "\"$(PODS_TARGET_SRCROOT)/cpp/\"/** \"#{skiaPath}/cpp/skia/**\" "
}
@@ -30,17 +43,24 @@ Pod::Spec.new do |s|
# All source files that should be publicly visible
# Note how this does not include headers, since those can nameclash.
s.source_files = [
- "ios/**/*.{m,mm,swift}",
+ # Core
+ "ios/*.{m,mm,swift}",
+ "ios/Extensions/*.{m,mm,swift}",
+ "ios/Parsers/*.{m,mm,swift}",
+ "ios/React Utils/*.{m,mm,swift}",
"ios/CameraBridge.h",
- "ios/Skia Render Layer/PreviewSkiaView.h",
- "ios/Frame Processor/Frame.h",
- "ios/Frame Processor/FrameProcessorCallback.h",
- "ios/Frame Processor/FrameProcessorRuntimeManager.h",
- "ios/Frame Processor/FrameProcessorPluginRegistry.h",
- "ios/Frame Processor/FrameProcessorPlugin.h",
- "ios/React Utils/RCTBridge+runOnJS.h",
- "ios/React Utils/JSConsoleHelper.h",
- "cpp/**/*.{cpp}",
+
+ # Frame Processors
+ hasWorklets ? "ios/Frame Processor/*.{m,mm,swift}" : "",
+ hasWorklets ? "ios/Frame Processor/Frame.h" : "",
+ hasWorklets ? "ios/Frame Processor/FrameProcessor.h" : "",
+ hasWorklets ? "ios/Frame Processor/FrameProcessorRuntimeManager.h" : "",
+ hasWorklets ? "ios/Frame Processor/FrameProcessorPlugin.h" : "",
+ hasWorklets ? "cpp/**/*.{cpp}" : "",
+
+ # Skia Frame Processors
+ hasSkia ? "ios/Skia Render Layer/*.{m,mm,swift}" : "",
+ hasSkia ? "ios/Skia Render Layer/SkiaRenderer.h" : "",
]
# Any private headers that are not globally unique should be mentioned here.
# Otherwise there will be a nameclash, since CocoaPods flattens out any header directories
@@ -51,8 +71,13 @@ Pod::Spec.new do |s|
]
s.dependency "React"
- s.dependency "React-callinvoker"
s.dependency "React-Core"
- s.dependency "react-native-worklets"
- s.dependency "react-native-skia"
+ s.dependency "React-callinvoker"
+
+ if hasWorklets
+ s.dependency "react-native-worklets"
+ if hasSkia
+ s.dependency "react-native-skia"
+ end
+ end
end
diff --git a/docs/docs/guides/FORMATS.mdx b/docs/docs/guides/FORMATS.mdx
index 5a2b936..4f63024 100644
--- a/docs/docs/guides/FORMATS.mdx
+++ b/docs/docs/guides/FORMATS.mdx
@@ -18,34 +18,6 @@ Each camera device (see [Camera Devices](devices)) provides a number of capture
If you don't want to specify the best format for your camera device, you don't have to. The Camera _automatically chooses the best matching format_ for the current camera device. This is why the Camera's `format` property is _optional_.
-If you don't want to do a lot of filtering, but still want to let the camera know what your intentions are, you can use the Camera's `preset` property.
-
-For example, use the `'medium'` preset if you want to create a video-chat application that shouldn't excessively use mobile data:
-
-```tsx
-function App() {
- const devices = useCameraDevices()
- const device = devices.back
-
- if (device == null) return
- return (
-
- )
-}
-```
-
-:::note
-See the [CameraPreset.ts](https://github.com/mrousavy/react-native-vision-camera/blob/main/src/CameraPreset.ts) type for more information about presets
-:::
-
-:::warning
-You cannot set `preset` and `format` at the same time; if `format` is set, `preset` must be `undefined` and vice versa!
-:::
-
### What you need to know about cameras
To understand a bit more about camera formats, you first need to understand a few "general camera basics":
@@ -110,7 +82,7 @@ export const sortFormatsByResolution = (left: CameraDeviceFormat, right: CameraD
// in this case, points aren't "normalized" (e.g. higher resolution = 1 point, lower resolution = -1 points)
let leftPoints = left.photoHeight * left.photoWidth
let rightPoints = right.photoHeight * right.photoWidth
-
+
// we also care about video dimensions, not only photo.
leftPoints += left.videoWidth * left.videoHeight
rightPoints += right.videoWidth * right.videoHeight
diff --git a/example/ios/Frame Processor Plugins/Example Plugin (Objective-C)/ExampleFrameProcessorPlugin.m b/example/ios/Frame Processor Plugins/Example Plugin (Objective-C)/ExampleFrameProcessorPlugin.m
index dba0033..ba2cf7d 100644
--- a/example/ios/Frame Processor Plugins/Example Plugin (Objective-C)/ExampleFrameProcessorPlugin.m
+++ b/example/ios/Frame Processor Plugins/Example Plugin (Objective-C)/ExampleFrameProcessorPlugin.m
@@ -5,6 +5,7 @@
// Created by Marc Rousavy on 01.05.21.
//
+#if __has_include()
#import
#import
#import
@@ -45,3 +46,4 @@
}
@end
+#endif
diff --git a/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.swift b/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.swift
index cdc70b5..9f22a1e 100644
--- a/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.swift
+++ b/example/ios/Frame Processor Plugins/Example Plugin (Swift)/ExamplePluginSwift.swift
@@ -9,6 +9,7 @@
import AVKit
import Vision
+#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
@objc
public class ExamplePluginSwift : FrameProcessorPlugin {
@@ -44,3 +45,4 @@ public class ExamplePluginSwift : FrameProcessorPlugin {
]
}
}
+#endif
diff --git a/example/ios/Gemfile b/example/ios/Gemfile
index 1142b1b..ee72848 100644
--- a/example/ios/Gemfile
+++ b/example/ios/Gemfile
@@ -4,3 +4,4 @@ source 'https://rubygems.org'
ruby '>= 2.6.10'
gem 'cocoapods', '>= 1.11.3'
+gem 'cocoapods-check', '>= 1.1.0'
diff --git a/example/ios/Gemfile.lock b/example/ios/Gemfile.lock
index b93ee93..f84c4c2 100644
--- a/example/ios/Gemfile.lock
+++ b/example/ios/Gemfile.lock
@@ -94,8 +94,11 @@ PLATFORMS
x86_64-darwin-19
DEPENDENCIES
- cocoapods (= 1.11.3)
- cocoapods-check
+ cocoapods (>= 1.11.3)
+ cocoapods-check (>= 1.1.0)
+
+RUBY VERSION
+ ruby 2.6.10p210
BUNDLED WITH
- 2.2.27
+ 2.3.22
diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock
index 3cd632e..b5423f7 100644
--- a/example/ios/Podfile.lock
+++ b/example/ios/Podfile.lock
@@ -713,7 +713,7 @@ SPEC CHECKSUMS:
RNStaticSafeAreaInsets: 055ddbf5e476321720457cdaeec0ff2ba40ec1b8
RNVectorIcons: fcc2f6cb32f5735b586e66d14103a74ce6ad61f8
SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17
- VisionCamera: 0ee46c2c5dd7b8aa3cc3152ff4deda60ac58276f
+ VisionCamera: b4e91836f577249470ae42707782f4b44d875cd9
Yoga: 65286bb6a07edce5e4fe8c90774da977ae8fc009
PODFILE CHECKSUM: ab9c06b18c63e741c04349c0fd630c6d3145081c
diff --git a/example/ios/VisionCameraExample-Bridging-Header.h b/example/ios/VisionCameraExample-Bridging-Header.h
index 29b41c9..96d1ae1 100644
--- a/example/ios/VisionCameraExample-Bridging-Header.h
+++ b/example/ios/VisionCameraExample-Bridging-Header.h
@@ -2,5 +2,7 @@
// Use this file to import your target's public headers that you would like to expose to Swift.
//
+#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#import
#import
+#endif
diff --git a/example/ios/VisionCameraExample/AppDelegate.mm b/example/ios/VisionCameraExample/AppDelegate.mm
index 19f7c92..30595c4 100644
--- a/example/ios/VisionCameraExample/AppDelegate.mm
+++ b/example/ios/VisionCameraExample/AppDelegate.mm
@@ -2,7 +2,9 @@
#import
#import "VisionCameraExample-Swift.h"
+#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
#import
+#endif
@implementation AppDelegate
@@ -13,7 +15,9 @@
// They will be passed down to the ViewController used by React Native.
self.initialProps = @{};
+#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
[FrameProcessorPlugin registerPlugin:[[ExamplePluginSwift alloc] init]];
+#endif
return [super application:application didFinishLaunchingWithOptions:launchOptions];
}
diff --git a/example/package.json b/example/package.json
index 1737309..02c0fb8 100644
--- a/example/package.json
+++ b/example/package.json
@@ -39,6 +39,8 @@
"@react-native/metro-config": "^0.72.7",
"@react-native/typescript-config": "^0.73.0",
"@types/react": "^18.2.14",
+ "@types/react-native-vector-icons": "^6.4.13",
+ "@types/react-native-video": "^5.0.15",
"babel-plugin-module-resolver": "^5.0.0",
"eslint": "^8.44.0",
"jest": "^29.5.0",
diff --git a/example/src/CameraPage.tsx b/example/src/CameraPage.tsx
index 7bcf064..fbc6be3 100644
--- a/example/src/CameraPage.tsx
+++ b/example/src/CameraPage.tsx
@@ -8,7 +8,7 @@ import {
PhotoFile,
sortFormats,
useCameraDevices,
- useFrameProcessor,
+ useSkiaFrameProcessor,
VideoFile,
} from 'react-native-vision-camera';
import { Camera, frameRateIncluded } from 'react-native-vision-camera';
@@ -21,7 +21,6 @@ import { CaptureButton } from './views/CaptureButton';
import { PressableOpacity } from 'react-native-pressable-opacity';
import MaterialIcon from 'react-native-vector-icons/MaterialCommunityIcons';
import IonIcon from 'react-native-vector-icons/Ionicons';
-import { examplePlugin } from './frame-processors/ExamplePlugin';
import type { Routes } from './Routes';
import type { NativeStackScreenProps } from '@react-navigation/native-stack';
import { useIsFocused } from '@react-navigation/core';
@@ -219,13 +218,12 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
paint.setImageFilter(imageFilter);
const isIOS = Platform.OS === 'ios';
- const frameProcessor = useFrameProcessor(
+ const frameProcessor = useSkiaFrameProcessor(
(frame) => {
'worklet';
console.log(`Width: ${frame.width}`);
- if (isIOS) frame.render(paint);
- else console.log('Drawing to the Frame is not yet available on Android. WIP PR');
+ if (frame.isDrawable) frame.render(paint);
},
[isIOS, paint],
);
@@ -253,9 +251,8 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
video={true}
audio={hasMicrophonePermission}
enableFpsGraph={true}
- previewType="skia"
- frameProcessor={device.supportsParallelVideoProcessing ? frameProcessor : undefined}
orientation="portrait"
+ frameProcessor={device.supportsParallelVideoProcessing ? frameProcessor : undefined}
/>
diff --git a/example/yarn.lock b/example/yarn.lock
index 0bfb592..59a7f4a 100644
--- a/example/yarn.lock
+++ b/example/yarn.lock
@@ -2340,7 +2340,7 @@
resolved "https://registry.yarnpkg.com/@react-native/typescript-config/-/typescript-config-0.73.0.tgz#198abaeaf5588ae59dc86fcab67a27a0be7f7e31"
integrity sha512-LDl7LN+/965O/c7RkHcT+RutGQSdNOc0MqaWKk4SHtM1UzbG0VswA+/g8sJj+AdQh8vFWsHgG08hnhwb2hZoIg==
-"@react-native/virtualized-lists@^0.72.6":
+"@react-native/virtualized-lists@^0.72.4", "@react-native/virtualized-lists@^0.72.6":
version "0.72.6"
resolved "https://registry.yarnpkg.com/@react-native/virtualized-lists/-/virtualized-lists-0.72.6.tgz#375f88a1371927d803afad8d8a0ede3261464030"
integrity sha512-JhT6ydu35LvbSKdwnhWDuGHMOwM0WAh9oza/X8vXHA8ELHRyQ/4p8eKz/bTQcbQziJaaleUURToGhFuCtgiMoA==
@@ -2518,7 +2518,38 @@
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf"
integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==
-"@types/react@^18.2.14":
+"@types/react-native-vector-icons@^6.4.13":
+ version "6.4.13"
+ resolved "https://registry.yarnpkg.com/@types/react-native-vector-icons/-/react-native-vector-icons-6.4.13.tgz#28b34d15094e040718beefb67cb3eff0c4994cb6"
+ integrity sha512-1PqFoKuXTSzMHwGMAr+REdYJBQAbe9xrww3ecZR0FsHcD1K+vGS/rxuAriL4rsI6+p69sZQjDzpEVAbDQcjSwA==
+ dependencies:
+ "@types/react" "*"
+ "@types/react-native" "^0.70"
+
+"@types/react-native-video@^5.0.15":
+ version "5.0.15"
+ resolved "https://registry.yarnpkg.com/@types/react-native-video/-/react-native-video-5.0.15.tgz#7af0a0df46293333d069102788d4f7db2961a122"
+ integrity sha512-li3yBYQ+D5GqZl0Y+M/vCTPfZwVyUU67CtSjEg+/ERkgEpvHDH+gQaoc9O00ttXr8kvqEzpiC6Ca9juIfeIlMA==
+ dependencies:
+ "@types/react" "*"
+ "@types/react-native" "*"
+
+"@types/react-native@*":
+ version "0.72.2"
+ resolved "https://registry.yarnpkg.com/@types/react-native/-/react-native-0.72.2.tgz#27c931a899c555b28e20cdd12e570b017808de96"
+ integrity sha512-/eEjr04Zqo7mTMszuSdrLx90+j5nWhDMMOgtnKZfAYyV3RwmlpSb7F17ilmMMxZWJY81n/JZ4e6wdhMJFpjrCg==
+ dependencies:
+ "@react-native/virtualized-lists" "^0.72.4"
+ "@types/react" "*"
+
+"@types/react-native@^0.70":
+ version "0.70.14"
+ resolved "https://registry.yarnpkg.com/@types/react-native/-/react-native-0.70.14.tgz#8619b8c94296f6456c5362d74a3d1b4fad3f54ab"
+ integrity sha512-Kwc+BYBrnDqvacNxKp1UtcZJnJJnTih2NYmi/ieAKlHdxEPN6sYMwmIwgHdoLHggvml6bf3DYRaH2jt+gzaLjw==
+ dependencies:
+ "@types/react" "*"
+
+"@types/react@*", "@types/react@^18.2.14":
version "18.2.14"
resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.14.tgz#fa7a6fecf1ce35ca94e74874f70c56ce88f7a127"
integrity sha512-A0zjq+QN/O0Kpe30hA1GidzyFjatVvrpIvWLxD+xv67Vt91TWWgco9IvrJBkeyHm1trGaFS/FSGqPlhyeZRm0g==
diff --git a/ios/CameraBridge.h b/ios/CameraBridge.h
index a7ac8cf..c602df9 100644
--- a/ios/CameraBridge.h
+++ b/ios/CameraBridge.h
@@ -13,13 +13,10 @@
#import
#import
#import
+#import
-#import "FrameProcessorCallback.h"
+#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
+#import "FrameProcessor.h"
#import "FrameProcessorRuntimeManager.h"
#import "Frame.h"
-#import "RCTBridge+runOnJS.h"
-#import "JSConsoleHelper.h"
-
-@interface CameraBridge: RCTViewManager
-
-@end
+#endif
diff --git a/ios/CameraError.swift b/ios/CameraError.swift
index 65573b2..cb8678a 100644
--- a/ios/CameraError.swift
+++ b/ios/CameraError.swift
@@ -113,7 +113,6 @@ enum FormatError {
case invalidHdr
case invalidFormat
case invalidColorSpace(colorSpace: String)
- case invalidPreset(preset: String)
var code: String {
switch self {
@@ -123,8 +122,6 @@ enum FormatError {
return "invalid-fps"
case .invalidHdr:
return "invalid-hdr"
- case .invalidPreset:
- return "invalid-preset"
case .invalidColorSpace:
return "invalid-color-space"
}
@@ -141,8 +138,6 @@ enum FormatError {
case let .invalidColorSpace(colorSpace):
return "The currently selected format does not support the colorSpace \"\(colorSpace)\"! " +
"Make sure you select a format which `colorSpaces` includes \"\(colorSpace)\"!"
- case let .invalidPreset(preset):
- return "The preset \"\(preset)\" is not available for the current camera device."
}
}
}
@@ -256,6 +251,8 @@ enum CaptureError {
enum SystemError: String {
case noManager = "no-camera-manager"
+ case skiaUnavailable = "skia-unavailable"
+ case frameProcessorsUnavailable = "frame-processors-unavailable"
var code: String {
return rawValue
@@ -265,6 +262,10 @@ enum SystemError: String {
switch self {
case .noManager:
return "No Camera Manager was found."
+ case .skiaUnavailable:
+ return "Skia Integration is unavailable - is @shopify/react-native-skia installed?"
+ case .frameProcessorsUnavailable:
+ return "Frame Processors are unavailable - is react-native-worklets installed?"
}
}
}
diff --git a/ios/CameraView+AVAudioSession.swift b/ios/CameraView+AVAudioSession.swift
index a14de3d..4ddef2c 100644
--- a/ios/CameraView+AVAudioSession.swift
+++ b/ios/CameraView+AVAudioSession.swift
@@ -72,7 +72,7 @@ extension CameraView {
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
return
}
- audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
+ audioOutput!.setSampleBufferDelegate(self, queue: CameraQueues.audioQueue)
audioCaptureSession.addOutput(audioOutput!)
}
}
@@ -135,7 +135,7 @@ extension CameraView {
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
if isRecording {
- audioQueue.async {
+ CameraQueues.audioQueue.async {
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...")
// restart audio session because interruption is over
self.activateAudioSession()
diff --git a/ios/CameraView+AVCaptureSession.swift b/ios/CameraView+AVCaptureSession.swift
index 081050b..5d289ab 100644
--- a/ios/CameraView+AVCaptureSession.swift
+++ b/ios/CameraView+AVCaptureSession.swift
@@ -39,28 +39,6 @@ extension CameraView {
captureSession.commitConfiguration()
}
- // If preset is set, use preset. Otherwise use format.
- if let preset = preset {
- var sessionPreset: AVCaptureSession.Preset?
- do {
- sessionPreset = try AVCaptureSession.Preset(withString: preset)
- } catch let EnumParserError.unsupportedOS(supportedOnOS: os) {
- invokeOnError(.parameter(.unsupportedOS(unionName: "Preset", receivedValue: preset, supportedOnOs: os)))
- return
- } catch {
- invokeOnError(.parameter(.invalid(unionName: "Preset", receivedValue: preset)))
- return
- }
- if sessionPreset != nil {
- if captureSession.canSetSessionPreset(sessionPreset!) {
- captureSession.sessionPreset = sessionPreset!
- } else {
- // non-fatal error, so continue with configuration
- invokeOnError(.format(.invalidPreset(preset: preset)))
- }
- }
- }
-
// pragma MARK: Capture Session Inputs
// Video Input
do {
@@ -132,7 +110,7 @@ extension CameraView {
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "video-output")))
return
}
- videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
+ videoOutput!.setSampleBufferDelegate(self, queue: CameraQueues.videoQueue)
videoOutput!.alwaysDiscardsLateVideoFrames = false
if previewType == "skia" {
@@ -273,7 +251,7 @@ extension CameraView {
if isActive {
// restart capture session after an error occured
- cameraQueue.async {
+ CameraQueues.cameraQueue.async {
self.captureSession.startRunning()
}
}
diff --git a/ios/CameraView+Focus.swift b/ios/CameraView+Focus.swift
index e82fbba..073eb0c 100644
--- a/ios/CameraView+Focus.swift
+++ b/ios/CameraView+Focus.swift
@@ -23,7 +23,7 @@ extension CameraView {
}
/// Converts a Point in the UI View Layer to a Point in the Camera Frame coordinate system
- func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
+ private func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
guard let previewView = previewView else {
invokeOnError(.session(.cameraNotReady))
return .zero
@@ -53,7 +53,7 @@ extension CameraView {
}
/// Converts a Point in the UI View Layer to a Point in the Camera Device Sensor coordinate system (x: [0..1], y: [0..1])
- func captureDevicePointConverted(fromLayerPoint pointInLayer: CGPoint) -> CGPoint {
+ private func captureDevicePointConverted(fromLayerPoint pointInLayer: CGPoint) -> CGPoint {
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero
diff --git a/ios/CameraView+Preview.swift b/ios/CameraView+Preview.swift
new file mode 100644
index 0000000..8867936
--- /dev/null
+++ b/ios/CameraView+Preview.swift
@@ -0,0 +1,57 @@
+//
+// CameraView+Preview.swift
+// VisionCamera
+//
+// Created by Marc Rousavy on 20.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+extension CameraView {
+ #if VISION_CAMERA_ENABLE_SKIA
+ @objc
+ func getSkiaRenderer() -> SkiaRenderer {
+ if skiaRenderer == nil {
+ skiaRenderer = SkiaRenderer()
+ }
+ return skiaRenderer!
+ }
+ #endif
+
+ public func setupPreviewView() {
+ if previewType == "skia" {
+ // Skia Preview View allows user to draw onto a Frame in a Frame Processor
+ #if VISION_CAMERA_ENABLE_SKIA
+ if previewView is SkiaPreviewView { return }
+ previewView?.removeFromSuperview()
+ previewView = SkiaPreviewView(frame: frame, skiaRenderer: getSkiaRenderer())
+ #else
+ invokeOnError(.system(.skiaUnavailable))
+ return
+ #endif
+ } else {
+ // Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
+ if previewView is NativePreviewView { return }
+ previewView?.removeFromSuperview()
+ previewView = NativePreviewView(frame: frame, session: captureSession)
+ }
+
+ addSubview(previewView!)
+ }
+
+ internal func setupFpsGraph() {
+ #if DEBUG
+ if enableFpsGraph {
+ if fpsGraph != nil { return }
+ fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
+ fpsGraph!.layer.zPosition = 9999.0
+ addSubview(fpsGraph!)
+ } else {
+ fpsGraph?.removeFromSuperview()
+ fpsGraph = nil
+ }
+ #endif
+ }
+}
diff --git a/ios/CameraView+RecordVideo.swift b/ios/CameraView+RecordVideo.swift
index c9ab122..f986497 100644
--- a/ios/CameraView+RecordVideo.swift
+++ b/ios/CameraView+RecordVideo.swift
@@ -15,7 +15,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
Starts a video + audio recording with a custom Asset Writer.
*/
func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) {
- cameraQueue.async {
+ CameraQueues.cameraQueue.async {
ReactLogger.log(level: .info, message: "Starting Video recording...")
let callback = Callback(jsCallbackFunc)
@@ -67,7 +67,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
defer {
if enableAudio {
- self.audioQueue.async {
+ CameraQueues.audioQueue.async {
self.deactivateAudioSession()
}
}
@@ -127,10 +127,12 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
recordingSession.initializeVideoWriter(withSettings: videoSettings,
pixelFormat: pixelFormat)
- // Init Audio (optional, async)
+ // Init Audio (optional)
if enableAudio {
- // Activate Audio Session (blocking)
- self.activateAudioSession()
+ // Activate Audio Session asynchronously
+ CameraQueues.audioQueue.async {
+ self.activateAudioSession()
+ }
if let audioOutput = self.audioOutput,
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) {
@@ -150,7 +152,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
func stopRecording(promise: Promise) {
- cameraQueue.async {
+ CameraQueues.cameraQueue.async {
self.isRecording = false
withPromise(promise) {
@@ -164,7 +166,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
func pauseRecording(promise: Promise) {
- cameraQueue.async {
+ CameraQueues.cameraQueue.async {
withPromise(promise) {
guard self.recordingSession != nil else {
// there's no active recording!
@@ -177,7 +179,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
func resumeRecording(promise: Promise) {
- cameraQueue.async {
+ CameraQueues.cameraQueue.async {
withPromise(promise) {
guard self.recordingSession != nil else {
// there's no active recording!
@@ -190,23 +192,15 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
- // Draw Frame to Preview View Canvas (and call Frame Processor)
- if captureOutput is AVCaptureVideoDataOutput {
- if let previewView = previewView as? PreviewSkiaView {
- // Render to Skia PreviewView
- previewView.drawFrame(sampleBuffer) { canvas in
- // Call JS Frame Processor before passing Frame to GPU - allows user to draw
- guard let frameProcessor = self.frameProcessorCallback else { return }
- let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
- frameProcessor(frame, canvas)
+ #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
+ if captureOutput is AVCaptureVideoDataOutput {
+ if let frameProcessor = frameProcessor {
+ // Call Frame Processor
+ let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
+ frameProcessor.call(frame)
}
- } else {
- // Call JS Frame Processor. User cannot draw, since we don't have a Skia Canvas.
- guard let frameProcessor = frameProcessorCallback else { return }
- let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
- frameProcessor(frame, nil)
}
- }
+ #endif
// Record Video Frame/Audio Sample to File
if isRecording {
@@ -220,8 +214,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
case is AVCaptureAudioDataOutput:
let timestamp = CMSyncConvertTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
- from: audioCaptureSession.masterClock!,
- to: captureSession.masterClock!)
+ from: audioCaptureSession.masterClock ?? CMClockGetHostTimeClock(),
+ to: captureSession.masterClock ?? CMClockGetHostTimeClock())
recordingSession.appendBuffer(sampleBuffer, type: .audio, timestamp: timestamp)
default:
break
@@ -253,7 +247,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
/**
Gets the orientation of the CameraView's images (CMSampleBuffers).
*/
- var bufferOrientation: UIImage.Orientation {
+ private var bufferOrientation: UIImage.Orientation {
guard let cameraPosition = videoDeviceInput?.device.position else {
return .up
}
@@ -261,16 +255,12 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
switch outputOrientation {
case .portrait:
return cameraPosition == .front ? .leftMirrored : .right
-
case .landscapeLeft:
return cameraPosition == .front ? .downMirrored : .up
-
case .portraitUpsideDown:
return cameraPosition == .front ? .rightMirrored : .left
-
case .landscapeRight:
return cameraPosition == .front ? .upMirrored : .down
-
case .unknown:
return .up
@unknown default:
diff --git a/ios/CameraView+TakePhoto.swift b/ios/CameraView+TakePhoto.swift
index d9cb3eb..df0d02c 100644
--- a/ios/CameraView+TakePhoto.swift
+++ b/ios/CameraView+TakePhoto.swift
@@ -8,23 +8,9 @@
import AVFoundation
-// MARK: - TakePhotoOptions
-
-struct TakePhotoOptions {
- init(fromDictionary dictionary: NSDictionary) {
- if let videoCodec = dictionary.value(forKey: "videoCodec") as? String {
- self.videoCodec = AVVideoCodecType(withString: videoCodec)
- }
- qualityPrioritization = dictionary.value(forKey: "qualityPrioritization") as? String
- }
-
- var videoCodec: AVVideoCodecType?
- var qualityPrioritization: String?
-}
-
extension CameraView {
func takePhoto(options: NSDictionary, promise: Promise) {
- cameraQueue.async {
+ CameraQueues.cameraQueue.async {
guard let photoOutput = self.photoOutput,
let videoDeviceInput = self.videoDeviceInput else {
if self.photo?.boolValue == true {
diff --git a/ios/CameraView+Torch.swift b/ios/CameraView+Torch.swift
new file mode 100644
index 0000000..c5b0c02
--- /dev/null
+++ b/ios/CameraView+Torch.swift
@@ -0,0 +1,51 @@
+//
+// CameraView+Torch.swift
+// VisionCamera
+//
+// Created by Marc Rousavy on 20.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+extension CameraView {
+ final func setTorchMode(_ torchMode: String) {
+ guard let device = videoDeviceInput?.device else {
+ invokeOnError(.session(.cameraNotReady))
+ return
+ }
+ guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
+ invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
+ return
+ }
+ if !captureSession.isRunning {
+ torchMode = .off
+ }
+ if device.torchMode == torchMode {
+ // no need to run the whole lock/unlock bs
+ return
+ }
+ if !device.hasTorch || !device.isTorchAvailable {
+ if torchMode == .off {
+ // ignore it, when it's off and not supported, it's off.
+ return
+ } else {
+ // torch mode is .auto or .on, but no torch is available.
+ invokeOnError(.device(.torchUnavailable))
+ return
+ }
+ }
+ do {
+ try device.lockForConfiguration()
+ device.torchMode = torchMode
+ if torchMode == .on {
+ try device.setTorchModeOn(level: 1.0)
+ }
+ device.unlockForConfiguration()
+ } catch let error as NSError {
+ invokeOnError(.device(.configureError), cause: error)
+ return
+ }
+ }
+}
diff --git a/ios/CameraView.swift b/ios/CameraView.swift
index 49a68d5..8b2868d 100644
--- a/ios/CameraView.swift
+++ b/ios/CameraView.swift
@@ -23,7 +23,6 @@ private let propsThatRequireReconfiguration = ["cameraId",
"enableDepthData",
"enableHighQualityPhotos",
"enablePortraitEffectsMatteDelivery",
- "preset",
"photo",
"video",
"enableFrameProcessor",
@@ -37,14 +36,11 @@ private let propsThatRequireDeviceReconfiguration = ["fps",
public final class CameraView: UIView {
// pragma MARK: React Properties
-
- // pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighQualityPhotos: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
- @objc var preset: String?
// use cases
@objc var photo: NSNumber? // nullable bool
@objc var video: NSNumber? // nullable bool
@@ -85,27 +81,26 @@ public final class CameraView: UIView {
// Capture Session
internal let captureSession = AVCaptureSession()
internal let audioCaptureSession = AVCaptureSession()
- // Inputs
+ // Inputs & Outputs
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var videoOutput: AVCaptureVideoDataOutput?
internal var audioOutput: AVCaptureAudioDataOutput?
- // CameraView+RecordView (+ FrameProcessorDelegate.mm)
+ // CameraView+RecordView (+ Frame Processor)
internal var isRecording = false
internal var recordingSession: RecordingSession?
- @objc public var frameProcessorCallback: FrameProcessorCallback?
- // CameraView+TakePhoto
- internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
+ #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
+ @objc public var frameProcessor: FrameProcessor?
+ #endif
+ #if VISION_CAMERA_ENABLE_SKIA
+ internal var skiaRenderer: SkiaRenderer?
+ #endif
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
- internal let cameraQueue = CameraQueues.cameraQueue
- internal let videoQueue = CameraQueues.videoQueue
- internal let audioQueue = CameraQueues.audioQueue
-
- internal var previewView: UIView?
+ internal var previewView: PreviewView?
#if DEBUG
internal var fpsGraph: RCTFPSGraph?
#endif
@@ -165,10 +160,7 @@ public final class CameraView: UIView {
if newSuperview != nil {
if !isMounted {
isMounted = true
- guard let onViewReady = onViewReady else {
- return
- }
- onViewReady(nil)
+ onViewReady?(nil)
}
}
}
@@ -180,36 +172,6 @@ public final class CameraView: UIView {
}
}
- func setupPreviewView() {
- if previewType == "skia" {
- // Skia Preview View allows user to draw onto a Frame in a Frame Processor
- if previewView is PreviewSkiaView { return }
- previewView?.removeFromSuperview()
- previewView = PreviewSkiaView(frame: frame)
- } else {
- // Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
- if previewView is PreviewView { return }
- previewView?.removeFromSuperview()
- previewView = PreviewView(frame: frame, session: captureSession)
- }
-
- addSubview(previewView!)
- }
-
- func setupFpsGraph() {
- #if DEBUG
- if enableFpsGraph {
- if fpsGraph != nil { return }
- fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
- fpsGraph!.layer.zPosition = 9999.0
- addSubview(fpsGraph!)
- } else {
- fpsGraph?.removeFromSuperview()
- fpsGraph = nil
- }
- #endif
- }
-
// pragma MARK: Props updating
override public final func didSetProps(_ changedProps: [String]!) {
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
@@ -246,8 +208,8 @@ public final class CameraView: UIView {
shouldReconfigureDevice ||
shouldUpdateVideoStabilization ||
shouldUpdateOrientation {
- // Video Configuration
- cameraQueue.async {
+ CameraQueues.cameraQueue.async {
+ // Video Configuration
if shouldReconfigure {
self.configureCaptureSession()
}
@@ -285,7 +247,7 @@ public final class CameraView: UIView {
// This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch.
if shouldUpdateTorch {
- self.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
+ CameraQueues.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
self.setTorchMode(self.torch)
}
}
@@ -293,52 +255,13 @@ public final class CameraView: UIView {
// Audio Configuration
if shouldReconfigureAudioSession {
- audioQueue.async {
+ CameraQueues.audioQueue.async {
self.configureAudioSession()
}
}
}
}
- internal final func setTorchMode(_ torchMode: String) {
- guard let device = videoDeviceInput?.device else {
- invokeOnError(.session(.cameraNotReady))
- return
- }
- guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
- invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
- return
- }
- if !captureSession.isRunning {
- torchMode = .off
- }
- if device.torchMode == torchMode {
- // no need to run the whole lock/unlock bs
- return
- }
- if !device.hasTorch || !device.isTorchAvailable {
- if torchMode == .off {
- // ignore it, when it's off and not supported, it's off.
- return
- } else {
- // torch mode is .auto or .on, but no torch is available.
- invokeOnError(.device(.torchUnavailable))
- return
- }
- }
- do {
- try device.lockForConfiguration()
- device.torchMode = torchMode
- if torchMode == .on {
- try device.setTorchModeOn(level: 1.0)
- }
- device.unlockForConfiguration()
- } catch let error as NSError {
- invokeOnError(.device(.configureError), cause: error)
- return
- }
- }
-
@objc
func onOrientationChanged() {
updateOrientation()
diff --git a/ios/CameraViewManager.m b/ios/CameraViewManager.m
index 8c4ed31..ee79095 100644
--- a/ios/CameraViewManager.m
+++ b/ios/CameraViewManager.m
@@ -40,7 +40,6 @@ RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(colorSpace, NSString);
RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString);
// other props
-RCT_EXPORT_VIEW_PROPERTY(preset, NSString);
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(previewType, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
diff --git a/ios/CameraViewManager.swift b/ios/CameraViewManager.swift
index c405369..c66418e 100644
--- a/ios/CameraViewManager.swift
+++ b/ios/CameraViewManager.swift
@@ -13,7 +13,9 @@ import Foundation
final class CameraViewManager: RCTViewManager {
// pragma MARK: Properties
- private var runtimeManager: FrameProcessorRuntimeManager?
+ #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
+ private var runtimeManager: FrameProcessorRuntimeManager?
+ #endif
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
@@ -31,10 +33,14 @@ final class CameraViewManager: RCTViewManager {
@objc
final func installFrameProcessorBindings() -> NSNumber {
- // Runs on JS Thread
- runtimeManager = FrameProcessorRuntimeManager()
- runtimeManager!.installFrameProcessorBindings()
- return true as NSNumber
+ #if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
+ // Runs on JS Thread
+ runtimeManager = FrameProcessorRuntimeManager()
+ runtimeManager!.installFrameProcessorBindings()
+ return true as NSNumber
+ #else
+ return false as NSNumber
+ #endif
}
@objc
@@ -101,15 +107,10 @@ final class CameraViewManager: RCTViewManager {
@objc
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
- let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(), mediaType: .video, position: .unspecified)
- let devices = discoverySession.devices.filter {
- if #available(iOS 11.1, *) {
- // exclude the true-depth camera. The True-Depth camera has YUV and Infrared, can't take photos!
- return $0.deviceType != .builtInTrueDepthCamera
- }
- return true
- }
- return devices.map {
+ let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(),
+ mediaType: .video,
+ position: .unspecified)
+ return discoverySession.devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
@@ -171,6 +172,7 @@ final class CameraViewManager: RCTViewManager {
private func getCameraView(withTag tag: NSNumber) -> CameraView {
// swiftlint:disable force_cast
return bridge.uiManager.view(forReactTag: tag) as! CameraView
+ // swiftlint:enable force_cast
}
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
@@ -180,9 +182,6 @@ final class CameraViewManager: RCTViewManager {
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
- if #available(iOS 11.1, *) {
- deviceTypes.append(.builtInTrueDepthCamera)
- }
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
diff --git a/ios/Extensions/FourCharCode+toString.swift b/ios/Extensions/FourCharCode+toString.swift
index f841e90..0e9b69d 100644
--- a/ios/Extensions/FourCharCode+toString.swift
+++ b/ios/Extensions/FourCharCode+toString.swift
@@ -12,6 +12,6 @@ extension FourCharCode {
s.append(String(UnicodeScalar((self >> 16) & 255)!))
s.append(String(UnicodeScalar((self >> 8) & 255)!))
s.append(String(UnicodeScalar(self & 255)!))
- return (s)
+ return s
}
}
diff --git a/ios/Frame Processor/Frame.h b/ios/Frame Processor/Frame.h
index 70d2179..486ab96 100644
--- a/ios/Frame Processor/Frame.h
+++ b/ios/Frame Processor/Frame.h
@@ -14,9 +14,9 @@
@interface Frame : NSObject
-- (instancetype) initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation;
+- (instancetype _Nonnull) initWithBuffer:(CMSampleBufferRef _Nonnull)buffer orientation:(UIImageOrientation)orientation;
-@property (nonatomic, readonly) CMSampleBufferRef buffer;
+@property (nonatomic, readonly) CMSampleBufferRef _Nonnull buffer;
@property (nonatomic, readonly) UIImageOrientation orientation;
@end
diff --git a/ios/Frame Processor/Frame.m b/ios/Frame Processor/Frame.m
index 25e9ff7..27a7c68 100644
--- a/ios/Frame Processor/Frame.m
+++ b/ios/Frame Processor/Frame.m
@@ -11,11 +11,11 @@
#import
@implementation Frame {
- CMSampleBufferRef buffer;
+ CMSampleBufferRef _Nonnull buffer;
UIImageOrientation orientation;
}
-- (instancetype) initWithBuffer:(CMSampleBufferRef)buffer orientation:(UIImageOrientation)orientation {
+- (instancetype) initWithBuffer:(CMSampleBufferRef _Nonnull)buffer orientation:(UIImageOrientation)orientation {
self = [super init];
if (self) {
_buffer = buffer;
diff --git a/ios/Frame Processor/FrameHostObject.h b/ios/Frame Processor/FrameHostObject.h
index b58b182..98ec24c 100644
--- a/ios/Frame Processor/FrameHostObject.h
+++ b/ios/Frame Processor/FrameHostObject.h
@@ -13,17 +13,11 @@
#import "Frame.h"
-#import "SkCanvas.h"
-#import "JsiSkCanvas.h"
-
using namespace facebook;
class JSI_EXPORT FrameHostObject: public jsi::HostObject {
public:
explicit FrameHostObject(Frame* frame): frame(frame) {}
- explicit FrameHostObject(Frame* frame,
- std::shared_ptr canvas):
- frame(frame), canvas(canvas) {}
public:
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
@@ -31,5 +25,4 @@ public:
public:
Frame* frame;
- std::shared_ptr canvas;
};
diff --git a/ios/Frame Processor/FrameHostObject.mm b/ios/Frame Processor/FrameHostObject.mm
index 5e524b1..aa427a5 100644
--- a/ios/Frame Processor/FrameHostObject.mm
+++ b/ios/Frame Processor/FrameHostObject.mm
@@ -11,8 +11,6 @@
#import
#import "WKTJsiHostObject.h"
-#import "SkCanvas.h"
-#import "../Skia Render Layer/SkImageHelpers.h"
#import "../../cpp/JSITypedArray.h"
std::vector FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
@@ -24,6 +22,7 @@ std::vector FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
+ result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isDrawable")));
// Conversion
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
@@ -31,27 +30,10 @@ std::vector FrameHostObject::getPropertyNames(jsi::Runtime& rt)
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
- // Skia
- result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
-
- if (canvas != nullptr) {
- auto canvasPropNames = canvas->getPropertyNames(rt);
- for (auto& prop : canvasPropNames) {
- result.push_back(std::move(prop));
- }
- }
return result;
}
-SkRect inscribe(SkSize size, SkRect rect) {
- auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
- auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
- return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
- rect.y() + halfHeightDelta, size.width(),
- size.height());
-}
-
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
@@ -80,7 +62,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
0,
incrementRefCount);
}
-
if (name == "decrementRefCount") {
auto decrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
// Decrement retain count by one. If the retain count is zero, ARC will destroy the Frame Buffer.
@@ -92,31 +73,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
0,
decrementRefCount);
}
- if (name == "render") {
- auto render = JSI_HOST_FUNCTION_LAMBDA {
- if (canvas == nullptr) {
- throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
- }
-
- // convert CMSampleBuffer to SkImage
- auto context = canvas->getCanvas()->recordingContext();
- auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, frame.buffer);
-
- // draw SkImage
- if (count > 0) {
- // ..with paint/shader
- auto paintHostObject = arguments[0].asObject(runtime).asHostObject(runtime);
- auto paint = paintHostObject->getObject();
- canvas->getCanvas()->drawImage(image, 0, 0, SkSamplingOptions(), paint.get());
- } else {
- // ..without paint/shader
- canvas->getCanvas()->drawImage(image, 0, 0);
- }
-
- return jsi::Value::undefined();
- };
- return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
- }
if (name == "toArrayBuffer") {
auto toArrayBuffer = JSI_HOST_FUNCTION_LAMBDA {
auto pixelBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
@@ -146,6 +102,9 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
}
+ if (name == "isDrawable") {
+ return jsi::Value(false);
+ }
if (name == "isValid") {
auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 && CMSampleBufferIsValid(frame.buffer);
return jsi::Value(isValid);
@@ -206,11 +165,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
return jsi::Value((double) planesCount);
}
- if (canvas != nullptr) {
- // If we have a Canvas, try to access the property on there.
- return canvas->get(runtime, propName);
- }
-
// fallback to base implementation
return HostObject::get(runtime, propName);
}
diff --git a/ios/Frame Processor/FrameProcessor.h b/ios/Frame Processor/FrameProcessor.h
new file mode 100644
index 0000000..9ba38ad
--- /dev/null
+++ b/ios/Frame Processor/FrameProcessor.h
@@ -0,0 +1,33 @@
+//
+// FrameProcessorContext.h
+// VisionCamera
+//
+// Created by Marc Rousavy on 13.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+#pragma once
+
+#import
+#import
+#import "Frame.h"
+
+#ifdef __cplusplus
+#import "WKTJsiWorklet.h"
+#import
+#import "FrameHostObject.h"
+#import
+#endif
+
+@interface FrameProcessor : NSObject
+
+#ifdef __cplusplus
+- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr)context
+ worklet:(std::shared_ptr)worklet;
+
+- (void)callWithFrameHostObject:(std::shared_ptr)frameHostObject;
+#endif
+
+- (void)call:(Frame* _Nonnull)frame;
+
+@end
diff --git a/ios/Frame Processor/FrameProcessor.mm b/ios/Frame Processor/FrameProcessor.mm
new file mode 100644
index 0000000..414a773
--- /dev/null
+++ b/ios/Frame Processor/FrameProcessor.mm
@@ -0,0 +1,61 @@
+//
+// FrameProcessor.mm
+// VisionCamera
+//
+// Created by Marc Rousavy on 13.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+#import
+#import "FrameProcessor.h"
+
+#import
+#import
+#import "WKTJsiWorklet.h"
+#import "FrameHostObject.h"
+
+using namespace facebook;
+
+@implementation FrameProcessor {
+ std::shared_ptr _workletContext;
+ std::shared_ptr _workletInvoker;
+}
+
+- (instancetype)initWithWorklet:(std::shared_ptr)context
+ worklet:(std::shared_ptr)worklet {
+ if (self = [super init]) {
+ _workletContext = context;
+ _workletInvoker = std::make_shared(worklet);
+ }
+ return self;
+}
+
+- (void)callWithFrameHostObject:(std::shared_ptr)frameHostObject {
+ // Call the Frame Processor on the Worklet Runtime
+ jsi::Runtime& runtime = _workletContext->getWorkletRuntime();
+
+ try {
+ // Wrap HostObject as JSI Value
+ auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
+ jsi::Value jsValue(std::move(argument));
+
+ // Call the Worklet with the Frame JS Host Object as an argument
+ _workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
+ } catch (jsi::JSError& jsError) {
+ // JS Error occured, print it to console.
+ auto message = jsError.getMessage();
+
+ _workletContext->invokeOnJsThread([message](jsi::Runtime& jsRuntime) {
+ auto logFn = jsRuntime.global().getPropertyAsObject(jsRuntime, "console").getPropertyAsFunction(jsRuntime, "error");
+ logFn.call(jsRuntime, jsi::String::createFromUtf8(jsRuntime, "Frame Processor threw an error: " + message));
+ });
+ }
+}
+
+- (void)call:(Frame* _Nonnull)frame {
+ // Create the Frame Host Object wrapping the internal Frame
+ auto frameHostObject = std::make_shared(frame);
+ [self callWithFrameHostObject:frameHostObject];
+}
+
+@end
diff --git a/ios/Frame Processor/FrameProcessorCallback.h b/ios/Frame Processor/FrameProcessorCallback.h
deleted file mode 100644
index c5a1e32..0000000
--- a/ios/Frame Processor/FrameProcessorCallback.h
+++ /dev/null
@@ -1,14 +0,0 @@
-//
-// FrameProcessorCallback.h
-// VisionCamera
-//
-// Created by Marc Rousavy on 11.03.21.
-// Copyright © 2021 mrousavy. All rights reserved.
-//
-
-#pragma once
-
-#import
-#import "Frame.h"
-
-typedef void (^FrameProcessorCallback) (Frame* frame, void* skCanvas);
diff --git a/ios/Frame Processor/FrameProcessorPlugin.h b/ios/Frame Processor/FrameProcessorPlugin.h
index 38ca26d..dc63ca4 100644
--- a/ios/Frame Processor/FrameProcessorPlugin.h
+++ b/ios/Frame Processor/FrameProcessorPlugin.h
@@ -20,13 +20,13 @@
/// Get the name of the Frame Processor Plugin.
/// This will be exposed to JS under the `FrameProcessorPlugins` Proxy object.
-- (NSString *)name;
+- (NSString * _Nonnull)name;
/// The actual callback when calling this plugin. Any Frame Processing should be handled there.
/// Make sure your code is optimized, as this is a hot path.
-- (id) callback:(Frame*)frame withArguments:(NSArray*)arguments;
+- (id _Nullable) callback:(Frame* _Nonnull)frame withArguments:(NSArray* _Nullable)arguments;
/// Register the given plugin in the Plugin Registry. This should be called on App Startup.
-+ (void) registerPlugin:(FrameProcessorPlugin*)plugin;
++ (void) registerPlugin:(FrameProcessorPlugin* _Nonnull)plugin;
@end
diff --git a/ios/Frame Processor/FrameProcessorPlugin.m b/ios/Frame Processor/FrameProcessorPlugin.m
index 417b1d3..ec4275e 100644
--- a/ios/Frame Processor/FrameProcessorPlugin.m
+++ b/ios/Frame Processor/FrameProcessorPlugin.m
@@ -18,13 +18,13 @@
return nil;
}
-- (id)callback:(Frame *)frame withArguments:(NSArray *)arguments {
+- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSArray* _Nullable)arguments {
[NSException raise:NSInternalInconsistencyException
format:@"Frame Processor Plugin \"%@\" does not override the `callback(frame:withArguments:)` method!", [self name]];
return nil;
}
-+ (void)registerPlugin:(FrameProcessorPlugin *)plugin {
++ (void)registerPlugin:(FrameProcessorPlugin* _Nonnull)plugin {
[FrameProcessorPluginRegistry addFrameProcessorPlugin:plugin];
}
diff --git a/ios/Frame Processor/FrameProcessorPluginRegistry.h b/ios/Frame Processor/FrameProcessorPluginRegistry.h
index 326d9dd..80ae89b 100644
--- a/ios/Frame Processor/FrameProcessorPluginRegistry.h
+++ b/ios/Frame Processor/FrameProcessorPluginRegistry.h
@@ -15,6 +15,6 @@
@interface FrameProcessorPluginRegistry : NSObject
+ (NSMutableDictionary*)frameProcessorPlugins;
-+ (void) addFrameProcessorPlugin:(FrameProcessorPlugin*)plugin;
++ (void) addFrameProcessorPlugin:(FrameProcessorPlugin* _Nonnull)plugin;
@end
diff --git a/ios/Frame Processor/FrameProcessorPluginRegistry.mm b/ios/Frame Processor/FrameProcessorPluginRegistry.m
similarity index 100%
rename from ios/Frame Processor/FrameProcessorPluginRegistry.mm
rename to ios/Frame Processor/FrameProcessorPluginRegistry.m
diff --git a/ios/Frame Processor/FrameProcessorRuntimeManager.h b/ios/Frame Processor/FrameProcessorRuntimeManager.h
index 9142e43..a137abb 100644
--- a/ios/Frame Processor/FrameProcessorRuntimeManager.h
+++ b/ios/Frame Processor/FrameProcessorRuntimeManager.h
@@ -13,8 +13,6 @@
@interface FrameProcessorRuntimeManager : NSObject
-- (instancetype) init;
-
- (void) installFrameProcessorBindings;
@end
diff --git a/ios/Frame Processor/FrameProcessorRuntimeManager.mm b/ios/Frame Processor/FrameProcessorRuntimeManager.mm
index ef98055..46aa2d5 100644
--- a/ios/Frame Processor/FrameProcessorRuntimeManager.mm
+++ b/ios/Frame Processor/FrameProcessorRuntimeManager.mm
@@ -10,6 +10,7 @@
#import "FrameProcessorRuntimeManager.h"
#import "FrameProcessorPluginRegistry.h"
#import "FrameProcessorPlugin.h"
+#import "FrameProcessor.h"
#import "FrameHostObject.h"
#import
@@ -21,15 +22,15 @@
#import
#import "WKTJsiWorkletContext.h"
-#import "WKTJsiWorkletApi.h"
#import "WKTJsiWorklet.h"
-#import "WKTJsiHostObject.h"
-#import "FrameProcessorUtils.h"
-#import "FrameProcessorCallback.h"
#import "../React Utils/JSIUtils.h"
#import "../../cpp/JSITypedArray.h"
+#if VISION_CAMERA_ENABLE_SKIA
+#import "../Skia Render Layer/SkiaFrameProcessor.h"
+#endif
+
// Forward declarations for the Swift classes
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@interface CameraQueues : NSObject
@@ -37,21 +38,15 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
@end
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
@interface CameraView : UIView
-@property (nonatomic, copy) FrameProcessorCallback _Nullable frameProcessorCallback;
+@property (nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
+- (SkiaRenderer* _Nonnull)getSkiaRenderer;
@end
@implementation FrameProcessorRuntimeManager {
- // Running Frame Processors on camera's video thread (synchronously)
+ // Separate Camera Worklet Context
std::shared_ptr workletContext;
}
-- (instancetype)init {
- if (self = [super init]) {
- // Initialize self
- }
- return self;
-}
-
- (void) setupWorkletContext:(jsi::Runtime&)runtime {
NSLog(@"FrameProcessorBindings: Creating Worklet Context...");
@@ -136,7 +131,7 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
// HostObject that attaches the cache to the lifecycle of the Runtime. On Runtime destroy, we destroy the cache.
auto propNameCacheObject = std::make_shared(jsiRuntime);
jsiRuntime.global().setProperty(jsiRuntime,
- "__visionCameraPropNameCache",
+ "__visionCameraArrayBufferCache",
jsi::Object::createFromHostObject(jsiRuntime, propNameCacheObject));
// Install the Worklet Runtime in the main React JS Runtime
@@ -148,14 +143,30 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
auto setFrameProcessor = JSI_HOST_FUNCTION_LAMBDA {
NSLog(@"FrameProcessorBindings: Setting new frame processor...");
auto viewTag = arguments[0].asNumber();
- auto worklet = std::make_shared(runtime, arguments[1]);
+ auto object = arguments[1].asObject(runtime);
+ auto frameProcessorType = object.getProperty(runtime, "type").asString(runtime).utf8(runtime);
+ auto worklet = std::make_shared(runtime, object.getProperty(runtime, "frameProcessor"));
RCTExecuteOnMainQueue(^{
auto currentBridge = [RCTBridge currentBridge];
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
auto view = static_cast(anonymousView);
- auto callback = convertWorkletToFrameProcessorCallback(self->workletContext->getWorkletRuntime(), worklet);
- view.frameProcessorCallback = callback;
+ if (frameProcessorType == "frame-processor") {
+ view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:self->workletContext
+ worklet:worklet];
+
+ } else if (frameProcessorType == "skia-frame-processor") {
+#if VISION_CAMERA_ENABLE_SKIA
+ SkiaRenderer* skiaRenderer = [view getSkiaRenderer];
+ view.frameProcessor = [[SkiaFrameProcessor alloc] initWithWorklet:self->workletContext
+ worklet:worklet
+ skiaRenderer:skiaRenderer];
+#else
+ throw std::runtime_error("system/skia-unavailable: Skia is not installed!");
+#endif
+ } else {
+ throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " + frameProcessorType);
+ }
});
return jsi::Value::undefined();
@@ -175,10 +186,8 @@ __attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
if (!currentBridge) return;
auto anonymousView = [currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
- if (!anonymousView) return;
-
auto view = static_cast(anonymousView);
- view.frameProcessorCallback = nil;
+ view.frameProcessor = nil;
});
return jsi::Value::undefined();
diff --git a/ios/Frame Processor/FrameProcessorUtils.h b/ios/Frame Processor/FrameProcessorUtils.h
deleted file mode 100644
index 29027d3..0000000
--- a/ios/Frame Processor/FrameProcessorUtils.h
+++ /dev/null
@@ -1,25 +0,0 @@
-//
-// FrameProcessorUtils.h
-// VisionCamera
-//
-// Created by Marc Rousavy on 15.03.21.
-// Copyright © 2021 mrousavy. All rights reserved.
-//
-
-#pragma once
-
-#import
-#import
-#import "FrameProcessorCallback.h"
-
-#ifndef __cplusplus
-#error FrameProcessorUtils.h has to be compiled with C++!
-#endif
-
-#import
-#import "WKTJsiWorklet.h"
-#import
-
-using namespace facebook;
-
-FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runtime, std::shared_ptr worklet);
diff --git a/ios/Frame Processor/FrameProcessorUtils.mm b/ios/Frame Processor/FrameProcessorUtils.mm
deleted file mode 100644
index 510c0d7..0000000
--- a/ios/Frame Processor/FrameProcessorUtils.mm
+++ /dev/null
@@ -1,72 +0,0 @@
-//
-// FrameProcessorUtils.m
-// VisionCamera
-//
-// Created by Marc Rousavy on 15.03.21.
-// Copyright © 2021 mrousavy. All rights reserved.
-//
-
-#import "FrameProcessorUtils.h"
-#import
-#import
-#import
-
-#import "FrameHostObject.h"
-#import "Frame.h"
-
-#import
-#import
-#import "JSConsoleHelper.h"
-#import
-
-#import "WKTJsiWorklet.h"
-
-#import "RNSkPlatformContext.h"
-#import "RNSkiOSPlatformContext.h"
-#import "JsiSkCanvas.h"
-
-FrameProcessorCallback convertWorkletToFrameProcessorCallback(jsi::Runtime& runtime, std::shared_ptr worklet) {
- // Wrap Worklet call in invoker
- auto workletInvoker = std::make_shared(worklet);
- // Create cached Skia Canvas object
- auto skiaPlatformContext = std::make_shared(&runtime, RCTBridge.currentBridge);
- auto canvasHostObject = std::make_shared(skiaPlatformContext);
-
- // Converts a Worklet to a callable Objective-C block function
- return ^(Frame* frame, void* skiaCanvas) {
-
- try {
- // create HostObject which holds the Frame
- auto frameHostObject = std::make_shared(frame);
- // Update cached Canvas object
- if (skiaCanvas != nullptr) {
- canvasHostObject->setCanvas((SkCanvas*)skiaCanvas);
- frameHostObject->canvas = canvasHostObject;
- } else {
- frameHostObject->canvas = nullptr;
- }
-
- auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
- jsi::Value jsValue(std::move(argument));
- // Call the Worklet with the Frame JS Host Object as an argument
- workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
-
- // After the sync Frame Processor finished executing, remove the Canvas on that Frame instance. It can no longer draw.
- frameHostObject->canvas = nullptr;
- } catch (jsi::JSError& jsError) {
- // JS Error occured, print it to console.
- auto stack = std::regex_replace(jsError.getStack(), std::regex("\n"), "\n ");
- auto message = [NSString stringWithFormat:@"Frame Processor threw an error: %s\nIn: %s", jsError.getMessage().c_str(), stack.c_str()];
-
- RCTBridge* bridge = [RCTBridge currentBridge];
- if (bridge != nil && bridge.jsCallInvoker != nullptr) {
- bridge.jsCallInvoker->invokeAsync([bridge, message]() {
- auto logFn = [JSConsoleHelper getLogFunctionForBridge:bridge];
- logFn(RCTLogLevelError, message);
- });
- } else {
- NSLog(@"%@", message);
- }
- }
- };
-}
diff --git a/ios/NativePreviewView.swift b/ios/NativePreviewView.swift
new file mode 100644
index 0000000..c298f0f
--- /dev/null
+++ b/ios/NativePreviewView.swift
@@ -0,0 +1,35 @@
+//
+// PreviewView.swift
+// VisionCamera
+//
+// Created by Marc Rousavy on 30.11.22.
+// Copyright © 2022 mrousavy. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+import UIKit
+
+class NativePreviewView: PreviewView {
+ /// Convenience wrapper to get layer as its statically known type.
+ var videoPreviewLayer: AVCaptureVideoPreviewLayer {
+ // swiftlint:disable force_cast
+ return layer as! AVCaptureVideoPreviewLayer
+ // swiftlint:enable force_cast
+ }
+
+ override public class var layerClass: AnyClass {
+ return AVCaptureVideoPreviewLayer.self
+ }
+
+ init(frame: CGRect, session: AVCaptureSession) {
+ super.init(frame: frame)
+ videoPreviewLayer.session = session
+ videoPreviewLayer.videoGravity = .resizeAspectFill
+ }
+
+ @available(*, unavailable)
+ required init?(coder _: NSCoder) {
+ fatalError("init(coder:) is not implemented!")
+ }
+}
diff --git a/ios/Parsers/AVCaptureSession.Preset+descriptor.swift b/ios/Parsers/AVCaptureSession.Preset+descriptor.swift
deleted file mode 100644
index 3102444..0000000
--- a/ios/Parsers/AVCaptureSession.Preset+descriptor.swift
+++ /dev/null
@@ -1,55 +0,0 @@
-//
-// AVCaptureSession.Preset+descriptor.swift
-// mrousavy
-//
-// Created by Marc Rousavy on 15.12.20.
-// Copyright © 2020 mrousavy. All rights reserved.
-//
-
-import AVFoundation
-import Foundation
-
-extension AVCaptureSession.Preset {
- init(withString string: String) throws {
- switch string {
- case "cif-352x288":
- self = .cif352x288
- return
- case "hd-1280x720":
- self = .hd1280x720
- return
- case "hd-1920x1080":
- self = .hd1920x1080
- return
- case "hd-3840x2160":
- self = .hd4K3840x2160
- return
- case "high":
- self = .high
- return
- case "iframe-1280x720":
- self = .iFrame1280x720
- return
- case "iframe-960x540":
- self = .iFrame960x540
- return
- case "input-priority":
- self = .inputPriority
- return
- case "low":
- self = .low
- return
- case "medium":
- self = .medium
- return
- case "photo":
- self = .photo
- return
- case "vga-640x480":
- self = .vga640x480
- return
- default:
- throw EnumParserError.invalidValue
- }
- }
-}
diff --git a/ios/PreviewView.swift b/ios/PreviewView.swift
index 5d5e54e..7cfcde7 100644
--- a/ios/PreviewView.swift
+++ b/ios/PreviewView.swift
@@ -2,33 +2,11 @@
// PreviewView.swift
// VisionCamera
//
-// Created by Marc Rousavy on 30.11.22.
-// Copyright © 2022 mrousavy. All rights reserved.
+// Created by Marc Rousavy on 20.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
//
-import AVFoundation
import Foundation
import UIKit
-class PreviewView: UIView {
- /// Convenience wrapper to get layer as its statically known type.
- var videoPreviewLayer: AVCaptureVideoPreviewLayer {
- // swiftlint:disable force_cast
- return layer as! AVCaptureVideoPreviewLayer
- }
-
- override public class var layerClass: AnyClass {
- return AVCaptureVideoPreviewLayer.self
- }
-
- init(frame: CGRect, session: AVCaptureSession) {
- super.init(frame: frame)
- videoPreviewLayer.session = session
- videoPreviewLayer.videoGravity = .resizeAspectFill
- }
-
- @available(*, unavailable)
- required init?(coder _: NSCoder) {
- fatalError("init(coder:) is not implemented!")
- }
-}
+class PreviewView: UIView {}
diff --git a/ios/React Utils/JSConsoleHelper.h b/ios/React Utils/JSConsoleHelper.h
deleted file mode 100644
index 7fb9af7..0000000
--- a/ios/React Utils/JSConsoleHelper.h
+++ /dev/null
@@ -1,20 +0,0 @@
-//
-// JSConsoleHelper.h
-// VisionCamera
-//
-// Created by Marc Rousavy on 02.06.21.
-// Copyright © 2021 mrousavy. All rights reserved.
-//
-
-#pragma once
-
-#import
-#import
-
-@interface JSConsoleHelper : NSObject
-
-typedef void (^ConsoleLogFunction) (RCTLogLevel level, NSString* message);
-
-+ (ConsoleLogFunction) getLogFunctionForBridge:(RCTBridge*)bridge;
-
-@end
diff --git a/ios/React Utils/JSConsoleHelper.mm b/ios/React Utils/JSConsoleHelper.mm
deleted file mode 100644
index b167104..0000000
--- a/ios/React Utils/JSConsoleHelper.mm
+++ /dev/null
@@ -1,60 +0,0 @@
-//
-// JSConsoleHelper.mm
-// VisionCamera
-//
-// Created by Marc Rousavy on 02.06.21.
-// Copyright © 2021 mrousavy. All rights reserved.
-//
-
-#import
-#import "JSConsoleHelper.h"
-
-#import
-#import
-#import
-#import
-#import "RCTBridge+runOnJS.h"
-
-@implementation JSConsoleHelper
-
-+ (const char *) getLogFunctionNameForLogLevel:(RCTLogLevel)level {
- switch (level) {
- case RCTLogLevelTrace:
- return "trace";
- case RCTLogLevelInfo:
- return "log";
- case RCTLogLevelWarning:
- return "warn";
- case RCTLogLevelError:
- case RCTLogLevelFatal:
- return "error";
- }
-}
-
-+ (ConsoleLogFunction) getLogFunctionForBridge:(RCTBridge*)bridge {
- RCTCxxBridge *cxxBridge = (RCTCxxBridge *)bridge;
- if (!cxxBridge.runtime) {
- return nil;
- }
-
- facebook::jsi::Runtime* jsiRuntime = (facebook::jsi::Runtime*)cxxBridge.runtime;
-
- return ^(RCTLogLevel level, NSString* message) {
- [bridge runOnJS:^{
- if (jsiRuntime != nullptr) {
- facebook::jsi::Runtime& runtime = *jsiRuntime;
- auto logFunctionName = [JSConsoleHelper getLogFunctionNameForLogLevel:level];
- try {
- auto console = runtime.global().getPropertyAsObject(runtime, "console");
- auto log = console.getPropertyAsFunction(runtime, logFunctionName);
- log.call(runtime, facebook::jsi::String::createFromAscii(runtime, [message UTF8String]));
- } catch (facebook::jsi::JSError& jsError) {
- NSLog(@"%@", message);
- NSLog(@"Failed to call `console.%s`: %s", logFunctionName, jsError.getMessage().c_str());
- }
- }
- }];
- };
-}
-
-@end
diff --git a/ios/React Utils/JSIUtils.mm b/ios/React Utils/JSIUtils.mm
index adbb762..60fce85 100644
--- a/ios/React Utils/JSIUtils.mm
+++ b/ios/React Utils/JSIUtils.mm
@@ -20,6 +20,7 @@
#import
#import
#import
+#import
#import "../Frame Processor/Frame.h"
#import "../Frame Processor/FrameHostObject.h"
@@ -173,6 +174,13 @@ id convertJSIValueToObjCObject(jsi::Runtime &runtime, const jsi::Value &value, s
RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime &runtime, const jsi::Function &value, std::shared_ptr jsInvoker)
{
auto weakWrapper = CallbackWrapper::createWeak(value.getFunction(runtime), runtime, jsInvoker);
+ RCTBlockGuard *blockGuard = [[RCTBlockGuard alloc] initWithCleanup:^() {
+ auto strongWrapper = weakWrapper.lock();
+ if (strongWrapper) {
+ strongWrapper->destroy();
+ }
+ }];
+
BOOL __block wrapperWasCalled = NO;
RCTResponseSenderBlock callback = ^(NSArray *responses) {
if (wrapperWasCalled) {
@@ -184,7 +192,7 @@ RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime &runtime, const
return;
}
- strongWrapper->jsInvoker().invokeAsync([weakWrapper, responses]() {
+ strongWrapper->jsInvoker().invokeAsync([weakWrapper, responses, blockGuard]() {
auto strongWrapper2 = weakWrapper.lock();
if (!strongWrapper2) {
return;
@@ -194,6 +202,9 @@ RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime &runtime, const
strongWrapper2->callback().call(strongWrapper2->runtime(), args, static_cast(responses.count));
strongWrapper2->destroy();
delete[] args;
+
+ // Delete the CallbackWrapper when the block gets dealloced without being invoked.
+ (void)blockGuard;
});
wrapperWasCalled = YES;
diff --git a/ios/React Utils/RCTBridge+runOnJS.h b/ios/React Utils/RCTBridge+runOnJS.h
deleted file mode 100644
index 623d1e5..0000000
--- a/ios/React Utils/RCTBridge+runOnJS.h
+++ /dev/null
@@ -1,18 +0,0 @@
-//
-// RCTBridge+runOnJS.h
-// VisionCamera
-//
-// Created by Marc Rousavy on 23.03.21.
-// Copyright © 2021 mrousavy. All rights reserved.
-//
-
-#pragma once
-
-#import
-#import
-
-@interface RCTBridge (RunOnJS)
-
-- (void) runOnJS:(void (^)(void))block NS_SWIFT_NAME( runOnJS(_:) );
-
-@end
diff --git a/ios/React Utils/RCTBridge+runOnJS.mm b/ios/React Utils/RCTBridge+runOnJS.mm
deleted file mode 100644
index 423a0b3..0000000
--- a/ios/React Utils/RCTBridge+runOnJS.mm
+++ /dev/null
@@ -1,23 +0,0 @@
-//
-// RCTBridge+runOnJS.mm
-// VisionCamera
-//
-// Created by Marc Rousavy on 23.03.21.
-// Copyright © 2021 mrousavy. All rights reserved.
-//
-
-#import "RCTBridge+runOnJS.h"
-#import
-#import
-#import
-
-@implementation RCTBridge (RunOnJS)
-
-- (void) runOnJS:(void (^)())block {
- auto callInvoker = [self jsCallInvoker];
- callInvoker->invokeAsync([block]() {
- block();
- });
-}
-
-@end
diff --git a/ios/Skia Render Layer/DrawableFrameHostObject.h b/ios/Skia Render Layer/DrawableFrameHostObject.h
new file mode 100644
index 0000000..3fe17bc
--- /dev/null
+++ b/ios/Skia Render Layer/DrawableFrameHostObject.h
@@ -0,0 +1,35 @@
+//
+// DrawableFrameHostObject.h
+// VisionCamera
+//
+// Created by Marc Rousavy on 20.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+#pragma once
+
+#import
+#import "../Frame Processor/FrameHostObject.h"
+#import "../Frame Processor/Frame.h"
+#import
+
+#import "SkCanvas.h"
+#import "JsiSkCanvas.h"
+
+using namespace facebook;
+
+class JSI_EXPORT DrawableFrameHostObject: public FrameHostObject {
+public:
+ explicit DrawableFrameHostObject(Frame* frame,
+ std::shared_ptr canvas):
+ FrameHostObject(frame), _canvas(canvas) {}
+
+public:
+ jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
+ std::vector getPropertyNames(jsi::Runtime& rt) override;
+
+ void invalidateCanvas();
+
+private:
+ std::shared_ptr _canvas;
+};
diff --git a/ios/Skia Render Layer/DrawableFrameHostObject.mm b/ios/Skia Render Layer/DrawableFrameHostObject.mm
new file mode 100644
index 0000000..a8174c4
--- /dev/null
+++ b/ios/Skia Render Layer/DrawableFrameHostObject.mm
@@ -0,0 +1,83 @@
+//
+// DrawableFrameHostObject.mm
+// VisionCamera
+//
+// Created by Marc Rousavy on 20.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+#import "DrawableFrameHostObject.h"
+#import "SkCanvas.h"
+#import "SkImageHelpers.h"
+
+std::vector DrawableFrameHostObject::getPropertyNames(jsi::Runtime& rt) {
+ auto result = FrameHostObject::getPropertyNames(rt);
+
+ // Skia - Render Frame
+ result.push_back(jsi::PropNameID::forUtf8(rt, std::string("render")));
+
+ if (_canvas != nullptr) {
+ auto canvasPropNames = _canvas->getPropertyNames(rt);
+ for (auto& prop : canvasPropNames) {
+ result.push_back(std::move(prop));
+ }
+ }
+
+ return result;
+}
+
+SkRect inscribe(SkSize size, SkRect rect) {
+ auto halfWidthDelta = (rect.width() - size.width()) / 2.0;
+ auto halfHeightDelta = (rect.height() - size.height()) / 2.0;
+ return SkRect::MakeXYWH(rect.x() + halfWidthDelta,
+ rect.y() + halfHeightDelta, size.width(),
+ size.height());
+}
+
+jsi::Value DrawableFrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
+ auto name = propName.utf8(runtime);
+
+ if (name == "render") {
+ auto render = JSI_HOST_FUNCTION_LAMBDA {
+ if (_canvas == nullptr) {
+ throw jsi::JSError(runtime, "Trying to render a Frame without a Skia Canvas! Did you install Skia?");
+ }
+
+ // convert CMSampleBuffer to SkImage
+ auto context = _canvas->getCanvas()->recordingContext();
+ auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, frame.buffer);
+
+ // draw SkImage
+ if (count > 0) {
+ // ..with paint/shader
+ auto paintHostObject = arguments[0].asObject(runtime).asHostObject(runtime);
+ auto paint = paintHostObject->getObject();
+ _canvas->getCanvas()->drawImage(image, 0, 0, SkSamplingOptions(), paint.get());
+ } else {
+ // ..without paint/shader
+ _canvas->getCanvas()->drawImage(image, 0, 0);
+ }
+
+ return jsi::Value::undefined();
+ };
+ return jsi::Function::createFromHostFunction(runtime, jsi::PropNameID::forUtf8(runtime, "render"), 1, render);
+ }
+ if (name == "isDrawable") {
+ return jsi::Value(_canvas != nullptr);
+ }
+
+ if (_canvas != nullptr) {
+ // If we have a Canvas, try to access the property on there.
+ auto result = _canvas->get(runtime, propName);
+ if (!result.isUndefined()) {
+ return result;
+ }
+ }
+
+ // fallback to base implementation
+ return FrameHostObject::get(runtime, propName);
+}
+
+void DrawableFrameHostObject::invalidateCanvas() {
+ _canvas = nullptr;
+}
diff --git a/ios/Skia Render Layer/PreviewSkiaView.h b/ios/Skia Render Layer/PreviewSkiaView.h
deleted file mode 100644
index 54e7102..0000000
--- a/ios/Skia Render Layer/PreviewSkiaView.h
+++ /dev/null
@@ -1,26 +0,0 @@
-//
-// PreviewSkiaView.h
-// VisionCamera
-//
-// Created by Marc Rousavy on 17.11.22.
-// Copyright © 2022 mrousavy. All rights reserved.
-//
-
-#ifndef PreviewSkiaView_h
-#define PreviewSkiaView_h
-
-#import
-#import
-#import "FrameProcessorCallback.h"
-
-typedef void (^DrawCallback) (void* _Nonnull skCanvas);
-
-@interface PreviewSkiaView: UIView
-
-// Call to pass a new Frame to be drawn by the Skia Canvas
-- (void) drawFrame:(_Nonnull CMSampleBufferRef)buffer withCallback:(DrawCallback _Nonnull)callback;
-
-@end
-
-
-#endif /* PreviewSkiaView_h */
diff --git a/ios/Skia Render Layer/PreviewSkiaView.mm b/ios/Skia Render Layer/PreviewSkiaView.mm
deleted file mode 100644
index f7c7c8d..0000000
--- a/ios/Skia Render Layer/PreviewSkiaView.mm
+++ /dev/null
@@ -1,60 +0,0 @@
-//
-// PreviewSkiaView.mm
-// VisionCamera
-//
-// Created by Marc Rousavy on 17.11.22.
-// Copyright © 2022 mrousavy. All rights reserved.
-//
-
-#import "PreviewSkiaView.h"
-#import
-
-#import "SkiaMetalCanvasProvider.h"
-#include
-
-#include
-#include
-
-#if SHOW_FPS
-#import
-#endif
-
-@implementation PreviewSkiaView {
- std::shared_ptr _canvasProvider;
-}
-
-- (void)drawFrame:(CMSampleBufferRef)buffer withCallback:(DrawCallback _Nonnull)callback {
- if (_canvasProvider == nullptr) {
- throw std::runtime_error("Cannot draw new Frame to Canvas when SkiaMetalCanvasProvider is null!");
- }
-
- _canvasProvider->renderFrameToCanvas(buffer, ^(SkCanvas* canvas) {
- callback((void*)canvas);
- });
-}
-
-- (void) willMoveToSuperview:(UIView *)newWindow {
- if (newWindow == NULL) {
- // Remove implementation view when the parent view is not set
- if (_canvasProvider != nullptr) {
- [_canvasProvider->getLayer() removeFromSuperlayer];
- _canvasProvider = nullptr;
- }
- } else {
- // Create implementation view when the parent view is set
- if (_canvasProvider == nullptr) {
- _canvasProvider = std::make_shared();
- [self.layer addSublayer: _canvasProvider->getLayer()];
- _canvasProvider->start();
- }
- }
-}
-
-- (void) layoutSubviews {
- if (_canvasProvider != nullptr) {
- _canvasProvider->setSize(self.bounds.size.width, self.bounds.size.height);
- }
-}
-
-@end
-
diff --git a/ios/Skia Render Layer/SkImageHelpers.mm b/ios/Skia Render Layer/SkImageHelpers.mm
index 24820b2..812c6ab 100644
--- a/ios/Skia Render Layer/SkImageHelpers.mm
+++ b/ios/Skia Render Layer/SkImageHelpers.mm
@@ -25,18 +25,18 @@
# define FourCC2Str(fourcc) (const char[]){*(((char*)&fourcc)+3), *(((char*)&fourcc)+2), *(((char*)&fourcc)+1), *(((char*)&fourcc)+0),0}
#endif
-CVMetalTextureCacheRef getTextureCache(GrRecordingContext* context) {
+inline CVMetalTextureCacheRef getTextureCache() {
static CVMetalTextureCacheRef textureCache = nil;
if (textureCache == nil) {
// Create a new Texture Cache
- auto result = CVMetalTextureCacheCreate(kCFAllocatorDefault,
- nil,
- MTLCreateSystemDefaultDevice(),
- nil,
- &textureCache);
- if (result != kCVReturnSuccess || textureCache == nil) {
- throw std::runtime_error("Failed to create Metal Texture Cache!");
- }
+ auto result = CVMetalTextureCacheCreate(kCFAllocatorDefault,
+ nil,
+ MTLCreateSystemDefaultDevice(),
+ nil,
+ &textureCache);
+ if (result != kCVReturnSuccess || textureCache == nil) {
+ throw std::runtime_error("Failed to create Metal Texture Cache!");
+ }
}
return textureCache;
}
@@ -45,46 +45,34 @@ sk_sp SkImageHelpers::convertCMSampleBufferToSkImage(GrRecordingContext
auto pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
double width = CVPixelBufferGetWidth(pixelBuffer);
double height = CVPixelBufferGetHeight(pixelBuffer);
-
+
// Make sure the format is RGB (BGRA_8888)
auto format = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (format != kCVPixelFormatType_32BGRA) {
- auto fourCharCode = @(FourCC2Str(format));
- auto error = std::string("VisionCamera: Frame has unknown Pixel Format (") + fourCharCode.UTF8String + std::string(") - cannot convert to SkImage!");
+ auto error = std::string("VisionCamera: Frame has unknown Pixel Format (") + FourCC2Str(format) + std::string(") - cannot convert to SkImage!");
throw std::runtime_error(error);
}
- auto textureCache = getTextureCache(context);
-
+ auto textureCache = getTextureCache();
+
// Convert CMSampleBuffer* -> CVMetalTexture*
CVMetalTextureRef cvTexture;
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
- textureCache,
- pixelBuffer,
- nil,
- MTLPixelFormatBGRA8Unorm,
- width,
- height,
- 0, // plane index
- &cvTexture);
- GrMtlTextureInfo textureInfo;
+ textureCache,
+ pixelBuffer,
+ nil,
+ MTLPixelFormatBGRA8Unorm,
+ width,
+ height,
+ 0, // plane index
+ &cvTexture);
auto mtlTexture = CVMetalTextureGetTexture(cvTexture);
- textureInfo.fTexture.retain((__bridge void*)mtlTexture);
- // Wrap it in a GrBackendTexture
- GrBackendTexture texture(width, height, GrMipmapped::kNo, textureInfo);
+ auto image = convertMTLTextureToSkImage(context, mtlTexture);
- // Create an SkImage from the existing texture
- auto image = SkImages::AdoptTextureFrom(context,
- texture,
- kTopLeft_GrSurfaceOrigin,
- kBGRA_8888_SkColorType,
- kOpaque_SkAlphaType,
- SkColorSpace::MakeSRGB());
-
// Release the Texture wrapper (it will still be strong)
CFRelease(cvTexture);
-
+
return image;
}
@@ -92,7 +80,11 @@ sk_sp SkImageHelpers::convertMTLTextureToSkImage(GrRecordingContext* co
// Convert the rendered MTLTexture to an SkImage
GrMtlTextureInfo textureInfo;
textureInfo.fTexture.retain((__bridge void*)texture);
- GrBackendTexture backendTexture(texture.width, texture.height, GrMipmapped::kNo, textureInfo);
+ GrBackendTexture backendTexture((int)texture.width,
+ (int)texture.height,
+ GrMipmapped::kNo,
+ textureInfo);
+ // TODO: Adopt or Borrow?
auto image = SkImages::AdoptTextureFrom(context,
backendTexture,
kTopLeft_GrSurfaceOrigin,
@@ -109,7 +101,7 @@ SkRect SkImageHelpers::createCenterCropRect(SkRect sourceRect, SkRect destinatio
} else {
src = SkSize::Make((sourceRect.height() * destinationRect.width()) / destinationRect.height(), sourceRect.height());
}
-
+
return inscribe(src, sourceRect);
}
diff --git a/ios/Skia Render Layer/SkiaFrameProcessor.h b/ios/Skia Render Layer/SkiaFrameProcessor.h
new file mode 100644
index 0000000..be00b2c
--- /dev/null
+++ b/ios/Skia Render Layer/SkiaFrameProcessor.h
@@ -0,0 +1,27 @@
+//
+// SkiaFrameProcessor.h
+// VisionCamera
+//
+// Created by Marc Rousavy on 14.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+#pragma once
+
+#import
+#import "FrameProcessor.h"
+#import "SkiaRenderer.h"
+
+#ifdef __cplusplus
+#import "WKTJsiWorklet.h"
+#endif
+
+@interface SkiaFrameProcessor: FrameProcessor
+
+#ifdef __cplusplus
+- (instancetype _Nonnull) initWithWorklet:(std::shared_ptr)context
+ worklet:(std::shared_ptr)worklet
+ skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer;
+#endif
+
+@end
diff --git a/ios/Skia Render Layer/SkiaFrameProcessor.mm b/ios/Skia Render Layer/SkiaFrameProcessor.mm
new file mode 100644
index 0000000..90f0639
--- /dev/null
+++ b/ios/Skia Render Layer/SkiaFrameProcessor.mm
@@ -0,0 +1,56 @@
+//
+// SkiaFrameProcessor.mm
+// VisionCamera
+//
+// Created by Marc Rousavy on 14.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+#import
+#import "SkiaFrameProcessor.h"
+#import "SkiaRenderer.h"
+
+#import
+
+#import
+#import "DrawableFrameHostObject.h"
+
+#import
+#import
+
+using namespace facebook;
+
+@implementation SkiaFrameProcessor {
+ SkiaRenderer* _skiaRenderer;
+ std::shared_ptr _skiaCanvas;
+}
+
+- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr)context
+ worklet:(std::shared_ptr)worklet
+ skiaRenderer:(SkiaRenderer* _Nonnull)skiaRenderer {
+ if (self = [super initWithWorklet:context
+ worklet:worklet]) {
+ _skiaRenderer = skiaRenderer;
+ auto platformContext = std::make_shared(context->getJsRuntime(),
+ RCTBridge.currentBridge);
+ _skiaCanvas = std::make_shared(platformContext);
+ }
+ return self;
+}
+
+- (void)call:(Frame*)frame {
+ [_skiaRenderer renderCameraFrameToOffscreenCanvas:frame.buffer
+ withDrawCallback:^(SkiaCanvas _Nonnull canvas) {
+ // Create the Frame Host Object wrapping the internal Frame and Skia Canvas
+ self->_skiaCanvas->setCanvas(static_cast(canvas));
+ auto frameHostObject = std::make_shared(frame, self->_skiaCanvas);
+
+ // Call JS Frame Processor
+ [self callWithFrameHostObject:frameHostObject];
+
+ // Remove Skia Canvas from Host Object because it is no longer valid
+ frameHostObject->invalidateCanvas();
+ }];
+}
+
+@end
diff --git a/ios/Skia Render Layer/SkiaMetalCanvasProvider.h b/ios/Skia Render Layer/SkiaMetalCanvasProvider.h
deleted file mode 100644
index 86dc59c..0000000
--- a/ios/Skia Render Layer/SkiaMetalCanvasProvider.h
+++ /dev/null
@@ -1,57 +0,0 @@
-#pragma once
-
-#ifndef __cplusplus
-#error This header has to be compiled with C++!
-#endif
-
-#import
-#import
-#import
-
-#include
-#include
-
-#include
-#include
-#include
-#include
-
-#import "VisionDisplayLink.h"
-#import "SkiaMetalRenderContext.h"
-
-class SkiaMetalCanvasProvider: public std::enable_shared_from_this {
-public:
- SkiaMetalCanvasProvider();
- ~SkiaMetalCanvasProvider();
-
- // Render a Camera Frame to the off-screen canvas
- void renderFrameToCanvas(CMSampleBufferRef sampleBuffer, const std::function& drawCallback);
-
- // Start updating the DisplayLink (runLoop @ screen refresh rate) and draw Frames to the Layer
- void start();
- // Update the size of the View (Layer)
- void setSize(int width, int height);
- CALayer* getLayer();
-
-private:
- bool _isValid = false;
- float _width = -1;
- float _height = -1;
-
- // For rendering Camera Frame -> off-screen MTLTexture
- OffscreenRenderContext _offscreenContext;
-
- // For rendering off-screen MTLTexture -> on-screen CAMetalLayer
- LayerRenderContext _layerContext;
-
- // For synchronization between the two Threads/Contexts
- std::mutex _textureMutex;
- std::atomic _hasNewFrame = false;
-
-private:
- void render();
- id getTexture(int width, int height);
-
- float getPixelDensity();
-};
-
diff --git a/ios/Skia Render Layer/SkiaPreviewDisplayLink.swift b/ios/Skia Render Layer/SkiaPreviewDisplayLink.swift
new file mode 100644
index 0000000..9d8c139
--- /dev/null
+++ b/ios/Skia Render Layer/SkiaPreviewDisplayLink.swift
@@ -0,0 +1,51 @@
+//
+// SkiaPreviewDisplayLink.swift
+// VisionCamera
+//
+// Created by Marc Rousavy on 19.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+import Foundation
+
+class SkiaPreviewDisplayLink {
+ private var displayLink: CADisplayLink?
+ private let callback: (_ timestamp: Double) -> Void
+
+ init(callback: @escaping (_ timestamp: Double) -> Void) {
+ self.callback = callback
+ }
+
+ deinit {
+ stop()
+ }
+
+ @objc
+ func update(_ displayLink: CADisplayLink) {
+ callback(displayLink.timestamp)
+ }
+
+ func start() {
+ if displayLink == nil {
+ let displayLink = CADisplayLink(target: self, selector: #selector(update))
+ let queue = DispatchQueue(label: "mrousavy/VisionCamera.preview",
+ qos: .userInteractive,
+ attributes: [],
+ autoreleaseFrequency: .inherit,
+ target: nil)
+ queue.async {
+ displayLink.add(to: .current, forMode: .common)
+ self.displayLink = displayLink
+
+ ReactLogger.log(level: .info, message: "Starting Skia Preview Display Link...")
+ RunLoop.current.run()
+ ReactLogger.log(level: .info, message: "Skia Preview Display Link stopped.")
+ }
+ }
+ }
+
+ func stop() {
+ displayLink?.invalidate()
+ displayLink = nil
+ }
+}
diff --git a/ios/Skia Render Layer/SkiaPreviewView.swift b/ios/Skia Render Layer/SkiaPreviewView.swift
new file mode 100644
index 0000000..2d2efe5
--- /dev/null
+++ b/ios/Skia Render Layer/SkiaPreviewView.swift
@@ -0,0 +1,82 @@
+//
+// SkiaPreviewView.swift
+// VisionCamera
+//
+// Created by Marc Rousavy on 19.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+import Foundation
+
+// MARK: - SkiaPreviewLayer
+
+@available(iOS 13.0, *)
+class SkiaPreviewLayer: CAMetalLayer {
+ private var pixelRatio: CGFloat {
+ return UIScreen.main.scale
+ }
+
+ init(device: MTLDevice) {
+ super.init()
+
+ framebufferOnly = true
+ self.device = device
+ isOpaque = false
+ pixelFormat = .bgra8Unorm
+ contentsScale = pixelRatio
+ }
+
+ @available(*, unavailable)
+ required init?(coder _: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ func setSize(width: CGFloat, height: CGFloat) {
+ frame = CGRect(x: 0, y: 0, width: width, height: height)
+ drawableSize = CGSize(width: width * pixelRatio,
+ height: height * pixelRatio)
+ }
+}
+
+// MARK: - SkiaPreviewView
+
+class SkiaPreviewView: PreviewView {
+ private let skiaRenderer: SkiaRenderer
+ private let previewLayer: SkiaPreviewLayer
+ private lazy var displayLink = SkiaPreviewDisplayLink(callback: { [weak self] _ in
+ // Called everytime to render the screen - e.g. 60 FPS
+ if let self = self {
+ self.skiaRenderer.renderLatestFrame(to: self.previewLayer)
+ }
+ })
+
+ init(frame: CGRect, skiaRenderer: SkiaRenderer) {
+ self.skiaRenderer = skiaRenderer
+ previewLayer = SkiaPreviewLayer(device: skiaRenderer.metalDevice)
+ super.init(frame: frame)
+ }
+
+ deinit {
+ self.displayLink.stop()
+ }
+
+ @available(*, unavailable)
+ required init?(coder _: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
+ override func willMove(toSuperview newSuperview: UIView?) {
+ if newSuperview != nil {
+ layer.addSublayer(previewLayer)
+ displayLink.start()
+ } else {
+ previewLayer.removeFromSuperlayer()
+ displayLink.stop()
+ }
+ }
+
+ override func layoutSubviews() {
+ previewLayer.setSize(width: bounds.size.width,
+ height: bounds.size.height)
+ }
+}
diff --git a/ios/Skia Render Layer/SkiaMetalRenderContext.h b/ios/Skia Render Layer/SkiaRenderContext.h
similarity index 50%
rename from ios/Skia Render Layer/SkiaMetalRenderContext.h
rename to ios/Skia Render Layer/SkiaRenderContext.h
index 7b736ce..52782b2 100644
--- a/ios/Skia Render Layer/SkiaMetalRenderContext.h
+++ b/ios/Skia Render Layer/SkiaRenderContext.h
@@ -1,18 +1,15 @@
//
-// SkiaMetalRenderContext.h
+// SkiaRenderContext.h
// VisionCamera
//
// Created by Marc Rousavy on 02.12.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
-#ifndef SkiaMetalRenderContext_h
-#define SkiaMetalRenderContext_h
+#pragma once
#import
-#import
-#import
-#include
+#import
struct RenderContext {
id device;
@@ -26,16 +23,3 @@ struct RenderContext {
(__bridge void*)commandQueue);
}
};
-
-// For rendering to an off-screen in-memory Metal Texture (MTLTexture)
-struct OffscreenRenderContext: public RenderContext {
- id texture;
-};
-
-// For rendering to a Metal Layer (CAMetalLayer)
-struct LayerRenderContext: public RenderContext {
- CAMetalLayer* layer;
- VisionDisplayLink* displayLink;
-};
-
-#endif /* SkiaMetalRenderContext_h */
diff --git a/ios/Skia Render Layer/SkiaRenderer.h b/ios/Skia Render Layer/SkiaRenderer.h
new file mode 100644
index 0000000..043f47a
--- /dev/null
+++ b/ios/Skia Render Layer/SkiaRenderer.h
@@ -0,0 +1,45 @@
+//
+// SkiaRenderer.h
+// VisionCamera
+//
+// Created by Marc Rousavy on 19.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+
+#pragma once
+
+#import
+#import
+#import
+
+typedef void* SkiaCanvas;
+typedef void(^draw_callback_t)(SkiaCanvas _Nonnull);
+
+/**
+ A Camera Frame Renderer powered by Skia.
+ It provides two Contexts, one offscreen and one onscreen.
+ - Offscreen Context: Allows you to render a Frame into a Skia Canvas and draw onto it using Skia commands
+ - Onscreen Context: Allows you to render a Frame from the offscreen context onto a Layer allowing it to be displayed for Preview.
+
+ The two contexts may run at different Frame Rates.
+ */
+@interface SkiaRenderer : NSObject
+
+/**
+ Renders the given Camera Frame to the offscreen Skia Canvas.
+ The given callback will be executed with a reference to the Skia Canvas
+ for the user to perform draw operations on (in this case, through a JS Frame Processor)
+ */
+- (void)renderCameraFrameToOffscreenCanvas:(CMSampleBufferRef _Nonnull)sampleBuffer withDrawCallback:(draw_callback_t _Nonnull)callback;
+/**
+ Renders the latest Frame to the onscreen Layer.
+ This should be called everytime you want the UI to update, e.g. for 60 FPS; every 16.66ms.
+ */
+- (void)renderLatestFrameToLayer:(CALayer* _Nonnull)layer;
+
+/**
+ The Metal Device used for Rendering to the Layer
+ */
+@property (nonatomic, readonly) id _Nonnull metalDevice;
+
+@end
diff --git a/ios/Skia Render Layer/SkiaMetalCanvasProvider.mm b/ios/Skia Render Layer/SkiaRenderer.mm
similarity index 59%
rename from ios/Skia Render Layer/SkiaMetalCanvasProvider.mm
rename to ios/Skia Render Layer/SkiaRenderer.mm
index ee8c415..0feac15 100644
--- a/ios/Skia Render Layer/SkiaMetalCanvasProvider.mm
+++ b/ios/Skia Render Layer/SkiaRenderer.mm
@@ -1,72 +1,137 @@
-#import "SkiaMetalCanvasProvider.h"
+//
+// SkiaRenderer.mm
+// VisionCamera
+//
+// Created by Marc Rousavy on 19.07.23.
+// Copyright © 2023 mrousavy. All rights reserved.
+//
+#import
+#import "SkiaRenderer.h"
#import
#import
-#import
+#import "SkiaRenderContext.h"
+
#import
#import
-#import
+#import
#import
#import
-
#import "SkImageHelpers.h"
-#include
+#import
+#import
+#import
-SkiaMetalCanvasProvider::SkiaMetalCanvasProvider(): std::enable_shared_from_this() {
- // Configure Metal Layer
- _layerContext.layer = [CAMetalLayer layer];
- _layerContext.layer.framebufferOnly = NO;
- _layerContext.layer.device = _layerContext.device;
- _layerContext.layer.opaque = false;
- _layerContext.layer.contentsScale = getPixelDensity();
- _layerContext.layer.pixelFormat = MTLPixelFormatBGRA8Unorm;
- // Set up DisplayLink
- _layerContext.displayLink = [[VisionDisplayLink alloc] init];
-
- _isValid = true;
+@implementation SkiaRenderer {
+ // The context we draw each Frame on
+ std::unique_ptr _offscreenContext;
+ // The context the preview runs on
+ std::unique_ptr _layerContext;
+ // The texture holding the drawn-to Frame
+ id _texture;
+
+ // For synchronization between the two Threads/Contexts
+ std::mutex _textureMutex;
+ std::atomic _hasNewFrame;
}
-SkiaMetalCanvasProvider::~SkiaMetalCanvasProvider() {
- _isValid = false;
- NSLog(@"VisionCamera: Stopping SkiaMetalCanvasProvider DisplayLink...");
- [_layerContext.displayLink stop];
+- (instancetype)init {
+ if (self = [super init]) {
+ _offscreenContext = std::make_unique();
+ _layerContext = std::make_unique();
+ _texture = nil;
+ _hasNewFrame = false;
+ }
+ return self;
}
-void SkiaMetalCanvasProvider::start() {
- NSLog(@"VisionCamera: Starting SkiaMetalCanvasProvider DisplayLink...");
- [_layerContext.displayLink start:[weakThis = weak_from_this()](double time) {
- auto thiz = weakThis.lock();
- if (thiz) {
- thiz->render();
- }
- }];
+- (id)metalDevice {
+ return _layerContext->device;
}
-id SkiaMetalCanvasProvider::getTexture(int width, int height) {
- if (_offscreenContext.texture == nil
- || _offscreenContext.texture.width != width
- || _offscreenContext.texture.height != height) {
+- (id)getTexture:(NSUInteger)width height:(NSUInteger)height {
+ if (_texture == nil
+ || _texture.width != width
+ || _texture.height != height) {
// Create new texture with the given width and height
MTLTextureDescriptor* textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
width:width
height:height
mipmapped:NO];
textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
- _offscreenContext.texture = [_offscreenContext.device newTextureWithDescriptor:textureDescriptor];
+ _texture = [_offscreenContext->device newTextureWithDescriptor:textureDescriptor];
}
- return _offscreenContext.texture;
+ return _texture;
}
-/**
- Callback from the DisplayLink - renders the current in-memory off-screen texture to the on-screen CAMetalLayer
- */
-void SkiaMetalCanvasProvider::render() {
- if (_width == -1 && _height == -1) {
- return;
- }
+- (void)renderCameraFrameToOffscreenCanvas:(CMSampleBufferRef)sampleBuffer withDrawCallback:(draw_callback_t)callback {
+ // Wrap in auto release pool since we want the system to clean up after rendering
+ @autoreleasepool {
+ // Get the Frame's PixelBuffer
+ CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (pixelBuffer == nil) {
+ throw std::runtime_error("SkiaRenderer: Pixel Buffer is corrupt/empty.");
+ }
+ // Lock Mutex to block the runLoop from overwriting the _currentDrawable
+ std::unique_lock lock(_textureMutex);
+
+ // Get the Metal Texture we use for in-memory drawing
+ auto texture = [self getTexture:CVPixelBufferGetWidth(pixelBuffer)
+ height:CVPixelBufferGetHeight(pixelBuffer)];
+
+ // Get & Lock the writeable Texture from the Metal Drawable
+ GrMtlTextureInfo textureInfo;
+ textureInfo.fTexture.retain((__bridge void*)texture);
+ GrBackendRenderTarget backendRenderTarget((int)texture.width,
+ (int)texture.height,
+ 1,
+ textureInfo);
+
+ auto context = _offscreenContext->skiaContext.get();
+
+ // Create a Skia Surface from the writable Texture
+ auto surface = SkSurface::MakeFromBackendRenderTarget(context,
+ backendRenderTarget,
+ kTopLeft_GrSurfaceOrigin,
+ kBGRA_8888_SkColorType,
+ SkColorSpace::MakeSRGB(),
+ nullptr);
+
+ if (surface == nullptr || surface->getCanvas() == nullptr) {
+ throw std::runtime_error("Skia surface could not be created from parameters.");
+ }
+
+ // Converts the CMSampleBuffer to an SkImage - RGB.
+ CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, sampleBuffer);
+
+ auto canvas = surface->getCanvas();
+
+ // Clear everything so we keep it at a clean state
+ canvas->clear(SkColors::kBlack);
+
+ // Draw the Image into the Frame (aspectRatio: cover)
+ // The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
+ // but that'll just overwrite the existing one - no need to worry.
+ canvas->drawImage(image, 0, 0);
+
+ // Call the draw callback - probably a JS Frame Processor.
+ callback(static_cast(canvas));
+
+ // Flush all appended operations on the canvas and commit it to the SkSurface
+ surface->flushAndSubmit();
+
+ // Set dirty & free locks
+ _hasNewFrame = true;
+ lock.unlock();
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+ }
+}
+
+- (void)renderLatestFrameToLayer:(CALayer* _Nonnull)layer {
if (!_hasNewFrame) {
// No new Frame has arrived in the meantime.
// We don't need to re-draw the texture to the screen if nothing has changed, abort.
@@ -74,12 +139,12 @@ void SkiaMetalCanvasProvider::render() {
}
@autoreleasepool {
- auto context = _layerContext.skiaContext.get();
-
+ auto context = _layerContext->skiaContext.get();
+
// Create a Skia Surface from the CAMetalLayer (use to draw to the View)
GrMTLHandle drawableHandle;
auto surface = SkSurface::MakeFromCAMetalLayer(context,
- (__bridge GrMTLHandle)_layerContext.layer,
+ (__bridge GrMTLHandle)layer,
kTopLeft_GrSurfaceOrigin,
1,
kBGRA_8888_SkColorType,
@@ -91,15 +156,14 @@ void SkiaMetalCanvasProvider::render() {
}
auto canvas = surface->getCanvas();
-
+
// Lock the Mutex so we can operate on the Texture atomically without
// renderFrameToCanvas() overwriting in between from a different thread
std::unique_lock lock(_textureMutex);
- // Get the texture
- auto texture = _offscreenContext.texture;
+ auto texture = _texture;
if (texture == nil) return;
-
+
// Calculate Center Crop (aspectRatio: cover) transform
auto sourceRect = SkRect::MakeXYWH(0, 0, texture.width, texture.height);
auto destinationRect = SkRect::MakeXYWH(0, 0, surface->width(), surface->height());
@@ -130,104 +194,14 @@ void SkiaMetalCanvasProvider::render() {
// Pass the drawable into the Metal Command Buffer and submit it to the GPU
id drawable = (__bridge id)drawableHandle;
- id commandBuffer([_layerContext.commandQueue commandBuffer]);
+ id commandBuffer([_layerContext->commandQueue commandBuffer]);
[commandBuffer presentDrawable:drawable];
[commandBuffer commit];
-
+
+ // Set flag back to false
_hasNewFrame = false;
-
lock.unlock();
}
}
-float SkiaMetalCanvasProvider::getPixelDensity() {
- return UIScreen.mainScreen.scale;
-}
-
-/**
- Render to a canvas. This uses the current in-memory off-screen texture and draws to it.
- The buffer is expected to be in RGB (`BGRA_8888`) format.
- While rendering, `drawCallback` will be invoked with a Skia Canvas instance which can be used for Frame Processing (JS).
- */
-void SkiaMetalCanvasProvider::renderFrameToCanvas(CMSampleBufferRef sampleBuffer, const std::function& drawCallback) {
- if (_width == -1 && _height == -1) {
- return;
- }
-
- // Wrap in auto release pool since we want the system to clean up after rendering
- // and not wait until later - we've seen some example of memory usage growing very
- // fast in the simulator without this.
- @autoreleasepool {
- // Get the Frame's PixelBuffer
- CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- if (pixelBuffer == nil) {
- throw std::runtime_error("drawFrame: Pixel Buffer is corrupt/empty.");
- }
-
- // Lock Mutex to block the runLoop from overwriting the _currentDrawable
- std::unique_lock lock(_textureMutex);
-
- // Get the Metal Texture we use for in-memory drawing
- auto texture = getTexture(CVPixelBufferGetWidth(pixelBuffer),
- CVPixelBufferGetHeight(pixelBuffer));
-
- // Get & Lock the writeable Texture from the Metal Drawable
- GrMtlTextureInfo fbInfo;
- fbInfo.fTexture.retain((__bridge void*)texture);
- GrBackendRenderTarget backendRT(texture.width,
- texture.height,
- 1,
- fbInfo);
-
- auto context = _offscreenContext.skiaContext.get();
-
- // Create a Skia Surface from the writable Texture
- auto surface = SkSurface::MakeFromBackendRenderTarget(context,
- backendRT,
- kTopLeft_GrSurfaceOrigin,
- kBGRA_8888_SkColorType,
- nullptr,
- nullptr);
-
- if (surface == nullptr || surface->getCanvas() == nullptr) {
- throw std::runtime_error("Skia surface could not be created from parameters.");
- }
-
- // Lock the Frame's PixelBuffer for the duration of the Frame Processor so the user can safely do operations on it
- CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
-
- // Converts the CMSampleBuffer to an SkImage - RGB.
- auto image = SkImageHelpers::convertCMSampleBufferToSkImage(context, sampleBuffer);
-
- auto canvas = surface->getCanvas();
-
- // Clear everything so we keep it at a clean state
- canvas->clear(SkColors::kBlack);
-
- // Draw the Image into the Frame (aspectRatio: cover)
- // The Frame Processor might draw the Frame again (through render()) to pass a custom paint/shader,
- // but that'll just overwrite the existing one - no need to worry.
- canvas->drawImage(image, 0, 0);
-
- // Call the JS Frame Processor.
- drawCallback(canvas);
-
- // Flush all appended operations on the canvas and commit it to the SkSurface
- surface->flushAndSubmit();
-
- _hasNewFrame = true;
-
- lock.unlock();
- CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
- }
-}
-
-void SkiaMetalCanvasProvider::setSize(int width, int height) {
- _width = width;
- _height = height;
- _layerContext.layer.frame = CGRectMake(0, 0, width, height);
- _layerContext.layer.drawableSize = CGSizeMake(width * getPixelDensity(),
- height* getPixelDensity());
-}
-
-CALayer* SkiaMetalCanvasProvider::getLayer() { return _layerContext.layer; }
+@end
diff --git a/ios/Skia Render Layer/VisionDisplayLink.h b/ios/Skia Render Layer/VisionDisplayLink.h
deleted file mode 100644
index 79fd0f7..0000000
--- a/ios/Skia Render Layer/VisionDisplayLink.h
+++ /dev/null
@@ -1,38 +0,0 @@
-//
-// VisionDisplayLink.h
-// VisionCamera
-//
-// Created by Marc Rousavy on 28.11.22.
-// Copyright © 2022 mrousavy. All rights reserved.
-//
-
-#ifndef DisplayLink_h
-#define DisplayLink_h
-
-#import
-#import
-
-typedef void (^block_t)(double);
-@interface VisionDisplayLink : NSObject {
- CADisplayLink *_displayLink;
- double _currentFps;
- double _previousFrameTimestamp;
-}
-
-@property(nonatomic, copy) block_t updateBlock;
-
-// Start the DisplayLink's runLoop
-- (void)start:(block_t)block;
-
-// Stop the DisplayLink's runLoop
-- (void)stop;
-
-// Get the current FPS value
-- (double)currentFps;
-
-// The FPS value this DisplayLink is targeting
-- (double)targetFps;
-
-@end
-
-#endif /* VisionDisplayLink_h */
diff --git a/ios/Skia Render Layer/VisionDisplayLink.m b/ios/Skia Render Layer/VisionDisplayLink.m
deleted file mode 100644
index d5348bc..0000000
--- a/ios/Skia Render Layer/VisionDisplayLink.m
+++ /dev/null
@@ -1,63 +0,0 @@
-//
-// VisionDisplayLink.m
-// VisionCamera
-//
-// Created by Marc Rousavy on 28.11.22.
-// Copyright © 2022 mrousavy. All rights reserved.
-//
-
-#import "VisionDisplayLink.h"
-#import
-
-@implementation VisionDisplayLink
-
-- (void)start:(block_t)block {
- self.updateBlock = block;
- // check whether the loop is already running
- if (_displayLink == nil) {
- // specify update method
- _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(update:)];
-
- // Start a new Queue/Thread that will run the runLoop
- dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, -1);
- dispatch_queue_t queue = dispatch_queue_create("mrousavy/VisionCamera.preview", qos);
- dispatch_async(queue, ^{
- // Add the display link to the current run loop (thread on which we're currently running on)
- NSRunLoop* loop = [NSRunLoop currentRunLoop];
- [self->_displayLink addToRunLoop:loop forMode:NSRunLoopCommonModes];
- // Run the runLoop (blocking)
- [loop run];
- NSLog(@"VisionCamera: DisplayLink runLoop ended.");
- });
- }
-}
-
-- (void)stop {
- // check whether the loop is already stopped
- if (_displayLink != nil) {
- // if the display link is present, it gets invalidated (loop stops)
-
- [_displayLink invalidate];
- _displayLink = nil;
- }
-}
-
-- (void)update:(CADisplayLink *)sender {
- double time = sender.timestamp;
-
- double diff = time - _previousFrameTimestamp;
- _currentFps = 1.0 / diff;
- _previousFrameTimestamp = time;
-
- _updateBlock(time);
-}
-
-- (double)targetFps {
- return 1.0 / _displayLink.duration;
-}
-
-- (double)currentFps {
- return _currentFps;
-}
-
-@end
diff --git a/ios/VisionCamera.xcodeproj/project.pbxproj b/ios/VisionCamera.xcodeproj/project.pbxproj
index bc20ec0..f2b0f77 100644
--- a/ios/VisionCamera.xcodeproj/project.pbxproj
+++ b/ios/VisionCamera.xcodeproj/project.pbxproj
@@ -7,14 +7,11 @@
objects = {
/* Begin PBXBuildFile section */
- B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.mm in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */; };
+ B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; };
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
- B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */; };
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
- B8248868292644EF00729383 /* PreviewSkiaView.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8248867292644EF00729383 /* PreviewSkiaView.mm */; };
- B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */ = {isa = PBXBuildFile; fileRef = B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */; };
- B83373B529266A350092E380 /* SkiaMetalCanvasProvider.mm in Sources */ = {isa = PBXBuildFile; fileRef = B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */; };
- B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
+ B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B82F3A0A2A6896E3002BB804 /* PreviewView.swift */; };
+ B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* NativePreviewView.swift */; };
B841262F292E41A1001AB448 /* SkImageHelpers.mm in Sources */ = {isa = PBXBuildFile; fileRef = B841262E292E41A1001AB448 /* SkImageHelpers.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
@@ -23,7 +20,6 @@
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */; };
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */; };
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */; };
- B8805067266798B600EAD7F2 /* JSConsoleHelper.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */; };
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */; };
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
@@ -44,7 +40,6 @@
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517325E0102000DB86D6 /* EnumParserError.swift */; };
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */; };
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */; };
- B887519B25E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */; };
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */; };
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */; };
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */; };
@@ -58,7 +53,6 @@
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; };
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
- B88A020D2934FC22009E035A /* VisionDisplayLink.m in Sources */ = {isa = PBXBuildFile; fileRef = B88A020C2934FC22009E035A /* VisionDisplayLink.m */; };
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
@@ -67,6 +61,8 @@
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */; };
+ B8E957CE2A6939A6008F5480 /* CameraView+Preview.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */; };
+ B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
@@ -83,23 +79,16 @@
/* Begin PBXFileReference section */
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
- B80A319E293A5C10003EE681 /* SkiaMetalRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaMetalRenderContext.h; sourceTree = ""; };
+ B80A319E293A5C10003EE681 /* SkiaRenderContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderContext.h; sourceTree = ""; };
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = ""; };
- B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginRegistry.mm; sourceTree = ""; };
- B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorCallback.h; sourceTree = ""; };
+ B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = ""; };
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = ""; };
- B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorUtils.mm; sourceTree = ""; };
- B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorUtils.h; sourceTree = ""; };
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = ""; };
+ B8127E382A68871C00B06972 /* SkiaPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewView.swift; sourceTree = ""; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = ""; };
B81D41EF263C86F900B041FD /* JSIUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSIUtils.h; sourceTree = ""; };
- B8248866292644E300729383 /* PreviewSkiaView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PreviewSkiaView.h; sourceTree = ""; };
- B8248867292644EF00729383 /* PreviewSkiaView.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PreviewSkiaView.mm; sourceTree = ""; };
- B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "RCTBridge+runOnJS.h"; sourceTree = ""; };
- B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = "RCTBridge+runOnJS.mm"; sourceTree = ""; };
- B83373B329266A350092E380 /* SkiaMetalCanvasProvider.h */ = {isa = PBXFileReference; explicitFileType = sourcecode.cpp.h; fileEncoding = 4; path = SkiaMetalCanvasProvider.h; sourceTree = ""; };
- B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaMetalCanvasProvider.mm; sourceTree = ""; };
- B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = ""; };
+ B82F3A0A2A6896E3002BB804 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = ""; };
+ B83D5EE629377117000AFD2F /* NativePreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativePreviewView.swift; sourceTree = ""; };
B841262E292E41A1001AB448 /* SkImageHelpers.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkImageHelpers.mm; sourceTree = ""; };
B8412630292E41AD001AB448 /* SkImageHelpers.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkImageHelpers.h; sourceTree = ""; };
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = ""; };
@@ -107,12 +96,11 @@
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = ""; };
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+descriptor.swift"; sourceTree = ""; };
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Orientation.swift"; sourceTree = ""; };
+ B865BC5F2A6888DA0093DF1A /* SkiaPreviewDisplayLink.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SkiaPreviewDisplayLink.swift; sourceTree = ""; };
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = ""; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = ""; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = ""; };
B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = ""; };
- B8805065266798AB00EAD7F2 /* JSConsoleHelper.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSConsoleHelper.h; sourceTree = ""; };
- B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSConsoleHelper.mm; sourceTree = ""; };
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureConnection+setInterfaceOrientation.swift"; sourceTree = ""; };
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = ""; };
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = ""; };
@@ -134,7 +122,6 @@
B887517325E0102000DB86D6 /* EnumParserError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = EnumParserError.swift; sourceTree = ""; };
B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoStabilizationMode+descriptor.swift"; sourceTree = ""; };
B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVVideoCodecType+descriptor.swift"; sourceTree = ""; };
- B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession.Preset+descriptor.swift"; sourceTree = ""; };
B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.TorchMode+descriptor.swift"; sourceTree = ""; };
B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput.QualityPrioritization+descriptor.swift"; sourceTree = ""; };
B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.DeviceType+descriptor.swift"; sourceTree = ""; };
@@ -149,17 +136,25 @@
B887518325E0102000DB86D6 /* CameraError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraError.swift; sourceTree = ""; };
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = ""; };
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = ""; };
- B88A020C2934FC22009E035A /* VisionDisplayLink.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisionDisplayLink.m; sourceTree = ""; };
- B88A020E2934FC29009E035A /* VisionDisplayLink.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionDisplayLink.h; sourceTree = ""; };
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = ""; };
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = ""; };
+ B89A28742A68795E0092207F /* SkiaRenderer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaRenderer.mm; sourceTree = ""; };
+ B89A28752A68796A0092207F /* SkiaRenderer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaRenderer.h; sourceTree = ""; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = ""; };
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = ""; };
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = ""; };
+ B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SkiaFrameProcessor.h; sourceTree = ""; };
+ B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = SkiaFrameProcessor.mm; sourceTree = ""; };
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = ""; };
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = ""; };
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = ""; };
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = ""; };
+ B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = DrawableFrameHostObject.mm; sourceTree = ""; };
+ B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DrawableFrameHostObject.h; sourceTree = ""; };
+ B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Preview.swift"; sourceTree = ""; };
+ B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Torch.swift"; sourceTree = ""; };
+ B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessor.h; sourceTree = ""; };
+ B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessor.mm; sourceTree = ""; };
B8F7DDD1266F715D00120533 /* Frame.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Frame.m; sourceTree = ""; };
/* End PBXFileReference section */
@@ -192,15 +187,18 @@
B887518425E0102000DB86D6 /* CameraView.swift */,
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */,
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */,
+ B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */,
B887518025E0102000DB86D6 /* CameraView+Focus.swift */,
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */,
+ B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */,
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */,
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */,
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */,
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
- B83D5EE629377117000AFD2F /* PreviewView.swift */,
+ B82F3A0A2A6896E3002BB804 /* PreviewView.swift */,
+ B83D5EE629377117000AFD2F /* NativePreviewView.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B8FCA20C292669B800F1AC82 /* Skia Render Layer */,
B887516125E0102000DB86D6 /* Extensions */,
@@ -239,12 +237,8 @@
B887516F25E0102000DB86D6 /* ReactLogger.swift */,
B887517025E0102000DB86D6 /* Promise.swift */,
B8BD3BA1266E22D2006C80A2 /* Callback.swift */,
- B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */,
- B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */,
B81D41EF263C86F900B041FD /* JSIUtils.h */,
B8994E6B263F03E100069589 /* JSIUtils.mm */,
- B8805065266798AB00EAD7F2 /* JSConsoleHelper.h */,
- B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */,
);
path = "React Utils";
sourceTree = "";
@@ -256,7 +250,6 @@
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */,
B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */,
B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */,
- B887517625E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift */,
B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */,
B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */,
B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */,
@@ -274,9 +267,8 @@
B8DCF2D725EA940700EA5C72 /* Frame Processor */ = {
isa = PBXGroup;
children = (
- B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */,
- B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */,
- B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */,
+ B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */,
+ B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */,
B8103E5725FF56F0007A1684 /* Frame.h */,
B8F7DDD1266F715D00120533 /* Frame.m */,
B84760A22608EE38004C3180 /* FrameHostObject.h */,
@@ -284,7 +276,7 @@
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */,
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */,
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */,
- B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */,
+ B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */,
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
B86F803429A90DBD00205E48 /* FrameProcessorPlugin.m */,
);
@@ -294,15 +286,17 @@
B8FCA20C292669B800F1AC82 /* Skia Render Layer */ = {
isa = PBXGroup;
children = (
- B8248866292644E300729383 /* PreviewSkiaView.h */,
- B8248867292644EF00729383 /* PreviewSkiaView.mm */,
- B83373B329266A350092E380 /* SkiaMetalCanvasProvider.h */,
- B83373B429266A350092E380 /* SkiaMetalCanvasProvider.mm */,
+ B8C1FD222A613607007A06D6 /* SkiaFrameProcessor.h */,
+ B8C1FD232A613612007A06D6 /* SkiaFrameProcessor.mm */,
B8412630292E41AD001AB448 /* SkImageHelpers.h */,
B841262E292E41A1001AB448 /* SkImageHelpers.mm */,
- B88A020E2934FC29009E035A /* VisionDisplayLink.h */,
- B88A020C2934FC22009E035A /* VisionDisplayLink.m */,
- B80A319E293A5C10003EE681 /* SkiaMetalRenderContext.h */,
+ B80A319E293A5C10003EE681 /* SkiaRenderContext.h */,
+ B89A28752A68796A0092207F /* SkiaRenderer.h */,
+ B89A28742A68795E0092207F /* SkiaRenderer.mm */,
+ B8127E382A68871C00B06972 /* SkiaPreviewView.swift */,
+ B865BC5F2A6888DA0093DF1A /* SkiaPreviewDisplayLink.swift */,
+ B8DFBA372A68A17E00941736 /* DrawableFrameHostObject.h */,
+ B8DFBA362A68A17E00941736 /* DrawableFrameHostObject.mm */,
);
path = "Skia Render Layer";
sourceTree = "";
@@ -410,13 +404,13 @@
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */,
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
B88751A225E0102000DB86D6 /* AVCaptureColorSpace+descriptor.swift in Sources */,
- B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */,
+ B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */,
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */,
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */,
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
- B83373B529266A350092E380 /* SkiaMetalCanvasProvider.mm in Sources */,
+ B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */,
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
@@ -425,44 +419,40 @@
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */,
- B8248868292644EF00729383 /* PreviewSkiaView.mm in Sources */,
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */,
- B887519B25E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift in Sources */,
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */,
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */,
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */,
- B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */,
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */,
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */,
+ B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */,
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */,
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
+ B8E957CE2A6939A6008F5480 /* CameraView+Preview.swift in Sources */,
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
- B88A020D2934FC22009E035A /* VisionDisplayLink.m in Sources */,
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
- B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */,
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */,
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */,
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */,
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */,
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */,
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */,
- B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.mm in Sources */,
+ B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */,
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */,
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */,
- B8805067266798B600EAD7F2 /* JSConsoleHelper.mm in Sources */,
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */,
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */,
);
diff --git a/package.json b/package.json
index 2225e80..2cf5e4e 100644
--- a/package.json
+++ b/package.json
@@ -164,5 +164,8 @@
}
]
]
+ },
+ "dependencies": {
+ "string-hash-64": "^1.0.3"
}
}
diff --git a/src/Camera.tsx b/src/Camera.tsx
index 6dffae0..655b66e 100644
--- a/src/Camera.tsx
+++ b/src/Camera.tsx
@@ -4,8 +4,7 @@ import type { VideoFileType } from '.';
import type { CameraDevice } from './CameraDevice';
import type { ErrorWithCause } from './CameraError';
import { CameraCaptureError, CameraRuntimeError, tryParseNativeCameraError, isErrorWithCause } from './CameraError';
-import type { CameraProps } from './CameraProps';
-import type { Frame } from './Frame';
+import type { CameraProps, FrameProcessor } from './CameraProps';
import { assertFrameProcessorsAvailable, assertJSIAvailable } from './JSIHelper';
import { CameraModule } from './NativeCameraModule';
import type { PhotoFile, TakePhotoOptions } from './PhotoFile';
@@ -25,6 +24,7 @@ interface OnErrorEvent {
type NativeCameraViewProps = Omit & {
cameraId: string;
enableFrameProcessor: boolean;
+ previewType: 'native' | 'skia';
onInitialized?: (event: NativeSyntheticEvent) => void;
onError?: (event: NativeSyntheticEvent) => void;
onViewReady: () => void;
@@ -67,7 +67,7 @@ export class Camera extends React.PureComponent {
static displayName = 'Camera';
/** @internal */
displayName = Camera.displayName;
- private lastFrameProcessor: ((frame: Frame) => void) | undefined;
+ private lastFrameProcessor: FrameProcessor | undefined;
private isNativeViewMounted = false;
private readonly ref: React.RefObject;
@@ -417,7 +417,7 @@ export class Camera extends React.PureComponent {
//#endregion
//#region Lifecycle
- private setFrameProcessor(frameProcessor: (frame: Frame) => void): void {
+ private setFrameProcessor(frameProcessor: FrameProcessor): void {
assertFrameProcessorsAvailable();
// @ts-expect-error JSI functions aren't typed
global.setFrameProcessor(this.handle, frameProcessor);
@@ -473,6 +473,7 @@ export class Camera extends React.PureComponent {
onInitialized={this.onInitialized}
onError={this.onError}
enableFrameProcessor={frameProcessor != null}
+ previewType={frameProcessor?.type === 'skia-frame-processor' ? 'skia' : 'native'}
/>
);
}
diff --git a/src/CameraError.ts b/src/CameraError.ts
index 322331c..9acb6b6 100644
--- a/src/CameraError.ts
+++ b/src/CameraError.ts
@@ -15,14 +15,12 @@ export type DeviceError =
| 'device/low-light-boost-not-supported'
| 'device/focus-not-supported'
| 'device/camera-not-available-on-simulator';
-export type FrameProcessorError = 'frame-processor/unavailable';
export type FormatError =
| 'format/invalid-fps'
| 'format/invalid-hdr'
| 'format/invalid-low-light-boost'
| 'format/invalid-format'
- | 'format/invalid-color-space'
- | 'format/invalid-preset';
+ | 'format/invalid-color-space';
export type SessionError =
| 'session/camera-not-ready'
| 'session/audio-session-setup-failed'
@@ -50,7 +48,12 @@ export type CaptureError =
| 'capture/photo-not-enabled'
| 'capture/aborted'
| 'capture/unknown';
-export type SystemError = 'system/camera-module-not-found' | 'system/no-camera-manager' | 'system/view-not-found';
+export type SystemError =
+ | 'system/camera-module-not-found'
+ | 'system/no-camera-manager'
+ | 'system/frame-processors-unavailable'
+ | 'system/skia-unavailable'
+ | 'system/view-not-found';
export type UnknownError = 'unknown/unknown';
/**
@@ -105,7 +108,6 @@ type CameraErrorCode =
| PermissionError
| ParameterError
| DeviceError
- | FrameProcessorError
| FormatError
| SessionError
| CaptureError
@@ -162,7 +164,7 @@ export class CameraCaptureError extends CameraError