feat: Complete iOS Codebase rewrite (#1647)

* Make Frame Processors an extra subspec

* Update VisionCamera.podspec

* Make optional

* Make VisionCamera compile without Skia

* Fix

* Add skia again

* Update VisionCamera.podspec

* Make VisionCamera build without Frame Processors

* Rename error to `system/frame-processors-unavailable`

* Fix Frame Processor returning early

* Remove `preset`, FP partial rewrite

* Only warn on frame drop

* Fix wrong queue

* fix: Run on CameraQueue again

* Update CameraView.swift

* fix: Activate audio session asynchronously on audio queue

* Update CameraView+RecordVideo.swift

* Update PreviewView.h

* Cleanups

* Cleanup

* fix cast

* feat: Add LiDAR Depth Camera support

* Upgrade Ruby

* Add vector icons type

* Update Gemfile.lock

* fix: Stop queues on deinit

* Also load `builtInTrueDepthCamera`

* Update CameraViewManager.swift

* Update SkImageHelpers.mm

* Extract FrameProcessorCallback to FrameProcessor

Holds more context now :)

* Rename to .m

* fix: Add `RCTLog` import

* Create SkiaFrameProcessor

* Update CameraBridge.h

* Call Frame Processor

* Fix defines

* fix: Allow deleting callback funcs

* fix Skia build

* batch

* Just call `setSkiaFrameProcessor`

* Rewrite in Swift

* Pass `SkiaRenderer`

* Fix Import

* Move `PreviewView` to Swift

* Fix Layer

* Set Skia Canvas to Frame Host Object

* Make `DrawableFrameHostObject` subclass

* Fix TS types

* Use same MTLDevice and apply scale

* Make getter

* Extract `setTorch` and `Preview`

* fix: Fix nil metal device

* Don't wait for session stop in deinit

* Use main pixel ratio

* Use unique_ptr for Render Contexts

* fix: Fix SkiaPreviewDisplayLink broken after deinit

* inline `getTextureCache`

* Update CameraPage.tsx

* chore: Format iOS

* perf: Allow MTLLayer to be optimized for only frame buffers

* Add RN Video types

* fix: Fix Frame Processors if guard

* Find nodeModules recursively

* Create `Frame.isDrawable`

* Add `cocoapods-check` dependency
This commit is contained in:
Marc Rousavy
2023-07-20 15:30:04 +02:00
committed by GitHub
parent 5fb594ce6b
commit 375e894038
78 changed files with 1278 additions and 1245 deletions

View File

@@ -13,7 +13,9 @@ import Foundation
final class CameraViewManager: RCTViewManager {
// pragma MARK: Properties
private var runtimeManager: FrameProcessorRuntimeManager?
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
private var runtimeManager: FrameProcessorRuntimeManager?
#endif
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
@@ -31,10 +33,14 @@ final class CameraViewManager: RCTViewManager {
@objc
final func installFrameProcessorBindings() -> NSNumber {
// Runs on JS Thread
runtimeManager = FrameProcessorRuntimeManager()
runtimeManager!.installFrameProcessorBindings()
return true as NSNumber
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
// Runs on JS Thread
runtimeManager = FrameProcessorRuntimeManager()
runtimeManager!.installFrameProcessorBindings()
return true as NSNumber
#else
return false as NSNumber
#endif
}
@objc
@@ -101,15 +107,10 @@ final class CameraViewManager: RCTViewManager {
@objc
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(), mediaType: .video, position: .unspecified)
let devices = discoverySession.devices.filter {
if #available(iOS 11.1, *) {
// exclude the true-depth camera. The True-Depth camera has YUV and Infrared, can't take photos!
return $0.deviceType != .builtInTrueDepthCamera
}
return true
}
return devices.map {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(),
mediaType: .video,
position: .unspecified)
return discoverySession.devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
@@ -171,6 +172,7 @@ final class CameraViewManager: RCTViewManager {
private func getCameraView(withTag tag: NSNumber) -> CameraView {
// swiftlint:disable force_cast
return bridge.uiManager.view(forReactTag: tag) as! CameraView
// swiftlint:enable force_cast
}
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
@@ -180,9 +182,6 @@ final class CameraViewManager: RCTViewManager {
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
if #available(iOS 11.1, *) {
deviceTypes.append(.builtInTrueDepthCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)