feat: Complete iOS Codebase rewrite (#1647)

* Make Frame Processors an extra subspec

* Update VisionCamera.podspec

* Make optional

* Make VisionCamera compile without Skia

* Fix

* Add skia again

* Update VisionCamera.podspec

* Make VisionCamera build without Frame Processors

* Rename error to `system/frame-processors-unavailable`

* Fix Frame Processor returning early

* Remove `preset`, FP partial rewrite

* Only warn on frame drop

* Fix wrong queue

* fix: Run on CameraQueue again

* Update CameraView.swift

* fix: Activate audio session asynchronously on audio queue

* Update CameraView+RecordVideo.swift

* Update PreviewView.h

* Cleanups

* Cleanup

* fix cast

* feat: Add LiDAR Depth Camera support

* Upgrade Ruby

* Add vector icons type

* Update Gemfile.lock

* fix: Stop queues on deinit

* Also load `builtInTrueDepthCamera`

* Update CameraViewManager.swift

* Update SkImageHelpers.mm

* Extract FrameProcessorCallback to FrameProcessor

Holds more context now :)

* Rename to .m

* fix: Add `RCTLog` import

* Create SkiaFrameProcessor

* Update CameraBridge.h

* Call Frame Processor

* Fix defines

* fix: Allow deleting callback funcs

* fix Skia build

* batch

* Just call `setSkiaFrameProcessor`

* Rewrite in Swift

* Pass `SkiaRenderer`

* Fix Import

* Move `PreviewView` to Swift

* Fix Layer

* Set Skia Canvas to Frame Host Object

* Make `DrawableFrameHostObject` subclass

* Fix TS types

* Use same MTLDevice and apply scale

* Make getter

* Extract `setTorch` and `Preview`

* fix: Fix nil metal device

* Don't wait for session stop in deinit

* Use main pixel ratio

* Use unique_ptr for Render Contexts

* fix: Fix SkiaPreviewDisplayLink broken after deinit

* inline `getTextureCache`

* Update CameraPage.tsx

* chore: Format iOS

* perf: Allow MTLLayer to be optimized for only frame buffers

* Add RN Video types

* fix: Fix Frame Processors if guard

* Find nodeModules recursively

* Create `Frame.isDrawable`

* Add `cocoapods-check` dependency
This commit is contained in:
Marc Rousavy
2023-07-20 15:30:04 +02:00
committed by GitHub
parent 5fb594ce6b
commit 375e894038
78 changed files with 1278 additions and 1245 deletions

View File

@@ -23,7 +23,6 @@ private let propsThatRequireReconfiguration = ["cameraId",
"enableDepthData",
"enableHighQualityPhotos",
"enablePortraitEffectsMatteDelivery",
"preset",
"photo",
"video",
"enableFrameProcessor",
@@ -37,14 +36,11 @@ private let propsThatRequireDeviceReconfiguration = ["fps",
public final class CameraView: UIView {
// pragma MARK: React Properties
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighQualityPhotos: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
// use cases
@objc var photo: NSNumber? // nullable bool
@objc var video: NSNumber? // nullable bool
@@ -85,27 +81,26 @@ public final class CameraView: UIView {
// Capture Session
internal let captureSession = AVCaptureSession()
internal let audioCaptureSession = AVCaptureSession()
// Inputs
// Inputs & Outputs
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var videoOutput: AVCaptureVideoDataOutput?
internal var audioOutput: AVCaptureAudioDataOutput?
// CameraView+RecordView (+ FrameProcessorDelegate.mm)
// CameraView+RecordView (+ Frame Processor)
internal var isRecording = false
internal var recordingSession: RecordingSession?
@objc public var frameProcessorCallback: FrameProcessorCallback?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
@objc public var frameProcessor: FrameProcessor?
#endif
#if VISION_CAMERA_ENABLE_SKIA
internal var skiaRenderer: SkiaRenderer?
#endif
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
internal let cameraQueue = CameraQueues.cameraQueue
internal let videoQueue = CameraQueues.videoQueue
internal let audioQueue = CameraQueues.audioQueue
internal var previewView: UIView?
internal var previewView: PreviewView?
#if DEBUG
internal var fpsGraph: RCTFPSGraph?
#endif
@@ -165,10 +160,7 @@ public final class CameraView: UIView {
if newSuperview != nil {
if !isMounted {
isMounted = true
guard let onViewReady = onViewReady else {
return
}
onViewReady(nil)
onViewReady?(nil)
}
}
}
@@ -180,36 +172,6 @@ public final class CameraView: UIView {
}
}
func setupPreviewView() {
if previewType == "skia" {
// Skia Preview View allows user to draw onto a Frame in a Frame Processor
if previewView is PreviewSkiaView { return }
previewView?.removeFromSuperview()
previewView = PreviewSkiaView(frame: frame)
} else {
// Normal iOS PreviewView is lighter and more performant (YUV Format, GPU only)
if previewView is PreviewView { return }
previewView?.removeFromSuperview()
previewView = PreviewView(frame: frame, session: captureSession)
}
addSubview(previewView!)
}
func setupFpsGraph() {
#if DEBUG
if enableFpsGraph {
if fpsGraph != nil { return }
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
fpsGraph!.layer.zPosition = 9999.0
addSubview(fpsGraph!)
} else {
fpsGraph?.removeFromSuperview()
fpsGraph = nil
}
#endif
}
// pragma MARK: Props updating
override public final func didSetProps(_ changedProps: [String]!) {
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
@@ -246,8 +208,8 @@ public final class CameraView: UIView {
shouldReconfigureDevice ||
shouldUpdateVideoStabilization ||
shouldUpdateOrientation {
// Video Configuration
cameraQueue.async {
CameraQueues.cameraQueue.async {
// Video Configuration
if shouldReconfigure {
self.configureCaptureSession()
}
@@ -285,7 +247,7 @@ public final class CameraView: UIView {
// This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch.
if shouldUpdateTorch {
self.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
CameraQueues.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
self.setTorchMode(self.torch)
}
}
@@ -293,52 +255,13 @@ public final class CameraView: UIView {
// Audio Configuration
if shouldReconfigureAudioSession {
audioQueue.async {
CameraQueues.audioQueue.async {
self.configureAudioSession()
}
}
}
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
invokeOnError(.session(.cameraNotReady))
return
}
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
return
}
if !captureSession.isRunning {
torchMode = .off
}
if device.torchMode == torchMode {
// no need to run the whole lock/unlock bs
return
}
if !device.hasTorch || !device.isTorchAvailable {
if torchMode == .off {
// ignore it, when it's off and not supported, it's off.
return
} else {
// torch mode is .auto or .on, but no torch is available.
invokeOnError(.device(.torchUnavailable))
return
}
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
if torchMode == .on {
try device.setTorchModeOn(level: 1.0)
}
device.unlockForConfiguration()
} catch let error as NSError {
invokeOnError(.device(.configureError), cause: error)
return
}
}
@objc
func onOrientationChanged() {
updateOrientation()