feat: frameProcessorFps="auto"
and automatic performance suggestions (throttle or increase FPS) (#393)
* Add `onFrameProcessorPerformanceSuggestionAvailable` and make `frameProcessorFps` support `auto` * Implement performance suggestion and auto-adjusting * Fix FPS setting, evaluate correctly * Floor suggested FPS * Remove `console.log` for frame drop warnings. * Swift format * Use `30` magic number * only call if FPS is different * Update CameraView.swift * Implement Android 1/2 * Cleanup * Update `frameProcessorFps` if available * Optimize `FrameProcessorPerformanceDataCollector` initialization * Cache call * Set frameProcessorFps directly (Kotlin setter) * Don't suggest if same value * Call suggestion every second * reset time on set * Always store 15 last samples * reset counter too * Update FrameProcessorPerformanceDataCollector.swift * Update CameraView+RecordVideo.swift * Update CameraView.kt * iOS: Redesign evaluation * Update CameraView+RecordVideo.swift * Android: Redesign evaluation * Update CameraView.kt * Update REA to latest alpha and install RNScreens * Fix frameProcessorFps updating
This commit is contained in:
@@ -118,10 +118,11 @@ extension CameraView {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: Add JS-Event for Audio Session interruptions?
|
||||
switch type {
|
||||
case .began:
|
||||
// Something interrupted our Audio Session, stop recording audio.
|
||||
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
|
||||
case .ended:
|
||||
ReactLogger.log(level: .info, message: "The Audio Session interruption has ended.")
|
||||
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
|
||||
@@ -129,13 +130,13 @@ extension CameraView {
|
||||
if options.contains(.shouldResume) {
|
||||
if isRecording {
|
||||
audioQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...")
|
||||
// restart audio session because interruption is over
|
||||
self.activateAudioSession()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
|
||||
}
|
||||
@unknown default: ()
|
||||
}
|
||||
|
@@ -255,7 +255,7 @@ extension CameraView {
|
||||
|
||||
@objc
|
||||
func sessionRuntimeError(notification: Notification) {
|
||||
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
|
||||
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
|
||||
return
|
||||
}
|
||||
|
@@ -8,9 +8,6 @@
|
||||
|
||||
import AVFoundation
|
||||
|
||||
private var hasLoggedVideoFrameDropWarning = false
|
||||
private var hasLoggedFrameProcessorFrameDropWarning = false
|
||||
|
||||
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
||||
|
||||
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
||||
@@ -203,63 +200,61 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: resize using VideoToolbox (VTPixelTransferSession)
|
||||
|
||||
if let frameProcessor = frameProcessorCallback,
|
||||
captureOutput is AVCaptureVideoDataOutput {
|
||||
if let frameProcessor = frameProcessorCallback, captureOutput is AVCaptureVideoDataOutput {
|
||||
// check if last frame was x nanoseconds ago, effectively throttling FPS
|
||||
let diff = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorCall.uptimeNanoseconds
|
||||
let secondsPerFrame = 1.0 / frameProcessorFps.doubleValue
|
||||
let lastFrameProcessorCallElapsedTime = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorCall.uptimeNanoseconds
|
||||
let secondsPerFrame = 1.0 / actualFrameProcessorFps
|
||||
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
|
||||
|
||||
if diff > UInt64(nanosecondsPerFrame) {
|
||||
if lastFrameProcessorCallElapsedTime > UInt64(nanosecondsPerFrame) {
|
||||
if !isRunningFrameProcessor {
|
||||
// we're not in the middle of executing the Frame Processor, so prepare for next call.
|
||||
CameraQueues.frameProcessorQueue.async {
|
||||
self.isRunningFrameProcessor = true
|
||||
|
||||
let perfSample = self.frameProcessorPerformanceDataCollector.beginPerformanceSampleCollection()
|
||||
let frame = Frame(buffer: sampleBuffer, orientation: self.bufferOrientation)
|
||||
frameProcessor(frame)
|
||||
perfSample.endPerformanceSampleCollection()
|
||||
|
||||
self.isRunningFrameProcessor = false
|
||||
}
|
||||
lastFrameProcessorCall = DispatchTime.now()
|
||||
} else {
|
||||
// we're still in the middle of executing a Frame Processor for a previous frame, notify user about dropped frame.
|
||||
if !hasLoggedFrameProcessorFrameDropWarning {
|
||||
ReactLogger.log(level: .warning,
|
||||
message: "Your Frame Processor took so long to execute that a frame was dropped. " +
|
||||
"Either throttle your Frame Processor's frame rate using the `frameProcessorFps` prop, or optimize " +
|
||||
"it's execution speed. (This warning will only be shown once)",
|
||||
alsoLogToJS: true)
|
||||
hasLoggedFrameProcessorFrameDropWarning = true
|
||||
}
|
||||
// we're still in the middle of executing a Frame Processor for a previous frame, so a frame was dropped.
|
||||
ReactLogger.log(level: .warning, message: "The Frame Processor took so long to execute that a frame was dropped.")
|
||||
}
|
||||
}
|
||||
|
||||
if isReadyForNewEvaluation {
|
||||
// last evaluation was more than 1sec ago, evaluate again
|
||||
evaluateNewPerformanceSamples()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||
if !hasLoggedVideoFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
|
||||
let reason = findFrameDropReason(inBuffer: buffer)
|
||||
ReactLogger.log(level: .warning,
|
||||
message: "Dropped a Frame - This might indicate that your frame rate is higher than the phone can currently process. " +
|
||||
"Throttle the Camera frame rate using the `fps` prop and make sure the device stays in optimal condition for recording. " +
|
||||
"Frame drop reason: \(reason). (This warning will only be shown once)",
|
||||
alsoLogToJS: true)
|
||||
hasLoggedVideoFrameDropWarning = true
|
||||
}
|
||||
}
|
||||
private func evaluateNewPerformanceSamples() {
|
||||
lastFrameProcessorPerformanceEvaluation = DispatchTime.now()
|
||||
guard let videoDevice = videoDeviceInput?.device else { return }
|
||||
|
||||
private final func findFrameDropReason(inBuffer buffer: CMSampleBuffer) -> String {
|
||||
var mode: CMAttachmentMode = 0
|
||||
guard let reason = CMGetAttachment(buffer,
|
||||
key: kCMSampleBufferAttachmentKey_DroppedFrameReason,
|
||||
attachmentModeOut: &mode) else {
|
||||
return "unknown"
|
||||
}
|
||||
return String(describing: reason)
|
||||
let maxFrameProcessorFps = Double(videoDevice.activeVideoMinFrameDuration.timescale) * Double(videoDevice.activeVideoMinFrameDuration.value)
|
||||
let averageFps = 1.0 / frameProcessorPerformanceDataCollector.averageExecutionTimeSeconds
|
||||
let suggestedFrameProcessorFps = floor(min(averageFps, maxFrameProcessorFps))
|
||||
|
||||
if frameProcessorFps.intValue == -1 {
|
||||
// frameProcessorFps="auto"
|
||||
actualFrameProcessorFps = suggestedFrameProcessorFps
|
||||
} else {
|
||||
// frameProcessorFps={someCustomFpsValue}
|
||||
invokeOnFrameProcessorPerformanceSuggestionAvailable(currentFps: frameProcessorFps.doubleValue,
|
||||
suggestedFps: suggestedFrameProcessorFps)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private var isReadyForNewEvaluation: Bool {
|
||||
let lastPerformanceEvaluationElapsedTime = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorPerformanceEvaluation.uptimeNanoseconds
|
||||
return lastPerformanceEvaluationElapsedTime > 1_000_000_000
|
||||
}
|
||||
|
||||
/**
|
||||
Gets the orientation of the CameraView's images (CMSampleBuffers).
|
||||
|
@@ -52,7 +52,7 @@ public final class CameraView: UIView {
|
||||
// props that require format reconfiguring
|
||||
@objc var format: NSDictionary?
|
||||
@objc var fps: NSNumber?
|
||||
@objc var frameProcessorFps: NSNumber = 1.0
|
||||
@objc var frameProcessorFps: NSNumber = -1.0 // "auto"
|
||||
@objc var hdr: NSNumber? // nullable bool
|
||||
@objc var lowLightBoost: NSNumber? // nullable bool
|
||||
@objc var colorSpace: NSString?
|
||||
@@ -64,6 +64,7 @@ public final class CameraView: UIView {
|
||||
// events
|
||||
@objc var onInitialized: RCTDirectEventBlock?
|
||||
@objc var onError: RCTDirectEventBlock?
|
||||
@objc var onFrameProcessorPerformanceSuggestionAvailable: RCTDirectEventBlock?
|
||||
// zoom
|
||||
@objc var enableZoomGesture = false {
|
||||
didSet {
|
||||
@@ -104,6 +105,10 @@ public final class CameraView: UIView {
|
||||
|
||||
/// Specifies whether the frameProcessor() function is currently executing. used to drop late frames.
|
||||
internal var isRunningFrameProcessor = false
|
||||
internal let frameProcessorPerformanceDataCollector = FrameProcessorPerformanceDataCollector()
|
||||
internal var actualFrameProcessorFps = 30.0
|
||||
internal var lastSuggestedFrameProcessorFps = 0.0
|
||||
internal var lastFrameProcessorPerformanceEvaluation = DispatchTime.now()
|
||||
|
||||
/// Returns whether the AVCaptureSession is currently running (reflected by isActive)
|
||||
var isRunning: Bool {
|
||||
@@ -244,6 +249,18 @@ public final class CameraView: UIView {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Frame Processor FPS Configuration
|
||||
if changedProps.contains("frameProcessorFps") {
|
||||
if frameProcessorFps.doubleValue == -1 {
|
||||
// "auto"
|
||||
actualFrameProcessorFps = 30.0
|
||||
} else {
|
||||
actualFrameProcessorFps = frameProcessorFps.doubleValue
|
||||
}
|
||||
lastFrameProcessorPerformanceEvaluation = DispatchTime.now()
|
||||
frameProcessorPerformanceDataCollector.clear()
|
||||
}
|
||||
}
|
||||
|
||||
internal final func setTorchMode(_ torchMode: String) {
|
||||
@@ -336,4 +353,18 @@ public final class CameraView: UIView {
|
||||
guard let onInitialized = self.onInitialized else { return }
|
||||
onInitialized([String: Any]())
|
||||
}
|
||||
|
||||
internal final func invokeOnFrameProcessorPerformanceSuggestionAvailable(currentFps: Double, suggestedFps: Double) {
|
||||
ReactLogger.log(level: .info, message: "Frame Processor Performance Suggestion available!")
|
||||
guard let onFrameProcessorPerformanceSuggestionAvailable = self.onFrameProcessorPerformanceSuggestionAvailable else { return }
|
||||
|
||||
if lastSuggestedFrameProcessorFps == suggestedFps { return }
|
||||
if suggestedFps == currentFps { return }
|
||||
|
||||
onFrameProcessorPerformanceSuggestionAvailable([
|
||||
"type": suggestedFps > currentFps ? "can-use-higher-fps" : "should-use-lower-fps",
|
||||
"suggestedFrameProcessorFps": suggestedFps,
|
||||
])
|
||||
lastSuggestedFrameProcessorFps = suggestedFps
|
||||
}
|
||||
}
|
||||
|
@@ -48,6 +48,7 @@ RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
|
||||
// Camera View Properties
|
||||
RCT_EXPORT_VIEW_PROPERTY(onError, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onInitialized, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onFrameProcessorPerformanceSuggestionAvailable, RCTDirectEventBlock);
|
||||
|
||||
// Camera View Functions
|
||||
RCT_EXTERN_METHOD(startRecording:(nonnull NSNumber *)node options:(NSDictionary *)options onRecordCallback:(RCTResponseSenderBlock)onRecordCallback);
|
||||
|
@@ -17,11 +17,6 @@ final class CameraViewManager: RCTViewManager {
|
||||
|
||||
override var bridge: RCTBridge! {
|
||||
didSet {
|
||||
#if DEBUG
|
||||
// Install console.log bindings
|
||||
ReactLogger.ConsoleLogFunction = JSConsoleHelper.getLogFunction(for: bridge)
|
||||
#endif
|
||||
|
||||
// Install Frame Processor bindings and setup Runtime
|
||||
if VISION_CAMERA_ENABLE_FRAME_PROCESSORS {
|
||||
CameraQueues.frameProcessorQueue.async {
|
||||
|
@@ -16,8 +16,7 @@ extension AVAudioSession {
|
||||
func updateCategory(_ category: AVAudioSession.Category, options: AVAudioSession.CategoryOptions = []) throws {
|
||||
if self.category != category || categoryOptions.rawValue != options.rawValue {
|
||||
ReactLogger.log(level: .info,
|
||||
message: "Changing AVAudioSession category from \(self.category.rawValue) -> \(category.rawValue)",
|
||||
alsoLogToJS: true)
|
||||
message: "Changing AVAudioSession category from \(self.category.rawValue) -> \(category.rawValue)")
|
||||
try setCategory(category, options: options)
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,63 @@
|
||||
//
|
||||
// FrameProcessorPerformanceDataCollector.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 30.08.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
// keep a maximum of `maxSampleSize` historical performance data samples cached.
|
||||
private let maxSampleSize = 15
|
||||
|
||||
// MARK: - PerformanceSampleCollection
|
||||
|
||||
struct PerformanceSampleCollection {
|
||||
var endPerformanceSampleCollection: () -> Void
|
||||
|
||||
init(end: @escaping () -> Void) {
|
||||
endPerformanceSampleCollection = end
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - FrameProcessorPerformanceDataCollector
|
||||
|
||||
class FrameProcessorPerformanceDataCollector {
|
||||
private var performanceSamples: [Double] = []
|
||||
private var counter = 0
|
||||
private var lastEvaluation = -1
|
||||
|
||||
var averageExecutionTimeSeconds: Double {
|
||||
let sum = performanceSamples.reduce(0, +)
|
||||
let average = sum / Double(performanceSamples.count)
|
||||
|
||||
lastEvaluation = counter
|
||||
|
||||
return average
|
||||
}
|
||||
|
||||
func beginPerformanceSampleCollection() -> PerformanceSampleCollection {
|
||||
let begin = DispatchTime.now()
|
||||
|
||||
return PerformanceSampleCollection {
|
||||
let end = DispatchTime.now()
|
||||
let seconds = Double(end.uptimeNanoseconds - begin.uptimeNanoseconds) / 1_000_000_000.0
|
||||
|
||||
let index = self.counter % maxSampleSize
|
||||
|
||||
if self.performanceSamples.count > index {
|
||||
self.performanceSamples[index] = seconds
|
||||
} else {
|
||||
self.performanceSamples.append(seconds)
|
||||
}
|
||||
|
||||
self.counter += 1
|
||||
}
|
||||
}
|
||||
|
||||
func clear() {
|
||||
counter = 0
|
||||
performanceSamples.removeAll()
|
||||
}
|
||||
}
|
@@ -11,11 +11,6 @@ import Foundation
|
||||
// MARK: - ReactLogger
|
||||
|
||||
enum ReactLogger {
|
||||
/**
|
||||
A function that logs to the JavaScript console.
|
||||
*/
|
||||
static var ConsoleLogFunction: ConsoleLogFunction?
|
||||
|
||||
/**
|
||||
Log a message to the console in the format of `VisionCamera.[caller-function-name]: [message]`
|
||||
|
||||
@@ -27,14 +22,10 @@ enum ReactLogger {
|
||||
@inlinable
|
||||
static func log(level: RCTLogLevel,
|
||||
message: String,
|
||||
alsoLogToJS: Bool = false,
|
||||
_ file: String = #file,
|
||||
_ lineNumber: Int = #line,
|
||||
_ function: String = #function) {
|
||||
#if DEBUG
|
||||
if alsoLogToJS, let log = ConsoleLogFunction {
|
||||
log(level, "[native] VisionCamera.\(function): \(message)")
|
||||
}
|
||||
RCTDefaultLogFunction(level, RCTLogSource.native, file, lineNumber as NSNumber, "VisionCamera.\(function): \(message)")
|
||||
#endif
|
||||
}
|
||||
|
@@ -60,7 +60,7 @@ class RecordingSession {
|
||||
|
||||
deinit {
|
||||
if assetWriter.status == .writing {
|
||||
ReactLogger.log(level: .info, message: "Cancelling AssetWriter...", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .info, message: "Cancelling AssetWriter...")
|
||||
assetWriter.cancelWriting()
|
||||
}
|
||||
}
|
||||
@@ -70,11 +70,11 @@ class RecordingSession {
|
||||
*/
|
||||
func initializeVideoWriter(withSettings settings: [String: Any], pixelFormat: OSType) {
|
||||
guard !settings.isEmpty else {
|
||||
ReactLogger.log(level: .error, message: "Tried to initialize Video Writer with empty settings!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Tried to initialize Video Writer with empty settings!")
|
||||
return
|
||||
}
|
||||
guard bufferAdaptor == nil else {
|
||||
ReactLogger.log(level: .error, message: "Tried to add Video Writer twice!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Tried to add Video Writer twice!")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -93,11 +93,11 @@ class RecordingSession {
|
||||
*/
|
||||
func initializeAudioWriter(withSettings settings: [String: Any]) {
|
||||
guard !settings.isEmpty else {
|
||||
ReactLogger.log(level: .error, message: "Tried to initialize Audio Writer with empty settings!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Tried to initialize Audio Writer with empty settings!")
|
||||
return
|
||||
}
|
||||
guard audioWriter == nil else {
|
||||
ReactLogger.log(level: .error, message: "Tried to add Audio Writer twice!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Tried to add Audio Writer twice!")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -139,8 +139,7 @@ class RecordingSession {
|
||||
}
|
||||
guard let initialTimestamp = initialTimestamp else {
|
||||
ReactLogger.log(level: .error,
|
||||
message: "A frame arrived, but initialTimestamp was nil. Is this RecordingSession running?",
|
||||
alsoLogToJS: true)
|
||||
message: "A frame arrived, but initialTimestamp was nil. Is this RecordingSession running?")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -149,17 +148,16 @@ class RecordingSession {
|
||||
switch bufferType {
|
||||
case .video:
|
||||
guard let bufferAdaptor = bufferAdaptor else {
|
||||
ReactLogger.log(level: .error, message: "Video Frame arrived but VideoWriter was nil!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Video Frame arrived but VideoWriter was nil!")
|
||||
return
|
||||
}
|
||||
if !bufferAdaptor.assetWriterInput.isReadyForMoreMediaData {
|
||||
ReactLogger.log(level: .warning,
|
||||
message: "The Video AVAssetWriterInput was not ready for more data! Is your frame rate too high?",
|
||||
alsoLogToJS: true)
|
||||
message: "The Video AVAssetWriterInput was not ready for more data! Is your frame rate too high?")
|
||||
return
|
||||
}
|
||||
guard let imageBuffer = CMSampleBufferGetImageBuffer(buffer) else {
|
||||
ReactLogger.log(level: .error, message: "Failed to get the CVImageBuffer!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Failed to get the CVImageBuffer!")
|
||||
return
|
||||
}
|
||||
bufferAdaptor.append(imageBuffer, withPresentationTime: timestamp)
|
||||
@@ -169,7 +167,7 @@ class RecordingSession {
|
||||
}
|
||||
case .audio:
|
||||
guard let audioWriter = audioWriter else {
|
||||
ReactLogger.log(level: .error, message: "Audio Frame arrived but AudioWriter was nil!", alsoLogToJS: true)
|
||||
ReactLogger.log(level: .error, message: "Audio Frame arrived but AudioWriter was nil!")
|
||||
return
|
||||
}
|
||||
if !audioWriter.isReadyForMoreMediaData {
|
||||
@@ -184,8 +182,7 @@ class RecordingSession {
|
||||
|
||||
if assetWriter.status == .failed {
|
||||
ReactLogger.log(level: .error,
|
||||
message: "AssetWriter failed to write buffer! Error: \(assetWriter.error?.localizedDescription ?? "none")",
|
||||
alsoLogToJS: true)
|
||||
message: "AssetWriter failed to write buffer! Error: \(assetWriter.error?.localizedDescription ?? "none")")
|
||||
finish()
|
||||
}
|
||||
}
|
||||
|
@@ -135,6 +135,7 @@
|
||||
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
|
||||
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
|
||||
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
|
||||
B8948BDF26DCEE2B00B430E2 /* FrameProcessorPerformanceDataCollector.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameProcessorPerformanceDataCollector.swift; sourceTree = "<group>"; };
|
||||
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
|
||||
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
|
||||
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; };
|
||||
@@ -268,6 +269,7 @@
|
||||
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
|
||||
B80416F026AB16E8000DEB6A /* VisionCameraScheduler.mm */,
|
||||
B80416F126AB16F3000DEB6A /* VisionCameraScheduler.h */,
|
||||
B8948BDF26DCEE2B00B430E2 /* FrameProcessorPerformanceDataCollector.swift */,
|
||||
);
|
||||
path = "Frame Processor";
|
||||
sourceTree = "<group>";
|
||||
|
Reference in New Issue
Block a user