fix: Frame Processor FPS (#1288)
* fix: Build using XCode 14 * fix: Throttle FP by start time rather than end time
This commit is contained in:
parent
4781ad9835
commit
52a1d50d91
@ -16,5 +16,17 @@ target 'VisionCameraExample' do
|
||||
post_install do |installer|
|
||||
react_native_post_install(installer)
|
||||
__apply_Xcode_12_5_M1_post_install_workaround(installer)
|
||||
|
||||
# This is necessary for Xcode 14, because it signs resource bundles by default
|
||||
# when building for devices. See https://github.com/facebook/react-native/issues/34673#issuecomment-1259253787
|
||||
installer.target_installation_results.pod_target_installation_results
|
||||
.each do |pod_name, target_installation_result|
|
||||
target_installation_result.resource_bundle_targets.each do |resource_bundle_target|
|
||||
resource_bundle_target.build_configurations.each do |config|
|
||||
config.build_settings['CODE_SIGNING_ALLOWED'] = 'NO'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
@ -324,7 +324,7 @@ PODS:
|
||||
- React
|
||||
- RNVectorIcons (8.1.0):
|
||||
- React-Core
|
||||
- VisionCamera (2.13.3):
|
||||
- VisionCamera (2.14.0):
|
||||
- React
|
||||
- React-callinvoker
|
||||
- React-Core
|
||||
@ -504,9 +504,9 @@ SPEC CHECKSUMS:
|
||||
RNScreens: 40a2cb40a02a609938137a1e0acfbf8fc9eebf19
|
||||
RNStaticSafeAreaInsets: 6103cf09647fa427186d30f67b0f5163c1ae8252
|
||||
RNVectorIcons: 31cebfcf94e8cf8686eb5303ae0357da64d7a5a4
|
||||
VisionCamera: 7bcf3a81533a1c9ad13930804377ad13a03fcded
|
||||
VisionCamera: 17d4ce16a6b3646081005292e2ee0e972b5233c9
|
||||
Yoga: e7dc4e71caba6472ff48ad7d234389b91dadc280
|
||||
|
||||
PODFILE CHECKSUM: 29b1752e05601e9867644e58ce0ed8b9106be6cb
|
||||
PODFILE CHECKSUM: d3dcba1fba41a8301df6697eb0ddc5cfe4ea2cc8
|
||||
|
||||
COCOAPODS: 1.10.2
|
||||
|
@ -212,11 +212,11 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
|
||||
if let frameProcessor = frameProcessorCallback, captureOutput is AVCaptureVideoDataOutput {
|
||||
// check if last frame was x nanoseconds ago, effectively throttling FPS
|
||||
let lastFrameProcessorCallElapsedTime = DispatchTime.now().uptimeNanoseconds - lastFrameProcessorCall.uptimeNanoseconds
|
||||
let frameTime = UInt64(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds * 1_000_000_000.0)
|
||||
let lastFrameProcessorCallElapsedTime = frameTime - lastFrameProcessorCall
|
||||
let secondsPerFrame = 1.0 / actualFrameProcessorFps
|
||||
let nanosecondsPerFrame = secondsPerFrame * 1_000_000_000.0
|
||||
|
||||
if lastFrameProcessorCallElapsedTime > UInt64(nanosecondsPerFrame) {
|
||||
if lastFrameProcessorCallElapsedTime >= UInt64(nanosecondsPerFrame) {
|
||||
if !isRunningFrameProcessor {
|
||||
// we're not in the middle of executing the Frame Processor, so prepare for next call.
|
||||
CameraQueues.frameProcessorQueue.async {
|
||||
@ -229,7 +229,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
|
||||
self.isRunningFrameProcessor = false
|
||||
}
|
||||
lastFrameProcessorCall = DispatchTime.now()
|
||||
lastFrameProcessorCall = frameTime
|
||||
} else {
|
||||
// we're still in the middle of executing a Frame Processor for a previous frame, so a frame was dropped.
|
||||
ReactLogger.log(level: .warning, message: "The Frame Processor took so long to execute that a frame was dropped.")
|
||||
|
@ -94,7 +94,7 @@ public final class CameraView: UIView {
|
||||
internal var isRecording = false
|
||||
internal var recordingSession: RecordingSession?
|
||||
@objc public var frameProcessorCallback: FrameProcessorCallback?
|
||||
internal var lastFrameProcessorCall = DispatchTime.now()
|
||||
internal var lastFrameProcessorCall = DispatchTime.now().uptimeNanoseconds
|
||||
// CameraView+TakePhoto
|
||||
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
|
||||
// CameraView+Zoom
|
||||
|
Loading…
Reference in New Issue
Block a user