fix: Move Audio Input initialization shortly before startRecording (#159)

* rename

* Update AVAudioSession+updateCategory.swift

* fix bootstrap script

* Update CameraView+AVAudioSession.swift

* move audio input adding lower

* Activate AudioSession only when starting recording

* format

* Deactivate Audio Session

* remove audio input before deactivating audio session

* Update CameraView+AVAudioSession.swift

* log time

* Update CameraView+AVAudioSession.swift

* measure time with `measureElapsedTime`

* Update project.pbxproj

* only log in debug builds

* bootstrap with bridge (RNN new API)

* Mark two funcs as `@inlinable`

* format

* Update ReactLogger.swift

* Make audioWriter optional (allow videos without sound)

* only log frame drop reason in DEBUG

* Make audio writing entirely optional

* format

* Use function name as label for measureElapsedTime

* Update MeasureElapsedTime.swift

* Update MeasureElapsedTime.swift

* Mark AudioWriter as finished

* set `automaticallyConfiguresApplicationAudioSession` once

* Add JS console logging

* log to JS console for a few logs

* Update AVAudioSession+updateCategory.swift

* format

* Update JSConsoleHelper.mm

* catch log errors

* Update ReactLogger.swift

* fix docs

* Update RecordingSession.swift

* Immediatelly add audio input

* Update CameraView+AVCaptureSession.swift

* Update CameraView+AVCaptureSession.swift

* Update ReactLogger.swift

* immediatelly set audio session

* extract

* format

* Update TROUBLESHOOTING.mdx

* hmm

* Update AVAudioSession+updateCategory.swift

* Create secondary `AVCaptureSession` for audio

* Configure once, start stop on demand

* format

* fix audio notification interruptions

* docs
This commit is contained in:
Marc Rousavy 2021-06-03 14:16:02 +02:00 committed by GitHub
parent 71730a73ef
commit eeb765f018
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 420 additions and 212 deletions

View File

@ -30,6 +30,7 @@ Pod::Spec.new do |s|
"ios/Frame Processor/FrameProcessorPluginRegistry.h",
"ios/Frame Processor/FrameProcessorPlugin.h",
"ios/React Utils/RCTBridge+runOnJS.h",
"ios/React Utils/JSConsoleHelper.h",
"cpp/**/*.{cpp}",
]
# Any private headers that are not globally unique should be mentioned here.

View File

@ -34,6 +34,7 @@ Before opening an issue, make sure you try the following:
4. Choose whatever name you want, e.g. `File.swift` and press **Create**
5. Press **Create Bridging Header** when promted.
5. If you're having runtime issues, check the logs in Xcode to find out more. In Xcode, go to **View** > **Debug Area** > **Activate Console** (<kbd>⇧</kbd>+<kbd>⌘</kbd>+<kbd>C</kbd>).
* For errors without messages, there's often an error code attached. Look up the error code on [osstatus.com](https://www.osstatus.com) to get more information about a specific error.
## Android

View File

@ -490,7 +490,7 @@ SPEC CHECKSUMS:
RNReanimated: 9c13c86454bfd54dab7505c1a054470bfecd2563
RNStaticSafeAreaInsets: 6103cf09647fa427186d30f67b0f5163c1ae8252
RNVectorIcons: 31cebfcf94e8cf8686eb5303ae0357da64d7a5a4
VisionCamera: d274e912758134d5275d1ee3b9873d40d1fbb2a1
VisionCamera: c4e2782fbbca6dcea922fcfeabb0070e1dcda493
Yoga: a7de31c64fe738607e7a3803e3f591a4b1df7393
PODFILE CHECKSUM: 4b093c1d474775c2eac3268011e4b0b80929d3a2

View File

@ -15,7 +15,8 @@
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions
{
[ReactNativeNavigation bootstrapWithDelegate:self launchOptions:launchOptions];
RCTBridge* bridge = [[RCTBridge alloc] initWithDelegate:self launchOptions:launchOptions];
[ReactNativeNavigation bootstrapWithBridge:bridge];
return YES;
}

View File

@ -16,6 +16,7 @@
#import "FrameProcessorCallback.h"
#import "FrameProcessorRuntimeManager.h"
#import "RCTBridge+runOnJS.h"
#import "JSConsoleHelper.h"
#ifdef VISION_CAMERA_DISABLE_FRAME_PROCESSORS
static bool enableFrameProcessors = false;

View File

@ -146,6 +146,7 @@ enum FormatError {
enum SessionError {
case cameraNotReady
case audioSessionSetupFailed(reason: String)
case audioSessionFailedToActivate
case audioInUseByOtherApp
var code: String {
@ -156,6 +157,8 @@ enum SessionError {
return "audio-session-setup-failed"
case .audioInUseByOtherApp:
return "audio-in-use-by-other-app"
case .audioSessionFailedToActivate:
return "audio-session-failed-to-activate"
}
}
@ -167,6 +170,8 @@ enum SessionError {
return "The audio session failed to setup! \(reason)"
case .audioInUseByOtherApp:
return "The audio session is already in use by another app with higher priority!"
case .audioSessionFailedToActivate:
return "Failed to activate Audio Session!"
}
}
}

View File

@ -14,71 +14,81 @@ import Foundation
*/
extension CameraView {
/**
Configures the Audio session to allow background-music playback while recording.
Configures the Audio Capture Session with an audio input and audio data output.
*/
final func configureAudioSession() {
let start = DispatchTime.now()
ReactLogger.log(level: .info, message: "Configuring Audio Session...")
audioCaptureSession.beginConfiguration()
defer {
audioCaptureSession.commitConfiguration()
}
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
// Audio Input
do {
try addAudioInput()
if let audioDeviceInput = self.audioDeviceInput {
audioCaptureSession.removeInput(audioDeviceInput)
self.audioDeviceInput = nil
}
ReactLogger.log(level: .info, message: "Adding Audio input...")
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
return invokeOnError(.device(.microphoneUnavailable))
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
}
audioCaptureSession.addInput(audioDeviceInput!)
} catch let error as NSError {
return invokeOnError(.device(.microphoneUnavailable), cause: error)
}
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategoryIfNotSet(AVAudioSession.Category.playAndRecord, options: [.mixWithOthers, .allowBluetoothA2DP, .defaultToSpeaker])
audioSession.trySetAllowHaptics(true)
// Audio Output
if let audioOutput = self.audioOutput {
audioCaptureSession.removeOutput(audioOutput)
self.audioOutput = nil
}
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
audioOutput = AVCaptureAudioDataOutput()
guard audioCaptureSession.canAddOutput(audioOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
}
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
audioCaptureSession.addOutput(audioOutput!)
}
// activate current audio session because camera is active
try audioSession.setActive(true)
/**
Configures the Audio session and activates it. If the session was active it will shortly be deactivated before configuration.
The Audio Session will be configured to allow background music, haptics (vibrations) and system sound playback while recording.
Background audio is allowed to play on speakers or bluetooth speakers.
*/
final func activateAudioSession() {
ReactLogger.log(level: .info, message: "Activating Audio Session...")
do {
try AVAudioSession.sharedInstance().updateCategory(AVAudioSession.Category.playAndRecord,
options: [.mixWithOthers,
.allowBluetoothA2DP,
.defaultToSpeaker,
.allowAirPlay])
audioCaptureSession.startRunning()
} catch let error as NSError {
switch error.code {
case 561_017_449:
self.invokeOnError(.session(.audioInUseByOtherApp), cause: error)
default:
self.invokeOnError(.session(.audioSessionSetupFailed(reason: error.description)), cause: error)
self.invokeOnError(.session(.audioSessionFailedToActivate), cause: error)
}
self.removeAudioInput()
}
let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
ReactLogger.log(level: .info, message: "Configured Audio session in \(Double(nanoTime) / 1_000_000)ms!")
}
/**
Configures the CaptureSession and adds the audio device if it has not already been added yet.
*/
func addAudioInput() throws {
if audioDeviceInput != nil {
// we already added the audio device, don't add it again
return
}
removeAudioInput()
final func deactivateAudioSession() {
ReactLogger.log(level: .info, message: "Deactivating Audio Session...")
ReactLogger.log(level: .info, message: "Adding audio input...")
captureSession.beginConfiguration()
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw CameraError.device(.microphoneUnavailable)
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard captureSession.canAddInput(audioDeviceInput!) else {
throw CameraError.parameter(.unsupportedInput(inputDescriptor: "audio-input"))
}
captureSession.addInput(audioDeviceInput!)
captureSession.automaticallyConfiguresApplicationAudioSession = false
captureSession.commitConfiguration()
}
/**
Configures the CaptureSession and removes the audio device if it has been added before.
*/
func removeAudioInput() {
guard let audioInput = audioDeviceInput else {
return
}
ReactLogger.log(level: .info, message: "Removing audio input...")
captureSession.beginConfiguration()
captureSession.removeInput(audioInput)
audioDeviceInput = nil
captureSession.commitConfiguration()
audioCaptureSession.stopRunning()
}
@objc
@ -93,18 +103,21 @@ extension CameraView {
switch type {
case .began:
// Something interrupted our Audio Session, stop recording audio.
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
removeAudioInput()
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!", alsoLogToJS: true)
case .ended:
ReactLogger.log(level: .error, message: "The Audio Session interruption has ended.")
ReactLogger.log(level: .info, message: "The Audio Session interruption has ended.")
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
ReactLogger.log(level: .error, message: "Resuming interrupted Audio Session...")
// restart audio session because interruption is over
configureAudioSession()
if isRecording {
audioQueue.async {
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...", alsoLogToJS: true)
// restart audio session because interruption is over
self.activateAudioSession()
}
}
} else {
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!", alsoLogToJS: true)
}
@unknown default: ()
}

View File

@ -13,6 +13,8 @@ import Foundation
Extension for CameraView that sets up the AVCaptureSession, Device and Format.
*/
extension CameraView {
// pragma MARK: Configure Capture Session
/**
Configures the Capture Session.
*/
@ -35,9 +37,6 @@ extension CameraView {
captureSession.commitConfiguration()
}
// Disable automatic Audio Session configuration because we configure it in CameraView+AVAudioSession.swift (called before Camera gets activated)
captureSession.automaticallyConfiguresApplicationAudioSession = false
// If preset is set, use preset. Otherwise use format.
if let preset = self.preset {
var sessionPreset: AVCaptureSession.Preset?
@ -58,12 +57,14 @@ extension CameraView {
}
}
// INPUTS
// pragma MARK: Capture Session Inputs
// Video Input
do {
if let videoDeviceInput = self.videoDeviceInput {
captureSession.removeInput(videoDeviceInput)
self.videoDeviceInput = nil
}
ReactLogger.log(level: .info, message: "Adding Video input...")
guard let videoDevice = AVCaptureDevice(uniqueID: cameraId) else {
return invokeOnError(.device(.invalid))
}
@ -77,11 +78,14 @@ extension CameraView {
return invokeOnError(.device(.invalid))
}
// OUTPUTS
// pragma MARK: Capture Session Outputs
// Photo Output
if let photoOutput = self.photoOutput {
captureSession.removeOutput(photoOutput)
self.photoOutput = nil
}
// Photo Output
ReactLogger.log(level: .info, message: "Adding Photo output...")
photoOutput = AVCapturePhotoOutput()
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported && enableDepthData
if let enableHighResolutionCapture = self.enableHighResolutionCapture?.boolValue {
@ -115,24 +119,13 @@ extension CameraView {
videoOutput!.mirror()
}
// Audio Output
if let audioOutput = self.audioOutput {
captureSession.removeOutput(audioOutput)
self.audioOutput = nil
}
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
audioOutput = AVCaptureAudioDataOutput()
guard captureSession.canAddOutput(audioOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
}
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
captureSession.addOutput(audioOutput!)
invokeOnInitialized()
isReady = true
ReactLogger.log(level: .info, message: "Session successfully configured!")
}
// pragma MARK: Configure Device
/**
Configures the Video Device with the given FPS, HDR and ColorSpace.
*/
@ -182,6 +175,8 @@ extension CameraView {
}
}
// pragma MARK: Configure Format
/**
Configures the Video Device to find the best matching Format.
*/
@ -216,9 +211,11 @@ extension CameraView {
}
}
// pragma MARK: Notifications/Interruptions
@objc
func sessionRuntimeError(notification: Notification) {
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!", alsoLogToJS: true)
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
@ -232,40 +229,4 @@ extension CameraView {
}
}
}
@objc
func sessionInterruptionBegin(notification: Notification) {
ReactLogger.log(level: .error, message: "Capture Session Interruption begin Notification!")
guard let reasonNumber = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? NSNumber else {
return
}
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonNumber.intValue)
switch reason {
case .audioDeviceInUseByAnotherClient:
// remove audio input so iOS thinks nothing is wrong and won't pause the session.
removeAudioInput()
default:
// don't do anything, iOS will automatically pause session
break
}
}
@objc
func sessionInterruptionEnd(notification: Notification) {
ReactLogger.log(level: .error, message: "Capture Session Interruption end Notification!")
guard let reasonNumber = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? NSNumber else {
return
}
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonNumber.intValue)
switch reason {
case .audioDeviceInUseByAnotherClient:
// add audio again because we removed it when we received the interruption.
configureAudioSession()
default:
// don't do anything, iOS will automatically resume session
break
}
}
}

View File

@ -13,9 +13,13 @@ private var hasLoggedFrameDropWarning = false
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
/**
Starts a video + audio recording with a custom Asset Writer.
*/
func startRecording(options: NSDictionary, callback: @escaping RCTResponseSenderBlock) {
cameraQueue.async {
ReactLogger.log(level: .info, message: "Starting Video recording...")
do {
let errorPointer = ErrorPointer(nilLiteral: ())
guard let tempFilePath = RCTTempFilePath("mov", errorPointer) else {
@ -41,6 +45,9 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
let onFinish = { (status: AVAssetWriter.Status, error: Error?) -> Void in
defer {
self.recordingSession = nil
self.audioQueue.async {
self.deactivateAudioSession()
}
}
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
if let error = error {
@ -58,16 +65,36 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
}
let videoSettings = self.videoOutput!.recommendedVideoSettingsForAssetWriter(writingTo: fileType)
let audioSettings = self.audioOutput!.recommendedAudioSettingsForAssetWriter(writingTo: fileType) as? [String: Any]
self.recordingSession = try RecordingSession(url: tempURL,
fileType: fileType,
videoSettings: videoSettings ?? [:],
audioSettings: audioSettings ?? [:],
isVideoMirrored: self.videoOutput!.isMirrored,
completion: onFinish)
self.isRecording = true
// Init Video
guard let videoOutput = self.videoOutput,
let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType),
!videoSettings.isEmpty else {
throw CameraError.capture(.createRecorderError(message: "Failed to get video settings!"))
}
self.recordingSession!.initializeVideoWriter(withSettings: videoSettings,
isVideoMirrored: self.videoOutput!.isMirrored)
// Init Audio (optional, async)
self.audioQueue.async {
// Activate Audio Session (blocking)
self.activateAudioSession()
guard let recordingSession = self.recordingSession else {
// recording has already been cancelled
return
}
if let audioOutput = self.audioOutput,
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) as? [String: Any] {
recordingSession.initializeAudioWriter(withSettings: audioSettings)
}
// Finally start recording, with or without audio.
recordingSession.start()
self.isRecording = true
}
} catch EnumParserError.invalidValue {
return callback([NSNull(), EnumParserError.invalidValue])
} catch let error as NSError {
@ -77,9 +104,9 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
func stopRecording(promise: Promise) {
isRecording = false
cameraQueue.async {
self.isRecording = false
withPromise(promise) {
guard let recordingSession = self.recordingSession else {
throw CameraError.capture(.noRecordingInProgress)
@ -146,13 +173,16 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
}
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
if frameProcessorCallback != nil && !hasLoggedFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
let reason = findFrameDropReason(inBuffer: buffer)
// TODO: Show in React console?
ReactLogger.log(level: .warning, message: "Dropped a Frame. This might indicate that your Frame Processor is doing too much work. " +
"Either throttle the frame processor's frame rate, or optimize your frame processor's execution speed. Frame drop reason: \(reason)")
hasLoggedFrameDropWarning = true
}
#if DEBUG
if frameProcessorCallback != nil && !hasLoggedFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
let reason = findFrameDropReason(inBuffer: buffer)
ReactLogger.log(level: .warning,
message: "Dropped a Frame. This might indicate that your Frame Processor is doing too much work. " +
"Either throttle the frame processor's frame rate, or optimize your frame processor's execution speed. Frame drop reason: \(reason)",
alsoLogToJS: true)
hasLoggedFrameDropWarning = true
}
#endif
}
private final func findFrameDropReason(inBuffer buffer: CMSampleBuffer) -> String {

View File

@ -74,6 +74,7 @@ public final class CameraView: UIView {
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
// Capture Session
internal let captureSession = AVCaptureSession()
internal let audioCaptureSession = AVCaptureSession()
// Inputs
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
@ -121,17 +122,17 @@ public final class CameraView: UIView {
name: .AVCaptureSessionRuntimeError,
object: captureSession)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionInterruptionBegin),
name: .AVCaptureSessionWasInterrupted,
object: captureSession)
NotificationCenter.default.addObserver(self,
selector: #selector(sessionInterruptionEnd),
name: .AVCaptureSessionInterruptionEnded,
object: captureSession)
selector: #selector(sessionRuntimeError),
name: .AVCaptureSessionRuntimeError,
object: audioCaptureSession)
NotificationCenter.default.addObserver(self,
selector: #selector(audioSessionInterrupted),
name: AVAudioSession.interruptionNotification,
object: AVAudioSession.sharedInstance)
audioQueue.async {
self.configureAudioSession()
}
}
@available(*, unavailable)
@ -144,11 +145,8 @@ public final class CameraView: UIView {
name: .AVCaptureSessionRuntimeError,
object: captureSession)
NotificationCenter.default.removeObserver(self,
name: .AVCaptureSessionWasInterrupted,
object: captureSession)
NotificationCenter.default.removeObserver(self,
name: .AVCaptureSessionInterruptionEnded,
object: captureSession)
name: .AVCaptureSessionRuntimeError,
object: audioCaptureSession)
NotificationCenter.default.removeObserver(self,
name: AVAudioSession.interruptionNotification,
object: AVAudioSession.sharedInstance)
@ -189,7 +187,6 @@ public final class CameraView: UIView {
if shouldCheckActive && self.captureSession.isRunning != self.isActive {
if self.isActive {
ReactLogger.log(level: .info, message: "Starting Session...")
self.configureAudioSession()
self.captureSession.startRunning()
ReactLogger.log(level: .info, message: "Started Session!")
} else {

View File

@ -17,12 +17,18 @@ final class CameraViewManager: RCTViewManager {
override var bridge: RCTBridge! {
didSet {
if !enableFrameProcessors { return }
#if DEBUG
// Install console.log bindings
ReactLogger.ConsoleLogFunction = JSConsoleHelper.getLogFunction(for: bridge)
#endif
CameraQueues.videoQueue.async {
self.runtimeManager = FrameProcessorRuntimeManager(bridge: self.bridge)
self.bridge.runOnJS {
self.runtimeManager!.installFrameProcessorBindings()
// Install Frame Processor bindings and setup Runtime
if enableFrameProcessors {
CameraQueues.videoQueue.async {
self.runtimeManager = FrameProcessorRuntimeManager(bridge: self.bridge)
self.bridge.runOnJS {
self.runtimeManager!.installFrameProcessorBindings()
}
}
}
}

View File

@ -1,21 +0,0 @@
//
// AVAudioSession+trySetCategory.swift
// VisionCamera
//
// Created by Marc Rousavy on 01.06.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
extension AVAudioSession {
/**
Calls [setCategory] if the given category or options are not equal to the currently set category and options.
*/
func setCategoryIfNotSet(_ category: AVAudioSession.Category, options: AVAudioSession.CategoryOptions = []) throws {
if self.category != category || categoryOptions.rawValue != options.rawValue {
try setCategory(category, options: options)
}
}
}

View File

@ -0,0 +1,24 @@
//
// AVAudioSession+updateCategory.swift
// VisionCamera
//
// Created by Marc Rousavy on 01.06.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
extension AVAudioSession {
/**
Calls [setCategory] if the given category or options are not equal to the currently set category and options and reactivates the session.
*/
func updateCategory(_ category: AVAudioSession.Category, options: AVAudioSession.CategoryOptions = []) throws {
if self.category != category || categoryOptions.rawValue != options.rawValue {
ReactLogger.log(level: .info,
message: "Changing AVAudioSession category from \(self.category.rawValue) -> \(category.rawValue)",
alsoLogToJS: true)
try setCategory(category, options: options)
}
}
}

View File

@ -0,0 +1,20 @@
//
// JSConsoleHelper.h
// VisionCamera
//
// Created by Marc Rousavy on 02.06.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#pragma once
#import <React/RCTBridge.h>
#import <React/RCTLog.h>
@interface JSConsoleHelper : NSObject
typedef void (^ConsoleLogFunction) (RCTLogLevel level, NSString* message);
+ (ConsoleLogFunction) getLogFunctionForBridge:(RCTBridge*)bridge;
@end

View File

@ -0,0 +1,60 @@
//
// JSConsoleHelper.mm
// VisionCamera
//
// Created by Marc Rousavy on 02.06.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "JSConsoleHelper.h"
#import <React/RCTBridge.h>
#import <ReactCommon/RCTTurboModule.h>
#import <React/RCTBridge+Private.h>
#import <jsi/jsi.h>
#import "RCTBridge+runOnJS.h"
@implementation JSConsoleHelper
+ (const char *) getLogFunctionNameForLogLevel:(RCTLogLevel)level {
switch (level) {
case RCTLogLevelTrace:
return "trace";
case RCTLogLevelInfo:
return "log";
case RCTLogLevelWarning:
return "warn";
case RCTLogLevelError:
case RCTLogLevelFatal:
return "error";
}
}
+ (ConsoleLogFunction) getLogFunctionForBridge:(RCTBridge*)bridge {
RCTCxxBridge *cxxBridge = (RCTCxxBridge *)bridge;
if (!cxxBridge.runtime) {
return nil;
}
jsi::Runtime* jsiRuntime = (jsi::Runtime*)cxxBridge.runtime;
return ^(RCTLogLevel level, NSString* message) {
[bridge runOnJS:^{
if (jsiRuntime != nullptr) {
jsi::Runtime& runtime = *jsiRuntime;
auto logFunctionName = [JSConsoleHelper getLogFunctionNameForLogLevel:level];
try {
auto console = runtime.global().getPropertyAsObject(runtime, "console");
auto log = console.getPropertyAsFunction(runtime, logFunctionName);
log.call(runtime, jsi::String::createFromAscii(runtime, [message UTF8String]));
} catch (jsi::JSError& jsError) {
NSLog(@"%@", message);
NSLog(@"Failed to call `console.%s`: %s", logFunctionName, jsError.getMessage().c_str());
}
}
}];
};
}
@end

View File

@ -8,16 +8,34 @@
import Foundation
let context = "VisionCamera"
// MARK: - ReactLogger
enum ReactLogger {
/**
A function that logs to the JavaScript console.
*/
static var ConsoleLogFunction: ConsoleLogFunction?
/**
Log a message to the console in the format of `VisionCamera.[caller-function-name]: [message]`
@discussion
If the global ConsoleLogFunction is set, this function also logs to the JavaScript console (console.log, console.trace, console.warn or console.error)
This function also always logs to [RCTDefaultLogFunction].
In non-DEBUG builds, this function is no-op.
*/
@inlinable
static func log(level: RCTLogLevel,
message: String,
alsoLogToJS: Bool = false,
_ file: String = #file,
_ lineNumber: Int = #line,
_ function: String = #function) {
RCTDefaultLogFunction(level, RCTLogSource.native, file, lineNumber as NSNumber, "\(context).\(function): \(message)")
#if DEBUG
if alsoLogToJS, let log = ConsoleLogFunction {
log(level, "[native] VisionCamera.\(function): \(message)")
}
RCTDefaultLogFunction(level, RCTLogSource.native, file, lineNumber as NSNumber, "VisionCamera.\(function): \(message)")
#endif
}
}

View File

@ -11,7 +11,7 @@ import Foundation
// MARK: - BufferType
enum BufferType {
enum BufferType: String {
case audio
case video
}
@ -20,12 +20,11 @@ enum BufferType {
class RecordingSession {
private let assetWriter: AVAssetWriter
private let audioWriter: AVAssetWriterInput
private let videoWriter: AVAssetWriterInput
private let bufferAdaptor: AVAssetWriterInputPixelBufferAdaptor
private var audioWriter: AVAssetWriterInput?
private var bufferAdaptor: AVAssetWriterInputPixelBufferAdaptor?
private let completionHandler: (AVAssetWriter.Status, Error?) -> Void
private let initialTimestamp: CMTime
private var initialTimestamp: CMTime?
private var latestTimestamp: CMTime?
private var hasWrittenFirstVideoFrame = false
@ -34,7 +33,8 @@ class RecordingSession {
}
var duration: Double {
guard let latestTimestamp = latestTimestamp else {
guard let latestTimestamp = latestTimestamp,
let initialTimestamp = initialTimestamp else {
return 0.0
}
return (latestTimestamp - initialTimestamp).seconds
@ -42,61 +42,98 @@ class RecordingSession {
init(url: URL,
fileType: AVFileType,
videoSettings: [String: Any],
audioSettings: [String: Any],
isVideoMirrored: Bool,
completion: @escaping (AVAssetWriter.Status, Error?) -> Void) throws {
completionHandler = completion
do {
assetWriter = try AVAssetWriter(outputURL: url, fileType: fileType)
audioWriter = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
videoWriter = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
completionHandler = completion
} catch let error as NSError {
throw CameraError.capture(.createRecorderError(message: error.description))
}
}
audioWriter.expectsMediaDataInRealTime = true
deinit {
if assetWriter.status == .writing {
ReactLogger.log(level: .info, message: "Cancelling AssetWriter...", alsoLogToJS: true)
assetWriter.cancelWriting()
}
}
func initializeVideoWriter(withSettings settings: [String: Any], isVideoMirrored: Bool) {
guard !settings.isEmpty else {
ReactLogger.log(level: .error, message: "Tried to initialize Video Writer with empty settings!", alsoLogToJS: true)
return
}
guard bufferAdaptor == nil else {
ReactLogger.log(level: .error, message: "Tried to add Video Writer twice!", alsoLogToJS: true)
return
}
let videoWriter = AVAssetWriterInput(mediaType: .video, outputSettings: settings)
videoWriter.expectsMediaDataInRealTime = true
if isVideoMirrored {
videoWriter.transform = CGAffineTransform(rotationAngle: -(.pi / 2))
} else {
videoWriter.transform = CGAffineTransform(rotationAngle: .pi / 2)
}
bufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriter, withVideoSettings: videoSettings)
assetWriter.add(videoWriter)
assetWriter.add(audioWriter)
assetWriter.startWriting()
initialTimestamp = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: 1_000_000_000)
assetWriter.startSession(atSourceTime: initialTimestamp)
ReactLogger.log(level: .info, message: "Initialized Video and Audio AssetWriter.")
bufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriter, withVideoSettings: settings)
ReactLogger.log(level: .info, message: "Initialized Video AssetWriter.")
}
deinit {
if assetWriter.status == .writing {
ReactLogger.log(level: .info, message: "Cancelling AssetWriter...")
assetWriter.cancelWriting()
func initializeAudioWriter(withSettings settings: [String: Any]) {
guard !settings.isEmpty else {
ReactLogger.log(level: .error, message: "Tried to initialize Audio Writer with empty settings!", alsoLogToJS: true)
return
}
guard audioWriter == nil else {
ReactLogger.log(level: .error, message: "Tried to add Audio Writer twice!", alsoLogToJS: true)
return
}
audioWriter = AVAssetWriterInput(mediaType: .audio, outputSettings: settings)
audioWriter!.expectsMediaDataInRealTime = true
assetWriter.add(audioWriter!)
ReactLogger.log(level: .info, message: "Initialized Audio AssetWriter.")
}
func start() {
assetWriter.startWriting()
initialTimestamp = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: 1_000_000_000)
assetWriter.startSession(atSourceTime: initialTimestamp!)
ReactLogger.log(level: .info, message: "Started RecordingSession at \(initialTimestamp!.seconds) seconds.")
}
func appendBuffer(_ buffer: CMSampleBuffer, type bufferType: BufferType) {
if !CMSampleBufferDataIsReady(buffer) {
return
}
guard let initialTimestamp = initialTimestamp else {
ReactLogger.log(level: .error,
message: "A \(bufferType.rawValue) frame arrived, but initialTimestamp was nil. Is this RecordingSession running?",
alsoLogToJS: true)
return
}
let timestamp = CMSampleBufferGetPresentationTimeStamp(buffer)
latestTimestamp = timestamp
switch bufferType {
case .video:
if !videoWriter.isReadyForMoreMediaData {
ReactLogger.log(level: .warning, message: "The Video AVAssetWriterInput was not ready for more data! Is your frame rate too high?")
guard let bufferAdaptor = bufferAdaptor else {
ReactLogger.log(level: .error, message: "Video Frame arrived but VideoWriter was nil!", alsoLogToJS: true)
return
}
if !bufferAdaptor.assetWriterInput.isReadyForMoreMediaData {
ReactLogger.log(level: .warning,
message: "The Video AVAssetWriterInput was not ready for more data! Is your frame rate too high?",
alsoLogToJS: true)
return
}
guard let imageBuffer = CMSampleBufferGetImageBuffer(buffer) else {
ReactLogger.log(level: .error, message: "Failed to get the CVImageBuffer!")
ReactLogger.log(level: .error, message: "Failed to get the CVImageBuffer!", alsoLogToJS: true)
return
}
bufferAdaptor.append(imageBuffer, withPresentationTime: timestamp)
@ -105,6 +142,10 @@ class RecordingSession {
ReactLogger.log(level: .warning, message: "VideoWriter: First frame arrived \((timestamp - initialTimestamp).seconds) seconds late.")
}
case .audio:
guard let audioWriter = audioWriter else {
ReactLogger.log(level: .error, message: "Audio Frame arrived but AudioWriter was nil!", alsoLogToJS: true)
return
}
if !audioWriter.isReadyForMoreMediaData {
return
}
@ -117,14 +158,17 @@ class RecordingSession {
if assetWriter.status == .failed {
// TODO: Should I call the completion handler or is this instance still valid?
ReactLogger.log(level: .error, message: "AssetWriter failed to write buffer! Error: \(assetWriter.error?.localizedDescription ?? "none")")
ReactLogger.log(level: .error,
message: "AssetWriter failed to write buffer! Error: \(assetWriter.error?.localizedDescription ?? "none")",
alsoLogToJS: true)
}
}
func finish() {
ReactLogger.log(level: .info, message: "Finishing Recording with AssetWriter status \"\(assetWriter.status.descriptor)\"...")
if assetWriter.status == .writing {
videoWriter.markAsFinished()
bufferAdaptor?.assetWriterInput.markAsFinished()
audioWriter?.markAsFinished()
assetWriter.finishWriting {
self.completionHandler(self.assetWriter.status, self.assetWriter.error)
}

View File

@ -0,0 +1,26 @@
//
// MeasureElapsedTime.swift
// VisionCamera
//
// Created by Marc Rousavy on 01.06.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
import Foundation
/**
Measures the amount of time the given codeblock takes to execute.
Results will be printed to the [ReactLogger] with millisecond precision.
*/
@inlinable
func measureElapsedTime<T>(_ label: String = #function, _ code: () -> T) -> T {
#if DEBUG
let start = DispatchTime.now()
defer {
let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
ReactLogger.log(level: .info, message: "\(label) took: \(Double(nanoTime) / 1_000_000)ms!")
}
#endif
return code()
}

View File

@ -8,7 +8,7 @@
/* Begin PBXBuildFile section */
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.mm in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */; };
B80E06A0266632F000728644 /* AVAudioSession+setCategoryIfNotSet.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+setCategoryIfNotSet.swift */; };
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */; };
B82FBA962614B69D00909718 /* RCTBridge+runOnJS.mm in Sources */ = {isa = PBXBuildFile; fileRef = B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
@ -16,6 +16,7 @@
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */; };
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */; };
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */; };
B8805067266798B600EAD7F2 /* JSConsoleHelper.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */; };
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B887515F25E0102000DB86D6 /* CameraViewManager.m */; };
@ -52,6 +53,7 @@
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
B8CCC5A1266694B200B3916F /* MeasureElapsedTime.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8CCC5A0266694B200B3916F /* MeasureElapsedTime.swift */; };
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; };
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
@ -75,7 +77,7 @@
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginRegistry.mm; sourceTree = "<group>"; };
B80D67A825FA25380008FE8D /* FrameProcessorCallback.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorCallback.h; sourceTree = "<group>"; };
B80E069F266632F000728644 /* AVAudioSession+setCategoryIfNotSet.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+setCategoryIfNotSet.swift"; sourceTree = "<group>"; };
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
B8103E1B25FF553B007A1684 /* FrameProcessorUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorUtils.mm; sourceTree = "<group>"; };
B8103E1E25FF5550007A1684 /* FrameProcessorUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorUtils.h; sourceTree = "<group>"; };
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
@ -88,6 +90,8 @@
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
B8805065266798AB00EAD7F2 /* JSConsoleHelper.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSConsoleHelper.h; sourceTree = "<group>"; };
B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSConsoleHelper.mm; sourceTree = "<group>"; };
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
B887515E25E0102000DB86D6 /* CameraBridge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraBridge.h; sourceTree = "<group>"; };
@ -127,6 +131,7 @@
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; };
B8CCC5A0266694B200B3916F /* MeasureElapsedTime.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MeasureElapsedTime.swift; sourceTree = "<group>"; };
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; };
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; };
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; };
@ -171,10 +176,11 @@
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B8DCF08F25EA7BEE00EA5C72 /* cpp */,
B887516125E0102000DB86D6 /* Extensions */,
B887517225E0102000DB86D6 /* Parsers */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B8CCC59F266694A200B3916F /* Utils */,
B887516D25E0102000DB86D6 /* React Utils */,
134814211AA4EA7D00B7C361 /* Products */,
);
@ -185,7 +191,7 @@
children = (
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */,
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */,
B80E069F266632F000728644 /* AVAudioSession+setCategoryIfNotSet.swift */,
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */,
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */,
B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */,
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */,
@ -210,6 +216,8 @@
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */,
B81D41EF263C86F900B041FD /* JSIUtils.h */,
B8994E6B263F03E100069589 /* JSIUtils.mm */,
B8805065266798AB00EAD7F2 /* JSConsoleHelper.h */,
B8805066266798B600EAD7F2 /* JSConsoleHelper.mm */,
);
path = "React Utils";
sourceTree = "<group>";
@ -235,6 +243,14 @@
path = Parsers;
sourceTree = "<group>";
};
B8CCC59F266694A200B3916F /* Utils */ = {
isa = PBXGroup;
children = (
B8CCC5A0266694B200B3916F /* MeasureElapsedTime.swift */,
);
path = Utils;
sourceTree = "<group>";
};
B8DCF08F25EA7BEE00EA5C72 /* cpp */ = {
isa = PBXGroup;
children = (
@ -292,7 +308,7 @@
isa = PBXProject;
attributes = {
LastUpgradeCheck = 1240;
ORGANIZATIONNAME = Facebook;
ORGANIZATIONNAME = mrousavy;
TargetAttributes = {
58B511DA1A9E6C8500147676 = {
CreatedOnToolsVersion = 6.1.1;
@ -372,11 +388,12 @@
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B8CCC5A1266694B200B3916F /* MeasureElapsedTime.swift in Sources */,
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */,
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,
B80E06A0266632F000728644 /* AVAudioSession+setCategoryIfNotSet.swift in Sources */,
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */,
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
B887519B25E0102000DB86D6 /* AVCaptureSession.Preset+descriptor.swift in Sources */,
@ -409,6 +426,7 @@
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */,
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */,
B8805067266798B600EAD7F2 /* JSConsoleHelper.mm in Sources */,
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;

View File

@ -33,9 +33,8 @@
"lint-ci": "yarn lint -f ./node_modules/@firmnav/eslint-github-actions-formatter/dist/formatter.js",
"prepare": "bob build",
"release": "release-it",
"example": "yarn --cwd example",
"pods": "cd example && pod-install --quiet",
"bootstrap": "yarn example && yarn && yarn setup && yarn pods",
"pods": "cd example && yarn pods",
"bootstrap": "yarn && cd example && yarn && yarn pods",
"check-android": "scripts/ktlint.sh",
"check-ios": "scripts/swiftformat.sh && scripts/swiftlint.sh",
"check-cpp": "scripts/cpplint.sh",

View File

@ -20,7 +20,11 @@ export type FormatError =
| 'format/invalid-low-light-boost'
| 'format/invalid-format'
| 'format/invalid-preset';
export type SessionError = 'session/camera-not-ready' | 'session/audio-session-setup-failed' | 'session/audio-in-use-by-other-app';
export type SessionError =
| 'session/camera-not-ready'
| 'session/audio-session-setup-failed'
| 'session/audio-in-use-by-other-app'
| 'audio-session-failed-to-activate';
export type CaptureError =
| 'capture/invalid-photo-format'
| 'capture/encoder-error'