375e894038
* Make Frame Processors an extra subspec * Update VisionCamera.podspec * Make optional * Make VisionCamera compile without Skia * Fix * Add skia again * Update VisionCamera.podspec * Make VisionCamera build without Frame Processors * Rename error to `system/frame-processors-unavailable` * Fix Frame Processor returning early * Remove `preset`, FP partial rewrite * Only warn on frame drop * Fix wrong queue * fix: Run on CameraQueue again * Update CameraView.swift * fix: Activate audio session asynchronously on audio queue * Update CameraView+RecordVideo.swift * Update PreviewView.h * Cleanups * Cleanup * fix cast * feat: Add LiDAR Depth Camera support * Upgrade Ruby * Add vector icons type * Update Gemfile.lock * fix: Stop queues on deinit * Also load `builtInTrueDepthCamera` * Update CameraViewManager.swift * Update SkImageHelpers.mm * Extract FrameProcessorCallback to FrameProcessor Holds more context now :) * Rename to .m * fix: Add `RCTLog` import * Create SkiaFrameProcessor * Update CameraBridge.h * Call Frame Processor * Fix defines * fix: Allow deleting callback funcs * fix Skia build * batch * Just call `setSkiaFrameProcessor` * Rewrite in Swift * Pass `SkiaRenderer` * Fix Import * Move `PreviewView` to Swift * Fix Layer * Set Skia Canvas to Frame Host Object * Make `DrawableFrameHostObject` subclass * Fix TS types * Use same MTLDevice and apply scale * Make getter * Extract `setTorch` and `Preview` * fix: Fix nil metal device * Don't wait for session stop in deinit * Use main pixel ratio * Use unique_ptr for Render Contexts * fix: Fix SkiaPreviewDisplayLink broken after deinit * inline `getTextureCache` * Update CameraPage.tsx * chore: Format iOS * perf: Allow MTLLayer to be optimized for only frame buffers * Add RN Video types * fix: Fix Frame Processors if guard * Find nodeModules recursively * Create `Frame.isDrawable` * Add `cocoapods-check` dependency
152 lines
5.4 KiB
Swift
152 lines
5.4 KiB
Swift
//
|
|
// CameraView+AVAudioSession.swift
|
|
// VisionCamera
|
|
//
|
|
// Created by Marc Rousavy on 26.03.21.
|
|
// Copyright © 2021 mrousavy. All rights reserved.
|
|
//
|
|
|
|
import AVFoundation
|
|
import Foundation
|
|
|
|
/**
|
|
Extension for CameraView that sets up the AVAudioSession.
|
|
*/
|
|
extension CameraView {
|
|
/**
|
|
Configures the Audio Capture Session with an audio input and audio data output.
|
|
*/
|
|
final func configureAudioSession() {
|
|
ReactLogger.log(level: .info, message: "Configuring Audio Session...")
|
|
|
|
audioCaptureSession.beginConfiguration()
|
|
defer {
|
|
audioCaptureSession.commitConfiguration()
|
|
}
|
|
|
|
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
|
|
let enableAudio = audio?.boolValue == true
|
|
|
|
// check microphone permission
|
|
if enableAudio {
|
|
let audioPermissionStatus = AVCaptureDevice.authorizationStatus(for: .audio)
|
|
if audioPermissionStatus != .authorized {
|
|
invokeOnError(.permission(.microphone))
|
|
return
|
|
}
|
|
}
|
|
|
|
// Audio Input
|
|
do {
|
|
if let audioDeviceInput = audioDeviceInput {
|
|
audioCaptureSession.removeInput(audioDeviceInput)
|
|
self.audioDeviceInput = nil
|
|
}
|
|
if enableAudio {
|
|
ReactLogger.log(level: .info, message: "Adding Audio input...")
|
|
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
|
|
invokeOnError(.device(.microphoneUnavailable))
|
|
return
|
|
}
|
|
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
|
|
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
|
|
invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
|
|
return
|
|
}
|
|
audioCaptureSession.addInput(audioDeviceInput!)
|
|
}
|
|
} catch let error as NSError {
|
|
invokeOnError(.device(.microphoneUnavailable), cause: error)
|
|
return
|
|
}
|
|
|
|
// Audio Output
|
|
if let audioOutput = audioOutput {
|
|
audioCaptureSession.removeOutput(audioOutput)
|
|
self.audioOutput = nil
|
|
}
|
|
if enableAudio {
|
|
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
|
|
audioOutput = AVCaptureAudioDataOutput()
|
|
guard audioCaptureSession.canAddOutput(audioOutput!) else {
|
|
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
|
|
return
|
|
}
|
|
audioOutput!.setSampleBufferDelegate(self, queue: CameraQueues.audioQueue)
|
|
audioCaptureSession.addOutput(audioOutput!)
|
|
}
|
|
}
|
|
|
|
/**
|
|
Configures the Audio session and activates it. If the session was active it will shortly be deactivated before configuration.
|
|
|
|
The Audio Session will be configured to allow background music, haptics (vibrations) and system sound playback while recording.
|
|
Background audio is allowed to play on speakers or bluetooth speakers.
|
|
*/
|
|
final func activateAudioSession() {
|
|
ReactLogger.log(level: .info, message: "Activating Audio Session...")
|
|
|
|
do {
|
|
try AVAudioSession.sharedInstance().updateCategory(AVAudioSession.Category.playAndRecord,
|
|
options: [.mixWithOthers,
|
|
.allowBluetoothA2DP,
|
|
.defaultToSpeaker,
|
|
.allowAirPlay])
|
|
|
|
if #available(iOS 14.5, *) {
|
|
// prevents the audio session from being interrupted by a phone call
|
|
try AVAudioSession.sharedInstance().setPrefersNoInterruptionsFromSystemAlerts(true)
|
|
}
|
|
|
|
audioCaptureSession.startRunning()
|
|
} catch let error as NSError {
|
|
switch error.code {
|
|
case 561_017_449:
|
|
self.invokeOnError(.session(.audioInUseByOtherApp), cause: error)
|
|
default:
|
|
self.invokeOnError(.session(.audioSessionFailedToActivate), cause: error)
|
|
}
|
|
}
|
|
}
|
|
|
|
final func deactivateAudioSession() {
|
|
ReactLogger.log(level: .info, message: "Deactivating Audio Session...")
|
|
|
|
audioCaptureSession.stopRunning()
|
|
}
|
|
|
|
@objc
|
|
func audioSessionInterrupted(notification: Notification) {
|
|
ReactLogger.log(level: .error, message: "Audio Session Interruption Notification!")
|
|
guard let userInfo = notification.userInfo,
|
|
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
|
|
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
|
|
return
|
|
}
|
|
|
|
// TODO: Add JS-Event for Audio Session interruptions?
|
|
switch type {
|
|
case .began:
|
|
// Something interrupted our Audio Session, stop recording audio.
|
|
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
|
|
case .ended:
|
|
ReactLogger.log(level: .info, message: "The Audio Session interruption has ended.")
|
|
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
|
|
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
|
|
if options.contains(.shouldResume) {
|
|
if isRecording {
|
|
CameraQueues.audioQueue.async {
|
|
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...")
|
|
// restart audio session because interruption is over
|
|
self.activateAudioSession()
|
|
}
|
|
}
|
|
} else {
|
|
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
|
|
}
|
|
@unknown default:
|
|
()
|
|
}
|
|
}
|
|
}
|