feat: Separate usecases (decouple microphone, video, photo) (#168)

* Add props

* add props (iOS)

* Add use-cases conditionally

* Update CameraView+RecordVideo.swift

* Update RecordingSession.swift

* reconfigure on change

* Throw correct errors

* Check for audio permission

* Move `#if` outward

* Throw appropriate errors

* Update CameraView+RecordVideo.swift

* fix Splashscreen

* Dynamic filePath

* Fix video extension

* add `avci` and `m4v` file types

* Fix RecordVideo errors

* Fix audio setup

* Enable `photo`, `video` and `audio`

* Check for `video={true}` in frameProcessor

* format

* Remove unused DispatchQueue

* Update docs

* Add `supportsPhotoAndVideoCapture`

* Fix view manager

* Fix error not being propagated

* Catch normal errors too

* Update DEVICES.mdx

* Update CAPTURING.mdx

* Update classdocs
This commit is contained in:
Marc Rousavy
2021-06-07 13:08:40 +02:00
committed by GitHub
parent 555474be7d
commit 72a1fad78e
30 changed files with 412 additions and 167 deletions

View File

@@ -25,6 +25,15 @@ extension CameraView {
}
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
let enableAudio = audio?.boolValue == true
// check microphone permission
if enableAudio {
let audioPermissionStatus = AVCaptureDevice.authorizationStatus(for: .audio)
if audioPermissionStatus != .authorized {
return invokeOnError(.permission(.microphone))
}
}
// Audio Input
do {
@@ -32,15 +41,17 @@ extension CameraView {
audioCaptureSession.removeInput(audioDeviceInput)
self.audioDeviceInput = nil
}
ReactLogger.log(level: .info, message: "Adding Audio input...")
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
return invokeOnError(.device(.microphoneUnavailable))
if enableAudio {
ReactLogger.log(level: .info, message: "Adding Audio input...")
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
return invokeOnError(.device(.microphoneUnavailable))
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
}
audioCaptureSession.addInput(audioDeviceInput!)
}
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
}
audioCaptureSession.addInput(audioDeviceInput!)
} catch let error as NSError {
return invokeOnError(.device(.microphoneUnavailable), cause: error)
}
@@ -50,13 +61,15 @@ extension CameraView {
audioCaptureSession.removeOutput(audioOutput)
self.audioOutput = nil
}
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
audioOutput = AVCaptureAudioDataOutput()
guard audioCaptureSession.canAddOutput(audioOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
if enableAudio {
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
audioOutput = AVCaptureAudioDataOutput()
guard audioCaptureSession.canAddOutput(audioOutput!) else {
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
}
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
audioCaptureSession.addOutput(audioOutput!)
}
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
audioCaptureSession.addOutput(audioOutput!)
}
/**