feat: Separate usecases (decouple microphone, video, photo) (#168)
* Add props * add props (iOS) * Add use-cases conditionally * Update CameraView+RecordVideo.swift * Update RecordingSession.swift * reconfigure on change * Throw correct errors * Check for audio permission * Move `#if` outward * Throw appropriate errors * Update CameraView+RecordVideo.swift * fix Splashscreen * Dynamic filePath * Fix video extension * add `avci` and `m4v` file types * Fix RecordVideo errors * Fix audio setup * Enable `photo`, `video` and `audio` * Check for `video={true}` in frameProcessor * format * Remove unused DispatchQueue * Update docs * Add `supportsPhotoAndVideoCapture` * Fix view manager * Fix error not being propagated * Catch normal errors too * Update DEVICES.mdx * Update CAPTURING.mdx * Update classdocs
This commit is contained in:
@@ -21,7 +21,7 @@ enum PermissionError: String {
|
||||
var message: String {
|
||||
switch self {
|
||||
case .microphone:
|
||||
return "The Microphone permission was denied!"
|
||||
return "The Microphone permission was denied! If you want to record Videos without sound, pass `audio={false}`."
|
||||
case .camera:
|
||||
return "The Camera permission was denied!"
|
||||
}
|
||||
@@ -186,6 +186,8 @@ enum CaptureError {
|
||||
case createTempFileError
|
||||
case createRecorderError(message: String? = nil)
|
||||
case invalidPhotoCodec
|
||||
case videoNotEnabled
|
||||
case photoNotEnabled
|
||||
case unknown(message: String? = nil)
|
||||
|
||||
var code: String {
|
||||
@@ -204,6 +206,10 @@ enum CaptureError {
|
||||
return "create-recorder-error"
|
||||
case .invalidPhotoCodec:
|
||||
return "invalid-photo-codec"
|
||||
case .videoNotEnabled:
|
||||
return "video-not-enabled"
|
||||
case .photoNotEnabled:
|
||||
return "photo-not-enabled"
|
||||
case .unknown:
|
||||
return "unknown"
|
||||
}
|
||||
@@ -225,6 +231,10 @@ enum CaptureError {
|
||||
return "Failed to create a temporary file!"
|
||||
case let .createRecorderError(message: message):
|
||||
return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")"
|
||||
case .videoNotEnabled:
|
||||
return "Video capture is disabled! Pass `video={true}` to enable video recordings."
|
||||
case .photoNotEnabled:
|
||||
return "Photo capture is disabled! Pass `photo={true}` to enable photo capture."
|
||||
case let .unknown(message: message):
|
||||
return message ?? "An unknown error occured while capturing a video/photo."
|
||||
}
|
||||
|
@@ -23,7 +23,6 @@ public class CameraQueues: NSObject {
|
||||
autoreleaseFrequency: .inherit,
|
||||
target: nil)
|
||||
|
||||
// TODO: Is it a good idea to use a separate queue for audio output processing?
|
||||
/// The serial execution queue for output processing of audio buffers.
|
||||
@objc public static let audioQueue = DispatchQueue(label: "com.mrousavy.vision.audio-queue",
|
||||
qos: .userInteractive,
|
||||
|
@@ -25,6 +25,15 @@ extension CameraView {
|
||||
}
|
||||
|
||||
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
|
||||
let enableAudio = audio?.boolValue == true
|
||||
|
||||
// check microphone permission
|
||||
if enableAudio {
|
||||
let audioPermissionStatus = AVCaptureDevice.authorizationStatus(for: .audio)
|
||||
if audioPermissionStatus != .authorized {
|
||||
return invokeOnError(.permission(.microphone))
|
||||
}
|
||||
}
|
||||
|
||||
// Audio Input
|
||||
do {
|
||||
@@ -32,15 +41,17 @@ extension CameraView {
|
||||
audioCaptureSession.removeInput(audioDeviceInput)
|
||||
self.audioDeviceInput = nil
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Adding Audio input...")
|
||||
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
|
||||
return invokeOnError(.device(.microphoneUnavailable))
|
||||
if enableAudio {
|
||||
ReactLogger.log(level: .info, message: "Adding Audio input...")
|
||||
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
|
||||
return invokeOnError(.device(.microphoneUnavailable))
|
||||
}
|
||||
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
|
||||
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
|
||||
}
|
||||
audioCaptureSession.addInput(audioDeviceInput!)
|
||||
}
|
||||
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
|
||||
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
|
||||
}
|
||||
audioCaptureSession.addInput(audioDeviceInput!)
|
||||
} catch let error as NSError {
|
||||
return invokeOnError(.device(.microphoneUnavailable), cause: error)
|
||||
}
|
||||
@@ -50,13 +61,15 @@ extension CameraView {
|
||||
audioCaptureSession.removeOutput(audioOutput)
|
||||
self.audioOutput = nil
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
|
||||
audioOutput = AVCaptureAudioDataOutput()
|
||||
guard audioCaptureSession.canAddOutput(audioOutput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
|
||||
if enableAudio {
|
||||
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
|
||||
audioOutput = AVCaptureAudioDataOutput()
|
||||
guard audioCaptureSession.canAddOutput(audioOutput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
|
||||
}
|
||||
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
|
||||
audioCaptureSession.addOutput(audioOutput!)
|
||||
}
|
||||
audioOutput!.setSampleBufferDelegate(self, queue: audioQueue)
|
||||
audioCaptureSession.addOutput(audioOutput!)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -84,21 +84,23 @@ extension CameraView {
|
||||
captureSession.removeOutput(photoOutput)
|
||||
self.photoOutput = nil
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Adding Photo output...")
|
||||
photoOutput = AVCapturePhotoOutput()
|
||||
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported && enableDepthData
|
||||
if let enableHighResolutionCapture = self.enableHighResolutionCapture?.boolValue {
|
||||
photoOutput!.isHighResolutionCaptureEnabled = enableHighResolutionCapture
|
||||
}
|
||||
if #available(iOS 12.0, *) {
|
||||
photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported && self.enablePortraitEffectsMatteDelivery
|
||||
}
|
||||
guard captureSession.canAddOutput(photoOutput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output")))
|
||||
}
|
||||
captureSession.addOutput(photoOutput!)
|
||||
if videoDeviceInput!.device.position == .front {
|
||||
photoOutput!.mirror()
|
||||
if photo?.boolValue == true {
|
||||
ReactLogger.log(level: .info, message: "Adding Photo output...")
|
||||
photoOutput = AVCapturePhotoOutput()
|
||||
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported && enableDepthData
|
||||
if let enableHighResolutionCapture = self.enableHighResolutionCapture?.boolValue {
|
||||
photoOutput!.isHighResolutionCaptureEnabled = enableHighResolutionCapture
|
||||
}
|
||||
if #available(iOS 12.0, *) {
|
||||
photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported && self.enablePortraitEffectsMatteDelivery
|
||||
}
|
||||
guard captureSession.canAddOutput(photoOutput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output")))
|
||||
}
|
||||
captureSession.addOutput(photoOutput!)
|
||||
if videoDeviceInput!.device.position == .front {
|
||||
photoOutput!.mirror()
|
||||
}
|
||||
}
|
||||
|
||||
// Video Output + Frame Processor
|
||||
@@ -106,16 +108,18 @@ extension CameraView {
|
||||
captureSession.removeOutput(videoOutput)
|
||||
self.videoOutput = nil
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Adding Video Data output...")
|
||||
videoOutput = AVCaptureVideoDataOutput()
|
||||
guard captureSession.canAddOutput(videoOutput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "video-output")))
|
||||
}
|
||||
videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
|
||||
videoOutput!.alwaysDiscardsLateVideoFrames = true
|
||||
captureSession.addOutput(videoOutput!)
|
||||
if videoDeviceInput!.device.position == .front {
|
||||
videoOutput!.mirror()
|
||||
if video?.boolValue == true {
|
||||
ReactLogger.log(level: .info, message: "Adding Video Data output...")
|
||||
videoOutput = AVCaptureVideoDataOutput()
|
||||
guard captureSession.canAddOutput(videoOutput!) else {
|
||||
return invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "video-output")))
|
||||
}
|
||||
videoOutput!.setSampleBufferDelegate(self, queue: videoQueue)
|
||||
videoOutput!.alwaysDiscardsLateVideoFrames = true
|
||||
captureSession.addOutput(videoOutput!)
|
||||
if videoDeviceInput!.device.position == .front {
|
||||
videoOutput!.mirror()
|
||||
}
|
||||
}
|
||||
|
||||
invokeOnInitialized()
|
||||
@@ -223,7 +227,7 @@ extension CameraView {
|
||||
|
||||
if isActive {
|
||||
// restart capture session after an error occured
|
||||
queue.async {
|
||||
cameraQueue.async {
|
||||
self.captureSession.startRunning()
|
||||
}
|
||||
}
|
||||
|
@@ -16,72 +16,95 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
/**
|
||||
Starts a video + audio recording with a custom Asset Writer.
|
||||
*/
|
||||
func startRecording(options: NSDictionary, callback: @escaping RCTResponseSenderBlock) {
|
||||
func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) {
|
||||
cameraQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Starting Video recording...")
|
||||
let callback = Callback(jsCallbackFunc)
|
||||
|
||||
do {
|
||||
let errorPointer = ErrorPointer(nilLiteral: ())
|
||||
guard let tempFilePath = RCTTempFilePath("mov", errorPointer) else {
|
||||
return callback([NSNull(), makeReactError(.capture(.createTempFileError), cause: errorPointer?.pointee)])
|
||||
var fileType = AVFileType.mov
|
||||
if let fileTypeOption = options["fileType"] as? String {
|
||||
guard let parsed = try? AVFileType(withString: fileTypeOption) else {
|
||||
return callback.reject(error: .parameter(.invalid(unionName: "fileType", receivedValue: fileTypeOption)))
|
||||
}
|
||||
fileType = parsed
|
||||
}
|
||||
|
||||
let tempURL = URL(string: "file://\(tempFilePath)")!
|
||||
if let flashMode = options["flash"] as? String {
|
||||
// use the torch as the video's flash
|
||||
self.setTorchMode(flashMode)
|
||||
let errorPointer = ErrorPointer(nilLiteral: ())
|
||||
let fileExtension = fileType.descriptor ?? "mov"
|
||||
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
|
||||
return callback.reject(error: .capture(.createTempFileError), cause: errorPointer?.pointee)
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "File path: \(tempFilePath)")
|
||||
let tempURL = URL(string: "file://\(tempFilePath)")!
|
||||
|
||||
if let flashMode = options["flash"] as? String {
|
||||
// use the torch as the video's flash
|
||||
self.setTorchMode(flashMode)
|
||||
}
|
||||
|
||||
guard let videoOutput = self.videoOutput else {
|
||||
if self.video?.boolValue == true {
|
||||
return callback.reject(error: .session(.cameraNotReady))
|
||||
} else {
|
||||
return callback.reject(error: .capture(.videoNotEnabled))
|
||||
}
|
||||
}
|
||||
|
||||
var fileType = AVFileType.mov
|
||||
if let fileTypeOption = options["fileType"] as? String {
|
||||
fileType = AVFileType(withString: fileTypeOption)
|
||||
}
|
||||
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
||||
// both a callback and a Promise in a single function. Wait for TurboModules?
|
||||
// This means that any errors that occur in this function have to be delegated through
|
||||
// the callback, but I'd prefer for them to throw for the original function instead.
|
||||
|
||||
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
||||
// both a callback and a Promise in a single function. Wait for TurboModules?
|
||||
// This means that any errors that occur in this function have to be delegated through
|
||||
// the callback, but I'd prefer for them to throw for the original function instead.
|
||||
let enableAudio = self.audio?.boolValue == true
|
||||
|
||||
let onFinish = { (status: AVAssetWriter.Status, error: Error?) -> Void in
|
||||
defer {
|
||||
self.recordingSession = nil
|
||||
let onFinish = { (status: AVAssetWriter.Status, error: Error?) -> Void in
|
||||
defer {
|
||||
self.recordingSession = nil
|
||||
if enableAudio {
|
||||
self.audioQueue.async {
|
||||
self.deactivateAudioSession()
|
||||
}
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
|
||||
if let error = error {
|
||||
let description = (error as NSError).description
|
||||
return callback([NSNull(), CameraError.capture(.unknown(message: "An unknown recording error occured! \(description)"))])
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
|
||||
if let error = error as NSError? {
|
||||
let description = error.description
|
||||
return callback.reject(error: .capture(.unknown(message: "An unknown recording error occured! \(description)")), cause: error)
|
||||
} else {
|
||||
if status == .completed {
|
||||
return callback.resolve([
|
||||
"path": self.recordingSession!.url.absoluteString,
|
||||
"duration": self.recordingSession!.duration,
|
||||
])
|
||||
} else {
|
||||
if status == .completed {
|
||||
return callback([[
|
||||
"path": self.recordingSession!.url.absoluteString,
|
||||
"duration": self.recordingSession!.duration,
|
||||
], NSNull()])
|
||||
} else {
|
||||
return callback([NSNull(), CameraError.unknown(message: "AVAssetWriter completed with status: \(status.descriptor)")])
|
||||
}
|
||||
return callback.reject(error: .unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
do {
|
||||
self.recordingSession = try RecordingSession(url: tempURL,
|
||||
fileType: fileType,
|
||||
completion: onFinish)
|
||||
} catch let error as NSError {
|
||||
return callback.reject(error: .capture(.createRecorderError(message: nil)), cause: error)
|
||||
}
|
||||
|
||||
// Init Video
|
||||
guard let videoOutput = self.videoOutput,
|
||||
let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType),
|
||||
!videoSettings.isEmpty else {
|
||||
throw CameraError.capture(.createRecorderError(message: "Failed to get video settings!"))
|
||||
}
|
||||
self.recordingSession!.initializeVideoWriter(withSettings: videoSettings,
|
||||
isVideoMirrored: self.videoOutput!.isMirrored)
|
||||
// Init Video
|
||||
guard let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType),
|
||||
!videoSettings.isEmpty else {
|
||||
return callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!")))
|
||||
}
|
||||
self.recordingSession!.initializeVideoWriter(withSettings: videoSettings,
|
||||
isVideoMirrored: self.videoOutput!.isMirrored)
|
||||
|
||||
// Init Audio (optional, async)
|
||||
// Init Audio (optional, async)
|
||||
if enableAudio {
|
||||
self.audioQueue.async {
|
||||
// Activate Audio Session (blocking)
|
||||
self.activateAudioSession()
|
||||
|
||||
guard let recordingSession = self.recordingSession else {
|
||||
// recording has already been cancelled
|
||||
return
|
||||
@@ -95,10 +118,10 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
recordingSession.start()
|
||||
self.isRecording = true
|
||||
}
|
||||
} catch EnumParserError.invalidValue {
|
||||
return callback([NSNull(), EnumParserError.invalidValue])
|
||||
} catch let error as NSError {
|
||||
return callback([NSNull(), makeReactError(.capture(.createTempFileError), cause: error)])
|
||||
} else {
|
||||
// start recording session without audio.
|
||||
self.recordingSession!.start()
|
||||
self.isRecording = true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -175,8 +198,8 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
}
|
||||
}
|
||||
|
||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||
#if DEBUG
|
||||
#if DEBUG
|
||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didDrop buffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||
if frameProcessorCallback != nil && !hasLoggedFrameDropWarning && captureOutput is AVCaptureVideoDataOutput {
|
||||
let reason = findFrameDropReason(inBuffer: buffer)
|
||||
ReactLogger.log(level: .warning,
|
||||
@@ -185,16 +208,16 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
||||
alsoLogToJS: true)
|
||||
hasLoggedFrameDropWarning = true
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private final func findFrameDropReason(inBuffer buffer: CMSampleBuffer) -> String {
|
||||
var mode: CMAttachmentMode = 0
|
||||
guard let reason = CMGetAttachment(buffer,
|
||||
key: kCMSampleBufferAttachmentKey_DroppedFrameReason,
|
||||
attachmentModeOut: &mode) else {
|
||||
return "unknown"
|
||||
}
|
||||
return String(describing: reason)
|
||||
}
|
||||
|
||||
private final func findFrameDropReason(inBuffer buffer: CMSampleBuffer) -> String {
|
||||
var mode: CMAttachmentMode = 0
|
||||
guard let reason = CMGetAttachment(buffer,
|
||||
key: kCMSampleBufferAttachmentKey_DroppedFrameReason,
|
||||
attachmentModeOut: &mode) else {
|
||||
return "unknown"
|
||||
}
|
||||
return String(describing: reason)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
@@ -25,8 +25,13 @@ struct TakePhotoOptions {
|
||||
extension CameraView {
|
||||
func takePhoto(options: NSDictionary, promise: Promise) {
|
||||
cameraQueue.async {
|
||||
guard let photoOutput = self.photoOutput, let videoDeviceInput = self.videoDeviceInput else {
|
||||
return promise.reject(error: .session(.cameraNotReady))
|
||||
guard let photoOutput = self.photoOutput,
|
||||
let videoDeviceInput = self.videoDeviceInput else {
|
||||
if self.photo?.boolValue == true {
|
||||
return promise.reject(error: .session(.cameraNotReady))
|
||||
} else {
|
||||
return promise.reject(error: .capture(.photoNotEnabled))
|
||||
}
|
||||
}
|
||||
|
||||
var photoSettings = AVCapturePhotoSettings()
|
||||
|
@@ -15,7 +15,6 @@ import UIKit
|
||||
//
|
||||
// CameraView+RecordVideo
|
||||
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
|
||||
// TODO: videoStabilizationMode
|
||||
|
||||
// CameraView+TakePhoto
|
||||
// TODO: Photo HDR
|
||||
@@ -24,7 +23,9 @@ private let propsThatRequireReconfiguration = ["cameraId",
|
||||
"enableDepthData",
|
||||
"enableHighResolutionCapture",
|
||||
"enablePortraitEffectsMatteDelivery",
|
||||
"preset"]
|
||||
"preset",
|
||||
"photo",
|
||||
"video"]
|
||||
private let propsThatRequireDeviceReconfiguration = ["fps",
|
||||
"hdr",
|
||||
"lowLightBoost",
|
||||
@@ -42,6 +43,10 @@ public final class CameraView: UIView {
|
||||
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
|
||||
@objc var enablePortraitEffectsMatteDelivery = false
|
||||
@objc var preset: String?
|
||||
// use cases
|
||||
@objc var photo: NSNumber? // nullable bool
|
||||
@objc var video: NSNumber? // nullable bool
|
||||
@objc var audio: NSNumber? // nullable bool
|
||||
// props that require format reconfiguring
|
||||
@objc var format: NSDictionary?
|
||||
@objc var fps: NSNumber?
|
||||
@@ -71,8 +76,6 @@ public final class CameraView: UIView {
|
||||
// pragma MARK: Internal Properties
|
||||
|
||||
internal var isReady = false
|
||||
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo and record video)
|
||||
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
|
||||
// Capture Session
|
||||
internal let captureSession = AVCaptureSession()
|
||||
internal let audioCaptureSession = AVCaptureSession()
|
||||
@@ -130,10 +133,6 @@ public final class CameraView: UIView {
|
||||
selector: #selector(audioSessionInterrupted),
|
||||
name: AVAudioSession.interruptionNotification,
|
||||
object: AVAudioSession.sharedInstance)
|
||||
|
||||
audioQueue.async {
|
||||
self.configureAudioSession()
|
||||
}
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
@@ -159,6 +158,7 @@ public final class CameraView: UIView {
|
||||
let shouldReconfigure = changedProps.contains { propsThatRequireReconfiguration.contains($0) }
|
||||
let shouldReconfigureFormat = shouldReconfigure || changedProps.contains("format")
|
||||
let shouldReconfigureDevice = shouldReconfigureFormat || changedProps.contains { propsThatRequireDeviceReconfiguration.contains($0) }
|
||||
let shouldReconfigureAudioSession = changedProps.contains("audio")
|
||||
|
||||
let willReconfigure = shouldReconfigure || shouldReconfigureFormat || shouldReconfigureDevice
|
||||
|
||||
@@ -168,6 +168,7 @@ public final class CameraView: UIView {
|
||||
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
|
||||
|
||||
if shouldReconfigure ||
|
||||
shouldReconfigureAudioSession ||
|
||||
shouldCheckActive ||
|
||||
shouldUpdateTorch ||
|
||||
shouldUpdateZoom ||
|
||||
@@ -214,6 +215,13 @@ public final class CameraView: UIView {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Audio Configuration
|
||||
if shouldReconfigureAudioSession {
|
||||
audioQueue.async {
|
||||
self.configureAudioSession()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -27,6 +27,10 @@ RCT_EXPORT_VIEW_PROPERTY(cameraId, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableHighResolutionCapture, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL);
|
||||
// use cases
|
||||
RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(audio, NSNumber); // nullable bool
|
||||
// device format
|
||||
RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary);
|
||||
RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber);
|
||||
|
@@ -99,6 +99,7 @@ final class CameraViewManager: RCTViewManager {
|
||||
"neutralZoom": $0.neutralZoomFactor,
|
||||
"maxZoom": $0.maxAvailableVideoZoomFactor,
|
||||
"isMultiCam": $0.isMultiCam,
|
||||
"supportsPhotoAndVideoCapture": true,
|
||||
"supportsDepthCapture": false, // TODO: supportsDepthCapture
|
||||
"supportsRawCapture": false, // TODO: supportsRawCapture
|
||||
"supportsLowLightBoost": $0.isLowLightBoostSupported,
|
||||
|
@@ -75,10 +75,6 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
|
||||
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
|
||||
return jsi::Value((double) planesCount);
|
||||
}
|
||||
if (name == "buffer") {
|
||||
// TODO: Actually return the pixels of the buffer. Not sure if this will be a huge performance hit or not
|
||||
return jsi::Array(runtime, 0);
|
||||
}
|
||||
|
||||
return jsi::Value::undefined();
|
||||
}
|
||||
|
@@ -10,11 +10,33 @@ import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVFileType {
|
||||
init(withString string: String) {
|
||||
self.init(rawValue: string)
|
||||
init(withString string: String) throws {
|
||||
switch string {
|
||||
case "mov":
|
||||
self = .mov
|
||||
case "mp4":
|
||||
self = .mp4
|
||||
case "avci":
|
||||
self = .avci
|
||||
case "m4v":
|
||||
self = .m4v
|
||||
default:
|
||||
throw EnumParserError.invalidValue
|
||||
}
|
||||
}
|
||||
|
||||
var descriptor: String {
|
||||
return rawValue
|
||||
var descriptor: String? {
|
||||
switch self {
|
||||
case .mov:
|
||||
return "mov"
|
||||
case .mp4:
|
||||
return "mp4"
|
||||
case .avci:
|
||||
return "avci"
|
||||
case .m4v:
|
||||
return "m4v"
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -20,8 +20,4 @@ enum EnumParserError: Error {
|
||||
Raised when the descriptor does not match any of the possible values.
|
||||
*/
|
||||
case invalidValue
|
||||
/**
|
||||
Raised when no descriptor for the given enum is available.
|
||||
*/
|
||||
case noDescriptorAvailable
|
||||
}
|
||||
|
38
ios/React Utils/Callback.swift
Normal file
38
ios/React Utils/Callback.swift
Normal file
@@ -0,0 +1,38 @@
|
||||
//
|
||||
// Callback.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 07.06.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
Represents a callback to JavaScript. Syntax is the same as with Promise.
|
||||
*/
|
||||
class Callback {
|
||||
init(_ callback: @escaping RCTResponseSenderBlock) {
|
||||
self.callback = callback
|
||||
}
|
||||
|
||||
func reject(error: CameraError, cause: NSError?) {
|
||||
callback([NSNull(), makeReactError(error, cause: cause)])
|
||||
}
|
||||
|
||||
func reject(error: CameraError) {
|
||||
reject(error: error, cause: nil)
|
||||
}
|
||||
|
||||
func resolve(_ value: Any?) {
|
||||
callback([value, NSNull()])
|
||||
}
|
||||
|
||||
func resolve() {
|
||||
resolve(nil)
|
||||
}
|
||||
|
||||
// MARK: Private
|
||||
|
||||
private let callback: RCTResponseSenderBlock
|
||||
}
|
@@ -11,7 +11,7 @@ import Foundation
|
||||
|
||||
// MARK: - BufferType
|
||||
|
||||
enum BufferType: String {
|
||||
enum BufferType {
|
||||
case audio
|
||||
case video
|
||||
}
|
||||
@@ -112,7 +112,7 @@ class RecordingSession {
|
||||
}
|
||||
guard let initialTimestamp = initialTimestamp else {
|
||||
ReactLogger.log(level: .error,
|
||||
message: "A \(bufferType.rawValue) frame arrived, but initialTimestamp was nil. Is this RecordingSession running?",
|
||||
message: "A frame arrived, but initialTimestamp was nil. Is this RecordingSession running?",
|
||||
alsoLogToJS: true)
|
||||
return
|
||||
}
|
||||
|
@@ -54,6 +54,7 @@
|
||||
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
|
||||
B8994E6C263F03E100069589 /* JSIUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSIUtils.mm */; };
|
||||
B8A751D82609E4B30011C623 /* FrameProcessorRuntimeManager.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */; };
|
||||
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BD3BA1266E22D2006C80A2 /* Callback.swift */; };
|
||||
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; };
|
||||
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
|
||||
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
|
||||
@@ -132,6 +133,7 @@
|
||||
B8994E6B263F03E100069589 /* JSIUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSIUtils.mm; sourceTree = "<group>"; };
|
||||
B8A751D62609E4980011C623 /* FrameProcessorRuntimeManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorRuntimeManager.h; sourceTree = "<group>"; };
|
||||
B8A751D72609E4B30011C623 /* FrameProcessorRuntimeManager.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorRuntimeManager.mm; sourceTree = "<group>"; };
|
||||
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
|
||||
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; };
|
||||
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; };
|
||||
@@ -212,6 +214,7 @@
|
||||
B887516E25E0102000DB86D6 /* MakeReactError.swift */,
|
||||
B887516F25E0102000DB86D6 /* ReactLogger.swift */,
|
||||
B887517025E0102000DB86D6 /* Promise.swift */,
|
||||
B8BD3BA1266E22D2006C80A2 /* Callback.swift */,
|
||||
B82FBA942614B69D00909718 /* RCTBridge+runOnJS.h */,
|
||||
B82FBA952614B69D00909718 /* RCTBridge+runOnJS.mm */,
|
||||
B81D41EF263C86F900B041FD /* JSIUtils.h */,
|
||||
@@ -391,6 +394,7 @@
|
||||
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
|
||||
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
|
||||
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
|
||||
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */,
|
||||
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */,
|
||||
B8103E1C25FF553B007A1684 /* FrameProcessorUtils.mm in Sources */,
|
||||
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,
|
||||
|
Reference in New Issue
Block a user