chore: Move everything into package/
(#1745)
* Move everything into package * Remove .DS_Store * Move scripts and eslintrc to package * Create CODE_OF_CONDUCT.md * fix some links * Update all links (I think) * Update generated docs * Update notice-yarn-changes.yml * Update validate-android.yml * Update validate-cpp.yml * Delete notice-yarn-changes.yml * Update validate-cpp.yml * Update validate-cpp.yml * Update validate-js.yml * Update validate-cpp.yml * Update validate-cpp.yml * wrong c++ style * Revert "wrong c++ style" This reverts commit 55a3575589c6f13f8b05134d83384f55e0601ab2.
This commit is contained in:
1
package/ios/.swift-version
Normal file
1
package/ios/.swift-version
Normal file
@@ -0,0 +1 @@
|
||||
5.2
|
12
package/ios/.swiftformat
Normal file
12
package/ios/.swiftformat
Normal file
@@ -0,0 +1,12 @@
|
||||
--allman false
|
||||
--indent 2
|
||||
--exclude Pods,Generated
|
||||
|
||||
--disable andOperator
|
||||
--disable redundantReturn
|
||||
--disable wrapMultilineStatementBraces
|
||||
--disable organizeDeclarations
|
||||
|
||||
--enable markTypes
|
||||
|
||||
--enable isEmpty
|
51
package/ios/.swiftlint.yml
Normal file
51
package/ios/.swiftlint.yml
Normal file
@@ -0,0 +1,51 @@
|
||||
disabled_rules:
|
||||
- identifier_name
|
||||
- trailing_comma
|
||||
- todo
|
||||
- type_body_length
|
||||
- cyclomatic_complexity
|
||||
- function_body_length
|
||||
opt_in_rules:
|
||||
- contains_over_filter_count
|
||||
- contains_over_filter_is_empty
|
||||
- contains_over_first_not_nil
|
||||
- contains_over_range_nil_comparison
|
||||
- empty_collection_literal
|
||||
- empty_count
|
||||
- empty_string
|
||||
- first_where
|
||||
- flatmap_over_map_reduce
|
||||
- last_where
|
||||
- reduce_boolean
|
||||
- reduce_into
|
||||
- yoda_condition
|
||||
- vertical_whitespace_opening_braces
|
||||
- vertical_whitespace_closing_braces
|
||||
- vertical_parameter_alignment_on_call
|
||||
- untyped_error_in_catch
|
||||
- unowned_variable_capture
|
||||
- unavailable_function
|
||||
- switch_case_on_newline
|
||||
- static_operator
|
||||
- strict_fileprivate
|
||||
- sorted_imports
|
||||
- sorted_first_last
|
||||
- required_enum_case
|
||||
- redundant_type_annotation
|
||||
- redundant_nil_coalescing
|
||||
- attributes
|
||||
- convenience_type
|
||||
analyzer_rules:
|
||||
- explicit_self
|
||||
- unused_declaration
|
||||
- unused_import
|
||||
|
||||
excluded: # paths to ignore during linting. Takes precedence over `included`.
|
||||
- Pods
|
||||
|
||||
# Adjust rule numbers
|
||||
line_length: 160
|
||||
file_length: 500
|
||||
|
||||
# reporter type (xcode, json, csv, checkstyle, codeclimate, junit, html, emoji, sonarqube, markdown, github-actions-logging)
|
||||
reporter: "xcode"
|
22
package/ios/CameraBridge.h
Normal file
22
package/ios/CameraBridge.h
Normal file
@@ -0,0 +1,22 @@
|
||||
//
|
||||
// CameraBridge.h
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 09.11.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#import <React/RCTFPSGraph.h>
|
||||
#import <React/RCTLog.h>
|
||||
#import <React/RCTUIManager.h>
|
||||
#import <React/RCTViewManager.h>
|
||||
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
#import "Frame.h"
|
||||
#import "FrameProcessor.h"
|
||||
#import "VisionCameraProxy.h"
|
||||
#endif
|
320
package/ios/CameraError.swift
Normal file
320
package/ios/CameraError.swift
Normal file
@@ -0,0 +1,320 @@
|
||||
//
|
||||
// CameraError.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 14.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
// MARK: - PermissionError
|
||||
|
||||
enum PermissionError: String {
|
||||
case microphone = "microphone-permission-denied"
|
||||
case camera = "camera-permission-denied"
|
||||
|
||||
var code: String {
|
||||
return rawValue
|
||||
}
|
||||
|
||||
var message: String {
|
||||
switch self {
|
||||
case .microphone:
|
||||
return "The Microphone permission was denied! If you want to record Videos without sound, pass `audio={false}`."
|
||||
case .camera:
|
||||
return "The Camera permission was denied!"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - ParameterError
|
||||
|
||||
enum ParameterError {
|
||||
case invalid(unionName: String, receivedValue: String)
|
||||
case unsupportedOS(unionName: String, receivedValue: String, supportedOnOs: String)
|
||||
case unsupportedOutput(outputDescriptor: String)
|
||||
case unsupportedInput(inputDescriptor: String)
|
||||
case invalidCombination(provided: String, missing: String)
|
||||
|
||||
var code: String {
|
||||
switch self {
|
||||
case .invalid:
|
||||
return "invalid-parameter"
|
||||
case .unsupportedOS:
|
||||
return "unsupported-os"
|
||||
case .unsupportedOutput:
|
||||
return "unsupported-output"
|
||||
case .unsupportedInput:
|
||||
return "unsupported-input"
|
||||
case .invalidCombination:
|
||||
return "invalid-combination"
|
||||
}
|
||||
}
|
||||
|
||||
var message: String {
|
||||
switch self {
|
||||
case let .invalid(unionName: unionName, receivedValue: receivedValue):
|
||||
return "The value \"\(receivedValue)\" could not be parsed to type \(unionName)!"
|
||||
case let .unsupportedOS(unionName: unionName, receivedValue: receivedValue, supportedOnOs: os):
|
||||
return "The value \"\(receivedValue)\" for type \(unionName) is not supported on the current iOS version! Required OS: \(os) or higher"
|
||||
case let .unsupportedOutput(outputDescriptor: output):
|
||||
return "The output \"\(output)\" is not supported!"
|
||||
case let .unsupportedInput(inputDescriptor: input):
|
||||
return "The input \"\(input)\" is not supported!"
|
||||
case let .invalidCombination(provided: provided, missing: missing):
|
||||
return "Invalid combination! If \"\(provided)\" is provided, \"\(missing)\" also has to be set!"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - DeviceError
|
||||
|
||||
enum DeviceError: String {
|
||||
case configureError = "configuration-error"
|
||||
case noDevice = "no-device"
|
||||
case invalid = "invalid-device"
|
||||
case flashUnavailable = "flash-unavailable"
|
||||
case microphoneUnavailable = "microphone-unavailable"
|
||||
case lowLightBoostNotSupported = "low-light-boost-not-supported"
|
||||
case focusNotSupported = "focus-not-supported"
|
||||
case notAvailableOnSimulator = "camera-not-available-on-simulator"
|
||||
case pixelFormatNotSupported = "pixel-format-not-supported"
|
||||
|
||||
var code: String {
|
||||
return rawValue
|
||||
}
|
||||
|
||||
var message: String {
|
||||
switch self {
|
||||
case .configureError:
|
||||
return "Failed to lock the device for configuration."
|
||||
case .noDevice:
|
||||
return "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device."
|
||||
case .invalid:
|
||||
return "The given Camera device was invalid. Use `getAvailableCameraDevices()` to select a suitable Camera device."
|
||||
case .flashUnavailable:
|
||||
return "The Camera Device does not have a flash unit! Make sure you select a device where `hasFlash`/`hasTorch` is true!"
|
||||
case .lowLightBoostNotSupported:
|
||||
return "The currently selected camera device does not support low-light boost! Make sure you select a device where `supportsLowLightBoost` is true!"
|
||||
case .focusNotSupported:
|
||||
return "The currently selected camera device does not support focussing!"
|
||||
case .microphoneUnavailable:
|
||||
return "The microphone was unavailable."
|
||||
case .notAvailableOnSimulator:
|
||||
return "The Camera is not available on the iOS Simulator!"
|
||||
case .pixelFormatNotSupported:
|
||||
return "The given pixelFormat is not supported on the given Camera Device!"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - FormatError
|
||||
|
||||
enum FormatError {
|
||||
case invalidFps(fps: Int)
|
||||
case invalidHdr
|
||||
case invalidFormat
|
||||
|
||||
var code: String {
|
||||
switch self {
|
||||
case .invalidFormat:
|
||||
return "invalid-format"
|
||||
case .invalidFps:
|
||||
return "invalid-fps"
|
||||
case .invalidHdr:
|
||||
return "invalid-hdr"
|
||||
}
|
||||
}
|
||||
|
||||
var message: String {
|
||||
switch self {
|
||||
case .invalidFormat:
|
||||
return "The given format was invalid. Did you check if the current device supports the given format by using `getAvailableCameraDevices(...)`?"
|
||||
case let .invalidFps(fps):
|
||||
return "The given format cannot run at \(fps) FPS! Make sure your FPS is lower than `format.maxFps` but higher than `format.minFps`."
|
||||
case .invalidHdr:
|
||||
return "The currently selected format does not support HDR capture! Make sure you select a format which includes `supportsPhotoHDR`!"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - SessionError
|
||||
|
||||
enum SessionError {
|
||||
case cameraNotReady
|
||||
case audioSessionSetupFailed(reason: String)
|
||||
case audioSessionFailedToActivate
|
||||
case audioInUseByOtherApp
|
||||
|
||||
var code: String {
|
||||
switch self {
|
||||
case .cameraNotReady:
|
||||
return "camera-not-ready"
|
||||
case .audioSessionSetupFailed:
|
||||
return "audio-session-setup-failed"
|
||||
case .audioInUseByOtherApp:
|
||||
return "audio-in-use-by-other-app"
|
||||
case .audioSessionFailedToActivate:
|
||||
return "audio-session-failed-to-activate"
|
||||
}
|
||||
}
|
||||
|
||||
var message: String {
|
||||
switch self {
|
||||
case .cameraNotReady:
|
||||
return "The Camera is not ready yet! Wait for the onInitialized() callback!"
|
||||
case let .audioSessionSetupFailed(reason):
|
||||
return "The audio session failed to setup! \(reason)"
|
||||
case .audioInUseByOtherApp:
|
||||
return "The audio session is already in use by another app with higher priority!"
|
||||
case .audioSessionFailedToActivate:
|
||||
return "Failed to activate Audio Session!"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - CaptureError
|
||||
|
||||
enum CaptureError {
|
||||
case invalidPhotoFormat
|
||||
case recordingInProgress
|
||||
case noRecordingInProgress
|
||||
case fileError
|
||||
case createTempFileError
|
||||
case createRecorderError(message: String? = nil)
|
||||
case invalidPhotoCodec
|
||||
case videoNotEnabled
|
||||
case photoNotEnabled
|
||||
case aborted
|
||||
case unknown(message: String? = nil)
|
||||
|
||||
var code: String {
|
||||
switch self {
|
||||
case .invalidPhotoFormat:
|
||||
return "invalid-photo-format"
|
||||
case .recordingInProgress:
|
||||
return "recording-in-progress"
|
||||
case .noRecordingInProgress:
|
||||
return "no-recording-in-progress"
|
||||
case .fileError:
|
||||
return "file-io-error"
|
||||
case .createTempFileError:
|
||||
return "create-temp-file-error"
|
||||
case .createRecorderError:
|
||||
return "create-recorder-error"
|
||||
case .invalidPhotoCodec:
|
||||
return "invalid-photo-codec"
|
||||
case .videoNotEnabled:
|
||||
return "video-not-enabled"
|
||||
case .photoNotEnabled:
|
||||
return "photo-not-enabled"
|
||||
case .aborted:
|
||||
return "aborted"
|
||||
case .unknown:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
var message: String {
|
||||
switch self {
|
||||
case .invalidPhotoFormat:
|
||||
return "The given photo format was invalid!"
|
||||
case .invalidPhotoCodec:
|
||||
return "The given photo codec was invalid!"
|
||||
case .recordingInProgress:
|
||||
return "There is already an active video recording in progress! Did you call startRecording() twice?"
|
||||
case .noRecordingInProgress:
|
||||
return "There was no active video recording in progress! Did you call stopRecording() twice?"
|
||||
case .fileError:
|
||||
return "An unexpected File IO error occured!"
|
||||
case .createTempFileError:
|
||||
return "Failed to create a temporary file!"
|
||||
case let .createRecorderError(message: message):
|
||||
return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")"
|
||||
case .videoNotEnabled:
|
||||
return "Video capture is disabled! Pass `video={true}` to enable video recordings."
|
||||
case .photoNotEnabled:
|
||||
return "Photo capture is disabled! Pass `photo={true}` to enable photo capture."
|
||||
case .aborted:
|
||||
return "The capture has been stopped before any input data arrived."
|
||||
case let .unknown(message: message):
|
||||
return message ?? "An unknown error occured while capturing a video/photo."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - SystemError
|
||||
|
||||
enum SystemError: String {
|
||||
case noManager = "no-camera-manager"
|
||||
case frameProcessorsUnavailable = "frame-processors-unavailable"
|
||||
|
||||
var code: String {
|
||||
return rawValue
|
||||
}
|
||||
|
||||
var message: String {
|
||||
switch self {
|
||||
case .noManager:
|
||||
return "No Camera Manager was found."
|
||||
case .frameProcessorsUnavailable:
|
||||
return "Frame Processors are unavailable - is react-native-worklets-core installed?"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - CameraError
|
||||
|
||||
enum CameraError: Error {
|
||||
case permission(_ id: PermissionError)
|
||||
case parameter(_ id: ParameterError)
|
||||
case device(_ id: DeviceError)
|
||||
case format(_ id: FormatError)
|
||||
case session(_ id: SessionError)
|
||||
case capture(_ id: CaptureError)
|
||||
case system(_ id: SystemError)
|
||||
case unknown(message: String? = nil)
|
||||
|
||||
var code: String {
|
||||
switch self {
|
||||
case let .permission(id: id):
|
||||
return "permission/\(id.code)"
|
||||
case let .parameter(id: id):
|
||||
return "parameter/\(id.code)"
|
||||
case let .device(id: id):
|
||||
return "device/\(id.code)"
|
||||
case let .format(id: id):
|
||||
return "format/\(id.code)"
|
||||
case let .session(id: id):
|
||||
return "session/\(id.code)"
|
||||
case let .capture(id: id):
|
||||
return "capture/\(id.code)"
|
||||
case let .system(id: id):
|
||||
return "system/\(id.code)"
|
||||
case .unknown:
|
||||
return "unknown/unknown"
|
||||
}
|
||||
}
|
||||
|
||||
var message: String {
|
||||
switch self {
|
||||
case let .permission(id: id):
|
||||
return id.message
|
||||
case let .parameter(id: id):
|
||||
return id.message
|
||||
case let .device(id: id):
|
||||
return id.message
|
||||
case let .format(id: id):
|
||||
return id.message
|
||||
case let .session(id: id):
|
||||
return id.message
|
||||
case let .capture(id: id):
|
||||
return id.message
|
||||
case let .system(id: id):
|
||||
return id.message
|
||||
case let .unknown(message: message):
|
||||
return message ?? "An unexpected error occured."
|
||||
}
|
||||
}
|
||||
}
|
33
package/ios/CameraQueues.swift
Normal file
33
package/ios/CameraQueues.swift
Normal file
@@ -0,0 +1,33 @@
|
||||
//
|
||||
// CameraQueues.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 22.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
@objc
|
||||
public class CameraQueues: NSObject {
|
||||
/// The serial execution queue for the camera preview layer (input stream) as well as output processing of photos.
|
||||
@objc public static let cameraQueue = DispatchQueue(label: "mrousavy/VisionCamera.main",
|
||||
qos: .userInteractive,
|
||||
attributes: [],
|
||||
autoreleaseFrequency: .inherit,
|
||||
target: nil)
|
||||
|
||||
/// The serial execution queue for output processing of videos for recording or synchronous frame processing.
|
||||
@objc public static let videoQueue = DispatchQueue(label: "mrousavy/VisionCamera.video",
|
||||
qos: .userInteractive,
|
||||
attributes: [],
|
||||
autoreleaseFrequency: .inherit,
|
||||
target: nil)
|
||||
|
||||
/// The serial execution queue for output processing of audio buffers.
|
||||
@objc public static let audioQueue = DispatchQueue(label: "mrousavy/VisionCamera.audio",
|
||||
qos: .userInteractive,
|
||||
attributes: [],
|
||||
autoreleaseFrequency: .inherit,
|
||||
target: nil)
|
||||
}
|
151
package/ios/CameraView+AVAudioSession.swift
Normal file
151
package/ios/CameraView+AVAudioSession.swift
Normal file
@@ -0,0 +1,151 @@
|
||||
//
|
||||
// CameraView+AVAudioSession.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
Extension for CameraView that sets up the AVAudioSession.
|
||||
*/
|
||||
extension CameraView {
|
||||
/**
|
||||
Configures the Audio Capture Session with an audio input and audio data output.
|
||||
*/
|
||||
final func configureAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Audio Session...")
|
||||
|
||||
audioCaptureSession.beginConfiguration()
|
||||
defer {
|
||||
audioCaptureSession.commitConfiguration()
|
||||
}
|
||||
|
||||
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
|
||||
let enableAudio = audio?.boolValue == true
|
||||
|
||||
// check microphone permission
|
||||
if enableAudio {
|
||||
let audioPermissionStatus = AVCaptureDevice.authorizationStatus(for: .audio)
|
||||
if audioPermissionStatus != .authorized {
|
||||
invokeOnError(.permission(.microphone))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Audio Input
|
||||
do {
|
||||
if let audioDeviceInput = audioDeviceInput {
|
||||
audioCaptureSession.removeInput(audioDeviceInput)
|
||||
self.audioDeviceInput = nil
|
||||
}
|
||||
if enableAudio {
|
||||
ReactLogger.log(level: .info, message: "Adding Audio input...")
|
||||
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
|
||||
invokeOnError(.device(.microphoneUnavailable))
|
||||
return
|
||||
}
|
||||
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
|
||||
guard audioCaptureSession.canAddInput(audioDeviceInput!) else {
|
||||
invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "audio-input")))
|
||||
return
|
||||
}
|
||||
audioCaptureSession.addInput(audioDeviceInput!)
|
||||
}
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.microphoneUnavailable), cause: error)
|
||||
return
|
||||
}
|
||||
|
||||
// Audio Output
|
||||
if let audioOutput = audioOutput {
|
||||
audioCaptureSession.removeOutput(audioOutput)
|
||||
self.audioOutput = nil
|
||||
}
|
||||
if enableAudio {
|
||||
ReactLogger.log(level: .info, message: "Adding Audio Data output...")
|
||||
audioOutput = AVCaptureAudioDataOutput()
|
||||
guard audioCaptureSession.canAddOutput(audioOutput!) else {
|
||||
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "audio-output")))
|
||||
return
|
||||
}
|
||||
audioOutput!.setSampleBufferDelegate(self, queue: CameraQueues.audioQueue)
|
||||
audioCaptureSession.addOutput(audioOutput!)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Configures the Audio session and activates it. If the session was active it will shortly be deactivated before configuration.
|
||||
|
||||
The Audio Session will be configured to allow background music, haptics (vibrations) and system sound playback while recording.
|
||||
Background audio is allowed to play on speakers or bluetooth speakers.
|
||||
*/
|
||||
final func activateAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Activating Audio Session...")
|
||||
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().updateCategory(AVAudioSession.Category.playAndRecord,
|
||||
options: [.mixWithOthers,
|
||||
.allowBluetoothA2DP,
|
||||
.defaultToSpeaker,
|
||||
.allowAirPlay])
|
||||
|
||||
if #available(iOS 14.5, *) {
|
||||
// prevents the audio session from being interrupted by a phone call
|
||||
try AVAudioSession.sharedInstance().setPrefersNoInterruptionsFromSystemAlerts(true)
|
||||
}
|
||||
|
||||
audioCaptureSession.startRunning()
|
||||
} catch let error as NSError {
|
||||
switch error.code {
|
||||
case 561_017_449:
|
||||
self.invokeOnError(.session(.audioInUseByOtherApp), cause: error)
|
||||
default:
|
||||
self.invokeOnError(.session(.audioSessionFailedToActivate), cause: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final func deactivateAudioSession() {
|
||||
ReactLogger.log(level: .info, message: "Deactivating Audio Session...")
|
||||
|
||||
audioCaptureSession.stopRunning()
|
||||
}
|
||||
|
||||
@objc
|
||||
func audioSessionInterrupted(notification: Notification) {
|
||||
ReactLogger.log(level: .error, message: "Audio Session Interruption Notification!")
|
||||
guard let userInfo = notification.userInfo,
|
||||
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
|
||||
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: Add JS-Event for Audio Session interruptions?
|
||||
switch type {
|
||||
case .began:
|
||||
// Something interrupted our Audio Session, stop recording audio.
|
||||
ReactLogger.log(level: .error, message: "The Audio Session was interrupted!")
|
||||
case .ended:
|
||||
ReactLogger.log(level: .info, message: "The Audio Session interruption has ended.")
|
||||
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
|
||||
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
|
||||
if options.contains(.shouldResume) {
|
||||
if isRecording {
|
||||
CameraQueues.audioQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Resuming interrupted Audio Session...")
|
||||
// restart audio session because interruption is over
|
||||
self.activateAudioSession()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ReactLogger.log(level: .error, message: "Cannot resume interrupted Audio Session!")
|
||||
}
|
||||
@unknown default:
|
||||
()
|
||||
}
|
||||
}
|
||||
}
|
275
package/ios/CameraView+AVCaptureSession.swift
Normal file
275
package/ios/CameraView+AVCaptureSession.swift
Normal file
@@ -0,0 +1,275 @@
|
||||
//
|
||||
// CameraView+AVCaptureSession.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
Extension for CameraView that sets up the AVCaptureSession, Device and Format.
|
||||
*/
|
||||
extension CameraView {
|
||||
// pragma MARK: Configure Capture Session
|
||||
|
||||
/**
|
||||
Configures the Capture Session.
|
||||
*/
|
||||
final func configureCaptureSession() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Session...")
|
||||
isReady = false
|
||||
|
||||
#if targetEnvironment(simulator)
|
||||
invokeOnError(.device(.notAvailableOnSimulator))
|
||||
return
|
||||
#endif
|
||||
|
||||
guard cameraId != nil else {
|
||||
invokeOnError(.device(.noDevice))
|
||||
return
|
||||
}
|
||||
let cameraId = self.cameraId! as String
|
||||
|
||||
ReactLogger.log(level: .info, message: "Initializing Camera with device \(cameraId)...")
|
||||
captureSession.beginConfiguration()
|
||||
defer {
|
||||
captureSession.commitConfiguration()
|
||||
}
|
||||
|
||||
// pragma MARK: Capture Session Inputs
|
||||
// Video Input
|
||||
do {
|
||||
if let videoDeviceInput = videoDeviceInput {
|
||||
captureSession.removeInput(videoDeviceInput)
|
||||
self.videoDeviceInput = nil
|
||||
}
|
||||
ReactLogger.log(level: .info, message: "Adding Video input...")
|
||||
guard let videoDevice = AVCaptureDevice(uniqueID: cameraId) else {
|
||||
invokeOnError(.device(.invalid))
|
||||
return
|
||||
}
|
||||
videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
|
||||
guard captureSession.canAddInput(videoDeviceInput!) else {
|
||||
invokeOnError(.parameter(.unsupportedInput(inputDescriptor: "video-input")))
|
||||
return
|
||||
}
|
||||
captureSession.addInput(videoDeviceInput!)
|
||||
} catch {
|
||||
invokeOnError(.device(.invalid))
|
||||
return
|
||||
}
|
||||
|
||||
// pragma MARK: Capture Session Outputs
|
||||
|
||||
// Photo Output
|
||||
if let photoOutput = photoOutput {
|
||||
captureSession.removeOutput(photoOutput)
|
||||
self.photoOutput = nil
|
||||
}
|
||||
if photo?.boolValue == true {
|
||||
ReactLogger.log(level: .info, message: "Adding Photo output...")
|
||||
photoOutput = AVCapturePhotoOutput()
|
||||
|
||||
if enableHighQualityPhotos?.boolValue == true {
|
||||
// TODO: In iOS 16 this will be removed in favor of maxPhotoDimensions.
|
||||
photoOutput!.isHighResolutionCaptureEnabled = true
|
||||
if #available(iOS 13.0, *) {
|
||||
// TODO: Test if this actually does any fusion or if this just calls the captureOutput twice. If the latter, remove it.
|
||||
photoOutput!.isVirtualDeviceConstituentPhotoDeliveryEnabled = photoOutput!.isVirtualDeviceConstituentPhotoDeliverySupported
|
||||
photoOutput!.maxPhotoQualityPrioritization = .quality
|
||||
} else {
|
||||
photoOutput!.isDualCameraDualPhotoDeliveryEnabled = photoOutput!.isDualCameraDualPhotoDeliverySupported
|
||||
}
|
||||
}
|
||||
if enableDepthData {
|
||||
photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported
|
||||
}
|
||||
if #available(iOS 12.0, *), enablePortraitEffectsMatteDelivery {
|
||||
photoOutput!.isPortraitEffectsMatteDeliveryEnabled = photoOutput!.isPortraitEffectsMatteDeliverySupported
|
||||
}
|
||||
guard captureSession.canAddOutput(photoOutput!) else {
|
||||
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "photo-output")))
|
||||
return
|
||||
}
|
||||
captureSession.addOutput(photoOutput!)
|
||||
if videoDeviceInput!.device.position == .front {
|
||||
photoOutput!.mirror()
|
||||
}
|
||||
}
|
||||
|
||||
// Video Output + Frame Processor
|
||||
if let videoOutput = videoOutput {
|
||||
captureSession.removeOutput(videoOutput)
|
||||
self.videoOutput = nil
|
||||
}
|
||||
if video?.boolValue == true || enableFrameProcessor {
|
||||
ReactLogger.log(level: .info, message: "Adding Video Data output...")
|
||||
videoOutput = AVCaptureVideoDataOutput()
|
||||
guard captureSession.canAddOutput(videoOutput!) else {
|
||||
invokeOnError(.parameter(.unsupportedOutput(outputDescriptor: "video-output")))
|
||||
return
|
||||
}
|
||||
videoOutput!.setSampleBufferDelegate(self, queue: CameraQueues.videoQueue)
|
||||
videoOutput!.alwaysDiscardsLateVideoFrames = false
|
||||
|
||||
if let pixelFormat = pixelFormat as? String {
|
||||
let supportedPixelFormats = videoOutput!.availableVideoPixelFormatTypes
|
||||
let defaultFormat = supportedPixelFormats.first! // first value is always the most efficient format
|
||||
var pixelFormatType: OSType = defaultFormat
|
||||
switch pixelFormat {
|
||||
case "yuv":
|
||||
if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
|
||||
pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
|
||||
} else if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
|
||||
pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
||||
} else {
|
||||
invokeOnError(.device(.pixelFormatNotSupported))
|
||||
}
|
||||
case "rgb":
|
||||
if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
|
||||
pixelFormatType = kCVPixelFormatType_32BGRA
|
||||
} else {
|
||||
invokeOnError(.device(.pixelFormatNotSupported))
|
||||
}
|
||||
case "native":
|
||||
pixelFormatType = defaultFormat
|
||||
default:
|
||||
invokeOnError(.parameter(.invalid(unionName: "pixelFormat", receivedValue: pixelFormat)))
|
||||
}
|
||||
videoOutput!.videoSettings = [
|
||||
String(kCVPixelBufferPixelFormatTypeKey): pixelFormatType,
|
||||
]
|
||||
}
|
||||
captureSession.addOutput(videoOutput!)
|
||||
}
|
||||
|
||||
if outputOrientation != .portrait {
|
||||
updateOrientation()
|
||||
}
|
||||
|
||||
invokeOnInitialized()
|
||||
isReady = true
|
||||
ReactLogger.log(level: .info, message: "Session successfully configured!")
|
||||
}
|
||||
|
||||
// pragma MARK: Configure Device
|
||||
|
||||
/**
|
||||
Configures the Video Device with the given FPS and HDR modes.
|
||||
*/
|
||||
final func configureDevice() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Device...")
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
|
||||
if let fps = fps?.int32Value {
|
||||
let supportsGivenFps = device.activeFormat.videoSupportedFrameRateRanges.contains { range in
|
||||
return range.includes(fps: Double(fps))
|
||||
}
|
||||
if !supportsGivenFps {
|
||||
invokeOnError(.format(.invalidFps(fps: Int(fps))))
|
||||
return
|
||||
}
|
||||
|
||||
let duration = CMTimeMake(value: 1, timescale: fps)
|
||||
device.activeVideoMinFrameDuration = duration
|
||||
device.activeVideoMaxFrameDuration = duration
|
||||
} else {
|
||||
device.activeVideoMinFrameDuration = CMTime.invalid
|
||||
device.activeVideoMaxFrameDuration = CMTime.invalid
|
||||
}
|
||||
if hdr != nil {
|
||||
if hdr == true && !device.activeFormat.isVideoHDRSupported {
|
||||
invokeOnError(.format(.invalidHdr))
|
||||
return
|
||||
}
|
||||
if !device.automaticallyAdjustsVideoHDREnabled {
|
||||
if device.isVideoHDREnabled != hdr!.boolValue {
|
||||
device.isVideoHDREnabled = hdr!.boolValue
|
||||
}
|
||||
}
|
||||
}
|
||||
if lowLightBoost != nil {
|
||||
if lowLightBoost == true && !device.isLowLightBoostSupported {
|
||||
invokeOnError(.device(.lowLightBoostNotSupported))
|
||||
return
|
||||
}
|
||||
if device.automaticallyEnablesLowLightBoostWhenAvailable != lowLightBoost!.boolValue {
|
||||
device.automaticallyEnablesLowLightBoostWhenAvailable = lowLightBoost!.boolValue
|
||||
}
|
||||
}
|
||||
|
||||
device.unlockForConfiguration()
|
||||
ReactLogger.log(level: .info, message: "Device successfully configured!")
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.configureError), cause: error)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Configure Format
|
||||
|
||||
/**
|
||||
Configures the Video Device to find the best matching Format.
|
||||
*/
|
||||
final func configureFormat() {
|
||||
ReactLogger.log(level: .info, message: "Configuring Format...")
|
||||
guard let filter = format else {
|
||||
// Format Filter was null. Ignore it.
|
||||
return
|
||||
}
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
|
||||
if device.activeFormat.matchesFilter(filter) {
|
||||
ReactLogger.log(level: .info, message: "Active format already matches filter.")
|
||||
return
|
||||
}
|
||||
|
||||
// get matching format
|
||||
let matchingFormats = device.formats.filter { $0.matchesFilter(filter) }.sorted { $0.isBetterThan($1) }
|
||||
guard let format = matchingFormats.first else {
|
||||
invokeOnError(.format(.invalidFormat))
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
device.activeFormat = format
|
||||
device.unlockForConfiguration()
|
||||
ReactLogger.log(level: .info, message: "Format successfully configured!")
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.configureError), cause: error)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Notifications/Interruptions
|
||||
|
||||
@objc
|
||||
func sessionRuntimeError(notification: Notification) {
|
||||
ReactLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!")
|
||||
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
|
||||
return
|
||||
}
|
||||
|
||||
invokeOnError(.unknown(message: error._nsError.description), cause: error._nsError)
|
||||
|
||||
if isActive {
|
||||
// restart capture session after an error occured
|
||||
CameraQueues.cameraQueue.async {
|
||||
self.captureSession.startRunning()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
93
package/ios/CameraView+Focus.swift
Normal file
93
package/ios/CameraView+Focus.swift
Normal file
@@ -0,0 +1,93 @@
|
||||
//
|
||||
// CameraView+Focus.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 19.02.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
extension CameraView {
|
||||
private func rotateFrameSize(frameSize: CGSize, orientation: UIInterfaceOrientation) -> CGSize {
|
||||
switch orientation {
|
||||
case .portrait, .portraitUpsideDown, .unknown:
|
||||
// swap width and height since the input orientation is rotated
|
||||
return CGSize(width: frameSize.height, height: frameSize.width)
|
||||
case .landscapeLeft, .landscapeRight:
|
||||
// is same as camera sensor orientation
|
||||
return frameSize
|
||||
@unknown default:
|
||||
return frameSize
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a Point in the UI View Layer to a Point in the Camera Frame coordinate system
|
||||
private func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
|
||||
guard let videoDeviceInput = videoDeviceInput else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return .zero
|
||||
}
|
||||
guard let viewScale = window?.screen.scale else {
|
||||
invokeOnError(.unknown(message: "View has no parent Window!"))
|
||||
return .zero
|
||||
}
|
||||
|
||||
let frameSize = rotateFrameSize(frameSize: videoDeviceInput.device.activeFormat.videoDimensions,
|
||||
orientation: outputOrientation)
|
||||
let viewSize = CGSize(width: previewView.bounds.width * viewScale,
|
||||
height: previewView.bounds.height * viewScale)
|
||||
let scale = min(frameSize.width / viewSize.width, frameSize.height / viewSize.height)
|
||||
let scaledViewSize = CGSize(width: viewSize.width * scale, height: viewSize.height * scale)
|
||||
|
||||
let overlapX = scaledViewSize.width - frameSize.width
|
||||
let overlapY = scaledViewSize.height - frameSize.height
|
||||
|
||||
let scaledPoint = CGPoint(x: point.x * scale, y: point.y * scale)
|
||||
|
||||
return CGPoint(x: scaledPoint.x - (overlapX / 2), y: scaledPoint.y - (overlapY / 2))
|
||||
}
|
||||
|
||||
/// Converts a Point in the UI View Layer to a Point in the Camera Device Sensor coordinate system (x: [0..1], y: [0..1])
|
||||
private func captureDevicePointConverted(fromLayerPoint pointInLayer: CGPoint) -> CGPoint {
|
||||
guard let videoDeviceInput = videoDeviceInput else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return .zero
|
||||
}
|
||||
let frameSize = rotateFrameSize(frameSize: videoDeviceInput.device.activeFormat.videoDimensions,
|
||||
orientation: outputOrientation)
|
||||
let pointInFrame = convertLayerPointToFramePoint(layerPoint: pointInLayer)
|
||||
return CGPoint(x: pointInFrame.x / frameSize.width, y: pointInFrame.y / frameSize.height)
|
||||
}
|
||||
|
||||
func focus(point: CGPoint, promise: Promise) {
|
||||
withPromise(promise) {
|
||||
guard let device = self.videoDeviceInput?.device else {
|
||||
throw CameraError.session(SessionError.cameraNotReady)
|
||||
}
|
||||
if !device.isFocusPointOfInterestSupported {
|
||||
throw CameraError.device(DeviceError.focusNotSupported)
|
||||
}
|
||||
|
||||
// in {0..1} system
|
||||
let normalizedPoint = captureDevicePointConverted(fromLayerPoint: point)
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
|
||||
device.focusPointOfInterest = normalizedPoint
|
||||
device.focusMode = .continuousAutoFocus
|
||||
|
||||
if device.isExposurePointOfInterestSupported {
|
||||
device.exposurePointOfInterest = normalizedPoint
|
||||
device.exposureMode = .continuousAutoExposure
|
||||
}
|
||||
|
||||
device.unlockForConfiguration()
|
||||
return nil
|
||||
} catch {
|
||||
throw CameraError.device(DeviceError.configureError)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
45
package/ios/CameraView+Orientation.swift
Normal file
45
package/ios/CameraView+Orientation.swift
Normal file
@@ -0,0 +1,45 @@
|
||||
//
|
||||
// CameraView+Orientation.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 04.01.22.
|
||||
// Copyright © 2022 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
extension CameraView {
|
||||
/// Orientation of the input connection (preview)
|
||||
private var inputOrientation: UIInterfaceOrientation {
|
||||
return .portrait
|
||||
}
|
||||
|
||||
// Orientation of the output connections (photo, video, frame processor)
|
||||
var outputOrientation: UIInterfaceOrientation {
|
||||
if let userOrientation = orientation as String?,
|
||||
let parsedOrientation = try? UIInterfaceOrientation(withString: userOrientation) {
|
||||
// user is overriding output orientation
|
||||
return parsedOrientation
|
||||
} else {
|
||||
// use same as input orientation
|
||||
return inputOrientation
|
||||
}
|
||||
}
|
||||
|
||||
func updateOrientation() {
|
||||
// Updates the Orientation for all rotable
|
||||
let isMirrored = videoDeviceInput?.device.position == .front
|
||||
|
||||
let connectionOrientation = outputOrientation
|
||||
captureSession.outputs.forEach { output in
|
||||
output.connections.forEach { connection in
|
||||
if connection.isVideoMirroringSupported {
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = isMirrored
|
||||
}
|
||||
connection.setInterfaceOrientation(connectionOrientation)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
270
package/ios/CameraView+RecordVideo.swift
Normal file
270
package/ios/CameraView+RecordVideo.swift
Normal file
@@ -0,0 +1,270 @@
|
||||
//
|
||||
// CameraView+RecordVideo.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 16.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
||||
|
||||
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
||||
/**
|
||||
Starts a video + audio recording with a custom Asset Writer.
|
||||
*/
|
||||
func startRecording(options: NSDictionary, callback jsCallbackFunc: @escaping RCTResponseSenderBlock) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
ReactLogger.log(level: .info, message: "Starting Video recording...")
|
||||
let callback = Callback(jsCallbackFunc)
|
||||
|
||||
var fileType = AVFileType.mov
|
||||
if let fileTypeOption = options["fileType"] as? String {
|
||||
guard let parsed = try? AVFileType(withString: fileTypeOption) else {
|
||||
callback.reject(error: .parameter(.invalid(unionName: "fileType", receivedValue: fileTypeOption)))
|
||||
return
|
||||
}
|
||||
fileType = parsed
|
||||
}
|
||||
|
||||
let errorPointer = ErrorPointer(nilLiteral: ())
|
||||
let fileExtension = fileType.descriptor ?? "mov"
|
||||
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
|
||||
callback.reject(error: .capture(.createTempFileError), cause: errorPointer?.pointee)
|
||||
return
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "File path: \(tempFilePath)")
|
||||
let tempURL = URL(string: "file://\(tempFilePath)")!
|
||||
|
||||
if let flashMode = options["flash"] as? String {
|
||||
// use the torch as the video's flash
|
||||
self.setTorchMode(flashMode)
|
||||
}
|
||||
|
||||
guard let videoOutput = self.videoOutput else {
|
||||
if self.video?.boolValue == true {
|
||||
callback.reject(error: .session(.cameraNotReady))
|
||||
return
|
||||
} else {
|
||||
callback.reject(error: .capture(.videoNotEnabled))
|
||||
return
|
||||
}
|
||||
}
|
||||
guard let videoInput = self.videoDeviceInput else {
|
||||
callback.reject(error: .session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: The startRecording() func cannot be async because RN doesn't allow
|
||||
// both a callback and a Promise in a single function. Wait for TurboModules?
|
||||
// This means that any errors that occur in this function have to be delegated through
|
||||
// the callback, but I'd prefer for them to throw for the original function instead.
|
||||
|
||||
let enableAudio = self.audio?.boolValue == true
|
||||
|
||||
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
|
||||
defer {
|
||||
if enableAudio {
|
||||
CameraQueues.audioQueue.async {
|
||||
self.deactivateAudioSession()
|
||||
}
|
||||
}
|
||||
if options["flash"] != nil {
|
||||
// Set torch mode back to what it was before if we used it for the video flash.
|
||||
self.setTorchMode(self.torch)
|
||||
}
|
||||
}
|
||||
|
||||
self.recordingSession = nil
|
||||
self.isRecording = false
|
||||
ReactLogger.log(level: .info, message: "RecordingSession finished with status \(status.descriptor).")
|
||||
|
||||
if let error = error as NSError? {
|
||||
if error.domain == "capture/aborted" {
|
||||
callback.reject(error: .capture(.aborted), cause: error)
|
||||
} else {
|
||||
callback.reject(error: .capture(.unknown(message: "An unknown recording error occured! \(error.description)")), cause: error)
|
||||
}
|
||||
} else {
|
||||
if status == .completed {
|
||||
callback.resolve([
|
||||
"path": recordingSession.url.absoluteString,
|
||||
"duration": recordingSession.duration,
|
||||
])
|
||||
} else {
|
||||
callback.reject(error: .unknown(message: "AVAssetWriter completed with status: \(status.descriptor)"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let recordingSession: RecordingSession
|
||||
do {
|
||||
recordingSession = try RecordingSession(url: tempURL,
|
||||
fileType: fileType,
|
||||
completion: onFinish)
|
||||
} catch let error as NSError {
|
||||
callback.reject(error: .capture(.createRecorderError(message: nil)), cause: error)
|
||||
return
|
||||
}
|
||||
self.recordingSession = recordingSession
|
||||
|
||||
var videoCodec: AVVideoCodecType?
|
||||
if let codecString = options["videoCodec"] as? String {
|
||||
videoCodec = AVVideoCodecType(withString: codecString)
|
||||
}
|
||||
|
||||
// Init Video
|
||||
guard let videoSettings = self.recommendedVideoSettings(videoOutput: videoOutput, fileType: fileType, videoCodec: videoCodec),
|
||||
!videoSettings.isEmpty else {
|
||||
callback.reject(error: .capture(.createRecorderError(message: "Failed to get video settings!")))
|
||||
return
|
||||
}
|
||||
|
||||
// get pixel format (420f, 420v, x420)
|
||||
let pixelFormat = CMFormatDescriptionGetMediaSubType(videoInput.device.activeFormat.formatDescription)
|
||||
recordingSession.initializeVideoWriter(withSettings: videoSettings,
|
||||
pixelFormat: pixelFormat)
|
||||
|
||||
// Init Audio (optional)
|
||||
if enableAudio {
|
||||
// Activate Audio Session asynchronously
|
||||
CameraQueues.audioQueue.async {
|
||||
self.activateAudioSession()
|
||||
}
|
||||
|
||||
if let audioOutput = self.audioOutput,
|
||||
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: fileType) {
|
||||
recordingSession.initializeAudioWriter(withSettings: audioSettings)
|
||||
}
|
||||
}
|
||||
|
||||
// start recording session with or without audio.
|
||||
do {
|
||||
try recordingSession.startAssetWriter()
|
||||
} catch let error as NSError {
|
||||
callback.reject(error: .capture(.createRecorderError(message: "RecordingSession failed to start asset writer.")), cause: error)
|
||||
return
|
||||
}
|
||||
self.isRecording = true
|
||||
}
|
||||
}
|
||||
|
||||
func stopRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
self.isRecording = false
|
||||
|
||||
withPromise(promise) {
|
||||
guard let recordingSession = self.recordingSession else {
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
recordingSession.finish()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func pauseRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
withPromise(promise) {
|
||||
guard self.recordingSession != nil else {
|
||||
// there's no active recording!
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
self.isRecording = false
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func resumeRecording(promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
withPromise(promise) {
|
||||
guard self.recordingSession != nil else {
|
||||
// there's no active recording!
|
||||
throw CameraError.capture(.noRecordingInProgress)
|
||||
}
|
||||
self.isRecording = true
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
if captureOutput is AVCaptureVideoDataOutput {
|
||||
if let frameProcessor = frameProcessor {
|
||||
// Call Frame Processor
|
||||
let frame = Frame(buffer: sampleBuffer, orientation: bufferOrientation)
|
||||
frameProcessor.call(frame)
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// Record Video Frame/Audio Sample to File
|
||||
if isRecording {
|
||||
guard let recordingSession = recordingSession else {
|
||||
invokeOnError(.capture(.unknown(message: "isRecording was true but the RecordingSession was null!")))
|
||||
return
|
||||
}
|
||||
|
||||
switch captureOutput {
|
||||
case is AVCaptureVideoDataOutput:
|
||||
recordingSession.appendBuffer(sampleBuffer, type: .video, timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
|
||||
case is AVCaptureAudioDataOutput:
|
||||
let timestamp = CMSyncConvertTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
|
||||
from: audioCaptureSession.masterClock ?? CMClockGetHostTimeClock(),
|
||||
to: captureSession.masterClock ?? CMClockGetHostTimeClock())
|
||||
recordingSession.appendBuffer(sampleBuffer, type: .audio, timestamp: timestamp)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
if captureOutput is AVCaptureVideoDataOutput {
|
||||
// Update FPS Graph per Frame
|
||||
if let fpsGraph = fpsGraph {
|
||||
DispatchQueue.main.async {
|
||||
fpsGraph.onTick(CACurrentMediaTime())
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private func recommendedVideoSettings(videoOutput: AVCaptureVideoDataOutput,
|
||||
fileType: AVFileType,
|
||||
videoCodec: AVVideoCodecType?) -> [String: Any]? {
|
||||
if videoCodec != nil {
|
||||
return videoOutput.recommendedVideoSettings(forVideoCodecType: videoCodec!, assetWriterOutputFileType: fileType)
|
||||
} else {
|
||||
return videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: fileType)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Gets the orientation of the CameraView's images (CMSampleBuffers).
|
||||
*/
|
||||
private var bufferOrientation: UIImage.Orientation {
|
||||
guard let cameraPosition = videoDeviceInput?.device.position else {
|
||||
return .up
|
||||
}
|
||||
|
||||
switch outputOrientation {
|
||||
case .portrait:
|
||||
return cameraPosition == .front ? .leftMirrored : .right
|
||||
case .landscapeLeft:
|
||||
return cameraPosition == .front ? .downMirrored : .up
|
||||
case .portraitUpsideDown:
|
||||
return cameraPosition == .front ? .rightMirrored : .left
|
||||
case .landscapeRight:
|
||||
return cameraPosition == .front ? .upMirrored : .down
|
||||
case .unknown:
|
||||
return .up
|
||||
@unknown default:
|
||||
return .up
|
||||
}
|
||||
}
|
||||
}
|
87
package/ios/CameraView+TakePhoto.swift
Normal file
87
package/ios/CameraView+TakePhoto.swift
Normal file
@@ -0,0 +1,87 @@
|
||||
//
|
||||
// CameraView+TakePhoto.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 16.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension CameraView {
|
||||
func takePhoto(options: NSDictionary, promise: Promise) {
|
||||
CameraQueues.cameraQueue.async {
|
||||
guard let photoOutput = self.photoOutput,
|
||||
let videoDeviceInput = self.videoDeviceInput else {
|
||||
if self.photo?.boolValue == true {
|
||||
promise.reject(error: .session(.cameraNotReady))
|
||||
return
|
||||
} else {
|
||||
promise.reject(error: .capture(.photoNotEnabled))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ReactLogger.log(level: .info, message: "Capturing photo...")
|
||||
|
||||
// Create photo settings
|
||||
let photoSettings = AVCapturePhotoSettings()
|
||||
|
||||
// default, overridable settings if high quality capture was enabled
|
||||
if self.enableHighQualityPhotos?.boolValue == true {
|
||||
// TODO: On iOS 16+ this will be removed in favor of maxPhotoDimensions.
|
||||
photoSettings.isHighResolutionPhotoEnabled = true
|
||||
if #available(iOS 13.0, *) {
|
||||
photoSettings.photoQualityPrioritization = .quality
|
||||
}
|
||||
}
|
||||
|
||||
// flash
|
||||
if videoDeviceInput.device.isFlashAvailable, let flash = options["flash"] as? String {
|
||||
guard let flashMode = AVCaptureDevice.FlashMode(withString: flash) else {
|
||||
promise.reject(error: .parameter(.invalid(unionName: "FlashMode", receivedValue: flash)))
|
||||
return
|
||||
}
|
||||
photoSettings.flashMode = flashMode
|
||||
}
|
||||
|
||||
// shutter sound
|
||||
let enableShutterSound = options["enableShutterSound"] as? Bool ?? true
|
||||
|
||||
// depth data
|
||||
photoSettings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled
|
||||
if #available(iOS 12.0, *) {
|
||||
photoSettings.isPortraitEffectsMatteDeliveryEnabled = photoOutput.isPortraitEffectsMatteDeliveryEnabled
|
||||
}
|
||||
|
||||
// quality prioritization
|
||||
if #available(iOS 13.0, *), let qualityPrioritization = options["qualityPrioritization"] as? String {
|
||||
guard let photoQualityPrioritization = AVCapturePhotoOutput.QualityPrioritization(withString: qualityPrioritization) else {
|
||||
promise.reject(error: .parameter(.invalid(unionName: "QualityPrioritization", receivedValue: qualityPrioritization)))
|
||||
return
|
||||
}
|
||||
photoSettings.photoQualityPrioritization = photoQualityPrioritization
|
||||
}
|
||||
|
||||
// red-eye reduction
|
||||
if #available(iOS 12.0, *), let autoRedEyeReduction = options["enableAutoRedEyeReduction"] as? Bool {
|
||||
photoSettings.isAutoRedEyeReductionEnabled = autoRedEyeReduction
|
||||
}
|
||||
|
||||
// stabilization
|
||||
if let enableAutoStabilization = options["enableAutoStabilization"] as? Bool {
|
||||
photoSettings.isAutoStillImageStabilizationEnabled = enableAutoStabilization
|
||||
}
|
||||
|
||||
// distortion correction
|
||||
if #available(iOS 14.1, *), let enableAutoDistortionCorrection = options["enableAutoDistortionCorrection"] as? Bool {
|
||||
photoSettings.isAutoContentAwareDistortionCorrectionEnabled = enableAutoDistortionCorrection
|
||||
}
|
||||
|
||||
photoOutput.capturePhoto(with: photoSettings, delegate: PhotoCaptureDelegate(promise: promise, enableShutterSound: enableShutterSound))
|
||||
|
||||
// Assume that `takePhoto` is always called with the same parameters, so prepare the next call too.
|
||||
photoOutput.setPreparedPhotoSettingsArray([photoSettings], completionHandler: nil)
|
||||
}
|
||||
}
|
||||
}
|
51
package/ios/CameraView+Torch.swift
Normal file
51
package/ios/CameraView+Torch.swift
Normal file
@@ -0,0 +1,51 @@
|
||||
//
|
||||
// CameraView+Torch.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 20.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension CameraView {
|
||||
final func setTorchMode(_ torchMode: String) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
invokeOnError(.session(.cameraNotReady))
|
||||
return
|
||||
}
|
||||
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
|
||||
invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
|
||||
return
|
||||
}
|
||||
if !captureSession.isRunning {
|
||||
torchMode = .off
|
||||
}
|
||||
if device.torchMode == torchMode {
|
||||
// no need to run the whole lock/unlock bs
|
||||
return
|
||||
}
|
||||
if !device.hasTorch || !device.isTorchAvailable {
|
||||
if torchMode == .off {
|
||||
// ignore it, when it's off and not supported, it's off.
|
||||
return
|
||||
} else {
|
||||
// torch mode is .auto or .on, but no torch is available.
|
||||
invokeOnError(.device(.flashUnavailable))
|
||||
return
|
||||
}
|
||||
}
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
device.torchMode = torchMode
|
||||
if torchMode == .on {
|
||||
try device.setTorchModeOn(level: 1.0)
|
||||
}
|
||||
device.unlockForConfiguration()
|
||||
} catch let error as NSError {
|
||||
invokeOnError(.device(.configureError), cause: error)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
73
package/ios/CameraView+Zoom.swift
Normal file
73
package/ios/CameraView+Zoom.swift
Normal file
@@ -0,0 +1,73 @@
|
||||
//
|
||||
// CameraView+Zoom.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
extension CameraView {
|
||||
var minAvailableZoom: CGFloat {
|
||||
return videoDeviceInput?.device.minAvailableVideoZoomFactor ?? 1
|
||||
}
|
||||
|
||||
var maxAvailableZoom: CGFloat {
|
||||
return videoDeviceInput?.device.activeFormat.videoMaxZoomFactor ?? 1
|
||||
}
|
||||
|
||||
@objc
|
||||
final func onPinch(_ gesture: UIPinchGestureRecognizer) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
return
|
||||
}
|
||||
|
||||
let scale = max(min(gesture.scale * pinchScaleOffset, device.activeFormat.videoMaxZoomFactor), CGFloat(1.0))
|
||||
if gesture.state == .ended {
|
||||
pinchScaleOffset = scale
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
device.videoZoomFactor = scale
|
||||
device.unlockForConfiguration()
|
||||
} catch {
|
||||
invokeOnError(.device(.configureError))
|
||||
}
|
||||
}
|
||||
|
||||
func addPinchGestureRecognizer() {
|
||||
removePinchGestureRecognizer()
|
||||
pinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(onPinch(_:)))
|
||||
addGestureRecognizer(pinchGestureRecognizer!)
|
||||
}
|
||||
|
||||
func removePinchGestureRecognizer() {
|
||||
if let pinchGestureRecognizer = pinchGestureRecognizer {
|
||||
removeGestureRecognizer(pinchGestureRecognizer)
|
||||
self.pinchGestureRecognizer = nil
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
final func zoom(factor: CGFloat, animated: Bool) {
|
||||
guard let device = videoDeviceInput?.device else {
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try device.lockForConfiguration()
|
||||
let clamped = max(min(factor, device.activeFormat.videoMaxZoomFactor), CGFloat(1.0))
|
||||
if animated {
|
||||
device.ramp(toVideoZoomFactor: clamped, withRate: 1)
|
||||
} else {
|
||||
device.videoZoomFactor = clamped
|
||||
}
|
||||
device.unlockForConfiguration()
|
||||
} catch {
|
||||
invokeOnError(.device(.configureError))
|
||||
}
|
||||
}
|
||||
}
|
287
package/ios/CameraView.swift
Normal file
287
package/ios/CameraView.swift
Normal file
@@ -0,0 +1,287 @@
|
||||
//
|
||||
// CameraView.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 09.11.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
//
|
||||
// TODOs for the CameraView which are currently too hard to implement either because of AVFoundation's limitations, or my brain capacity
|
||||
//
|
||||
// CameraView+RecordVideo
|
||||
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
|
||||
|
||||
// CameraView+TakePhoto
|
||||
// TODO: Photo HDR
|
||||
|
||||
private let propsThatRequireReconfiguration = ["cameraId",
|
||||
"enableDepthData",
|
||||
"enableHighQualityPhotos",
|
||||
"enablePortraitEffectsMatteDelivery",
|
||||
"photo",
|
||||
"video",
|
||||
"enableFrameProcessor",
|
||||
"pixelFormat"]
|
||||
private let propsThatRequireDeviceReconfiguration = ["fps",
|
||||
"hdr",
|
||||
"lowLightBoost"]
|
||||
|
||||
// MARK: - CameraView
|
||||
|
||||
public final class CameraView: UIView {
|
||||
// pragma MARK: React Properties
|
||||
// props that require reconfiguring
|
||||
@objc var cameraId: NSString?
|
||||
@objc var enableDepthData = false
|
||||
@objc var enableHighQualityPhotos: NSNumber? // nullable bool
|
||||
@objc var enablePortraitEffectsMatteDelivery = false
|
||||
// use cases
|
||||
@objc var photo: NSNumber? // nullable bool
|
||||
@objc var video: NSNumber? // nullable bool
|
||||
@objc var audio: NSNumber? // nullable bool
|
||||
@objc var enableFrameProcessor = false
|
||||
@objc var pixelFormat: NSString?
|
||||
// props that require format reconfiguring
|
||||
@objc var format: NSDictionary?
|
||||
@objc var fps: NSNumber?
|
||||
@objc var hdr: NSNumber? // nullable bool
|
||||
@objc var lowLightBoost: NSNumber? // nullable bool
|
||||
@objc var orientation: NSString?
|
||||
// other props
|
||||
@objc var isActive = false
|
||||
@objc var torch = "off"
|
||||
@objc var zoom: NSNumber = 1.0 // in "factor"
|
||||
@objc var enableFpsGraph = false
|
||||
@objc var videoStabilizationMode: NSString?
|
||||
// events
|
||||
@objc var onInitialized: RCTDirectEventBlock?
|
||||
@objc var onError: RCTDirectEventBlock?
|
||||
@objc var onViewReady: RCTDirectEventBlock?
|
||||
// zoom
|
||||
@objc var enableZoomGesture = false {
|
||||
didSet {
|
||||
if enableZoomGesture {
|
||||
addPinchGestureRecognizer()
|
||||
} else {
|
||||
removePinchGestureRecognizer()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pragma MARK: Internal Properties
|
||||
var isMounted = false
|
||||
var isReady = false
|
||||
// Capture Session
|
||||
let captureSession = AVCaptureSession()
|
||||
let audioCaptureSession = AVCaptureSession()
|
||||
// Inputs & Outputs
|
||||
var videoDeviceInput: AVCaptureDeviceInput?
|
||||
var audioDeviceInput: AVCaptureDeviceInput?
|
||||
var photoOutput: AVCapturePhotoOutput?
|
||||
var videoOutput: AVCaptureVideoDataOutput?
|
||||
var audioOutput: AVCaptureAudioDataOutput?
|
||||
// CameraView+RecordView (+ Frame Processor)
|
||||
var isRecording = false
|
||||
var recordingSession: RecordingSession?
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
@objc public var frameProcessor: FrameProcessor?
|
||||
#endif
|
||||
// CameraView+Zoom
|
||||
var pinchGestureRecognizer: UIPinchGestureRecognizer?
|
||||
var pinchScaleOffset: CGFloat = 1.0
|
||||
|
||||
var previewView: PreviewView
|
||||
#if DEBUG
|
||||
var fpsGraph: RCTFPSGraph?
|
||||
#endif
|
||||
|
||||
/// Returns whether the AVCaptureSession is currently running (reflected by isActive)
|
||||
var isRunning: Bool {
|
||||
return captureSession.isRunning
|
||||
}
|
||||
|
||||
// pragma MARK: Setup
|
||||
override public init(frame: CGRect) {
|
||||
previewView = PreviewView(frame: frame, session: captureSession)
|
||||
super.init(frame: frame)
|
||||
|
||||
addSubview(previewView)
|
||||
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(sessionRuntimeError),
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: captureSession)
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(sessionRuntimeError),
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: audioCaptureSession)
|
||||
NotificationCenter.default.addObserver(self,
|
||||
selector: #selector(audioSessionInterrupted),
|
||||
name: AVAudioSession.interruptionNotification,
|
||||
object: AVAudioSession.sharedInstance)
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) is not implemented.")
|
||||
}
|
||||
|
||||
deinit {
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: captureSession)
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: .AVCaptureSessionRuntimeError,
|
||||
object: audioCaptureSession)
|
||||
NotificationCenter.default.removeObserver(self,
|
||||
name: AVAudioSession.interruptionNotification,
|
||||
object: AVAudioSession.sharedInstance)
|
||||
}
|
||||
|
||||
override public func willMove(toSuperview newSuperview: UIView?) {
|
||||
super.willMove(toSuperview: newSuperview)
|
||||
|
||||
if newSuperview != nil {
|
||||
if !isMounted {
|
||||
isMounted = true
|
||||
onViewReady?(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override public func layoutSubviews() {
|
||||
previewView.frame = frame
|
||||
previewView.bounds = bounds
|
||||
}
|
||||
|
||||
// pragma MARK: Props updating
|
||||
override public final func didSetProps(_ changedProps: [String]!) {
|
||||
ReactLogger.log(level: .info, message: "Updating \(changedProps.count) prop(s)...")
|
||||
let shouldReconfigure = changedProps.contains { propsThatRequireReconfiguration.contains($0) }
|
||||
let shouldReconfigureFormat = shouldReconfigure || changedProps.contains("format")
|
||||
let shouldReconfigureDevice = shouldReconfigureFormat || changedProps.contains { propsThatRequireDeviceReconfiguration.contains($0) }
|
||||
let shouldReconfigureAudioSession = changedProps.contains("audio")
|
||||
|
||||
let willReconfigure = shouldReconfigure || shouldReconfigureFormat || shouldReconfigureDevice
|
||||
|
||||
let shouldCheckActive = willReconfigure || changedProps.contains("isActive") || captureSession.isRunning != isActive
|
||||
let shouldUpdateTorch = willReconfigure || changedProps.contains("torch") || shouldCheckActive
|
||||
let shouldUpdateZoom = willReconfigure || changedProps.contains("zoom") || shouldCheckActive
|
||||
let shouldUpdateVideoStabilization = willReconfigure || changedProps.contains("videoStabilizationMode")
|
||||
let shouldUpdateOrientation = willReconfigure || changedProps.contains("orientation")
|
||||
|
||||
if changedProps.contains("enableFpsGraph") {
|
||||
DispatchQueue.main.async {
|
||||
self.setupFpsGraph()
|
||||
}
|
||||
}
|
||||
|
||||
if shouldReconfigure ||
|
||||
shouldReconfigureAudioSession ||
|
||||
shouldCheckActive ||
|
||||
shouldUpdateTorch ||
|
||||
shouldUpdateZoom ||
|
||||
shouldReconfigureFormat ||
|
||||
shouldReconfigureDevice ||
|
||||
shouldUpdateVideoStabilization ||
|
||||
shouldUpdateOrientation {
|
||||
CameraQueues.cameraQueue.async {
|
||||
// Video Configuration
|
||||
if shouldReconfigure {
|
||||
self.configureCaptureSession()
|
||||
}
|
||||
if shouldReconfigureFormat {
|
||||
self.configureFormat()
|
||||
}
|
||||
if shouldReconfigureDevice {
|
||||
self.configureDevice()
|
||||
}
|
||||
if shouldUpdateVideoStabilization, let videoStabilizationMode = self.videoStabilizationMode as String? {
|
||||
self.captureSession.setVideoStabilizationMode(videoStabilizationMode)
|
||||
}
|
||||
|
||||
if shouldUpdateZoom {
|
||||
let zoomClamped = max(min(CGFloat(self.zoom.doubleValue), self.maxAvailableZoom), self.minAvailableZoom)
|
||||
self.zoom(factor: zoomClamped, animated: false)
|
||||
self.pinchScaleOffset = zoomClamped
|
||||
}
|
||||
|
||||
if shouldCheckActive && self.captureSession.isRunning != self.isActive {
|
||||
if self.isActive {
|
||||
ReactLogger.log(level: .info, message: "Starting Session...")
|
||||
self.captureSession.startRunning()
|
||||
ReactLogger.log(level: .info, message: "Started Session!")
|
||||
} else {
|
||||
ReactLogger.log(level: .info, message: "Stopping Session...")
|
||||
self.captureSession.stopRunning()
|
||||
ReactLogger.log(level: .info, message: "Stopped Session!")
|
||||
}
|
||||
}
|
||||
|
||||
if shouldUpdateOrientation {
|
||||
self.updateOrientation()
|
||||
}
|
||||
|
||||
// This is a wack workaround, but if I immediately set torch mode after `startRunning()`, the session isn't quite ready yet and will ignore torch.
|
||||
if shouldUpdateTorch {
|
||||
CameraQueues.cameraQueue.asyncAfter(deadline: .now() + 0.1) {
|
||||
self.setTorchMode(self.torch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Audio Configuration
|
||||
if shouldReconfigureAudioSession {
|
||||
CameraQueues.audioQueue.async {
|
||||
self.configureAudioSession()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setupFpsGraph() {
|
||||
#if DEBUG
|
||||
if enableFpsGraph {
|
||||
if fpsGraph != nil { return }
|
||||
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
|
||||
fpsGraph!.layer.zPosition = 9999.0
|
||||
addSubview(fpsGraph!)
|
||||
} else {
|
||||
fpsGraph?.removeFromSuperview()
|
||||
fpsGraph = nil
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// pragma MARK: Event Invokers
|
||||
final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
|
||||
ReactLogger.log(level: .error, message: "Invoking onError(): \(error.message)")
|
||||
guard let onError = onError else { return }
|
||||
|
||||
var causeDictionary: [String: Any]?
|
||||
if let cause = cause {
|
||||
causeDictionary = [
|
||||
"code": cause.code,
|
||||
"domain": cause.domain,
|
||||
"message": cause.description,
|
||||
"details": cause.userInfo,
|
||||
]
|
||||
}
|
||||
onError([
|
||||
"code": error.code,
|
||||
"message": error.message,
|
||||
"cause": causeDictionary ?? NSNull(),
|
||||
])
|
||||
}
|
||||
|
||||
final func invokeOnInitialized() {
|
||||
ReactLogger.log(level: .info, message: "Camera initialized!")
|
||||
guard let onInitialized = onInitialized else { return }
|
||||
onInitialized([String: Any]())
|
||||
}
|
||||
}
|
94
package/ios/CameraViewManager.m
Normal file
94
package/ios/CameraViewManager.m
Normal file
@@ -0,0 +1,94 @@
|
||||
//
|
||||
// CameraViewManager.m
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 09.11.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#import <React/RCTUtils.h>
|
||||
#import <React/RCTViewManager.h>
|
||||
|
||||
@interface RCT_EXTERN_REMAP_MODULE (CameraView, CameraViewManager, RCTViewManager)
|
||||
|
||||
// Module Functions
|
||||
RCT_EXTERN_METHOD(getCameraPermissionStatus
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
RCT_EXTERN_METHOD(getMicrophonePermissionStatus
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
RCT_EXTERN_METHOD(requestCameraPermission
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
RCT_EXTERN_METHOD(requestMicrophonePermission
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
|
||||
RCT_EXTERN_METHOD(getAvailableCameraDevices
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
|
||||
// Camera View Properties
|
||||
RCT_EXPORT_VIEW_PROPERTY(isActive, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(cameraId, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableHighQualityPhotos, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL);
|
||||
// use cases
|
||||
RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(audio, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableFrameProcessor, BOOL);
|
||||
// device format
|
||||
RCT_EXPORT_VIEW_PROPERTY(format, NSDictionary);
|
||||
RCT_EXPORT_VIEW_PROPERTY(fps, NSNumber);
|
||||
RCT_EXPORT_VIEW_PROPERTY(hdr, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(lowLightBoost, NSNumber); // nullable bool
|
||||
RCT_EXPORT_VIEW_PROPERTY(videoStabilizationMode, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(pixelFormat, NSString);
|
||||
// other props
|
||||
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(enableFpsGraph, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(orientation, NSString);
|
||||
// Camera View Events
|
||||
RCT_EXPORT_VIEW_PROPERTY(onError, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onInitialized, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onViewReady, RCTDirectEventBlock);
|
||||
|
||||
// Camera View Functions
|
||||
RCT_EXTERN_METHOD(startRecording
|
||||
: (nonnull NSNumber*)node options
|
||||
: (NSDictionary*)options onRecordCallback
|
||||
: (RCTResponseSenderBlock)onRecordCallback);
|
||||
RCT_EXTERN_METHOD(pauseRecording
|
||||
: (nonnull NSNumber*)node resolve
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
RCT_EXTERN_METHOD(resumeRecording
|
||||
: (nonnull NSNumber*)node resolve
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
RCT_EXTERN_METHOD(stopRecording
|
||||
: (nonnull NSNumber*)node resolve
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
RCT_EXTERN_METHOD(takePhoto
|
||||
: (nonnull NSNumber*)node options
|
||||
: (NSDictionary*)options resolve
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
RCT_EXTERN_METHOD(focus
|
||||
: (nonnull NSNumber*)node point
|
||||
: (NSDictionary*)point resolve
|
||||
: (RCTPromiseResolveBlock)resolve reject
|
||||
: (RCTPromiseRejectBlock)reject);
|
||||
|
||||
// Static Methods
|
||||
RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(installFrameProcessorBindings);
|
||||
|
||||
@end
|
166
package/ios/CameraViewManager.swift
Normal file
166
package/ios/CameraViewManager.swift
Normal file
@@ -0,0 +1,166 @@
|
||||
//
|
||||
// CameraViewManager.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 09.11.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
@objc(CameraViewManager)
|
||||
final class CameraViewManager: RCTViewManager {
|
||||
// pragma MARK: Properties
|
||||
|
||||
override var methodQueue: DispatchQueue! {
|
||||
return DispatchQueue.main
|
||||
}
|
||||
|
||||
override static func requiresMainQueueSetup() -> Bool {
|
||||
return true
|
||||
}
|
||||
|
||||
override final func view() -> UIView! {
|
||||
return CameraView()
|
||||
}
|
||||
|
||||
// pragma MARK: React Functions
|
||||
|
||||
@objc
|
||||
final func installFrameProcessorBindings() -> NSNumber {
|
||||
#if VISION_CAMERA_ENABLE_FRAME_PROCESSORS
|
||||
// Called on JS Thread (blocking sync method)
|
||||
let result = VisionCameraInstaller.install(to: bridge)
|
||||
return NSNumber(value: result)
|
||||
#else
|
||||
return false as NSNumber
|
||||
#endif
|
||||
}
|
||||
|
||||
@objc
|
||||
final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) {
|
||||
let component = getCameraView(withTag: node)
|
||||
component.startRecording(options: options, callback: onRecordCallback)
|
||||
}
|
||||
|
||||
@objc
|
||||
final func pauseRecording(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
let component = getCameraView(withTag: node)
|
||||
component.pauseRecording(promise: Promise(resolver: resolve, rejecter: reject))
|
||||
}
|
||||
|
||||
@objc
|
||||
final func resumeRecording(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
let component = getCameraView(withTag: node)
|
||||
component.resumeRecording(promise: Promise(resolver: resolve, rejecter: reject))
|
||||
}
|
||||
|
||||
@objc
|
||||
final func stopRecording(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
let component = getCameraView(withTag: node)
|
||||
component.stopRecording(promise: Promise(resolver: resolve, rejecter: reject))
|
||||
}
|
||||
|
||||
@objc
|
||||
final func takePhoto(_ node: NSNumber, options: NSDictionary, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
let component = getCameraView(withTag: node)
|
||||
component.takePhoto(options: options, promise: Promise(resolver: resolve, rejecter: reject))
|
||||
}
|
||||
|
||||
@objc
|
||||
final func focus(_ node: NSNumber, point: NSDictionary, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
let promise = Promise(resolver: resolve, rejecter: reject)
|
||||
guard let x = point["x"] as? NSNumber, let y = point["y"] as? NSNumber else {
|
||||
promise.reject(error: .parameter(.invalid(unionName: "point", receivedValue: point.description)))
|
||||
return
|
||||
}
|
||||
let component = getCameraView(withTag: node)
|
||||
component.focus(point: CGPoint(x: x.doubleValue, y: y.doubleValue), promise: promise)
|
||||
}
|
||||
|
||||
@objc
|
||||
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
withPromise(resolve: resolve, reject: reject) {
|
||||
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(),
|
||||
mediaType: .video,
|
||||
position: .unspecified)
|
||||
return discoverySession.devices.map {
|
||||
return [
|
||||
"id": $0.uniqueID,
|
||||
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
|
||||
"position": $0.position.descriptor,
|
||||
"name": $0.localizedName,
|
||||
"hasFlash": $0.hasFlash,
|
||||
"hasTorch": $0.hasTorch,
|
||||
"minZoom": $0.minAvailableVideoZoomFactor,
|
||||
"neutralZoom": $0.neutralZoomFactor,
|
||||
"maxZoom": $0.maxAvailableVideoZoomFactor,
|
||||
"isMultiCam": $0.isMultiCam,
|
||||
"supportsDepthCapture": false, // TODO: supportsDepthCapture
|
||||
"supportsRawCapture": false, // TODO: supportsRawCapture
|
||||
"supportsLowLightBoost": $0.isLowLightBoostSupported,
|
||||
"supportsFocus": $0.isFocusPointOfInterestSupported,
|
||||
"hardwareLevel": "full",
|
||||
"sensorOrientation": "portrait", // TODO: Sensor Orientation?
|
||||
"formats": $0.formats.map { format -> [String: Any] in
|
||||
format.toDictionary()
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
final func getCameraPermissionStatus(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
withPromise(resolve: resolve, reject: reject) {
|
||||
let status = AVCaptureDevice.authorizationStatus(for: .video)
|
||||
return status.descriptor
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
final func getMicrophonePermissionStatus(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
withPromise(resolve: resolve, reject: reject) {
|
||||
let status = AVCaptureDevice.authorizationStatus(for: .audio)
|
||||
return status.descriptor
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
final func requestCameraPermission(_ resolve: @escaping RCTPromiseResolveBlock, reject _: @escaping RCTPromiseRejectBlock) {
|
||||
AVCaptureDevice.requestAccess(for: .video) { granted in
|
||||
let result: AVAuthorizationStatus = granted ? .authorized : .denied
|
||||
resolve(result.descriptor)
|
||||
}
|
||||
}
|
||||
|
||||
@objc
|
||||
final func requestMicrophonePermission(_ resolve: @escaping RCTPromiseResolveBlock, reject _: @escaping RCTPromiseRejectBlock) {
|
||||
AVCaptureDevice.requestAccess(for: .audio) { granted in
|
||||
let result: AVAuthorizationStatus = granted ? .authorized : .denied
|
||||
resolve(result.descriptor)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Private
|
||||
|
||||
private func getCameraView(withTag tag: NSNumber) -> CameraView {
|
||||
// swiftlint:disable force_cast
|
||||
return bridge.uiManager.view(forReactTag: tag) as! CameraView
|
||||
// swiftlint:enable force_cast
|
||||
}
|
||||
|
||||
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
|
||||
var deviceTypes: [AVCaptureDevice.DeviceType] = []
|
||||
if #available(iOS 13.0, *) {
|
||||
deviceTypes.append(.builtInTripleCamera)
|
||||
deviceTypes.append(.builtInDualWideCamera)
|
||||
deviceTypes.append(.builtInUltraWideCamera)
|
||||
}
|
||||
deviceTypes.append(.builtInDualCamera)
|
||||
deviceTypes.append(.builtInWideAngleCamera)
|
||||
deviceTypes.append(.builtInTelephotoCamera)
|
||||
return deviceTypes
|
||||
}
|
||||
}
|
@@ -0,0 +1,31 @@
|
||||
//
|
||||
// AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 05.05.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVAssetWriterInputPixelBufferAdaptor {
|
||||
/**
|
||||
Convenience initializer to extract correct attributes from the given videoSettings.
|
||||
*/
|
||||
convenience init(assetWriterInput: AVAssetWriterInput,
|
||||
withVideoSettings videoSettings: [String: Any],
|
||||
pixelFormat: OSType) {
|
||||
var attributes: [String: Any] = [:]
|
||||
|
||||
if let width = videoSettings[AVVideoWidthKey] as? NSNumber,
|
||||
let height = videoSettings[AVVideoHeightKey] as? NSNumber {
|
||||
attributes[kCVPixelBufferWidthKey as String] = width as CFNumber
|
||||
attributes[kCVPixelBufferHeightKey as String] = height as CFNumber
|
||||
}
|
||||
|
||||
attributes[kCVPixelBufferPixelFormatTypeKey as String] = pixelFormat
|
||||
|
||||
self.init(assetWriterInput: assetWriterInput, sourcePixelBufferAttributes: attributes)
|
||||
}
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
//
|
||||
// AVAudioSession+trySetAllowHaptics.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVAudioSession {
|
||||
/**
|
||||
Tries to set allowHapticsAndSystemSoundsDuringRecording and ignore errors.
|
||||
*/
|
||||
func trySetAllowHaptics(_ allowHaptics: Bool) {
|
||||
if #available(iOS 13.0, *) {
|
||||
if !self.allowHapticsAndSystemSoundsDuringRecording {
|
||||
try? self.setAllowHapticsAndSystemSoundsDuringRecording(allowHaptics)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
23
package/ios/Extensions/AVAudioSession+updateCategory.swift
Normal file
23
package/ios/Extensions/AVAudioSession+updateCategory.swift
Normal file
@@ -0,0 +1,23 @@
|
||||
//
|
||||
// AVAudioSession+updateCategory.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 01.06.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVAudioSession {
|
||||
/**
|
||||
Calls [setCategory] if the given category or options are not equal to the currently set category and options.
|
||||
*/
|
||||
func updateCategory(_ category: AVAudioSession.Category, options: AVAudioSession.CategoryOptions = []) throws {
|
||||
if self.category != category || categoryOptions.rawValue != options.rawValue {
|
||||
ReactLogger.log(level: .info,
|
||||
message: "Changing AVAudioSession category from \(self.category.rawValue) -> \(category.rawValue)")
|
||||
try setCategory(category, options: options)
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,34 @@
|
||||
//
|
||||
// AVCaptureConnection+setInterfaceOrientation.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.07.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureConnection {
|
||||
/**
|
||||
Sets the `videoOrientation` to the given `orientation` if video orientation setting is supported.
|
||||
*/
|
||||
func setInterfaceOrientation(_ orientation: UIInterfaceOrientation) {
|
||||
if isVideoOrientationSupported {
|
||||
switch orientation {
|
||||
case .portrait:
|
||||
videoOrientation = .portrait
|
||||
case .portraitUpsideDown:
|
||||
videoOrientation = .portraitUpsideDown
|
||||
case .landscapeLeft:
|
||||
videoOrientation = .landscapeLeft
|
||||
case .landscapeRight:
|
||||
videoOrientation = .landscapeRight
|
||||
case .unknown:
|
||||
fallthrough
|
||||
@unknown default:
|
||||
videoOrientation = .portrait
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
22
package/ios/Extensions/AVCaptureDevice+isMultiCam.swift
Normal file
22
package/ios/Extensions/AVCaptureDevice+isMultiCam.swift
Normal file
@@ -0,0 +1,22 @@
|
||||
//
|
||||
// AVCaptureDevice+isMultiCam.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 07.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice {
|
||||
/**
|
||||
Returns true if the device is a virtual multi-cam, false otherwise.
|
||||
*/
|
||||
var isMultiCam: Bool {
|
||||
if #available(iOS 13.0, *) {
|
||||
return self.isVirtualDevice
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
28
package/ios/Extensions/AVCaptureDevice+neutralZoom.swift
Normal file
28
package/ios/Extensions/AVCaptureDevice+neutralZoom.swift
Normal file
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// AVCaptureDevice+neutralZoom.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 10.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice {
|
||||
/**
|
||||
Get the value at which the Zoom factor is neutral.
|
||||
|
||||
For normal wide-angle devices, this is always going to be 1.0, since this is the default scale.
|
||||
For devices with an ultra-wide-angle camera, this value is going to be the value where the wide-angle device will switch over.
|
||||
*/
|
||||
var neutralZoomFactor: CGFloat {
|
||||
if #available(iOS 13.0, *) {
|
||||
if let indexOfWideAngle = self.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }) {
|
||||
if let zoomFactor = self.virtualDeviceSwitchOverVideoZoomFactors[safe: indexOfWideAngle - 1] {
|
||||
return CGFloat(zoomFactor.doubleValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
return 1.0
|
||||
}
|
||||
}
|
22
package/ios/Extensions/AVCaptureDevice+physicalDevices.swift
Normal file
22
package/ios/Extensions/AVCaptureDevice+physicalDevices.swift
Normal file
@@ -0,0 +1,22 @@
|
||||
//
|
||||
// AVCaptureDevice+physicalDevices.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 10.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice {
|
||||
/**
|
||||
If the device is a virtual multi-cam, this returns `constituentDevices`, otherwise this returns an array of a single element, `self`
|
||||
*/
|
||||
var physicalDevices: [AVCaptureDevice] {
|
||||
if #available(iOS 13.0, *), isVirtualDevice {
|
||||
return self.constituentDevices
|
||||
} else {
|
||||
return [self]
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,39 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+isBetterThan.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 19.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/** Compares the current Format to the given format and returns true if the current format has either:
|
||||
* 1. Higher still image capture dimensions
|
||||
* 2. Higher video format dimensions (iOS 13.0)
|
||||
* 3. Higher FPS
|
||||
*/
|
||||
func isBetterThan(_ other: AVCaptureDevice.Format) -> Bool {
|
||||
// compare still image dimensions
|
||||
let leftDimensions = highResolutionStillImageDimensions
|
||||
let rightDimensions = other.highResolutionStillImageDimensions
|
||||
if leftDimensions.height * leftDimensions.width > rightDimensions.height * rightDimensions.width {
|
||||
return true
|
||||
}
|
||||
|
||||
// compare video dimensions
|
||||
let leftVideo = videoDimensions
|
||||
let rightVideo = other.videoDimensions
|
||||
if leftVideo.height * leftVideo.width > rightVideo.height * rightVideo.width {
|
||||
return true
|
||||
}
|
||||
|
||||
// compare max fps
|
||||
if maxFrameRate > other.maxFrameRate {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
@@ -0,0 +1,83 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+matchesFilter.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/**
|
||||
* Checks whether the given filter (NSDictionary, JSON Object) matches the given AVCaptureDevice Format.
|
||||
* The `dictionary` dictionary must be of type `CameraDeviceFormat` (from `CameraDevice.d.ts`)
|
||||
*/
|
||||
func matchesFilter(_ filter: NSDictionary) -> Bool {
|
||||
if let photoHeight = filter.value(forKey: "photoHeight") as? NSNumber {
|
||||
if highResolutionStillImageDimensions.height != photoHeight.intValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let photoWidth = filter.value(forKey: "photoWidth") as? NSNumber {
|
||||
if highResolutionStillImageDimensions.width != photoWidth.intValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoHeight = filter.value(forKey: "videoHeight") as? NSNumber {
|
||||
if videoDimensions.height != CGFloat(videoHeight.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoWidth = filter.value(forKey: "videoWidth") as? NSNumber {
|
||||
if videoDimensions.width != CGFloat(videoWidth.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxISO = filter.value(forKey: "maxISO") as? NSNumber {
|
||||
if self.maxISO != maxISO.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let minISO = filter.value(forKey: "minISO") as? NSNumber {
|
||||
if self.minISO != minISO.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let fieldOfView = filter.value(forKey: "fieldOfView") as? NSNumber {
|
||||
if videoFieldOfView != fieldOfView.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxZoom = filter.value(forKey: "maxZoom") as? NSNumber {
|
||||
if videoMaxZoomFactor != CGFloat(maxZoom.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let minFps = filter.value(forKey: "minFps") as? NSNumber {
|
||||
if minFrameRate != Float64(minFps.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxFps = filter.value(forKey: "maxFps") as? NSNumber {
|
||||
if maxFrameRate != Float64(maxFps.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let autoFocusSystem = filter.value(forKey: "autoFocusSystem") as? String,
|
||||
let avAutoFocusSystem = try? AVCaptureDevice.Format.AutoFocusSystem(withString: autoFocusSystem) {
|
||||
if self.autoFocusSystem != avAutoFocusSystem {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoStabilizationModes = filter.value(forKey: "videoStabilizationModes") as? [String] {
|
||||
let avVideoStabilizationModes = videoStabilizationModes.map { try? AVCaptureVideoStabilizationMode(withString: $0) }
|
||||
let allStabilizationModesIncluded = self.videoStabilizationModes.allSatisfy { avVideoStabilizationModes.contains($0) }
|
||||
if !allStabilizationModesIncluded {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
@@ -0,0 +1,60 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+toDictionary.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
private func getAllVideoStabilizationModes() -> [AVCaptureVideoStabilizationMode] {
|
||||
var modes: [AVCaptureVideoStabilizationMode] = [.auto, .cinematic, .off, .standard]
|
||||
if #available(iOS 13, *) {
|
||||
modes.append(.cinematicExtended)
|
||||
}
|
||||
return modes
|
||||
}
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
var videoStabilizationModes: [AVCaptureVideoStabilizationMode] {
|
||||
return getAllVideoStabilizationModes().filter { self.isVideoStabilizationModeSupported($0) }
|
||||
}
|
||||
|
||||
var minFrameRate: Float64 {
|
||||
let maxRange = videoSupportedFrameRateRanges.max { l, r in
|
||||
return l.maxFrameRate < r.maxFrameRate
|
||||
}
|
||||
return maxRange?.maxFrameRate ?? 0
|
||||
}
|
||||
|
||||
var maxFrameRate: Float64 {
|
||||
let maxRange = videoSupportedFrameRateRanges.max { l, r in
|
||||
return l.maxFrameRate < r.maxFrameRate
|
||||
}
|
||||
return maxRange?.maxFrameRate ?? 0
|
||||
}
|
||||
|
||||
func toDictionary() -> [String: Any] {
|
||||
let availablePixelFormats = AVCaptureVideoDataOutput().availableVideoPixelFormatTypes
|
||||
let pixelFormats = availablePixelFormats.map { format in PixelFormat(mediaSubType: format) }
|
||||
|
||||
return [
|
||||
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
|
||||
"autoFocusSystem": autoFocusSystem.descriptor,
|
||||
"photoHeight": highResolutionStillImageDimensions.height,
|
||||
"photoWidth": highResolutionStillImageDimensions.width,
|
||||
"videoHeight": videoDimensions.height,
|
||||
"videoWidth": videoDimensions.width,
|
||||
"maxISO": maxISO,
|
||||
"minISO": minISO,
|
||||
"fieldOfView": videoFieldOfView,
|
||||
"maxZoom": videoMaxZoomFactor,
|
||||
"supportsVideoHDR": isVideoHDRSupported,
|
||||
"supportsPhotoHDR": false,
|
||||
"minFps": minFrameRate,
|
||||
"maxFps": maxFrameRate,
|
||||
"pixelFormats": pixelFormats.map(\.unionValue),
|
||||
]
|
||||
}
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+videoDimensions.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 03.08.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/**
|
||||
* Returns the video dimensions, adjusted to take pixel aspect ratio and/or clean
|
||||
* aperture into account.
|
||||
*
|
||||
* Pixel aspect ratio is used to adjust the width, leaving the height alone.
|
||||
*/
|
||||
var videoDimensions: CGSize {
|
||||
return CMVideoFormatDescriptionGetPresentationDimensions(formatDescription,
|
||||
usePixelAspectRatio: true,
|
||||
useCleanAperture: true)
|
||||
}
|
||||
}
|
20
package/ios/Extensions/AVCapturePhotoOutput+mirror.swift
Normal file
20
package/ios/Extensions/AVCapturePhotoOutput+mirror.swift
Normal file
@@ -0,0 +1,20 @@
|
||||
//
|
||||
// AVCapturePhotoOutput+mirror.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCapturePhotoOutput {
|
||||
func mirror() {
|
||||
connections.forEach { connection in
|
||||
if connection.isVideoMirroringSupported {
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// AVCaptureSession+setVideoStabilizationMode.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 02.06.21.
|
||||
// Copyright © 2021 Facebook. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureSession {
|
||||
/**
|
||||
Set the given video stabilization mode for all capture connections.
|
||||
*/
|
||||
func setVideoStabilizationMode(_ mode: String) {
|
||||
if #available(iOS 13.0, *) {
|
||||
guard let mode = try? AVCaptureVideoStabilizationMode(withString: mode) else {
|
||||
return
|
||||
}
|
||||
connections.forEach { connection in
|
||||
if connection.isVideoStabilizationSupported {
|
||||
connection.preferredVideoStabilizationMode = mode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
18
package/ios/Extensions/AVFrameRateRange+includes.swift
Normal file
18
package/ios/Extensions/AVFrameRateRange+includes.swift
Normal file
@@ -0,0 +1,18 @@
|
||||
//
|
||||
// AVFrameRateRange+includes.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVFrameRateRange {
|
||||
/**
|
||||
* Returns true if this [AVFrameRateRange] contains the given [fps]
|
||||
*/
|
||||
func includes(fps: Double) -> Bool {
|
||||
return fps >= minFrameRate && fps <= maxFrameRate
|
||||
}
|
||||
}
|
18
package/ios/Extensions/Collection+safe.swift
Normal file
18
package/ios/Extensions/Collection+safe.swift
Normal file
@@ -0,0 +1,18 @@
|
||||
//
|
||||
// Collection+safe.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 10.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
extension Collection {
|
||||
/**
|
||||
Returns the element at the specified index if it is within bounds, otherwise nil.
|
||||
*/
|
||||
subscript(safe index: Index) -> Element? {
|
||||
return indices.contains(index) ? self[index] : nil
|
||||
}
|
||||
}
|
17
package/ios/Extensions/FourCharCode+toString.swift
Normal file
17
package/ios/Extensions/FourCharCode+toString.swift
Normal file
@@ -0,0 +1,17 @@
|
||||
//
|
||||
// FourCharCode+toString.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Thomas Coldwell on 28/10/2021.
|
||||
// Based off this SO answer: https://stackoverflow.com/a/25625744
|
||||
//
|
||||
|
||||
extension FourCharCode {
|
||||
func toString() -> String {
|
||||
var s = String(UnicodeScalar((self >> 24) & 255)!)
|
||||
s.append(String(UnicodeScalar((self >> 16) & 255)!))
|
||||
s.append(String(UnicodeScalar((self >> 8) & 255)!))
|
||||
s.append(String(UnicodeScalar(self & 255)!))
|
||||
return s
|
||||
}
|
||||
}
|
23
package/ios/Frame Processor/Frame.h
Normal file
23
package/ios/Frame Processor/Frame.h
Normal file
@@ -0,0 +1,23 @@
|
||||
//
|
||||
// Frame.h
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 15.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import <CoreMedia/CMSampleBuffer.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <UIKit/UIImage.h>
|
||||
|
||||
@interface Frame : NSObject
|
||||
|
||||
- (instancetype _Nonnull)initWithBuffer:(CMSampleBufferRef _Nonnull)buffer
|
||||
orientation:(UIImageOrientation)orientation;
|
||||
|
||||
@property(nonatomic, readonly) CMSampleBufferRef _Nonnull buffer;
|
||||
@property(nonatomic, readonly) UIImageOrientation orientation;
|
||||
|
||||
@end
|
31
package/ios/Frame Processor/Frame.m
Normal file
31
package/ios/Frame Processor/Frame.m
Normal file
@@ -0,0 +1,31 @@
|
||||
//
|
||||
// Frame.m
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 08.06.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#import "Frame.h"
|
||||
#import <CoreMedia/CMSampleBuffer.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@implementation Frame {
|
||||
CMSampleBufferRef _Nonnull buffer;
|
||||
UIImageOrientation orientation;
|
||||
}
|
||||
|
||||
- (instancetype)initWithBuffer:(CMSampleBufferRef _Nonnull)buffer
|
||||
orientation:(UIImageOrientation)orientation {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_buffer = buffer;
|
||||
_orientation = orientation;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@synthesize buffer = _buffer;
|
||||
@synthesize orientation = _orientation;
|
||||
|
||||
@end
|
28
package/ios/Frame Processor/FrameHostObject.h
Normal file
28
package/ios/Frame Processor/FrameHostObject.h
Normal file
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// FrameHostObject.h
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 22.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import <CoreMedia/CMSampleBuffer.h>
|
||||
#import <jsi/jsi.h>
|
||||
|
||||
#import "Frame.h"
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class JSI_EXPORT FrameHostObject : public jsi::HostObject {
|
||||
public:
|
||||
explicit FrameHostObject(Frame* frame) : frame(frame) {}
|
||||
|
||||
public:
|
||||
jsi::Value get(jsi::Runtime&, const jsi::PropNameID& name) override;
|
||||
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& rt) override;
|
||||
|
||||
public:
|
||||
Frame* frame;
|
||||
};
|
188
package/ios/Frame Processor/FrameHostObject.mm
Normal file
188
package/ios/Frame Processor/FrameHostObject.mm
Normal file
@@ -0,0 +1,188 @@
|
||||
//
|
||||
// FrameHostObject.m
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 22.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#import "FrameHostObject.h"
|
||||
#import "WKTJsiHostObject.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <jsi/jsi.h>
|
||||
|
||||
#import "../../cpp/JSITypedArray.h"
|
||||
|
||||
std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt) {
|
||||
std::vector<jsi::PropNameID> result;
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("width")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("height")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("bytesPerRow")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("planesCount")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("orientation")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isMirrored")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("timestamp")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("pixelFormat")));
|
||||
// Conversion
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toString")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("toArrayBuffer")));
|
||||
// Ref Management
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("isValid")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("incrementRefCount")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(rt, std::string("decrementRefCount")));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
|
||||
auto name = propName.utf8(runtime);
|
||||
|
||||
if (name == "toString") {
|
||||
auto toString = JSI_HOST_FUNCTION_LAMBDA {
|
||||
if (this->frame == nil) {
|
||||
return jsi::String::createFromUtf8(runtime, "[closed frame]");
|
||||
}
|
||||
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||
auto width = CVPixelBufferGetWidth(imageBuffer);
|
||||
auto height = CVPixelBufferGetHeight(imageBuffer);
|
||||
|
||||
NSMutableString* string =
|
||||
[NSMutableString stringWithFormat:@"%lu x %lu Frame", width, height];
|
||||
return jsi::String::createFromUtf8(runtime, string.UTF8String);
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(
|
||||
runtime, jsi::PropNameID::forUtf8(runtime, "toString"), 0, toString);
|
||||
}
|
||||
if (name == "incrementRefCount") {
|
||||
auto incrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
|
||||
// Increment retain count by one so ARC doesn't destroy the Frame Buffer.
|
||||
CFRetain(frame.buffer);
|
||||
return jsi::Value::undefined();
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(
|
||||
runtime, jsi::PropNameID::forUtf8(runtime, "incrementRefCount"), 0, incrementRefCount);
|
||||
}
|
||||
if (name == "decrementRefCount") {
|
||||
auto decrementRefCount = JSI_HOST_FUNCTION_LAMBDA {
|
||||
// Decrement retain count by one. If the retain count is zero, ARC will destroy the Frame
|
||||
// Buffer.
|
||||
CFRelease(frame.buffer);
|
||||
return jsi::Value::undefined();
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(
|
||||
runtime, jsi::PropNameID::forUtf8(runtime, "decrementRefCount"), 0, decrementRefCount);
|
||||
}
|
||||
if (name == "toArrayBuffer") {
|
||||
auto toArrayBuffer = JSI_HOST_FUNCTION_LAMBDA {
|
||||
auto pixelBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||
auto bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
|
||||
auto height = CVPixelBufferGetHeight(pixelBuffer);
|
||||
|
||||
auto arraySize = bytesPerRow * height;
|
||||
|
||||
static constexpr auto ARRAYBUFFER_CACHE_PROP_NAME = "__frameArrayBufferCache";
|
||||
if (!runtime.global().hasProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME)) {
|
||||
vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray> arrayBuffer(runtime,
|
||||
arraySize);
|
||||
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
|
||||
}
|
||||
|
||||
auto arrayBufferCache =
|
||||
runtime.global().getPropertyAsObject(runtime, ARRAYBUFFER_CACHE_PROP_NAME);
|
||||
auto arrayBuffer = vision::getTypedArray(runtime, arrayBufferCache)
|
||||
.get<vision::TypedArrayKind::Uint8ClampedArray>(runtime);
|
||||
|
||||
if (arrayBuffer.size(runtime) != arraySize) {
|
||||
arrayBuffer =
|
||||
vision::TypedArray<vision::TypedArrayKind::Uint8ClampedArray>(runtime, arraySize);
|
||||
runtime.global().setProperty(runtime, ARRAYBUFFER_CACHE_PROP_NAME, arrayBuffer);
|
||||
}
|
||||
|
||||
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
auto buffer = (uint8_t*)CVPixelBufferGetBaseAddress(pixelBuffer);
|
||||
arrayBuffer.updateUnsafe(runtime, buffer, arraySize);
|
||||
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
|
||||
return arrayBuffer;
|
||||
};
|
||||
return jsi::Function::createFromHostFunction(
|
||||
runtime, jsi::PropNameID::forUtf8(runtime, "toArrayBuffer"), 0, toArrayBuffer);
|
||||
}
|
||||
|
||||
if (name == "isValid") {
|
||||
auto isValid = frame != nil && frame.buffer != nil && CFGetRetainCount(frame.buffer) > 0 &&
|
||||
CMSampleBufferIsValid(frame.buffer);
|
||||
return jsi::Value(isValid);
|
||||
}
|
||||
if (name == "width") {
|
||||
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||
auto width = CVPixelBufferGetWidth(imageBuffer);
|
||||
return jsi::Value((double)width);
|
||||
}
|
||||
if (name == "height") {
|
||||
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||
auto height = CVPixelBufferGetHeight(imageBuffer);
|
||||
return jsi::Value((double)height);
|
||||
}
|
||||
if (name == "orientation") {
|
||||
switch (frame.orientation) {
|
||||
case UIImageOrientationUp:
|
||||
case UIImageOrientationUpMirrored:
|
||||
return jsi::String::createFromUtf8(runtime, "portrait");
|
||||
case UIImageOrientationDown:
|
||||
case UIImageOrientationDownMirrored:
|
||||
return jsi::String::createFromUtf8(runtime, "portrait-upside-down");
|
||||
case UIImageOrientationLeft:
|
||||
case UIImageOrientationLeftMirrored:
|
||||
return jsi::String::createFromUtf8(runtime, "landscape-left");
|
||||
case UIImageOrientationRight:
|
||||
case UIImageOrientationRightMirrored:
|
||||
return jsi::String::createFromUtf8(runtime, "landscape-right");
|
||||
}
|
||||
}
|
||||
if (name == "isMirrored") {
|
||||
switch (frame.orientation) {
|
||||
case UIImageOrientationUp:
|
||||
case UIImageOrientationDown:
|
||||
case UIImageOrientationLeft:
|
||||
case UIImageOrientationRight:
|
||||
return jsi::Value(false);
|
||||
case UIImageOrientationDownMirrored:
|
||||
case UIImageOrientationUpMirrored:
|
||||
case UIImageOrientationLeftMirrored:
|
||||
case UIImageOrientationRightMirrored:
|
||||
return jsi::Value(true);
|
||||
}
|
||||
}
|
||||
if (name == "timestamp") {
|
||||
auto timestamp = CMSampleBufferGetPresentationTimeStamp(frame.buffer);
|
||||
auto seconds = static_cast<double>(CMTimeGetSeconds(timestamp));
|
||||
return jsi::Value(seconds * 1000.0);
|
||||
}
|
||||
if (name == "pixelFormat") {
|
||||
auto format = CMSampleBufferGetFormatDescription(frame.buffer);
|
||||
auto mediaType = CMFormatDescriptionGetMediaSubType(format);
|
||||
switch (mediaType) {
|
||||
case kCVPixelFormatType_32BGRA:
|
||||
return jsi::String::createFromUtf8(runtime, "rgb");
|
||||
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
|
||||
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
|
||||
return jsi::String::createFromUtf8(runtime, "yuv");
|
||||
default:
|
||||
return jsi::String::createFromUtf8(runtime, "unknown");
|
||||
}
|
||||
}
|
||||
if (name == "bytesPerRow") {
|
||||
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||
auto bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
|
||||
return jsi::Value((double)bytesPerRow);
|
||||
}
|
||||
if (name == "planesCount") {
|
||||
auto imageBuffer = CMSampleBufferGetImageBuffer(frame.buffer);
|
||||
auto planesCount = CVPixelBufferGetPlaneCount(imageBuffer);
|
||||
return jsi::Value((double)planesCount);
|
||||
}
|
||||
|
||||
// fallback to base implementation
|
||||
return HostObject::get(runtime, propName);
|
||||
}
|
33
package/ios/Frame Processor/FrameProcessor.h
Normal file
33
package/ios/Frame Processor/FrameProcessor.h
Normal file
@@ -0,0 +1,33 @@
|
||||
//
|
||||
// FrameProcessorContext.h
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 13.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import "Frame.h"
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
#import "FrameHostObject.h"
|
||||
#import "WKTJsiWorklet.h"
|
||||
#import <jsi/jsi.h>
|
||||
#import <memory.h>
|
||||
#endif
|
||||
|
||||
@interface FrameProcessor : NSObject
|
||||
|
||||
#ifdef __cplusplus
|
||||
- (instancetype _Nonnull)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
|
||||
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context;
|
||||
|
||||
- (void)callWithFrameHostObject:(std::shared_ptr<FrameHostObject>)frameHostObject;
|
||||
#endif
|
||||
|
||||
- (void)call:(Frame* _Nonnull)frame;
|
||||
|
||||
@end
|
64
package/ios/Frame Processor/FrameProcessor.mm
Normal file
64
package/ios/Frame Processor/FrameProcessor.mm
Normal file
@@ -0,0 +1,64 @@
|
||||
//
|
||||
// FrameProcessor.mm
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 13.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#import "FrameProcessor.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#import "FrameHostObject.h"
|
||||
#import "WKTJsiWorklet.h"
|
||||
#import <jsi/jsi.h>
|
||||
#import <memory>
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
@implementation FrameProcessor {
|
||||
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
|
||||
std::shared_ptr<RNWorklet::WorkletInvoker> _workletInvoker;
|
||||
}
|
||||
|
||||
- (instancetype)initWithWorklet:(std::shared_ptr<RNWorklet::JsiWorklet>)worklet
|
||||
context:(std::shared_ptr<RNWorklet::JsiWorkletContext>)context {
|
||||
if (self = [super init]) {
|
||||
_workletInvoker = std::make_shared<RNWorklet::WorkletInvoker>(worklet);
|
||||
_workletContext = context;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)callWithFrameHostObject:(std::shared_ptr<FrameHostObject>)frameHostObject {
|
||||
// Call the Frame Processor on the Worklet Runtime
|
||||
jsi::Runtime& runtime = _workletContext->getWorkletRuntime();
|
||||
|
||||
try {
|
||||
// Wrap HostObject as JSI Value
|
||||
auto argument = jsi::Object::createFromHostObject(runtime, frameHostObject);
|
||||
jsi::Value jsValue(std::move(argument));
|
||||
|
||||
// Call the Worklet with the Frame JS Host Object as an argument
|
||||
_workletInvoker->call(runtime, jsi::Value::undefined(), &jsValue, 1);
|
||||
} catch (jsi::JSError& jsError) {
|
||||
// JS Error occured, print it to console.
|
||||
auto message = jsError.getMessage();
|
||||
|
||||
_workletContext->invokeOnJsThread([message](jsi::Runtime& jsRuntime) {
|
||||
auto logFn = jsRuntime.global()
|
||||
.getPropertyAsObject(jsRuntime, "console")
|
||||
.getPropertyAsFunction(jsRuntime, "error");
|
||||
logFn.call(jsRuntime, jsi::String::createFromUtf8(
|
||||
jsRuntime, "Frame Processor threw an error: " + message));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
- (void)call:(Frame* _Nonnull)frame {
|
||||
// Create the Frame Host Object wrapping the internal Frame
|
||||
auto frameHostObject = std::make_shared<FrameHostObject>(frame);
|
||||
[self callWithFrameHostObject:frameHostObject];
|
||||
}
|
||||
|
||||
@end
|
28
package/ios/Frame Processor/FrameProcessorPlugin.h
Normal file
28
package/ios/Frame Processor/FrameProcessorPlugin.h
Normal file
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// FrameProcessorPlugin.h
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 01.05.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import "Frame.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/// The base class for a Frame Processor Plugin which can be called synchronously from a JS Frame
|
||||
/// Processor.
|
||||
///
|
||||
/// Subclass this class in a Swift or Objective-C class and override the `callback:withArguments:`
|
||||
/// method, and implement your Frame Processing there.
|
||||
///
|
||||
/// Use `[FrameProcessorPluginRegistry addFrameProcessorPlugin:]` to register the Plugin to the
|
||||
/// VisionCamera Runtime.
|
||||
@interface FrameProcessorPlugin : NSObject
|
||||
|
||||
/// The actual callback when calling this plugin. Any Frame Processing should be handled there.
|
||||
/// Make sure your code is optimized, as this is a hot path.
|
||||
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSDictionary* _Nullable)arguments;
|
||||
|
||||
@end
|
22
package/ios/Frame Processor/FrameProcessorPlugin.m
Normal file
22
package/ios/Frame Processor/FrameProcessorPlugin.m
Normal file
@@ -0,0 +1,22 @@
|
||||
//
|
||||
// FrameProcessorPlugin.m
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 31.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#import "FrameProcessorPlugin.h"
|
||||
|
||||
// Base implementation (empty)
|
||||
@implementation FrameProcessorPlugin
|
||||
|
||||
- (id _Nullable)callback:(Frame* _Nonnull)frame withArguments:(NSDictionary* _Nullable)arguments {
|
||||
[NSException
|
||||
raise:NSInternalInconsistencyException
|
||||
format:
|
||||
@"Frame Processor Plugin does not override the `callback(frame:withArguments:)` method!"];
|
||||
return nil;
|
||||
}
|
||||
|
||||
@end
|
32
package/ios/Frame Processor/FrameProcessorPluginHostObject.h
Normal file
32
package/ios/Frame Processor/FrameProcessorPluginHostObject.h
Normal file
@@ -0,0 +1,32 @@
|
||||
//
|
||||
// FrameProcessorPluginHostObject.h
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 21.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import "FrameProcessorPlugin.h"
|
||||
#import <ReactCommon/CallInvoker.h>
|
||||
#import <jsi/jsi.h>
|
||||
#import <memory>
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class FrameProcessorPluginHostObject : public jsi::HostObject {
|
||||
public:
|
||||
explicit FrameProcessorPluginHostObject(FrameProcessorPlugin* plugin,
|
||||
std::shared_ptr<react::CallInvoker> callInvoker)
|
||||
: _plugin(plugin), _callInvoker(callInvoker) {}
|
||||
~FrameProcessorPluginHostObject() {}
|
||||
|
||||
public:
|
||||
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
|
||||
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
|
||||
|
||||
private:
|
||||
FrameProcessorPlugin* _plugin;
|
||||
std::shared_ptr<react::CallInvoker> _callInvoker;
|
||||
};
|
@@ -0,0 +1,55 @@
|
||||
//
|
||||
// FrameProcessorPluginHostObject.mm
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 21.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#import "FrameProcessorPluginHostObject.h"
|
||||
#import "FrameHostObject.h"
|
||||
#import "JSINSObjectConversion.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <vector>
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
std::vector<jsi::PropNameID>
|
||||
FrameProcessorPluginHostObject::getPropertyNames(jsi::Runtime& runtime) {
|
||||
std::vector<jsi::PropNameID> result;
|
||||
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("call")));
|
||||
return result;
|
||||
}
|
||||
|
||||
jsi::Value FrameProcessorPluginHostObject::get(jsi::Runtime& runtime,
|
||||
const jsi::PropNameID& propName) {
|
||||
auto name = propName.utf8(runtime);
|
||||
|
||||
if (name == "call") {
|
||||
return jsi::Function::createFromHostFunction(
|
||||
runtime, jsi::PropNameID::forUtf8(runtime, "call"), 2,
|
||||
[=](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments,
|
||||
size_t count) -> jsi::Value {
|
||||
// Frame is first argument
|
||||
auto frameHostObject =
|
||||
arguments[0].asObject(runtime).asHostObject<FrameHostObject>(runtime);
|
||||
Frame* frame = frameHostObject->frame;
|
||||
|
||||
// Options are second argument (possibly undefined)
|
||||
NSDictionary* options = nil;
|
||||
if (count > 1) {
|
||||
auto optionsObject = arguments[1].asObject(runtime);
|
||||
options = JSINSObjectConversion::convertJSIObjectToNSDictionary(runtime, optionsObject,
|
||||
_callInvoker);
|
||||
}
|
||||
|
||||
// Call actual Frame Processor Plugin
|
||||
id result = [_plugin callback:frame withArguments:nil];
|
||||
|
||||
// Convert result value to jsi::Value (possibly undefined)
|
||||
return JSINSObjectConversion::convertObjCObjectToJSIValue(runtime, result);
|
||||
});
|
||||
}
|
||||
|
||||
return jsi::Value::undefined();
|
||||
}
|
26
package/ios/Frame Processor/FrameProcessorPluginRegistry.h
Normal file
26
package/ios/Frame Processor/FrameProcessorPluginRegistry.h
Normal file
@@ -0,0 +1,26 @@
|
||||
//
|
||||
// FrameProcessorPluginRegistry.h
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 24.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import "Frame.h"
|
||||
#import "FrameProcessorPlugin.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface FrameProcessorPluginRegistry : NSObject
|
||||
|
||||
typedef FrameProcessorPlugin* _Nonnull (^PluginInitializerFunction)(
|
||||
NSDictionary* _Nullable options);
|
||||
|
||||
+ (void)addFrameProcessorPlugin:(NSString* _Nonnull)name
|
||||
withInitializer:(PluginInitializerFunction _Nonnull)pluginInitializer;
|
||||
|
||||
+ (FrameProcessorPlugin* _Nullable)getPlugin:(NSString* _Nonnull)name
|
||||
withOptions:(NSDictionary* _Nullable)options;
|
||||
|
||||
@end
|
46
package/ios/Frame Processor/FrameProcessorPluginRegistry.m
Normal file
46
package/ios/Frame Processor/FrameProcessorPluginRegistry.m
Normal file
@@ -0,0 +1,46 @@
|
||||
//
|
||||
// FrameProcessorPluginRegistry.m
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 24.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#import "FrameProcessorPluginRegistry.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@implementation FrameProcessorPluginRegistry
|
||||
|
||||
+ (NSMutableDictionary<NSString*, PluginInitializerFunction>*)frameProcessorPlugins {
|
||||
static NSMutableDictionary<NSString*, PluginInitializerFunction>* plugins = nil;
|
||||
if (plugins == nil) {
|
||||
plugins = [[NSMutableDictionary alloc] init];
|
||||
}
|
||||
return plugins;
|
||||
}
|
||||
|
||||
+ (void)addFrameProcessorPlugin:(NSString*)name
|
||||
withInitializer:(PluginInitializerFunction)pluginInitializer {
|
||||
BOOL alreadyExists =
|
||||
[[FrameProcessorPluginRegistry frameProcessorPlugins] valueForKey:name] != nil;
|
||||
NSAssert(!alreadyExists,
|
||||
@"Tried to add a Frame Processor Plugin with a name that already exists! Either choose "
|
||||
@"unique names, or "
|
||||
@"remove the unused plugin. Name: %@",
|
||||
name);
|
||||
|
||||
[[FrameProcessorPluginRegistry frameProcessorPlugins] setValue:pluginInitializer forKey:name];
|
||||
}
|
||||
|
||||
+ (FrameProcessorPlugin*)getPlugin:(NSString* _Nonnull)name
|
||||
withOptions:(NSDictionary* _Nullable)options {
|
||||
PluginInitializerFunction initializer =
|
||||
[[FrameProcessorPluginRegistry frameProcessorPlugins] objectForKey:name];
|
||||
if (initializer == nil) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
return initializer(options);
|
||||
}
|
||||
|
||||
@end
|
65
package/ios/Frame Processor/JSINSObjectConversion.h
Normal file
65
package/ios/Frame Processor/JSINSObjectConversion.h
Normal file
@@ -0,0 +1,65 @@
|
||||
//
|
||||
// JSINSObjectConversion.h
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 30.04.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import <React/RCTBridgeModule.h>
|
||||
#import <ReactCommon/CallInvoker.h>
|
||||
#import <jsi/jsi.h>
|
||||
|
||||
namespace JSINSObjectConversion {
|
||||
|
||||
using namespace facebook;
|
||||
using namespace facebook::react;
|
||||
|
||||
// NSNumber -> boolean
|
||||
jsi::Value convertNSNumberToJSIBoolean(jsi::Runtime& runtime, NSNumber* value);
|
||||
|
||||
// NSNumber -> number
|
||||
jsi::Value convertNSNumberToJSINumber(jsi::Runtime& runtime, NSNumber* value);
|
||||
|
||||
// NSNumber -> string
|
||||
jsi::String convertNSStringToJSIString(jsi::Runtime& runtime, NSString* value);
|
||||
|
||||
// NSDictionary -> {}
|
||||
jsi::Object convertNSDictionaryToJSIObject(jsi::Runtime& runtime, NSDictionary* value);
|
||||
|
||||
// NSArray -> []
|
||||
jsi::Array convertNSArrayToJSIArray(jsi::Runtime& runtime, NSArray* value);
|
||||
|
||||
// id -> ???
|
||||
jsi::Value convertObjCObjectToJSIValue(jsi::Runtime& runtime, id value);
|
||||
|
||||
// string -> NSString
|
||||
NSString* convertJSIStringToNSString(jsi::Runtime& runtime, const jsi::String& value);
|
||||
|
||||
// any... -> NSArray
|
||||
NSArray* convertJSICStyleArrayToNSArray(jsi::Runtime& runtime, const jsi::Value* array,
|
||||
size_t length, std::shared_ptr<CallInvoker> jsInvoker);
|
||||
|
||||
// NSArray -> any...
|
||||
jsi::Value* convertNSArrayToJSICStyleArray(jsi::Runtime& runtime, NSArray* array);
|
||||
|
||||
// [] -> NSArray
|
||||
NSArray* convertJSIArrayToNSArray(jsi::Runtime& runtime, const jsi::Array& value,
|
||||
std::shared_ptr<CallInvoker> jsInvoker);
|
||||
|
||||
// {} -> NSDictionary
|
||||
NSDictionary* convertJSIObjectToNSDictionary(jsi::Runtime& runtime, const jsi::Object& value,
|
||||
std::shared_ptr<CallInvoker> jsInvoker);
|
||||
|
||||
// any -> id
|
||||
id convertJSIValueToObjCObject(jsi::Runtime& runtime, const jsi::Value& value,
|
||||
std::shared_ptr<CallInvoker> jsInvoker);
|
||||
|
||||
// (any...) => any -> (void)(id, id)
|
||||
RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime& runtime,
|
||||
const jsi::Function& value,
|
||||
std::shared_ptr<CallInvoker> jsInvoker);
|
||||
|
||||
} // namespace JSINSObjectConversion
|
216
package/ios/Frame Processor/JSINSObjectConversion.mm
Normal file
216
package/ios/Frame Processor/JSINSObjectConversion.mm
Normal file
@@ -0,0 +1,216 @@
|
||||
//
|
||||
// JSINSObjectConversion.mm
|
||||
// VisionCamera
|
||||
//
|
||||
// Forked and Adjusted by Marc Rousavy on 02.05.21.
|
||||
// Copyright © 2021 mrousavy & Facebook. All rights reserved.
|
||||
//
|
||||
// Forked and adjusted from:
|
||||
// https://github.com/facebook/react-native/blob/900210cacc4abca0079e3903781bc223c80c8ac7/ReactCommon/react/nativemodule/core/platform/ios/RCTTurboModule.mm
|
||||
// Original Copyright Notice:
|
||||
//
|
||||
// Copyright (c) Facebook, Inc. and its affiliates.
|
||||
//
|
||||
// This source code is licensed under the MIT license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
//
|
||||
|
||||
#import "JSINSObjectConversion.h"
|
||||
#import "../Frame Processor/Frame.h"
|
||||
#import "../Frame Processor/FrameHostObject.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <React/RCTBridge.h>
|
||||
#import <ReactCommon/CallInvoker.h>
|
||||
#import <ReactCommon/RCTBlockGuard.h>
|
||||
#import <ReactCommon/TurboModuleUtils.h>
|
||||
#import <jsi/jsi.h>
|
||||
|
||||
using namespace facebook;
|
||||
using namespace facebook::react;
|
||||
|
||||
namespace JSINSObjectConversion {
|
||||
|
||||
jsi::Value convertNSNumberToJSIBoolean(jsi::Runtime& runtime, NSNumber* value) {
|
||||
return jsi::Value((bool)[value boolValue]);
|
||||
}
|
||||
|
||||
jsi::Value convertNSNumberToJSINumber(jsi::Runtime& runtime, NSNumber* value) {
|
||||
return jsi::Value([value doubleValue]);
|
||||
}
|
||||
|
||||
jsi::String convertNSStringToJSIString(jsi::Runtime& runtime, NSString* value) {
|
||||
return jsi::String::createFromUtf8(runtime, [value UTF8String] ?: "");
|
||||
}
|
||||
|
||||
jsi::Object convertNSDictionaryToJSIObject(jsi::Runtime& runtime, NSDictionary* value) {
|
||||
jsi::Object result = jsi::Object(runtime);
|
||||
for (NSString* k in value) {
|
||||
result.setProperty(runtime, [k UTF8String], convertObjCObjectToJSIValue(runtime, value[k]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
jsi::Array convertNSArrayToJSIArray(jsi::Runtime& runtime, NSArray* value) {
|
||||
jsi::Array result = jsi::Array(runtime, value.count);
|
||||
for (size_t i = 0; i < value.count; i++) {
|
||||
result.setValueAtIndex(runtime, i, convertObjCObjectToJSIValue(runtime, value[i]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
jsi::Value convertObjCObjectToJSIValue(jsi::Runtime& runtime, id value) {
|
||||
if (value == nil) {
|
||||
return jsi::Value::undefined();
|
||||
} else if ([value isKindOfClass:[NSString class]]) {
|
||||
return convertNSStringToJSIString(runtime, (NSString*)value);
|
||||
} else if ([value isKindOfClass:[NSNumber class]]) {
|
||||
if ([value isKindOfClass:[@YES class]]) {
|
||||
return convertNSNumberToJSIBoolean(runtime, (NSNumber*)value);
|
||||
}
|
||||
return convertNSNumberToJSINumber(runtime, (NSNumber*)value);
|
||||
} else if ([value isKindOfClass:[NSDictionary class]]) {
|
||||
return convertNSDictionaryToJSIObject(runtime, (NSDictionary*)value);
|
||||
} else if ([value isKindOfClass:[NSArray class]]) {
|
||||
return convertNSArrayToJSIArray(runtime, (NSArray*)value);
|
||||
} else if (value == (id)kCFNull) {
|
||||
return jsi::Value::null();
|
||||
} else if ([value isKindOfClass:[Frame class]]) {
|
||||
auto frameHostObject = std::make_shared<FrameHostObject>((Frame*)value);
|
||||
return jsi::Object::createFromHostObject(runtime, frameHostObject);
|
||||
}
|
||||
return jsi::Value::undefined();
|
||||
}
|
||||
|
||||
NSString* convertJSIStringToNSString(jsi::Runtime& runtime, const jsi::String& value) {
|
||||
return [NSString stringWithUTF8String:value.utf8(runtime).c_str()];
|
||||
}
|
||||
|
||||
NSArray* convertJSICStyleArrayToNSArray(jsi::Runtime& runtime, const jsi::Value* array,
|
||||
size_t length, std::shared_ptr<CallInvoker> jsInvoker) {
|
||||
if (length < 1)
|
||||
return @[];
|
||||
NSMutableArray* result = [NSMutableArray new];
|
||||
for (size_t i = 0; i < length; i++) {
|
||||
// Insert kCFNull when it's `undefined` value to preserve the indices.
|
||||
[result addObject:convertJSIValueToObjCObject(runtime, array[i], jsInvoker) ?: (id)kCFNull];
|
||||
}
|
||||
return [result copy];
|
||||
}
|
||||
|
||||
jsi::Value* convertNSArrayToJSICStyleArray(jsi::Runtime& runtime, NSArray* array) {
|
||||
auto result = new jsi::Value[array.count];
|
||||
for (size_t i = 0; i < array.count; i++) {
|
||||
result[i] = convertObjCObjectToJSIValue(runtime, array[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
NSArray* convertJSIArrayToNSArray(jsi::Runtime& runtime, const jsi::Array& value,
|
||||
std::shared_ptr<CallInvoker> jsInvoker) {
|
||||
size_t size = value.size(runtime);
|
||||
NSMutableArray* result = [NSMutableArray new];
|
||||
for (size_t i = 0; i < size; i++) {
|
||||
// Insert kCFNull when it's `undefined` value to preserve the indices.
|
||||
[result
|
||||
addObject:convertJSIValueToObjCObject(runtime, value.getValueAtIndex(runtime, i), jsInvoker)
|
||||
?: (id)kCFNull];
|
||||
}
|
||||
return [result copy];
|
||||
}
|
||||
|
||||
NSDictionary* convertJSIObjectToNSDictionary(jsi::Runtime& runtime, const jsi::Object& value,
|
||||
std::shared_ptr<CallInvoker> jsInvoker) {
|
||||
jsi::Array propertyNames = value.getPropertyNames(runtime);
|
||||
size_t size = propertyNames.size(runtime);
|
||||
NSMutableDictionary* result = [NSMutableDictionary new];
|
||||
for (size_t i = 0; i < size; i++) {
|
||||
jsi::String name = propertyNames.getValueAtIndex(runtime, i).getString(runtime);
|
||||
NSString* k = convertJSIStringToNSString(runtime, name);
|
||||
id v = convertJSIValueToObjCObject(runtime, value.getProperty(runtime, name), jsInvoker);
|
||||
if (v) {
|
||||
result[k] = v;
|
||||
}
|
||||
}
|
||||
return [result copy];
|
||||
}
|
||||
|
||||
id convertJSIValueToObjCObject(jsi::Runtime& runtime, const jsi::Value& value,
|
||||
std::shared_ptr<CallInvoker> jsInvoker) {
|
||||
if (value.isUndefined() || value.isNull()) {
|
||||
return nil;
|
||||
}
|
||||
if (value.isBool()) {
|
||||
return @(value.getBool());
|
||||
}
|
||||
if (value.isNumber()) {
|
||||
return @(value.getNumber());
|
||||
}
|
||||
if (value.isString()) {
|
||||
return convertJSIStringToNSString(runtime, value.getString(runtime));
|
||||
}
|
||||
if (value.isObject()) {
|
||||
jsi::Object o = value.getObject(runtime);
|
||||
if (o.isArray(runtime)) {
|
||||
return convertJSIArrayToNSArray(runtime, o.getArray(runtime), jsInvoker);
|
||||
}
|
||||
if (o.isFunction(runtime)) {
|
||||
return convertJSIFunctionToCallback(runtime, std::move(o.getFunction(runtime)), jsInvoker);
|
||||
}
|
||||
if (o.isHostObject(runtime)) {
|
||||
auto hostObject = o.asHostObject(runtime);
|
||||
auto frame = dynamic_cast<FrameHostObject*>(hostObject.get());
|
||||
if (frame != nullptr) {
|
||||
return frame->frame;
|
||||
}
|
||||
}
|
||||
return convertJSIObjectToNSDictionary(runtime, o, jsInvoker);
|
||||
}
|
||||
|
||||
throw std::runtime_error("Unsupported jsi::jsi::Value kind");
|
||||
}
|
||||
|
||||
RCTResponseSenderBlock convertJSIFunctionToCallback(jsi::Runtime& runtime,
|
||||
const jsi::Function& value,
|
||||
std::shared_ptr<CallInvoker> jsInvoker) {
|
||||
auto weakWrapper = CallbackWrapper::createWeak(value.getFunction(runtime), runtime, jsInvoker);
|
||||
RCTBlockGuard* blockGuard = [[RCTBlockGuard alloc] initWithCleanup:^() {
|
||||
auto strongWrapper = weakWrapper.lock();
|
||||
if (strongWrapper) {
|
||||
strongWrapper->destroy();
|
||||
}
|
||||
}];
|
||||
|
||||
BOOL __block wrapperWasCalled = NO;
|
||||
RCTResponseSenderBlock callback = ^(NSArray* responses) {
|
||||
if (wrapperWasCalled) {
|
||||
throw std::runtime_error("callback arg cannot be called more than once");
|
||||
}
|
||||
|
||||
auto strongWrapper = weakWrapper.lock();
|
||||
if (!strongWrapper) {
|
||||
return;
|
||||
}
|
||||
|
||||
strongWrapper->jsInvoker().invokeAsync([weakWrapper, responses, blockGuard]() {
|
||||
auto strongWrapper2 = weakWrapper.lock();
|
||||
if (!strongWrapper2) {
|
||||
return;
|
||||
}
|
||||
|
||||
const jsi::Value* args = convertNSArrayToJSICStyleArray(strongWrapper2->runtime(), responses);
|
||||
strongWrapper2->callback().call(strongWrapper2->runtime(), args,
|
||||
static_cast<size_t>(responses.count));
|
||||
strongWrapper2->destroy();
|
||||
delete[] args;
|
||||
|
||||
// Delete the CallbackWrapper when the block gets dealloced without being invoked.
|
||||
(void)blockGuard;
|
||||
});
|
||||
|
||||
wrapperWasCalled = YES;
|
||||
};
|
||||
|
||||
return [callback copy];
|
||||
}
|
||||
|
||||
} // namespace JSINSObjectConversion
|
45
package/ios/Frame Processor/VisionCameraProxy.h
Normal file
45
package/ios/Frame Processor/VisionCameraProxy.h
Normal file
@@ -0,0 +1,45 @@
|
||||
//
|
||||
// VisionCameraProxy.h
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 20.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <React/RCTBridge.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
#import "WKTJsiWorkletContext.h"
|
||||
#import <ReactCommon/CallInvoker.h>
|
||||
#import <jsi/jsi.h>
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
class VisionCameraProxy : public jsi::HostObject {
|
||||
public:
|
||||
explicit VisionCameraProxy(jsi::Runtime& runtime,
|
||||
std::shared_ptr<react::CallInvoker> callInvoker);
|
||||
~VisionCameraProxy();
|
||||
|
||||
public:
|
||||
std::vector<jsi::PropNameID> getPropertyNames(jsi::Runtime& runtime) override;
|
||||
jsi::Value get(jsi::Runtime& runtime, const jsi::PropNameID& name) override;
|
||||
|
||||
private:
|
||||
void setFrameProcessor(jsi::Runtime& runtime, int viewTag, const jsi::Object& frameProcessor);
|
||||
void removeFrameProcessor(jsi::Runtime& runtime, int viewTag);
|
||||
jsi::Value getFrameProcessorPlugin(jsi::Runtime& runtime, std::string name,
|
||||
const jsi::Object& options);
|
||||
|
||||
private:
|
||||
std::shared_ptr<RNWorklet::JsiWorkletContext> _workletContext;
|
||||
std::shared_ptr<react::CallInvoker> _callInvoker;
|
||||
};
|
||||
#endif
|
||||
|
||||
@interface VisionCameraInstaller : NSObject
|
||||
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge;
|
||||
@end
|
178
package/ios/Frame Processor/VisionCameraProxy.mm
Normal file
178
package/ios/Frame Processor/VisionCameraProxy.mm
Normal file
@@ -0,0 +1,178 @@
|
||||
//
|
||||
// VisionCameraProxy.mm
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 20.07.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
#import "VisionCameraProxy.h"
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <jsi/jsi.h>
|
||||
|
||||
#import "../../cpp/JSITypedArray.h"
|
||||
#import "FrameHostObject.h"
|
||||
#import "FrameProcessor.h"
|
||||
#import "FrameProcessorPluginHostObject.h"
|
||||
#import "FrameProcessorPluginRegistry.h"
|
||||
#import "JSINSObjectConversion.h"
|
||||
#import "WKTJsiWorklet.h"
|
||||
|
||||
#import <React/RCTBridge+Private.h>
|
||||
#import <React/RCTBridge.h>
|
||||
#import <React/RCTUIManager.h>
|
||||
#import <React/RCTUtils.h>
|
||||
#import <ReactCommon/RCTTurboModuleManager.h>
|
||||
|
||||
// Swift forward-declarations
|
||||
__attribute__((objc_runtime_name("_TtC12VisionCamera12CameraQueues")))
|
||||
@interface CameraQueues : NSObject
|
||||
@property(nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;
|
||||
@end
|
||||
|
||||
__attribute__((objc_runtime_name("_TtC12VisionCamera10CameraView")))
|
||||
@interface CameraView : UIView
|
||||
@property(nonatomic, copy) FrameProcessor* _Nullable frameProcessor;
|
||||
@end
|
||||
|
||||
using namespace facebook;
|
||||
|
||||
VisionCameraProxy::VisionCameraProxy(jsi::Runtime& runtime,
|
||||
std::shared_ptr<react::CallInvoker> callInvoker) {
|
||||
_callInvoker = callInvoker;
|
||||
|
||||
NSLog(@"VisionCameraProxy: Creating Worklet Context...");
|
||||
auto runOnJS = [callInvoker](std::function<void()>&& f) {
|
||||
// Run on React JS Runtime
|
||||
callInvoker->invokeAsync(std::move(f));
|
||||
};
|
||||
auto runOnWorklet = [](std::function<void()>&& f) {
|
||||
// Run on Frame Processor Worklet Runtime
|
||||
dispatch_async(CameraQueues.videoQueue, [f = std::move(f)]() { f(); });
|
||||
};
|
||||
|
||||
_workletContext = std::make_shared<RNWorklet::JsiWorkletContext>("VisionCamera", &runtime,
|
||||
runOnJS, runOnWorklet);
|
||||
NSLog(@"VisionCameraProxy: Worklet Context Created!");
|
||||
}
|
||||
|
||||
VisionCameraProxy::~VisionCameraProxy() {
|
||||
NSLog(@"VisionCameraProxy: Destroying context...");
|
||||
// Destroy ArrayBuffer cache for both the JS and the Worklet Runtime.
|
||||
vision::invalidateArrayBufferCache(*_workletContext->getJsRuntime());
|
||||
vision::invalidateArrayBufferCache(_workletContext->getWorkletRuntime());
|
||||
}
|
||||
|
||||
std::vector<jsi::PropNameID> VisionCameraProxy::getPropertyNames(jsi::Runtime& runtime) {
|
||||
std::vector<jsi::PropNameID> result;
|
||||
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("setFrameProcessor")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("removeFrameProcessor")));
|
||||
result.push_back(jsi::PropNameID::forUtf8(runtime, std::string("getFrameProcessorPlugin")));
|
||||
return result;
|
||||
}
|
||||
|
||||
void VisionCameraProxy::setFrameProcessor(jsi::Runtime& runtime, int viewTag,
|
||||
const jsi::Object& object) {
|
||||
auto frameProcessorType = object.getProperty(runtime, "type").asString(runtime).utf8(runtime);
|
||||
auto worklet = std::make_shared<RNWorklet::JsiWorklet>(
|
||||
runtime, object.getProperty(runtime, "frameProcessor"));
|
||||
|
||||
RCTExecuteOnMainQueue(^{
|
||||
auto currentBridge = [RCTBridge currentBridge];
|
||||
auto anonymousView =
|
||||
[currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
|
||||
auto view = static_cast<CameraView*>(anonymousView);
|
||||
if (frameProcessorType == "frame-processor") {
|
||||
view.frameProcessor = [[FrameProcessor alloc] initWithWorklet:worklet
|
||||
context:_workletContext];
|
||||
} else {
|
||||
throw std::runtime_error("Unknown FrameProcessor.type passed! Received: " +
|
||||
frameProcessorType);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void VisionCameraProxy::removeFrameProcessor(jsi::Runtime& runtime, int viewTag) {
|
||||
RCTExecuteOnMainQueue(^{
|
||||
auto currentBridge = [RCTBridge currentBridge];
|
||||
auto anonymousView =
|
||||
[currentBridge.uiManager viewForReactTag:[NSNumber numberWithDouble:viewTag]];
|
||||
auto view = static_cast<CameraView*>(anonymousView);
|
||||
view.frameProcessor = nil;
|
||||
});
|
||||
}
|
||||
|
||||
jsi::Value VisionCameraProxy::getFrameProcessorPlugin(jsi::Runtime& runtime, std::string name,
|
||||
const jsi::Object& options) {
|
||||
NSString* key = [NSString stringWithUTF8String:name.c_str()];
|
||||
NSDictionary* optionsObjc =
|
||||
JSINSObjectConversion::convertJSIObjectToNSDictionary(runtime, options, _callInvoker);
|
||||
FrameProcessorPlugin* plugin = [FrameProcessorPluginRegistry getPlugin:key
|
||||
withOptions:optionsObjc];
|
||||
if (plugin == nil) {
|
||||
return jsi::Value::undefined();
|
||||
}
|
||||
|
||||
auto pluginHostObject = std::make_shared<FrameProcessorPluginHostObject>(plugin, _callInvoker);
|
||||
return jsi::Object::createFromHostObject(runtime, pluginHostObject);
|
||||
}
|
||||
|
||||
jsi::Value VisionCameraProxy::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
|
||||
auto name = propName.utf8(runtime);
|
||||
|
||||
if (name == "setFrameProcessor") {
|
||||
return jsi::Function::createFromHostFunction(
|
||||
runtime, jsi::PropNameID::forUtf8(runtime, "setFrameProcessor"), 1,
|
||||
[this](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments,
|
||||
size_t count) -> jsi::Value {
|
||||
auto viewTag = arguments[0].asNumber();
|
||||
auto object = arguments[1].asObject(runtime);
|
||||
this->setFrameProcessor(runtime, static_cast<int>(viewTag), object);
|
||||
return jsi::Value::undefined();
|
||||
});
|
||||
}
|
||||
if (name == "removeFrameProcessor") {
|
||||
return jsi::Function::createFromHostFunction(
|
||||
runtime, jsi::PropNameID::forUtf8(runtime, "removeFrameProcessor"), 1,
|
||||
[this](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments,
|
||||
size_t count) -> jsi::Value {
|
||||
auto viewTag = arguments[0].asNumber();
|
||||
this->removeFrameProcessor(runtime, static_cast<int>(viewTag));
|
||||
return jsi::Value::undefined();
|
||||
});
|
||||
}
|
||||
if (name == "getFrameProcessorPlugin") {
|
||||
return jsi::Function::createFromHostFunction(
|
||||
runtime, jsi::PropNameID::forUtf8(runtime, "getFrameProcessorPlugin"), 1,
|
||||
[this](jsi::Runtime& runtime, const jsi::Value& thisValue, const jsi::Value* arguments,
|
||||
size_t count) -> jsi::Value {
|
||||
if (count < 1 || !arguments[0].isString()) {
|
||||
throw jsi::JSError(runtime, "First argument needs to be a string (pluginName)!");
|
||||
}
|
||||
auto pluginName = arguments[0].asString(runtime).utf8(runtime);
|
||||
auto options = count > 1 ? arguments[1].asObject(runtime) : jsi::Object(runtime);
|
||||
|
||||
return this->getFrameProcessorPlugin(runtime, pluginName, options);
|
||||
});
|
||||
}
|
||||
|
||||
return jsi::Value::undefined();
|
||||
}
|
||||
|
||||
@implementation VisionCameraInstaller
|
||||
+ (BOOL)installToBridge:(RCTBridge* _Nonnull)bridge {
|
||||
RCTCxxBridge* cxxBridge = (RCTCxxBridge*)[RCTBridge currentBridge];
|
||||
if (!cxxBridge.runtime) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
jsi::Runtime& runtime = *(jsi::Runtime*)cxxBridge.runtime;
|
||||
|
||||
// global.VisionCameraProxy
|
||||
auto visionCameraProxy = std::make_shared<VisionCameraProxy>(runtime, bridge.jsCallInvoker);
|
||||
runtime.global().setProperty(runtime, "VisionCameraProxy",
|
||||
jsi::Object::createFromHostObject(runtime, visionCameraProxy));
|
||||
|
||||
return YES;
|
||||
}
|
||||
@end
|
28
package/ios/Parsers/AVAssetWriter.Status+descriptor.swift
Normal file
28
package/ios/Parsers/AVAssetWriter.Status+descriptor.swift
Normal file
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// AVAssetWriter.Status+descriptor.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 01.05.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVAssetWriter.Status {
|
||||
var descriptor: String {
|
||||
switch self {
|
||||
case .cancelled:
|
||||
return "cancelled"
|
||||
case .completed:
|
||||
return "completed"
|
||||
case .failed:
|
||||
return "failed"
|
||||
case .unknown:
|
||||
return "unknown"
|
||||
case .writing:
|
||||
return "writing"
|
||||
@unknown default:
|
||||
fatalError("Unknown AVAssetWriter.Status value! \(rawValue)")
|
||||
}
|
||||
}
|
||||
}
|
26
package/ios/Parsers/AVAuthorizationStatus+descriptor.swift
Normal file
26
package/ios/Parsers/AVAuthorizationStatus+descriptor.swift
Normal file
@@ -0,0 +1,26 @@
|
||||
//
|
||||
// AVAuthorizationStatus+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 29.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVAuthorizationStatus {
|
||||
var descriptor: String {
|
||||
switch self {
|
||||
case .authorized:
|
||||
return "granted"
|
||||
case .denied:
|
||||
return "denied"
|
||||
case .notDetermined:
|
||||
return "not-determined"
|
||||
case .restricted:
|
||||
return "restricted"
|
||||
@unknown default:
|
||||
fatalError("AVAuthorizationStatus has unknown state.")
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,38 @@
|
||||
//
|
||||
// AVCaptureDevice.DeviceType+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureDevice.DeviceType {
|
||||
var descriptor: String {
|
||||
if #available(iOS 13.0, *) {
|
||||
switch self {
|
||||
case .builtInDualWideCamera:
|
||||
return "dual-wide-camera"
|
||||
case .builtInTripleCamera:
|
||||
return "triple-camera"
|
||||
case .builtInUltraWideCamera:
|
||||
return "ultra-wide-angle-camera"
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
switch self {
|
||||
case .builtInDualCamera:
|
||||
return "dual-camera"
|
||||
case .builtInTelephotoCamera:
|
||||
return "telephoto-camera"
|
||||
case .builtInWideAngleCamera:
|
||||
return "wide-angle-camera"
|
||||
default:
|
||||
// e.g. `.builtInTrueDepthCamera`
|
||||
fatalError("AVCaptureDevice.Position has unknown state.")
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,27 @@
|
||||
//
|
||||
// AVCaptureDevice.FlashMode+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.FlashMode {
|
||||
init?(withString string: String) {
|
||||
switch string {
|
||||
case "on":
|
||||
self = .on
|
||||
return
|
||||
case "off":
|
||||
self = .off
|
||||
return
|
||||
case "auto":
|
||||
self = .auto
|
||||
return
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,40 @@
|
||||
//
|
||||
// AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 29.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.Format.AutoFocusSystem {
|
||||
init(withString string: String) throws {
|
||||
switch string {
|
||||
case "contrast-detection":
|
||||
self = .contrastDetection
|
||||
return
|
||||
case "phase-detection":
|
||||
self = .phaseDetection
|
||||
return
|
||||
case "none":
|
||||
self = .none
|
||||
return
|
||||
default:
|
||||
throw EnumParserError.invalidValue
|
||||
}
|
||||
}
|
||||
|
||||
var descriptor: String {
|
||||
switch self {
|
||||
case .contrastDetection:
|
||||
return "contrast-detection"
|
||||
case .phaseDetection:
|
||||
return "phase-detection"
|
||||
case .none:
|
||||
return "none"
|
||||
@unknown default:
|
||||
fatalError("AVCaptureDevice.Format has unknown state.")
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,25 @@
|
||||
//
|
||||
// AVCaptureDevice.Position+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureDevice.Position {
|
||||
var descriptor: String {
|
||||
switch self {
|
||||
case .back:
|
||||
return "back"
|
||||
case .front:
|
||||
return "front"
|
||||
case .unspecified:
|
||||
return "unspecified"
|
||||
@unknown default:
|
||||
fatalError("AVCaptureDevice.Position has unknown state.")
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,27 @@
|
||||
//
|
||||
// AVCaptureDevice.TorchMode+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.TorchMode {
|
||||
init?(withString string: String) {
|
||||
switch string {
|
||||
case "on":
|
||||
self = .on
|
||||
return
|
||||
case "off":
|
||||
self = .off
|
||||
return
|
||||
case "auto":
|
||||
self = .auto
|
||||
return
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,29 @@
|
||||
//
|
||||
// AVCapturePhotoOutput.QualityPrioritization+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
@available(iOS 13.0, *)
|
||||
extension AVCapturePhotoOutput.QualityPrioritization {
|
||||
init?(withString string: String) {
|
||||
switch string {
|
||||
case "speed":
|
||||
self = .speed
|
||||
return
|
||||
case "quality":
|
||||
self = .quality
|
||||
return
|
||||
case "balanced":
|
||||
self = .balanced
|
||||
return
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,61 @@
|
||||
//
|
||||
// AVCaptureVideoStabilizationMode+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 29.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureVideoStabilizationMode {
|
||||
init(withString string: String) throws {
|
||||
switch string {
|
||||
case "auto":
|
||||
self = .auto
|
||||
return
|
||||
case "cinematic":
|
||||
self = .cinematic
|
||||
return
|
||||
case "cinematic-extended":
|
||||
if #available(iOS 13.0, *) {
|
||||
self = .cinematicExtended
|
||||
return
|
||||
} else {
|
||||
throw EnumParserError.unsupportedOS(supportedOnOS: "iOS 13.0")
|
||||
}
|
||||
case "off":
|
||||
self = .off
|
||||
return
|
||||
case "standard":
|
||||
self = .standard
|
||||
return
|
||||
default:
|
||||
throw EnumParserError.invalidValue
|
||||
}
|
||||
}
|
||||
|
||||
var descriptor: String {
|
||||
if #available(iOS 13.0, *) {
|
||||
switch self {
|
||||
case .cinematicExtended:
|
||||
return "cinematic-extended"
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
switch self {
|
||||
case .auto:
|
||||
return "auto"
|
||||
case .cinematic:
|
||||
return "cinematic"
|
||||
case .off:
|
||||
return "off"
|
||||
case .standard:
|
||||
return "standard"
|
||||
default:
|
||||
fatalError("AVCaptureVideoStabilizationMode has unknown state.")
|
||||
}
|
||||
}
|
||||
}
|
42
package/ios/Parsers/AVFileType+descriptor.swift
Normal file
42
package/ios/Parsers/AVFileType+descriptor.swift
Normal file
@@ -0,0 +1,42 @@
|
||||
//
|
||||
// AVFileType+descriptor.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 01.05.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVFileType {
|
||||
init(withString string: String) throws {
|
||||
switch string {
|
||||
case "mov":
|
||||
self = .mov
|
||||
case "mp4":
|
||||
self = .mp4
|
||||
case "avci":
|
||||
self = .avci
|
||||
case "m4v":
|
||||
self = .m4v
|
||||
default:
|
||||
throw EnumParserError.invalidValue
|
||||
}
|
||||
}
|
||||
|
||||
var descriptor: String? {
|
||||
switch self {
|
||||
case .mov:
|
||||
return "mov"
|
||||
case .mp4:
|
||||
return "mp4"
|
||||
case .avci:
|
||||
return "avci"
|
||||
case .m4v:
|
||||
return "m4v"
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
25
package/ios/Parsers/AVVideoCodecType+descriptor.swift
Normal file
25
package/ios/Parsers/AVVideoCodecType+descriptor.swift
Normal file
@@ -0,0 +1,25 @@
|
||||
//
|
||||
// AVVideoCodecType+descriptor.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVVideoCodecType {
|
||||
init?(withString string: String) {
|
||||
switch string {
|
||||
case "h264":
|
||||
self = .h264
|
||||
return
|
||||
case "h265":
|
||||
self = .hevc
|
||||
return
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
23
package/ios/Parsers/EnumParserError.swift
Normal file
23
package/ios/Parsers/EnumParserError.swift
Normal file
@@ -0,0 +1,23 @@
|
||||
//
|
||||
// EnumParserError.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
An error raised when the given descriptor (TypeScript string union type) cannot be parsed and converted to a Swift enum.
|
||||
*/
|
||||
enum EnumParserError: Error {
|
||||
/**
|
||||
Raised when the descriptor is not supported on the current OS.
|
||||
*/
|
||||
case unsupportedOS(supportedOnOS: String)
|
||||
/**
|
||||
Raised when the descriptor does not match any of the possible values.
|
||||
*/
|
||||
case invalidValue
|
||||
}
|
63
package/ios/Parsers/PixelFormat.swift
Normal file
63
package/ios/Parsers/PixelFormat.swift
Normal file
@@ -0,0 +1,63 @@
|
||||
//
|
||||
// PixelFormat.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 17.08.23.
|
||||
// Copyright © 2023 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
enum PixelFormat {
|
||||
case yuv
|
||||
case rgb
|
||||
case dng
|
||||
case native
|
||||
case unknown
|
||||
|
||||
var unionValue: String {
|
||||
switch self {
|
||||
case .yuv:
|
||||
return "yuv"
|
||||
case .rgb:
|
||||
return "rgb"
|
||||
case .dng:
|
||||
return "dng"
|
||||
case .native:
|
||||
return "native"
|
||||
case .unknown:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
init(unionValue: String) throws {
|
||||
switch unionValue {
|
||||
case "yuv":
|
||||
self = .yuv
|
||||
case "rgb":
|
||||
self = .rgb
|
||||
case "dng":
|
||||
self = .dng
|
||||
case "native":
|
||||
self = .native
|
||||
case "unknown":
|
||||
self = .unknown
|
||||
default:
|
||||
throw CameraError.parameter(.invalid(unionName: "pixelFormat", receivedValue: unionValue))
|
||||
}
|
||||
}
|
||||
|
||||
init(mediaSubType: OSType) {
|
||||
switch mediaSubType {
|
||||
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
|
||||
self = .yuv
|
||||
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
|
||||
self = .yuv
|
||||
case kCVPixelFormatType_32BGRA:
|
||||
self = .rgb
|
||||
default:
|
||||
self = .unknown
|
||||
}
|
||||
}
|
||||
}
|
31
package/ios/Parsers/UIInterfaceOrientation+descriptor.swift
Normal file
31
package/ios/Parsers/UIInterfaceOrientation+descriptor.swift
Normal file
@@ -0,0 +1,31 @@
|
||||
//
|
||||
// UIInterfaceOrientation+descriptor.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 04.01.22.
|
||||
// Copyright © 2022 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
extension UIInterfaceOrientation {
|
||||
init(withString string: String) throws {
|
||||
switch string {
|
||||
case "portrait":
|
||||
self = .portrait
|
||||
return
|
||||
case "portrait-upside-down":
|
||||
self = .portraitUpsideDown
|
||||
return
|
||||
case "landscape-left":
|
||||
self = .landscapeLeft
|
||||
return
|
||||
case "landscape-right":
|
||||
self = .landscapeRight
|
||||
return
|
||||
default:
|
||||
throw EnumParserError.invalidValue
|
||||
}
|
||||
}
|
||||
}
|
112
package/ios/PhotoCaptureDelegate.swift
Normal file
112
package/ios/PhotoCaptureDelegate.swift
Normal file
@@ -0,0 +1,112 @@
|
||||
//
|
||||
// PhotoCaptureDelegate.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
private var delegatesReferences: [NSObject] = []
|
||||
|
||||
// MARK: - PhotoCaptureDelegate
|
||||
|
||||
class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
|
||||
private let promise: Promise
|
||||
private let enableShutterSound: Bool
|
||||
|
||||
required init(promise: Promise, enableShutterSound: Bool) {
|
||||
self.promise = promise
|
||||
self.enableShutterSound = enableShutterSound
|
||||
super.init()
|
||||
delegatesReferences.append(self)
|
||||
}
|
||||
|
||||
func photoOutput(_: AVCapturePhotoOutput, willCapturePhotoFor _: AVCaptureResolvedPhotoSettings) {
|
||||
if !enableShutterSound {
|
||||
// disable system shutter sound (see https://stackoverflow.com/a/55235949/5281431)
|
||||
AudioServicesDisposeSystemSoundID(1108)
|
||||
}
|
||||
}
|
||||
|
||||
func photoOutput(_: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
|
||||
defer {
|
||||
delegatesReferences.removeAll(where: { $0 == self })
|
||||
}
|
||||
if let error = error as NSError? {
|
||||
promise.reject(error: .capture(.unknown(message: error.description)), cause: error)
|
||||
return
|
||||
}
|
||||
|
||||
let error = ErrorPointer(nilLiteral: ())
|
||||
guard let tempFilePath = RCTTempFilePath("jpeg", error)
|
||||
else {
|
||||
promise.reject(error: .capture(.createTempFileError), cause: error?.pointee)
|
||||
return
|
||||
}
|
||||
let url = URL(string: "file://\(tempFilePath)")!
|
||||
|
||||
guard let data = photo.fileDataRepresentation() else {
|
||||
promise.reject(error: .capture(.fileError))
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
try data.write(to: url)
|
||||
let exif = photo.metadata["{Exif}"] as? [String: Any]
|
||||
let width = exif?["PixelXDimension"]
|
||||
let height = exif?["PixelYDimension"]
|
||||
let exifOrientation = photo.metadata[kCGImagePropertyOrientation as String] as? Int ?? 0
|
||||
let orientation = getOrientation(forExifOrientation: exifOrientation)
|
||||
let isMirrored = getIsMirrored(forExifOrientation: exifOrientation)
|
||||
|
||||
promise.resolve([
|
||||
"path": tempFilePath,
|
||||
"width": width as Any,
|
||||
"height": height as Any,
|
||||
"orientation": orientation,
|
||||
"isMirrored": isMirrored,
|
||||
"isRawPhoto": photo.isRawPhoto,
|
||||
"metadata": photo.metadata,
|
||||
"thumbnail": photo.embeddedThumbnailPhotoFormat as Any,
|
||||
])
|
||||
} catch {
|
||||
promise.reject(error: .capture(.fileError), cause: error as NSError)
|
||||
}
|
||||
}
|
||||
|
||||
func photoOutput(_: AVCapturePhotoOutput, didFinishCaptureFor _: AVCaptureResolvedPhotoSettings, error: Error?) {
|
||||
defer {
|
||||
delegatesReferences.removeAll(where: { $0 == self })
|
||||
}
|
||||
if let error = error as NSError? {
|
||||
promise.reject(error: .capture(.unknown(message: error.description)), cause: error)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
private func getOrientation(forExifOrientation exifOrientation: Int) -> String {
|
||||
switch exifOrientation {
|
||||
case 1, 2:
|
||||
return "portrait"
|
||||
case 3, 4:
|
||||
return "portrait-upside-down"
|
||||
case 5, 6:
|
||||
return "landscape-left"
|
||||
case 7, 8:
|
||||
return "landscape-right"
|
||||
default:
|
||||
return "portrait"
|
||||
}
|
||||
}
|
||||
|
||||
private func getIsMirrored(forExifOrientation exifOrientation: Int) -> Bool {
|
||||
switch exifOrientation {
|
||||
case 2, 4, 5, 7:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
35
package/ios/PreviewView.swift
Normal file
35
package/ios/PreviewView.swift
Normal file
@@ -0,0 +1,35 @@
|
||||
//
|
||||
// PreviewView.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 30.11.22.
|
||||
// Copyright © 2022 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
import UIKit
|
||||
|
||||
class PreviewView: UIView {
|
||||
/// Convenience wrapper to get layer as its statically known type.
|
||||
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
|
||||
// swiftlint:disable force_cast
|
||||
return layer as! AVCaptureVideoPreviewLayer
|
||||
// swiftlint:enable force_cast
|
||||
}
|
||||
|
||||
override public class var layerClass: AnyClass {
|
||||
return AVCaptureVideoPreviewLayer.self
|
||||
}
|
||||
|
||||
init(frame: CGRect, session: AVCaptureSession) {
|
||||
super.init(frame: frame)
|
||||
videoPreviewLayer.session = session
|
||||
videoPreviewLayer.videoGravity = .resizeAspectFill
|
||||
}
|
||||
|
||||
@available(*, unavailable)
|
||||
required init?(coder _: NSCoder) {
|
||||
fatalError("init(coder:) is not implemented!")
|
||||
}
|
||||
}
|
30
package/ios/README.md
Normal file
30
package/ios/README.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# ios
|
||||
|
||||
This folder contains the iOS-platform-specific code for react-native-vision-camera.
|
||||
|
||||
## Prerequesites
|
||||
|
||||
1. Install Xcode tools
|
||||
```sh
|
||||
xcode-select --install
|
||||
```
|
||||
2. Install need [SwiftFormat](https://github.com/nicklockwood/SwiftFormat) and [SwiftLint](https://github.com/realm/SwiftLint)
|
||||
```sh
|
||||
brew install swiftformat swiftlint
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
It is recommended that you work on the code using the Example project (`example/ios/VisionCameraExample.xcworkspace`), since that always includes the React Native header files, plus you can easily test changes that way.
|
||||
|
||||
You can however still edit the library project here by opening `VisionCamera.xcodeproj`, this has the advantage of **automatically formatting your Code** (swiftformat) and **showing you Linter errors** (swiftlint) when trying to build (<kbd>⌘</kbd>+<kbd>B</kbd>).
|
||||
|
||||
## Committing
|
||||
|
||||
Before committing, make sure that you're not violating the Swift or C++ codestyles. To do that, run the following command:
|
||||
|
||||
```bash
|
||||
yarn check-ios
|
||||
```
|
||||
|
||||
This will also try to automatically fix any errors by re-formatting the Swift code.
|
49
package/ios/React Utils/Callback.swift
Normal file
49
package/ios/React Utils/Callback.swift
Normal file
@@ -0,0 +1,49 @@
|
||||
//
|
||||
// Callback.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 07.06.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
/**
|
||||
Represents a callback to JavaScript. Syntax is the same as with Promise.
|
||||
*/
|
||||
class Callback {
|
||||
private var hasCalled = false
|
||||
private let callback: RCTResponseSenderBlock
|
||||
|
||||
init(_ callback: @escaping RCTResponseSenderBlock) {
|
||||
self.callback = callback
|
||||
}
|
||||
|
||||
func reject(error: CameraError, cause: NSError?) {
|
||||
guard !hasCalled else { return }
|
||||
|
||||
callback([NSNull(), makeReactError(error, cause: cause)])
|
||||
hasCalled = true
|
||||
}
|
||||
|
||||
func reject(error: CameraError) {
|
||||
guard !hasCalled else { return }
|
||||
|
||||
reject(error: error, cause: nil)
|
||||
hasCalled = true
|
||||
}
|
||||
|
||||
func resolve(_ value: Any) {
|
||||
guard !hasCalled else { return }
|
||||
|
||||
callback([value, NSNull()])
|
||||
hasCalled = true
|
||||
}
|
||||
|
||||
func resolve() {
|
||||
guard !hasCalled else { return }
|
||||
|
||||
resolve(NSNull())
|
||||
hasCalled = true
|
||||
}
|
||||
}
|
29
package/ios/React Utils/MakeReactError.swift
Normal file
29
package/ios/React Utils/MakeReactError.swift
Normal file
@@ -0,0 +1,29 @@
|
||||
//
|
||||
// MakeReactError.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
func makeReactError(_ cameraError: CameraError, cause: NSError?) -> [String: Any] {
|
||||
var causeDictionary: [String: Any]?
|
||||
if let cause = cause {
|
||||
causeDictionary = RCTMakeError("\(cause.domain): \(cause.code) \(cause.description)", nil, cause.userInfo)
|
||||
}
|
||||
return RCTMakeError(
|
||||
"\(cameraError.code): \(cameraError.message)",
|
||||
nil,
|
||||
[
|
||||
"code": cameraError.code,
|
||||
"message": cameraError.message,
|
||||
"cause": causeDictionary ?? NSNull(),
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
func makeReactError(_ cameraError: CameraError) -> [String: Any] {
|
||||
return makeReactError(cameraError, cause: nil)
|
||||
}
|
69
package/ios/React Utils/Promise.swift
Normal file
69
package/ios/React Utils/Promise.swift
Normal file
@@ -0,0 +1,69 @@
|
||||
//
|
||||
// Promise.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 14.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
// MARK: - Promise
|
||||
|
||||
/**
|
||||
* Represents a JavaScript Promise instance. `reject()` and `resolve()` should only be called once.
|
||||
*/
|
||||
class Promise {
|
||||
init(resolver: @escaping RCTPromiseResolveBlock, rejecter: @escaping RCTPromiseRejectBlock) {
|
||||
self.resolver = resolver
|
||||
self.rejecter = rejecter
|
||||
}
|
||||
|
||||
func reject(error: CameraError, cause: NSError?) {
|
||||
rejecter(error.code, error.message, cause)
|
||||
}
|
||||
|
||||
func reject(error: CameraError) {
|
||||
reject(error: error, cause: nil)
|
||||
}
|
||||
|
||||
func resolve(_ value: Any?) {
|
||||
resolver(value)
|
||||
}
|
||||
|
||||
func resolve() {
|
||||
resolve(nil)
|
||||
}
|
||||
|
||||
// MARK: Private
|
||||
|
||||
private let resolver: RCTPromiseResolveBlock
|
||||
private let rejecter: RCTPromiseRejectBlock
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a block with an automatic promise resolver and rejecter.
|
||||
*
|
||||
* The value returned by the `block` must be serializable by the React Native bridge, or `nil`.
|
||||
* The error thrown by the `block` should be a `CameraError`
|
||||
*/
|
||||
func withPromise(_ promise: Promise, _ block: () throws -> Any?) {
|
||||
do {
|
||||
let result = try block()
|
||||
promise.resolve(result)
|
||||
} catch let error as CameraError {
|
||||
promise.reject(error: error)
|
||||
} catch let error as NSError {
|
||||
promise.reject(error: CameraError.unknown(message: error.description), cause: error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a block with an automatic promise resolver and rejecter.
|
||||
*
|
||||
* The value returned by the `block` must be serializable by the React Native bridge, or `nil`.
|
||||
* The error thrown by the `block` should be a `CameraError`
|
||||
*/
|
||||
func withPromise(resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock, _ block: () throws -> Any?) {
|
||||
return withPromise(Promise(resolver: resolve, rejecter: reject), block)
|
||||
}
|
32
package/ios/React Utils/ReactLogger.swift
Normal file
32
package/ios/React Utils/ReactLogger.swift
Normal file
@@ -0,0 +1,32 @@
|
||||
//
|
||||
// ReactLogger.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
// MARK: - ReactLogger
|
||||
|
||||
enum ReactLogger {
|
||||
/**
|
||||
Log a message to the console in the format of `VisionCamera.[caller-function-name]: [message]`
|
||||
|
||||
@discussion
|
||||
If the global ConsoleLogFunction is set, this function also logs to the JavaScript console (console.log, console.trace, console.warn or console.error)
|
||||
This function also always logs to [RCTDefaultLogFunction].
|
||||
In non-DEBUG builds, this function is no-op.
|
||||
*/
|
||||
@inlinable
|
||||
static func log(level: RCTLogLevel,
|
||||
message: String,
|
||||
_ file: String = #file,
|
||||
_ lineNumber: Int = #line,
|
||||
_ function: String = #function) {
|
||||
#if DEBUG
|
||||
RCTDefaultLogFunction(level, RCTLogSource.native, file, lineNumber as NSNumber, "VisionCamera.\(function): \(message)")
|
||||
#endif
|
||||
}
|
||||
}
|
217
package/ios/RecordingSession.swift
Normal file
217
package/ios/RecordingSession.swift
Normal file
@@ -0,0 +1,217 @@
|
||||
//
|
||||
// RecordingSession.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 01.05.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
// MARK: - BufferType
|
||||
|
||||
enum BufferType {
|
||||
case audio
|
||||
case video
|
||||
}
|
||||
|
||||
// MARK: - RecordingSessionError
|
||||
|
||||
enum RecordingSessionError: Error {
|
||||
case failedToStartSession
|
||||
}
|
||||
|
||||
// MARK: - RecordingSession
|
||||
|
||||
class RecordingSession {
|
||||
private let assetWriter: AVAssetWriter
|
||||
private var audioWriter: AVAssetWriterInput?
|
||||
private var bufferAdaptor: AVAssetWriterInputPixelBufferAdaptor?
|
||||
private let completionHandler: (RecordingSession, AVAssetWriter.Status, Error?) -> Void
|
||||
|
||||
private var initialTimestamp: CMTime?
|
||||
private var latestTimestamp: CMTime?
|
||||
private var hasStartedWritingSession = false
|
||||
private var hasWrittenFirstVideoFrame = false
|
||||
private var isFinishing = false
|
||||
|
||||
var url: URL {
|
||||
return assetWriter.outputURL
|
||||
}
|
||||
|
||||
var duration: Double {
|
||||
guard let latestTimestamp = latestTimestamp,
|
||||
let initialTimestamp = initialTimestamp else {
|
||||
return 0.0
|
||||
}
|
||||
return (latestTimestamp - initialTimestamp).seconds
|
||||
}
|
||||
|
||||
init(url: URL,
|
||||
fileType: AVFileType,
|
||||
completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws {
|
||||
completionHandler = completion
|
||||
|
||||
do {
|
||||
assetWriter = try AVAssetWriter(outputURL: url, fileType: fileType)
|
||||
} catch let error as NSError {
|
||||
throw CameraError.capture(.createRecorderError(message: error.description))
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
if assetWriter.status == .writing {
|
||||
ReactLogger.log(level: .info, message: "Cancelling AssetWriter...")
|
||||
assetWriter.cancelWriting()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Initializes an AssetWriter for video frames (CMSampleBuffers).
|
||||
*/
|
||||
func initializeVideoWriter(withSettings settings: [String: Any], pixelFormat: OSType) {
|
||||
guard !settings.isEmpty else {
|
||||
ReactLogger.log(level: .error, message: "Tried to initialize Video Writer with empty settings!")
|
||||
return
|
||||
}
|
||||
guard bufferAdaptor == nil else {
|
||||
ReactLogger.log(level: .error, message: "Tried to add Video Writer twice!")
|
||||
return
|
||||
}
|
||||
|
||||
let videoWriter = AVAssetWriterInput(mediaType: .video, outputSettings: settings)
|
||||
videoWriter.expectsMediaDataInRealTime = true
|
||||
|
||||
assetWriter.add(videoWriter)
|
||||
bufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriter,
|
||||
withVideoSettings: settings,
|
||||
pixelFormat: pixelFormat)
|
||||
ReactLogger.log(level: .info, message: "Initialized Video AssetWriter.")
|
||||
}
|
||||
|
||||
/**
|
||||
Initializes an AssetWriter for audio frames (CMSampleBuffers).
|
||||
*/
|
||||
func initializeAudioWriter(withSettings settings: [String: Any]) {
|
||||
guard !settings.isEmpty else {
|
||||
ReactLogger.log(level: .error, message: "Tried to initialize Audio Writer with empty settings!")
|
||||
return
|
||||
}
|
||||
guard audioWriter == nil else {
|
||||
ReactLogger.log(level: .error, message: "Tried to add Audio Writer twice!")
|
||||
return
|
||||
}
|
||||
|
||||
audioWriter = AVAssetWriterInput(mediaType: .audio, outputSettings: settings)
|
||||
audioWriter!.expectsMediaDataInRealTime = true
|
||||
assetWriter.add(audioWriter!)
|
||||
ReactLogger.log(level: .info, message: "Initialized Audio AssetWriter.")
|
||||
}
|
||||
|
||||
/**
|
||||
Start the Asset Writer(s). If the AssetWriter failed to start, an error will be thrown.
|
||||
*/
|
||||
func startAssetWriter() throws {
|
||||
ReactLogger.log(level: .info, message: "Starting Asset Writer(s)...")
|
||||
|
||||
let success = assetWriter.startWriting()
|
||||
if !success {
|
||||
ReactLogger.log(level: .error, message: "Failed to start Asset Writer(s)!")
|
||||
throw RecordingSessionError.failedToStartSession
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Appends a new CMSampleBuffer to the Asset Writer. Use bufferType to specify if this is a video or audio frame.
|
||||
The timestamp parameter represents the presentation timestamp of the buffer, which should be synchronized across video and audio frames.
|
||||
*/
|
||||
func appendBuffer(_ buffer: CMSampleBuffer, type bufferType: BufferType, timestamp: CMTime) {
|
||||
guard assetWriter.status == .writing else {
|
||||
ReactLogger.log(level: .error, message: "Frame arrived, but AssetWriter status is \(assetWriter.status.descriptor)!")
|
||||
return
|
||||
}
|
||||
if !CMSampleBufferDataIsReady(buffer) {
|
||||
ReactLogger.log(level: .error, message: "Frame arrived, but sample buffer is not ready!")
|
||||
return
|
||||
}
|
||||
|
||||
latestTimestamp = timestamp
|
||||
|
||||
switch bufferType {
|
||||
case .video:
|
||||
guard let bufferAdaptor = bufferAdaptor else {
|
||||
ReactLogger.log(level: .error, message: "Video Frame arrived but VideoWriter was nil!")
|
||||
return
|
||||
}
|
||||
if !bufferAdaptor.assetWriterInput.isReadyForMoreMediaData {
|
||||
ReactLogger.log(level: .warning,
|
||||
message: "The Video AVAssetWriterInput was not ready for more data! Is your frame rate too high?")
|
||||
return
|
||||
}
|
||||
guard let imageBuffer = CMSampleBufferGetImageBuffer(buffer) else {
|
||||
ReactLogger.log(level: .error, message: "Failed to get the CVImageBuffer!")
|
||||
return
|
||||
}
|
||||
// Start the writing session before we write the first video frame
|
||||
if !hasStartedWritingSession {
|
||||
initialTimestamp = timestamp
|
||||
assetWriter.startSession(atSourceTime: timestamp)
|
||||
ReactLogger.log(level: .info, message: "Started RecordingSession at \(timestamp.seconds) seconds.")
|
||||
hasStartedWritingSession = true
|
||||
}
|
||||
bufferAdaptor.append(imageBuffer, withPresentationTime: timestamp)
|
||||
if !hasWrittenFirstVideoFrame {
|
||||
hasWrittenFirstVideoFrame = true
|
||||
}
|
||||
case .audio:
|
||||
guard let audioWriter = audioWriter else {
|
||||
ReactLogger.log(level: .error, message: "Audio Frame arrived but AudioWriter was nil!")
|
||||
return
|
||||
}
|
||||
if !audioWriter.isReadyForMoreMediaData {
|
||||
return
|
||||
}
|
||||
if !hasWrittenFirstVideoFrame || !hasStartedWritingSession {
|
||||
// first video frame has not been written yet, so skip this audio frame.
|
||||
return
|
||||
}
|
||||
audioWriter.append(buffer)
|
||||
}
|
||||
|
||||
if assetWriter.status == .failed {
|
||||
ReactLogger.log(level: .error,
|
||||
message: "AssetWriter failed to write buffer! Error: \(assetWriter.error?.localizedDescription ?? "none")")
|
||||
finish()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Marks the AssetWriters as finished and stops writing frames. The callback will be invoked either with an error or the status "success".
|
||||
*/
|
||||
func finish() {
|
||||
ReactLogger.log(level: .info, message: "Finishing Recording with AssetWriter status \"\(assetWriter.status.descriptor)\"...")
|
||||
|
||||
if isFinishing {
|
||||
ReactLogger.log(level: .warning, message: "Tried calling finish() twice while AssetWriter is still writing!")
|
||||
return
|
||||
}
|
||||
|
||||
if !hasWrittenFirstVideoFrame {
|
||||
let error = NSError(domain: "capture/aborted",
|
||||
code: 1,
|
||||
userInfo: [NSLocalizedDescriptionKey: "Stopped Recording Session too early, no frames have been recorded!"])
|
||||
completionHandler(self, .failed, error)
|
||||
} else if assetWriter.status == .writing {
|
||||
isFinishing = true
|
||||
bufferAdaptor?.assetWriterInput.markAsFinished()
|
||||
audioWriter?.markAsFinished()
|
||||
assetWriter.finishWriting {
|
||||
self.isFinishing = false
|
||||
self.completionHandler(self, self.assetWriter.status, self.assetWriter.error)
|
||||
}
|
||||
} else {
|
||||
completionHandler(self, assetWriter.status, assetWriter.error)
|
||||
}
|
||||
}
|
||||
}
|
593
package/ios/VisionCamera.xcodeproj/project.pbxproj
Normal file
593
package/ios/VisionCamera.xcodeproj/project.pbxproj
Normal file
@@ -0,0 +1,593 @@
|
||||
// !$*UTF8*$!
|
||||
{
|
||||
archiveVersion = 1;
|
||||
classes = {
|
||||
};
|
||||
objectVersion = 46;
|
||||
objects = {
|
||||
|
||||
/* Begin PBXBuildFile section */
|
||||
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; };
|
||||
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
|
||||
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
|
||||
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
|
||||
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
|
||||
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
|
||||
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */; };
|
||||
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */; };
|
||||
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86400512784A23400E9D2CA /* CameraView+Orientation.swift */; };
|
||||
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */; };
|
||||
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */; };
|
||||
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */; };
|
||||
B87B11BF2A8E63B700732EBF /* PixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B87B11BE2A8E63B700732EBF /* PixelFormat.swift */; };
|
||||
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */; };
|
||||
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
|
||||
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
|
||||
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */ = {isa = PBXBuildFile; fileRef = B887515F25E0102000DB86D6 /* CameraViewManager.m */; };
|
||||
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516225E0102000DB86D6 /* Collection+safe.swift */; };
|
||||
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */; };
|
||||
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */; };
|
||||
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */; };
|
||||
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */; };
|
||||
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */; };
|
||||
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */; };
|
||||
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */; };
|
||||
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */; };
|
||||
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516E25E0102000DB86D6 /* MakeReactError.swift */; };
|
||||
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516F25E0102000DB86D6 /* ReactLogger.swift */; };
|
||||
B887519625E0102000DB86D6 /* Promise.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517025E0102000DB86D6 /* Promise.swift */; };
|
||||
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */; };
|
||||
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517325E0102000DB86D6 /* EnumParserError.swift */; };
|
||||
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */; };
|
||||
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */; };
|
||||
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */; };
|
||||
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */; };
|
||||
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */; };
|
||||
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517B25E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift */; };
|
||||
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */; };
|
||||
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */; };
|
||||
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */; };
|
||||
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518025E0102000DB86D6 /* CameraView+Focus.swift */; };
|
||||
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518125E0102000DB86D6 /* CameraViewManager.swift */; };
|
||||
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; };
|
||||
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
|
||||
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
|
||||
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */; };
|
||||
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */ = {isa = PBXBuildFile; fileRef = B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */; };
|
||||
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BD3BA1266E22D2006C80A2 /* Callback.swift */; };
|
||||
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; };
|
||||
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
|
||||
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
|
||||
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */; };
|
||||
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXCopyFilesBuildPhase section */
|
||||
58B511D91A9E6C8500147676 /* CopyFiles */ = {
|
||||
isa = PBXCopyFilesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
dstPath = "include/$(PRODUCT_NAME)";
|
||||
dstSubfolderSpec = 16;
|
||||
files = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXCopyFilesBuildPhase section */
|
||||
|
||||
/* Begin PBXFileReference section */
|
||||
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; };
|
||||
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; };
|
||||
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginRegistry.h; sourceTree = "<group>"; };
|
||||
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPluginRegistry.m; sourceTree = "<group>"; };
|
||||
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+updateCategory.swift"; sourceTree = "<group>"; };
|
||||
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
|
||||
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
|
||||
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
|
||||
B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
|
||||
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
|
||||
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
|
||||
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
|
||||
B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FrameProcessorPlugin.m; sourceTree = "<group>"; };
|
||||
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Orientation.swift"; sourceTree = "<group>"; };
|
||||
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+trySetAllowHaptics.swift"; sourceTree = "<group>"; };
|
||||
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVAudioSession.swift"; sourceTree = "<group>"; };
|
||||
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
|
||||
B87B11BE2A8E63B700732EBF /* PixelFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PixelFormat.swift; sourceTree = "<group>"; };
|
||||
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureConnection+setInterfaceOrientation.swift"; sourceTree = "<group>"; };
|
||||
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
|
||||
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
|
||||
B887515E25E0102000DB86D6 /* CameraBridge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraBridge.h; sourceTree = "<group>"; };
|
||||
B887515F25E0102000DB86D6 /* CameraViewManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraViewManager.m; sourceTree = "<group>"; };
|
||||
B887516225E0102000DB86D6 /* Collection+safe.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Collection+safe.swift"; sourceTree = "<group>"; };
|
||||
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+neutralZoom.swift"; sourceTree = "<group>"; };
|
||||
B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+isBetterThan.swift"; sourceTree = "<group>"; };
|
||||
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+isMultiCam.swift"; sourceTree = "<group>"; };
|
||||
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+physicalDevices.swift"; sourceTree = "<group>"; };
|
||||
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVFrameRateRange+includes.swift"; sourceTree = "<group>"; };
|
||||
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput+mirror.swift"; sourceTree = "<group>"; };
|
||||
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+matchesFilter.swift"; sourceTree = "<group>"; };
|
||||
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+toDictionary.swift"; sourceTree = "<group>"; };
|
||||
B887516E25E0102000DB86D6 /* MakeReactError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MakeReactError.swift; sourceTree = "<group>"; };
|
||||
B887516F25E0102000DB86D6 /* ReactLogger.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ReactLogger.swift; sourceTree = "<group>"; };
|
||||
B887517025E0102000DB86D6 /* Promise.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Promise.swift; sourceTree = "<group>"; };
|
||||
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+TakePhoto.swift"; sourceTree = "<group>"; };
|
||||
B887517325E0102000DB86D6 /* EnumParserError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = EnumParserError.swift; sourceTree = "<group>"; };
|
||||
B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoStabilizationMode+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVVideoCodecType+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.TorchMode+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCapturePhotoOutput.QualityPrioritization+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.DeviceType+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517B25E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVAuthorizationStatus+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Position+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.FlashMode+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B887518025E0102000DB86D6 /* CameraView+Focus.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+Focus.swift"; sourceTree = "<group>"; };
|
||||
B887518125E0102000DB86D6 /* CameraViewManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraViewManager.swift; sourceTree = "<group>"; };
|
||||
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+Zoom.swift"; sourceTree = "<group>"; };
|
||||
B887518325E0102000DB86D6 /* CameraError.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraError.swift; sourceTree = "<group>"; };
|
||||
B887518425E0102000DB86D6 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
|
||||
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPlugin.h; sourceTree = "<group>"; };
|
||||
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession+setVideoStabilizationMode.swift"; sourceTree = "<group>"; };
|
||||
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = JSINSObjectConversion.mm; sourceTree = "<group>"; };
|
||||
B8BD3BA1266E22D2006C80A2 /* Callback.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Callback.swift; sourceTree = "<group>"; };
|
||||
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift"; sourceTree = "<group>"; };
|
||||
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAssetWriter.Status+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingSession.swift; sourceTree = "<group>"; };
|
||||
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
|
||||
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraProxy.h; sourceTree = "<group>"; };
|
||||
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraProxy.mm; sourceTree = "<group>"; };
|
||||
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Torch.swift"; sourceTree = "<group>"; };
|
||||
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessor.h; sourceTree = "<group>"; };
|
||||
B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessor.mm; sourceTree = "<group>"; };
|
||||
B8F7DDD1266F715D00120533 /* Frame.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Frame.m; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
58B511D81A9E6C8500147676 /* Frameworks */ = {
|
||||
isa = PBXFrameworksBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXFrameworksBuildPhase section */
|
||||
|
||||
/* Begin PBXGroup section */
|
||||
134814211AA4EA7D00B7C361 /* Products */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
134814201AA4EA6300B7C361 /* libVisionCamera.a */,
|
||||
);
|
||||
name = Products;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
58B511D21A9E6C8500147676 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B8DCF2D725EA940700EA5C72 /* Frame Processor */,
|
||||
B887515E25E0102000DB86D6 /* CameraBridge.h */,
|
||||
B84760DE2608F57D004C3180 /* CameraQueues.swift */,
|
||||
B887518325E0102000DB86D6 /* CameraError.swift */,
|
||||
B887518425E0102000DB86D6 /* CameraView.swift */,
|
||||
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */,
|
||||
B86DC973260E310600FB17B2 /* CameraView+AVAudioSession.swift */,
|
||||
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */,
|
||||
B887518025E0102000DB86D6 /* CameraView+Focus.swift */,
|
||||
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */,
|
||||
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */,
|
||||
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */,
|
||||
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */,
|
||||
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
|
||||
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
|
||||
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
|
||||
B83D5EE629377117000AFD2F /* PreviewView.swift */,
|
||||
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
|
||||
B887516125E0102000DB86D6 /* Extensions */,
|
||||
B887517225E0102000DB86D6 /* Parsers */,
|
||||
B887516D25E0102000DB86D6 /* React Utils */,
|
||||
134814211AA4EA7D00B7C361 /* Products */,
|
||||
);
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B887516125E0102000DB86D6 /* Extensions */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */,
|
||||
B86DC970260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift */,
|
||||
B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */,
|
||||
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */,
|
||||
B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */,
|
||||
B887516425E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift */,
|
||||
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */,
|
||||
B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */,
|
||||
B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */,
|
||||
B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */,
|
||||
B887516825E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift */,
|
||||
B887516925E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift */,
|
||||
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */,
|
||||
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */,
|
||||
B887516225E0102000DB86D6 /* Collection+safe.swift */,
|
||||
);
|
||||
path = Extensions;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B887516D25E0102000DB86D6 /* React Utils */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B887516E25E0102000DB86D6 /* MakeReactError.swift */,
|
||||
B887516F25E0102000DB86D6 /* ReactLogger.swift */,
|
||||
B887517025E0102000DB86D6 /* Promise.swift */,
|
||||
B8BD3BA1266E22D2006C80A2 /* Callback.swift */,
|
||||
);
|
||||
path = "React Utils";
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B887517225E0102000DB86D6 /* Parsers */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B887517325E0102000DB86D6 /* EnumParserError.swift */,
|
||||
B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */,
|
||||
B887517425E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift */,
|
||||
B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */,
|
||||
B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */,
|
||||
B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */,
|
||||
B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift */,
|
||||
B887517B25E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift */,
|
||||
B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */,
|
||||
B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */,
|
||||
B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */,
|
||||
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */,
|
||||
B864004F27849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift */,
|
||||
B87B11BE2A8E63B700732EBF /* PixelFormat.swift */,
|
||||
);
|
||||
path = Parsers;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B8DCF2D725EA940700EA5C72 /* Frame Processor */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */,
|
||||
B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */,
|
||||
B8103E5725FF56F0007A1684 /* Frame.h */,
|
||||
B8F7DDD1266F715D00120533 /* Frame.m */,
|
||||
B84760A22608EE38004C3180 /* FrameHostObject.h */,
|
||||
B84760A52608EE7C004C3180 /* FrameHostObject.mm */,
|
||||
B80C0DFE260BDD97001699AB /* FrameProcessorPluginRegistry.h */,
|
||||
B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */,
|
||||
B88873E5263D46C7008B1D0E /* FrameProcessorPlugin.h */,
|
||||
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */,
|
||||
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */,
|
||||
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */,
|
||||
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */,
|
||||
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */,
|
||||
B8994E6B263F03E100069589 /* JSINSObjectConversion.mm */,
|
||||
B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */,
|
||||
);
|
||||
path = "Frame Processor";
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
/* End PBXGroup section */
|
||||
|
||||
/* Begin PBXNativeTarget section */
|
||||
58B511DA1A9E6C8500147676 /* VisionCamera */ = {
|
||||
isa = PBXNativeTarget;
|
||||
buildConfigurationList = 58B511EF1A9E6C8500147676 /* Build configuration list for PBXNativeTarget "VisionCamera" */;
|
||||
buildPhases = (
|
||||
B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */,
|
||||
B81F6C7625E515810008974A /* Run SwiftLint */,
|
||||
58B511D71A9E6C8500147676 /* Sources */,
|
||||
58B511D81A9E6C8500147676 /* Frameworks */,
|
||||
58B511D91A9E6C8500147676 /* CopyFiles */,
|
||||
);
|
||||
buildRules = (
|
||||
);
|
||||
dependencies = (
|
||||
);
|
||||
name = VisionCamera;
|
||||
productName = RCTDataManager;
|
||||
productReference = 134814201AA4EA6300B7C361 /* libVisionCamera.a */;
|
||||
productType = "com.apple.product-type.library.static";
|
||||
};
|
||||
/* End PBXNativeTarget section */
|
||||
|
||||
/* Begin PBXProject section */
|
||||
58B511D31A9E6C8500147676 /* Project object */ = {
|
||||
isa = PBXProject;
|
||||
attributes = {
|
||||
LastUpgradeCheck = 1240;
|
||||
ORGANIZATIONNAME = mrousavy;
|
||||
TargetAttributes = {
|
||||
58B511DA1A9E6C8500147676 = {
|
||||
CreatedOnToolsVersion = 6.1.1;
|
||||
};
|
||||
};
|
||||
};
|
||||
buildConfigurationList = 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "VisionCamera" */;
|
||||
compatibilityVersion = "Xcode 3.2";
|
||||
developmentRegion = English;
|
||||
hasScannedForEncodings = 0;
|
||||
knownRegions = (
|
||||
English,
|
||||
en,
|
||||
);
|
||||
mainGroup = 58B511D21A9E6C8500147676;
|
||||
productRefGroup = 58B511D21A9E6C8500147676;
|
||||
projectDirPath = "";
|
||||
projectRoot = "";
|
||||
targets = (
|
||||
58B511DA1A9E6C8500147676 /* VisionCamera */,
|
||||
);
|
||||
};
|
||||
/* End PBXProject section */
|
||||
|
||||
/* Begin PBXShellScriptBuildPhase section */
|
||||
B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */ = {
|
||||
isa = PBXShellScriptBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
);
|
||||
inputFileListPaths = (
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "Run SwiftFormat";
|
||||
outputFileListPaths = (
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "if which swiftformat >/dev/null; then\n swiftformat .\nelse\n echo \"warning: SwiftFormat not installed, download from https://github.com/nicklockwood/SwiftFormat\"\nfi\n";
|
||||
};
|
||||
B81F6C7625E515810008974A /* Run SwiftLint */ = {
|
||||
isa = PBXShellScriptBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
);
|
||||
inputFileListPaths = (
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "Run SwiftLint";
|
||||
outputFileListPaths = (
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "if which swiftlint >/dev/null; then\n swiftlint --fix && swiftlint\nelse\n echo \"warning: SwiftLint not installed, download from https://github.com/realm/SwiftLint\"\nfi\n";
|
||||
};
|
||||
/* End PBXShellScriptBuildPhase section */
|
||||
|
||||
/* Begin PBXSourcesBuildPhase section */
|
||||
58B511D71A9E6C8500147676 /* Sources */ = {
|
||||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
B86DC974260E310600FB17B2 /* CameraView+AVAudioSession.swift in Sources */,
|
||||
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */,
|
||||
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */,
|
||||
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
|
||||
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */,
|
||||
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
|
||||
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
|
||||
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */,
|
||||
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */,
|
||||
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
|
||||
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
|
||||
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
|
||||
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
|
||||
B887518725E0102000DB86D6 /* CameraViewManager.m in Sources */,
|
||||
B88751A925E0102000DB86D6 /* CameraView.swift in Sources */,
|
||||
B887519925E0102000DB86D6 /* AVCaptureVideoStabilizationMode+descriptor.swift in Sources */,
|
||||
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */,
|
||||
B887519425E0102000DB86D6 /* MakeReactError.swift in Sources */,
|
||||
B887519525E0102000DB86D6 /* ReactLogger.swift in Sources */,
|
||||
B86400522784A23400E9D2CA /* CameraView+Orientation.swift in Sources */,
|
||||
B88751A725E0102000DB86D6 /* CameraView+Zoom.swift in Sources */,
|
||||
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */,
|
||||
B887518B25E0102000DB86D6 /* AVCaptureDevice.Format+isBetterThan.swift in Sources */,
|
||||
B8BD3BA2266E22D2006C80A2 /* Callback.swift in Sources */,
|
||||
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */,
|
||||
B864005027849A2400E9D2CA /* UIInterfaceOrientation+descriptor.swift in Sources */,
|
||||
B887518E25E0102000DB86D6 /* AVFrameRateRange+includes.swift in Sources */,
|
||||
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */,
|
||||
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */,
|
||||
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */,
|
||||
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */,
|
||||
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
|
||||
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
|
||||
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
|
||||
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
|
||||
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */,
|
||||
B87B11BF2A8E63B700732EBF /* PixelFormat.swift in Sources */,
|
||||
B88751A625E0102000DB86D6 /* CameraViewManager.swift in Sources */,
|
||||
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+descriptor.swift in Sources */,
|
||||
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
|
||||
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */,
|
||||
B887519025E0102000DB86D6 /* AVCaptureDevice.Format+matchesFilter.swift in Sources */,
|
||||
B887518F25E0102000DB86D6 /* AVCapturePhotoOutput+mirror.swift in Sources */,
|
||||
B88751A425E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */,
|
||||
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */,
|
||||
B88751A025E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift in Sources */,
|
||||
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */,
|
||||
B887519C25E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
|
||||
B8994E6C263F03E100069589 /* JSINSObjectConversion.mm in Sources */,
|
||||
B88751A525E0102000DB86D6 /* CameraView+Focus.swift in Sources */,
|
||||
B86DC971260E2D5200FB17B2 /* AVAudioSession+trySetAllowHaptics.swift in Sources */,
|
||||
B88B47472667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift in Sources */,
|
||||
B887519E25E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXSourcesBuildPhase section */
|
||||
|
||||
/* Begin XCBuildConfiguration section */
|
||||
58B511ED1A9E6C8500147676 /* Debug */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
|
||||
CLANG_CXX_LIBRARY = "libc++";
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CLANG_ENABLE_OBJC_ARC = YES;
|
||||
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
||||
CLANG_WARN_BOOL_CONVERSION = YES;
|
||||
CLANG_WARN_COMMA = YES;
|
||||
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
||||
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
||||
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
||||
CLANG_WARN_EMPTY_BODY = YES;
|
||||
CLANG_WARN_ENUM_CONVERSION = YES;
|
||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||
CLANG_WARN_INT_CONVERSION = YES;
|
||||
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
|
||||
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
||||
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||
COPY_PHASE_STRIP = NO;
|
||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||
ENABLE_TESTABILITY = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu99;
|
||||
GCC_DYNAMIC_NO_PIC = NO;
|
||||
GCC_NO_COMMON_BLOCKS = YES;
|
||||
GCC_OPTIMIZATION_LEVEL = 0;
|
||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||
"DEBUG=1",
|
||||
"$(inherited)",
|
||||
);
|
||||
GCC_SYMBOLS_PRIVATE_EXTERN = NO;
|
||||
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
||||
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
|
||||
MTL_ENABLE_DEBUG_INFO = YES;
|
||||
ONLY_ACTIVE_ARCH = YES;
|
||||
SDKROOT = iphoneos;
|
||||
};
|
||||
name = Debug;
|
||||
};
|
||||
58B511EE1A9E6C8500147676 /* Release */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
|
||||
CLANG_CXX_LIBRARY = "libc++";
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CLANG_ENABLE_OBJC_ARC = YES;
|
||||
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
||||
CLANG_WARN_BOOL_CONVERSION = YES;
|
||||
CLANG_WARN_COMMA = YES;
|
||||
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
||||
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
||||
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
||||
CLANG_WARN_EMPTY_BODY = YES;
|
||||
CLANG_WARN_ENUM_CONVERSION = YES;
|
||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||
CLANG_WARN_INT_CONVERSION = YES;
|
||||
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
|
||||
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
||||
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||
COPY_PHASE_STRIP = YES;
|
||||
ENABLE_NS_ASSERTIONS = NO;
|
||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu99;
|
||||
GCC_NO_COMMON_BLOCKS = YES;
|
||||
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
||||
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
|
||||
MTL_ENABLE_DEBUG_INFO = NO;
|
||||
SDKROOT = iphoneos;
|
||||
SWIFT_COMPILATION_MODE = wholemodule;
|
||||
VALIDATE_PRODUCT = YES;
|
||||
};
|
||||
name = Release;
|
||||
};
|
||||
58B511F01A9E6C8500147676 /* Debug */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
DEFINES_MODULE = YES;
|
||||
HEADER_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
|
||||
"$(SRCROOT)/../../../React/**",
|
||||
"$(SRCROOT)/../../react-native/React/**",
|
||||
);
|
||||
LIBRARY_SEARCH_PATHS = "$(inherited)";
|
||||
OTHER_LDFLAGS = "-ObjC";
|
||||
PRODUCT_NAME = VisionCamera;
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_OBJC_BRIDGING_HEADER = CameraBridge.h;
|
||||
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
|
||||
SWIFT_VERSION = 5.2;
|
||||
USER_HEADER_SEARCH_PATHS = "\"$(SRCROOT)/../cpp\"/**";
|
||||
};
|
||||
name = Debug;
|
||||
};
|
||||
58B511F11A9E6C8500147676 /* Release */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
DEFINES_MODULE = YES;
|
||||
HEADER_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
|
||||
"$(SRCROOT)/../../../React/**",
|
||||
"$(SRCROOT)/../../react-native/React/**",
|
||||
);
|
||||
LIBRARY_SEARCH_PATHS = "$(inherited)";
|
||||
OTHER_LDFLAGS = "-ObjC";
|
||||
PRODUCT_NAME = VisionCamera;
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_OBJC_BRIDGING_HEADER = CameraBridge.h;
|
||||
SWIFT_VERSION = 5.2;
|
||||
USER_HEADER_SEARCH_PATHS = "\"$(SRCROOT)/../cpp\"/**";
|
||||
};
|
||||
name = Release;
|
||||
};
|
||||
/* End XCBuildConfiguration section */
|
||||
|
||||
/* Begin XCConfigurationList section */
|
||||
58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "VisionCamera" */ = {
|
||||
isa = XCConfigurationList;
|
||||
buildConfigurations = (
|
||||
58B511ED1A9E6C8500147676 /* Debug */,
|
||||
58B511EE1A9E6C8500147676 /* Release */,
|
||||
);
|
||||
defaultConfigurationIsVisible = 0;
|
||||
defaultConfigurationName = Release;
|
||||
};
|
||||
58B511EF1A9E6C8500147676 /* Build configuration list for PBXNativeTarget "VisionCamera" */ = {
|
||||
isa = XCConfigurationList;
|
||||
buildConfigurations = (
|
||||
58B511F01A9E6C8500147676 /* Debug */,
|
||||
58B511F11A9E6C8500147676 /* Release */,
|
||||
);
|
||||
defaultConfigurationIsVisible = 0;
|
||||
defaultConfigurationName = Release;
|
||||
};
|
||||
/* End XCConfigurationList section */
|
||||
};
|
||||
rootObject = 58B511D31A9E6C8500147676 /* Project object */;
|
||||
}
|
Reference in New Issue
Block a user