Run SwiftFormat in GH Action (#28)

This commit is contained in:
Marc Rousavy
2021-03-09 10:53:29 +01:00
committed by GitHub
parent 168ba054d1
commit 64bb6f6a2a
22 changed files with 383 additions and 234 deletions

1
ios/.swift-version Normal file
View File

@@ -0,0 +1 @@
5.2

View File

@@ -1,3 +1,14 @@
--allman false
--indent 2
--exclude Pods,Generated
--disable andOperator
--disable redundantReturn
--disable wrapMultilineStatementBraces
--enable organizeDeclarations
--lifecycle didSetProps,requiresMainQueueSetup,view,methodQueue,getCameraView
--enable markTypes
--enable isEmpty

View File

@@ -18,7 +18,6 @@ opt_in_rules:
- last_where
- reduce_boolean
- reduce_into
- sorted_first_last
- yoda_condition
- vertical_whitespace_opening_braces
- vertical_whitespace_closing_braces

View File

@@ -8,10 +8,14 @@
import Foundation
// MARK: - PermissionError
enum PermissionError: String {
case microphone = "microphone-permission-denied"
case camera = "camera-permission-denied"
// MARK: Internal
var code: String {
return rawValue
}
@@ -26,6 +30,8 @@ enum PermissionError: String {
}
}
// MARK: - ParameterError
enum ParameterError {
case invalid(unionName: String, receivedValue: String)
case unsupportedOS(unionName: String, receivedValue: String, supportedOnOs: String)
@@ -33,6 +39,8 @@ enum ParameterError {
case unsupportedInput(inputDescriptor: String)
case invalidCombination(provided: String, missing: String)
// MARK: Internal
var code: String {
switch self {
case .invalid:
@@ -64,6 +72,8 @@ enum ParameterError {
}
}
// MARK: - DeviceError
enum DeviceError: String {
case configureError = "configuration-error"
case noDevice = "no-device"
@@ -74,6 +84,8 @@ enum DeviceError: String {
case focusNotSupported = "focus-not-supported"
case notAvailableOnSimulator = "camera-not-available-on-simulator"
// MARK: Internal
var code: String {
return rawValue
}
@@ -100,12 +112,16 @@ enum DeviceError: String {
}
}
// MARK: - FormatError
enum FormatError {
case invalidFps(fps: Int)
case invalidHdr
case invalidFormat
case invalidPreset(preset: String)
// MARK: Internal
var code: String {
switch self {
case .invalidFormat:
@@ -133,10 +149,14 @@ enum FormatError {
}
}
// MARK: - SessionError
enum SessionError {
case cameraNotReady
case audioSessionSetupFailed(reason: String)
// MARK: Internal
var code: String {
switch self {
case .cameraNotReady:
@@ -156,6 +176,8 @@ enum SessionError {
}
}
// MARK: - CaptureError
enum CaptureError {
case invalidPhotoFormat
case recordingInProgress
@@ -165,6 +187,8 @@ enum CaptureError {
case invalidPhotoCodec
case unknown(message: String? = nil)
// MARK: Internal
var code: String {
switch self {
case .invalidPhotoFormat:
@@ -204,9 +228,13 @@ enum CaptureError {
}
}
// MARK: - SystemError
enum SystemError: String {
case noManager = "no-camera-manager"
// MARK: Internal
var code: String {
return rawValue
}
@@ -219,6 +247,8 @@ enum SystemError: String {
}
}
// MARK: - CameraError
enum CameraError: Error {
case permission(_ id: PermissionError)
case parameter(_ id: ParameterError)
@@ -229,6 +259,8 @@ enum CameraError: Error {
case system(_ id: SystemError)
case unknown(message: String? = nil)
// MARK: Internal
var code: String {
switch self {
case let .permission(id: id):

View File

@@ -8,9 +8,10 @@
import AVFoundation
// MARK: - TakePhotoOptions
struct TakePhotoOptions {
var videoCodec: AVVideoCodecType?
var qualityPrioritization: String?
// MARK: Lifecycle
init(fromDictionary dictionary: NSDictionary) {
if let videoCodec = dictionary.value(forKey: "videoCodec") as? String {
@@ -18,6 +19,11 @@ struct TakePhotoOptions {
}
qualityPrioritization = dictionary.value(forKey: "qualityPrioritization") as? String
}
// MARK: Internal
var videoCodec: AVVideoCodecType?
var qualityPrioritization: String?
}
extension CameraView {

View File

@@ -29,72 +29,10 @@ import UIKit
private let propsThatRequireReconfiguration = ["cameraId", "enableDepthData", "enableHighResolutionCapture", "enablePortraitEffectsMatteDelivery", "preset", "onCodeScanned", "scannableCodes"]
private let propsThatRequireDeviceReconfiguration = ["fps", "hdr", "lowLightBoost", "colorSpace"]
// MARK: - CameraView
final class CameraView: UIView {
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
@objc var scannableCodes: [String]?
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 0.0 // in percent
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onCodeScanned: RCTBubblingEventBlock?
@objc var enableZoomGesture: Bool = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
var isReady: Bool = false
var isRunning: Bool {
return captureSession.isRunning
}
// pragma MARK: Private Properties
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
private let captureSession = AVCaptureSession()
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var movieOutput: AVCaptureMovieFileOutput?
internal var metadataOutput: AVCaptureMetadataOutput?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+RecordVideo
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
// pragma MARK: Setup
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
// MARK: Lifecycle
override init(frame: CGRect) {
super.init(frame: frame)
@@ -114,26 +52,6 @@ final class CameraView: UIView {
object: captureSession)
}
override func removeFromSuperview() {
captureSession.stopRunning()
super.removeFromSuperview()
}
@objc
func sessionRuntimeError(notification: Notification) {
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
invokeOnError(.unknown(message: error.localizedDescription), cause: error as NSError)
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented.")
@@ -188,6 +106,159 @@ final class CameraView: UIView {
}
}
// MARK: Internal
// pragma MARK: Setup
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
@objc var scannableCodes: [String]?
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 0.0 // in percent
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onCodeScanned: RCTBubblingEventBlock?
var isReady = false
// pragma MARK: Private Properties
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var movieOutput: AVCaptureMovieFileOutput?
internal var metadataOutput: AVCaptureMetadataOutput?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+RecordVideo
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
@objc var enableZoomGesture = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
var isRunning: Bool {
return captureSession.isRunning
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
override func removeFromSuperview() {
captureSession.stopRunning()
super.removeFromSuperview()
}
@objc
func sessionRuntimeError(notification: Notification) {
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
invokeOnError(.unknown(message: error.localizedDescription), cause: error as NSError)
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
return invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
}
if !captureSession.isRunning {
torchMode = .off
}
if device.torchMode == torchMode {
// no need to run the whole lock/unlock bs
return
}
if !device.hasTorch || !device.isTorchAvailable {
if torchMode == .off {
// ignore it, when it's off and not supported, it's off.
return
} else {
// torch mode is .auto or .on, but no torch is available.
return invokeOnError(.device(.torchUnavailable))
}
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
if torchMode == .on {
try device.setTorchModeOn(level: 1.0)
}
device.unlockForConfiguration()
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
// pragma MARK: Event Invokers
internal final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
ReactLogger.log(level: .error, message: error.localizedDescription, alsoLogToJS: true)
guard let onError = self.onError else { return }
var causeDictionary: [String: Any]?
if let cause = cause {
causeDictionary = [
"code": cause.code,
"domain": cause.domain,
"message": cause.localizedDescription,
"details": cause.userInfo,
]
}
onError([
"code": error.code,
"message": error.message,
"cause": causeDictionary ?? NSNull(),
])
}
internal final func invokeOnInitialized() {
ReactLogger.log(level: .info, message: "Camera onInitialized()", alsoLogToJS: true)
guard let onInitialized = self.onInitialized else { return }
onInitialized([String: Any]())
}
// MARK: Private
private let captureSession = AVCaptureSession()
// pragma MARK: Session, Device and Format Configuration
/**
Configures the Capture Session.
@@ -196,7 +267,7 @@ final class CameraView: UIView {
isReady = false
#if targetEnvironment(simulator)
return invokeOnError(.device(.notAvailableOnSimulator))
return invokeOnError(.device(.notAvailableOnSimulator))
#endif
guard cameraId != nil else {
@@ -295,7 +366,7 @@ final class CameraView: UIView {
}
captureSession.addOutput(photoOutput!)
if videoDeviceInput!.device.position == .front {
photoOutput!.mirror()
photoOutput!.mirror()
}
// Video Output
@@ -308,7 +379,7 @@ final class CameraView: UIView {
}
captureSession.addOutput(movieOutput!)
if videoDeviceInput!.device.position == .front {
movieOutput!.mirror()
movieOutput!.mirror()
}
// Barcode Scanning
@@ -422,66 +493,4 @@ final class CameraView: UIView {
return invokeOnError(.device(.configureError), cause: error)
}
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
return invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
}
if !captureSession.isRunning {
torchMode = .off
}
if device.torchMode == torchMode {
// no need to run the whole lock/unlock bs
return
}
if !device.hasTorch || !device.isTorchAvailable {
if torchMode == .off {
// ignore it, when it's off and not supported, it's off.
return
} else {
// torch mode is .auto or .on, but no torch is available.
return invokeOnError(.device(.torchUnavailable))
}
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
if torchMode == .on {
try device.setTorchModeOn(level: 1.0)
}
device.unlockForConfiguration()
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
// pragma MARK: Event Invokers
internal final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
ReactLogger.log(level: .error, message: error.localizedDescription, alsoLogToJS: true)
guard let onError = self.onError else { return }
var causeDictionary: [String: Any]?
if let cause = cause {
causeDictionary = [
"code": cause.code,
"domain": cause.domain,
"message": cause.localizedDescription,
"details": cause.userInfo
]
}
onError([
"code": error.code,
"message": error.message,
"cause": causeDictionary ?? NSNull(),
])
}
internal final func invokeOnInitialized() {
ReactLogger.log(level: .info, message: "Camera onInitialized()", alsoLogToJS: true)
guard let onInitialized = self.onInitialized else { return }
onInitialized([String: Any]())
}
}

View File

@@ -11,17 +11,15 @@ import Foundation
@objc(CameraViewManager)
final class CameraViewManager: RCTViewManager {
// pragma MARK: Setup
override final func view() -> UIView! {
return CameraView()
}
// MARK: Lifecycle
override static func requiresMainQueueSetup() -> Bool {
return true
}
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
// pragma MARK: Setup
override final func view() -> UIView! {
return CameraView()
}
private func getCameraView(withTag tag: NSNumber) -> CameraView {
@@ -29,6 +27,12 @@ final class CameraViewManager: RCTViewManager {
return bridge.uiManager.view(forReactTag: tag) as! CameraView
}
// MARK: Internal
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
}
// pragma MARK: Exported Functions
@objc
final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) {
@@ -65,7 +69,7 @@ final class CameraViewManager: RCTViewManager {
guard let movieOutput = component.movieOutput else {
throw CameraError.session(SessionError.cameraNotReady)
}
return movieOutput.availableVideoCodecTypes.map { $0.descriptor }
return movieOutput.availableVideoCodecTypes.map(\.descriptor)
}
}
@@ -76,26 +80,10 @@ final class CameraViewManager: RCTViewManager {
guard let photoOutput = component.photoOutput else {
throw CameraError.session(SessionError.cameraNotReady)
}
return photoOutput.availablePhotoCodecTypes.map { $0.descriptor }
return photoOutput.availablePhotoCodecTypes.map(\.descriptor)
}
}
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {
deviceTypes.append(.builtInTripleCamera)
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
if #available(iOS 11.1, *) {
deviceTypes.append(.builtInTrueDepthCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
return deviceTypes
}
// pragma MARK: View Manager funcs
@objc
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
@@ -104,7 +92,7 @@ final class CameraViewManager: RCTViewManager {
return discoverySession.devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map { $0.deviceType.descriptor },
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
"position": $0.position.descriptor,
"name": $0.localizedName,
"hasFlash": $0.hasFlash,
@@ -155,4 +143,22 @@ final class CameraViewManager: RCTViewManager {
resolve(result.descriptor)
}
}
// MARK: Private
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {
deviceTypes.append(.builtInTripleCamera)
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
if #available(iOS 11.1, *) {
deviceTypes.append(.builtInTrueDepthCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
return deviceTypes
}
}

View File

@@ -23,7 +23,7 @@ extension AVCaptureDevice.Format {
func toDictionary() -> [String: Any] {
var dict: [String: Any] = [
"videoStabilizationModes": videoStabilizationModes.map { $0.descriptor },
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
"autoFocusSystem": autoFocusSystem.descriptor,
"photoHeight": highResolutionStillImageDimensions.height,
"photoWidth": highResolutionStillImageDimensions.width,
@@ -31,7 +31,7 @@ extension AVCaptureDevice.Format {
"minISO": minISO,
"fieldOfView": videoFieldOfView,
"maxZoom": videoMaxZoomFactor,
"colorSpaces": supportedColorSpaces.map { $0.descriptor },
"colorSpaces": supportedColorSpaces.map(\.descriptor),
"supportsVideoHDR": isVideoHDRSupported,
"supportsPhotoHDR": false,
"frameRateRanges": videoSupportedFrameRateRanges.map {

View File

@@ -9,11 +9,11 @@
import AVFoundation
extension AVCaptureMovieFileOutput {
func mirror() {
connections.forEach { (connection) in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}
}
func mirror() {
connections.forEach { connection in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}
}
}
}

View File

@@ -9,11 +9,11 @@
import AVFoundation
extension AVCapturePhotoOutput {
func mirror() {
connections.forEach { (connection) in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}
}
func mirror() {
connections.forEach { connection in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}
}
}
}

View File

@@ -10,8 +10,10 @@ import AVFoundation
private var delegatesReferences: [NSObject] = []
// MARK: - PhotoCaptureDelegate
class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
private let promise: Promise
// MARK: Lifecycle
required init(promise: Promise) {
self.promise = promise
@@ -19,6 +21,8 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
delegatesReferences.append(self)
}
// MARK: Internal
func photoOutput(_: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
defer {
delegatesReferences.removeAll(where: { $0 == self })
@@ -66,4 +70,8 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
return promise.reject(error: .capture(.unknown(message: error.localizedDescription)), cause: error as NSError)
}
}
// MARK: Private
private let promise: Promise
}

View File

@@ -17,4 +17,4 @@ This folder contains the iOS-platform-specific code for react-native-vision-came
It is recommended that you work on the code using the Example project (`example/ios/VisionCameraExample.xcworkspace`), since that always includes the React Native header files, plus you can easily test changes that way.
You can however still edit the library project here by opening `VisionCamera.xcodeproj`.
You can however still edit the library project here by opening `VisionCamera.xcodeproj`, this has the advantage of **automatically formatting your Code** (swiftformat) and **showing you Linter errors** (swiftlint) when trying to build (<kbd></kbd>+<kbd>B</kbd>).

View File

@@ -8,18 +8,21 @@
import Foundation
// MARK: - Promise
/**
* Represents a JavaScript Promise instance. `reject()` and `resolve()` should only be called once.
*/
class Promise {
private let resolver: RCTPromiseResolveBlock
private let rejecter: RCTPromiseRejectBlock
// MARK: Lifecycle
init(resolver: @escaping RCTPromiseResolveBlock, rejecter: @escaping RCTPromiseRejectBlock) {
self.resolver = resolver
self.rejecter = rejecter
}
// MARK: Internal
func reject(error: CameraError, cause: NSError?) {
rejecter(error.code, error.message, cause)
}
@@ -35,6 +38,11 @@ class Promise {
func resolve() {
resolve(nil)
}
// MARK: Private
private let resolver: RCTPromiseResolveBlock
private let rejecter: RCTPromiseRejectBlock
}
/**

View File

@@ -10,6 +10,8 @@ import Foundation
let context = "Camera"
// MARK: - ReactLogger
enum ReactLogger {
static func log(level: RCTLogLevel, message: String, alsoLogToJS: Bool = false, file: String = #file, lineNumber: Int = #line) {
RCTDefaultLogFunction(level, RCTLogSource.native, file, lineNumber as NSNumber, "\(context): \(message)")

View File

@@ -14,9 +14,10 @@ import AVFoundation
// once the delegate has been triggered once.
private var delegateReferences: [NSObject] = []
// MARK: - RecordingDelegateWithCallback
class RecordingDelegateWithCallback: NSObject, AVCaptureFileOutputRecordingDelegate {
private let callback: RCTResponseSenderBlock // (video?, error?) => void
private let resetTorchMode: () -> Void
// MARK: Lifecycle
init(callback: @escaping RCTResponseSenderBlock, resetTorchMode: @escaping () -> Void) {
self.callback = callback
@@ -25,6 +26,8 @@ class RecordingDelegateWithCallback: NSObject, AVCaptureFileOutputRecordingDeleg
delegateReferences.append(self)
}
// MARK: Internal
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from _: [AVCaptureConnection], error: Error?) {
defer {
self.resetTorchMode()
@@ -37,4 +40,9 @@ class RecordingDelegateWithCallback: NSObject, AVCaptureFileOutputRecordingDeleg
let seconds = CMTimeGetSeconds(output.recordedDuration)
return callback([["path": outputFileURL.absoluteString, "duration": seconds, "size": output.recordedFileSize], NSNull()])
}
// MARK: Private
private let callback: RCTResponseSenderBlock // (video?, error?) => void
private let resetTorchMode: () -> Void
}

View File

@@ -196,6 +196,7 @@
buildConfigurationList = 58B511EF1A9E6C8500147676 /* Build configuration list for PBXNativeTarget "VisionCamera" */;
buildPhases = (
B81F6C7625E515810008974A /* Run SwiftLint */,
B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */,
58B511D71A9E6C8500147676 /* Sources */,
58B511D81A9E6C8500147676 /* Frameworks */,
58B511D91A9E6C8500147676 /* CopyFiles */,
@@ -242,6 +243,24 @@
/* End PBXProject section */
/* Begin PBXShellScriptBuildPhase section */
B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
);
name = "Run SwiftFormat";
outputFileListPaths = (
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "if which swiftformat >/dev/null; then\n swiftformat .\nelse\n echo \"warning: SwiftFormat not installed, download from https://github.com/nicklockwood/SwiftFormat\"\nfi\n";
};
B81F6C7625E515810008974A /* Run SwiftLint */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
@@ -258,7 +277,7 @@
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "if which swiftlint >/dev/null; then\n swiftlint autocorrect && swiftlint\nelse\n echo \"warning: SwiftLint not installed, download from https://github.com/realm/SwiftLint\"\nfi\n";
shellScript = "if which swiftlint >/dev/null; then\n swiftlint --fix && swiftlint\nelse\n echo \"warning: SwiftLint not installed, download from https://github.com/realm/SwiftLint\"\nfi\n";
};
/* End PBXShellScriptBuildPhase section */