Run SwiftFormat in GH Action (#28)

This commit is contained in:
Marc Rousavy 2021-03-09 10:53:29 +01:00 committed by GitHub
parent 168ba054d1
commit 64bb6f6a2a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 383 additions and 234 deletions

View File

@ -13,33 +13,30 @@ on:
- 'ios/**'
jobs:
lint:
name: SwiftLint
SwiftLint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run SwiftLint GitHub Action (--strict)
uses: norio-nomura/action-swiftlint@3.2.1
uses: norio-nomura/action-swiftlint@master
with:
args: --strict
env:
# DIFF_BASE: ${{ github.base_ref }}
WORKING_DIRECTORY: ios
# TODO: Figure out how to run SwiftFormat in a GitHub action
# SwiftFormat:
# name: SwiftFormat
# description: 'https://github.com/nicklockwood/SwiftFormat'
# runs-on: ubuntu-latest
# defaults:
# run:
# working-directory: ./ios
# steps:
# - uses: actions/checkout@v2
SwiftFormat:
runs-on: macOS-latest
defaults:
run:
working-directory: ./ios
steps:
- uses: actions/checkout@v2
# - name: Format Swift code
# run: swiftformat --verbose .
# working-directory: ${{env.working-directory}}
- name: Install SwiftFormat
run: brew install swiftformat
# - name: Verify formatted code is unchanged
# run: git diff --exit-code HEAD
# working-directory: ${{env.working-directory}}
- name: Format Swift code
run: swiftformat --verbose .
- name: Verify formatted code is unchanged
run: git diff --exit-code HEAD

1
ios/.swift-version Normal file
View File

@ -0,0 +1 @@
5.2

View File

@ -1,3 +1,14 @@
--allman false
--indent 2
--exclude Pods,Generated
--disable andOperator
--disable redundantReturn
--disable wrapMultilineStatementBraces
--enable organizeDeclarations
--lifecycle didSetProps,requiresMainQueueSetup,view,methodQueue,getCameraView
--enable markTypes
--enable isEmpty

View File

@ -18,7 +18,6 @@ opt_in_rules:
- last_where
- reduce_boolean
- reduce_into
- sorted_first_last
- yoda_condition
- vertical_whitespace_opening_braces
- vertical_whitespace_closing_braces

View File

@ -8,10 +8,14 @@
import Foundation
// MARK: - PermissionError
enum PermissionError: String {
case microphone = "microphone-permission-denied"
case camera = "camera-permission-denied"
// MARK: Internal
var code: String {
return rawValue
}
@ -26,6 +30,8 @@ enum PermissionError: String {
}
}
// MARK: - ParameterError
enum ParameterError {
case invalid(unionName: String, receivedValue: String)
case unsupportedOS(unionName: String, receivedValue: String, supportedOnOs: String)
@ -33,6 +39,8 @@ enum ParameterError {
case unsupportedInput(inputDescriptor: String)
case invalidCombination(provided: String, missing: String)
// MARK: Internal
var code: String {
switch self {
case .invalid:
@ -64,6 +72,8 @@ enum ParameterError {
}
}
// MARK: - DeviceError
enum DeviceError: String {
case configureError = "configuration-error"
case noDevice = "no-device"
@ -74,6 +84,8 @@ enum DeviceError: String {
case focusNotSupported = "focus-not-supported"
case notAvailableOnSimulator = "camera-not-available-on-simulator"
// MARK: Internal
var code: String {
return rawValue
}
@ -100,12 +112,16 @@ enum DeviceError: String {
}
}
// MARK: - FormatError
enum FormatError {
case invalidFps(fps: Int)
case invalidHdr
case invalidFormat
case invalidPreset(preset: String)
// MARK: Internal
var code: String {
switch self {
case .invalidFormat:
@ -133,10 +149,14 @@ enum FormatError {
}
}
// MARK: - SessionError
enum SessionError {
case cameraNotReady
case audioSessionSetupFailed(reason: String)
// MARK: Internal
var code: String {
switch self {
case .cameraNotReady:
@ -156,6 +176,8 @@ enum SessionError {
}
}
// MARK: - CaptureError
enum CaptureError {
case invalidPhotoFormat
case recordingInProgress
@ -165,6 +187,8 @@ enum CaptureError {
case invalidPhotoCodec
case unknown(message: String? = nil)
// MARK: Internal
var code: String {
switch self {
case .invalidPhotoFormat:
@ -204,9 +228,13 @@ enum CaptureError {
}
}
// MARK: - SystemError
enum SystemError: String {
case noManager = "no-camera-manager"
// MARK: Internal
var code: String {
return rawValue
}
@ -219,6 +247,8 @@ enum SystemError: String {
}
}
// MARK: - CameraError
enum CameraError: Error {
case permission(_ id: PermissionError)
case parameter(_ id: ParameterError)
@ -229,6 +259,8 @@ enum CameraError: Error {
case system(_ id: SystemError)
case unknown(message: String? = nil)
// MARK: Internal
var code: String {
switch self {
case let .permission(id: id):

View File

@ -8,9 +8,10 @@
import AVFoundation
// MARK: - TakePhotoOptions
struct TakePhotoOptions {
var videoCodec: AVVideoCodecType?
var qualityPrioritization: String?
// MARK: Lifecycle
init(fromDictionary dictionary: NSDictionary) {
if let videoCodec = dictionary.value(forKey: "videoCodec") as? String {
@ -18,6 +19,11 @@ struct TakePhotoOptions {
}
qualityPrioritization = dictionary.value(forKey: "qualityPrioritization") as? String
}
// MARK: Internal
var videoCodec: AVVideoCodecType?
var qualityPrioritization: String?
}
extension CameraView {

View File

@ -29,72 +29,10 @@ import UIKit
private let propsThatRequireReconfiguration = ["cameraId", "enableDepthData", "enableHighResolutionCapture", "enablePortraitEffectsMatteDelivery", "preset", "onCodeScanned", "scannableCodes"]
private let propsThatRequireDeviceReconfiguration = ["fps", "hdr", "lowLightBoost", "colorSpace"]
// MARK: - CameraView
final class CameraView: UIView {
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
@objc var scannableCodes: [String]?
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 0.0 // in percent
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onCodeScanned: RCTBubblingEventBlock?
@objc var enableZoomGesture: Bool = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
var isReady: Bool = false
var isRunning: Bool {
return captureSession.isRunning
}
// pragma MARK: Private Properties
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
private let captureSession = AVCaptureSession()
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var movieOutput: AVCaptureMovieFileOutput?
internal var metadataOutput: AVCaptureMetadataOutput?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+RecordVideo
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
// pragma MARK: Setup
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
// MARK: Lifecycle
override init(frame: CGRect) {
super.init(frame: frame)
@ -114,26 +52,6 @@ final class CameraView: UIView {
object: captureSession)
}
override func removeFromSuperview() {
captureSession.stopRunning()
super.removeFromSuperview()
}
@objc
func sessionRuntimeError(notification: Notification) {
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
invokeOnError(.unknown(message: error.localizedDescription), cause: error as NSError)
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented.")
@ -188,6 +106,159 @@ final class CameraView: UIView {
}
}
// MARK: Internal
// pragma MARK: Setup
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
// pragma MARK: Exported Properties
// props that require reconfiguring
@objc var cameraId: NSString?
@objc var enableDepthData = false
@objc var enableHighResolutionCapture: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var preset: String?
@objc var scannableCodes: [String]?
// props that require format reconfiguring
@objc var format: NSDictionary?
@objc var fps: NSNumber?
@objc var hdr: NSNumber? // nullable bool
@objc var lowLightBoost: NSNumber? // nullable bool
@objc var colorSpace: NSString?
// other props
@objc var isActive = false
@objc var torch = "off"
@objc var zoom: NSNumber = 0.0 // in percent
// events
@objc var onInitialized: RCTDirectEventBlock?
@objc var onError: RCTDirectEventBlock?
@objc var onCodeScanned: RCTBubblingEventBlock?
var isReady = false
// pragma MARK: Private Properties
/// The serial execution queue for the camera preview layer (input stream) as well as output processing (take photo, record video, process metadata/barcodes)
internal let queue = DispatchQueue(label: "com.mrousavy.camera-queue", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
internal var videoDeviceInput: AVCaptureDeviceInput?
internal var audioDeviceInput: AVCaptureDeviceInput?
internal var photoOutput: AVCapturePhotoOutput?
internal var movieOutput: AVCaptureMovieFileOutput?
internal var metadataOutput: AVCaptureMetadataOutput?
// CameraView+TakePhoto
internal var photoCaptureDelegates: [PhotoCaptureDelegate] = []
// CameraView+RecordVideo
internal var recordingDelegateResolver: RCTPromiseResolveBlock?
internal var recordingDelegateRejecter: RCTPromiseRejectBlock?
// CameraView+Zoom
internal var pinchGestureRecognizer: UIPinchGestureRecognizer?
internal var pinchScaleOffset: CGFloat = 1.0
@objc var enableZoomGesture = false {
didSet {
if enableZoomGesture {
addPinchGestureRecognizer()
} else {
removePinchGestureRecognizer()
}
}
}
var isRunning: Bool {
return captureSession.isRunning
}
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
}
override func removeFromSuperview() {
captureSession.stopRunning()
super.removeFromSuperview()
}
@objc
func sessionRuntimeError(notification: Notification) {
guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else {
return
}
if isActive {
// restart capture session after an error occured
queue.async {
self.captureSession.startRunning()
}
}
invokeOnError(.unknown(message: error.localizedDescription), cause: error as NSError)
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
return invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
}
if !captureSession.isRunning {
torchMode = .off
}
if device.torchMode == torchMode {
// no need to run the whole lock/unlock bs
return
}
if !device.hasTorch || !device.isTorchAvailable {
if torchMode == .off {
// ignore it, when it's off and not supported, it's off.
return
} else {
// torch mode is .auto or .on, but no torch is available.
return invokeOnError(.device(.torchUnavailable))
}
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
if torchMode == .on {
try device.setTorchModeOn(level: 1.0)
}
device.unlockForConfiguration()
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
// pragma MARK: Event Invokers
internal final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
ReactLogger.log(level: .error, message: error.localizedDescription, alsoLogToJS: true)
guard let onError = self.onError else { return }
var causeDictionary: [String: Any]?
if let cause = cause {
causeDictionary = [
"code": cause.code,
"domain": cause.domain,
"message": cause.localizedDescription,
"details": cause.userInfo,
]
}
onError([
"code": error.code,
"message": error.message,
"cause": causeDictionary ?? NSNull(),
])
}
internal final func invokeOnInitialized() {
ReactLogger.log(level: .info, message: "Camera onInitialized()", alsoLogToJS: true)
guard let onInitialized = self.onInitialized else { return }
onInitialized([String: Any]())
}
// MARK: Private
private let captureSession = AVCaptureSession()
// pragma MARK: Session, Device and Format Configuration
/**
Configures the Capture Session.
@ -422,66 +493,4 @@ final class CameraView: UIView {
return invokeOnError(.device(.configureError), cause: error)
}
}
internal final func setTorchMode(_ torchMode: String) {
guard let device = videoDeviceInput?.device else {
return invokeOnError(.session(.cameraNotReady))
}
guard var torchMode = AVCaptureDevice.TorchMode(withString: torchMode) else {
return invokeOnError(.parameter(.invalid(unionName: "TorchMode", receivedValue: torch)))
}
if !captureSession.isRunning {
torchMode = .off
}
if device.torchMode == torchMode {
// no need to run the whole lock/unlock bs
return
}
if !device.hasTorch || !device.isTorchAvailable {
if torchMode == .off {
// ignore it, when it's off and not supported, it's off.
return
} else {
// torch mode is .auto or .on, but no torch is available.
return invokeOnError(.device(.torchUnavailable))
}
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
if torchMode == .on {
try device.setTorchModeOn(level: 1.0)
}
device.unlockForConfiguration()
} catch let error as NSError {
return invokeOnError(.device(.configureError), cause: error)
}
}
// pragma MARK: Event Invokers
internal final func invokeOnError(_ error: CameraError, cause: NSError? = nil) {
ReactLogger.log(level: .error, message: error.localizedDescription, alsoLogToJS: true)
guard let onError = self.onError else { return }
var causeDictionary: [String: Any]?
if let cause = cause {
causeDictionary = [
"code": cause.code,
"domain": cause.domain,
"message": cause.localizedDescription,
"details": cause.userInfo
]
}
onError([
"code": error.code,
"message": error.message,
"cause": causeDictionary ?? NSNull(),
])
}
internal final func invokeOnInitialized() {
ReactLogger.log(level: .info, message: "Camera onInitialized()", alsoLogToJS: true)
guard let onInitialized = self.onInitialized else { return }
onInitialized([String: Any]())
}
}

View File

@ -11,17 +11,15 @@ import Foundation
@objc(CameraViewManager)
final class CameraViewManager: RCTViewManager {
// pragma MARK: Setup
override final func view() -> UIView! {
return CameraView()
}
// MARK: Lifecycle
override static func requiresMainQueueSetup() -> Bool {
return true
}
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
// pragma MARK: Setup
override final func view() -> UIView! {
return CameraView()
}
private func getCameraView(withTag tag: NSNumber) -> CameraView {
@ -29,6 +27,12 @@ final class CameraViewManager: RCTViewManager {
return bridge.uiManager.view(forReactTag: tag) as! CameraView
}
// MARK: Internal
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
}
// pragma MARK: Exported Functions
@objc
final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) {
@ -65,7 +69,7 @@ final class CameraViewManager: RCTViewManager {
guard let movieOutput = component.movieOutput else {
throw CameraError.session(SessionError.cameraNotReady)
}
return movieOutput.availableVideoCodecTypes.map { $0.descriptor }
return movieOutput.availableVideoCodecTypes.map(\.descriptor)
}
}
@ -76,26 +80,10 @@ final class CameraViewManager: RCTViewManager {
guard let photoOutput = component.photoOutput else {
throw CameraError.session(SessionError.cameraNotReady)
}
return photoOutput.availablePhotoCodecTypes.map { $0.descriptor }
return photoOutput.availablePhotoCodecTypes.map(\.descriptor)
}
}
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {
deviceTypes.append(.builtInTripleCamera)
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
if #available(iOS 11.1, *) {
deviceTypes.append(.builtInTrueDepthCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
return deviceTypes
}
// pragma MARK: View Manager funcs
@objc
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
@ -104,7 +92,7 @@ final class CameraViewManager: RCTViewManager {
return discoverySession.devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map { $0.deviceType.descriptor },
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
"position": $0.position.descriptor,
"name": $0.localizedName,
"hasFlash": $0.hasFlash,
@ -155,4 +143,22 @@ final class CameraViewManager: RCTViewManager {
resolve(result.descriptor)
}
}
// MARK: Private
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {
deviceTypes.append(.builtInTripleCamera)
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
if #available(iOS 11.1, *) {
deviceTypes.append(.builtInTrueDepthCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
return deviceTypes
}
}

View File

@ -23,7 +23,7 @@ extension AVCaptureDevice.Format {
func toDictionary() -> [String: Any] {
var dict: [String: Any] = [
"videoStabilizationModes": videoStabilizationModes.map { $0.descriptor },
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
"autoFocusSystem": autoFocusSystem.descriptor,
"photoHeight": highResolutionStillImageDimensions.height,
"photoWidth": highResolutionStillImageDimensions.width,
@ -31,7 +31,7 @@ extension AVCaptureDevice.Format {
"minISO": minISO,
"fieldOfView": videoFieldOfView,
"maxZoom": videoMaxZoomFactor,
"colorSpaces": supportedColorSpaces.map { $0.descriptor },
"colorSpaces": supportedColorSpaces.map(\.descriptor),
"supportsVideoHDR": isVideoHDRSupported,
"supportsPhotoHDR": false,
"frameRateRanges": videoSupportedFrameRateRanges.map {

View File

@ -10,7 +10,7 @@ import AVFoundation
extension AVCaptureMovieFileOutput {
func mirror() {
connections.forEach { (connection) in
connections.forEach { connection in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}

View File

@ -10,7 +10,7 @@ import AVFoundation
extension AVCapturePhotoOutput {
func mirror() {
connections.forEach { (connection) in
connections.forEach { connection in
if connection.isVideoMirroringSupported {
connection.isVideoMirrored = true
}

View File

@ -10,8 +10,10 @@ import AVFoundation
private var delegatesReferences: [NSObject] = []
// MARK: - PhotoCaptureDelegate
class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
private let promise: Promise
// MARK: Lifecycle
required init(promise: Promise) {
self.promise = promise
@ -19,6 +21,8 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
delegatesReferences.append(self)
}
// MARK: Internal
func photoOutput(_: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
defer {
delegatesReferences.removeAll(where: { $0 == self })
@ -66,4 +70,8 @@ class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
return promise.reject(error: .capture(.unknown(message: error.localizedDescription)), cause: error as NSError)
}
}
// MARK: Private
private let promise: Promise
}

View File

@ -17,4 +17,4 @@ This folder contains the iOS-platform-specific code for react-native-vision-came
It is recommended that you work on the code using the Example project (`example/ios/VisionCameraExample.xcworkspace`), since that always includes the React Native header files, plus you can easily test changes that way.
You can however still edit the library project here by opening `VisionCamera.xcodeproj`.
You can however still edit the library project here by opening `VisionCamera.xcodeproj`, this has the advantage of **automatically formatting your Code** (swiftformat) and **showing you Linter errors** (swiftlint) when trying to build (<kbd></kbd>+<kbd>B</kbd>).

View File

@ -8,18 +8,21 @@
import Foundation
// MARK: - Promise
/**
* Represents a JavaScript Promise instance. `reject()` and `resolve()` should only be called once.
*/
class Promise {
private let resolver: RCTPromiseResolveBlock
private let rejecter: RCTPromiseRejectBlock
// MARK: Lifecycle
init(resolver: @escaping RCTPromiseResolveBlock, rejecter: @escaping RCTPromiseRejectBlock) {
self.resolver = resolver
self.rejecter = rejecter
}
// MARK: Internal
func reject(error: CameraError, cause: NSError?) {
rejecter(error.code, error.message, cause)
}
@ -35,6 +38,11 @@ class Promise {
func resolve() {
resolve(nil)
}
// MARK: Private
private let resolver: RCTPromiseResolveBlock
private let rejecter: RCTPromiseRejectBlock
}
/**

View File

@ -10,6 +10,8 @@ import Foundation
let context = "Camera"
// MARK: - ReactLogger
enum ReactLogger {
static func log(level: RCTLogLevel, message: String, alsoLogToJS: Bool = false, file: String = #file, lineNumber: Int = #line) {
RCTDefaultLogFunction(level, RCTLogSource.native, file, lineNumber as NSNumber, "\(context): \(message)")

View File

@ -14,9 +14,10 @@ import AVFoundation
// once the delegate has been triggered once.
private var delegateReferences: [NSObject] = []
// MARK: - RecordingDelegateWithCallback
class RecordingDelegateWithCallback: NSObject, AVCaptureFileOutputRecordingDelegate {
private let callback: RCTResponseSenderBlock // (video?, error?) => void
private let resetTorchMode: () -> Void
// MARK: Lifecycle
init(callback: @escaping RCTResponseSenderBlock, resetTorchMode: @escaping () -> Void) {
self.callback = callback
@ -25,6 +26,8 @@ class RecordingDelegateWithCallback: NSObject, AVCaptureFileOutputRecordingDeleg
delegateReferences.append(self)
}
// MARK: Internal
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from _: [AVCaptureConnection], error: Error?) {
defer {
self.resetTorchMode()
@ -37,4 +40,9 @@ class RecordingDelegateWithCallback: NSObject, AVCaptureFileOutputRecordingDeleg
let seconds = CMTimeGetSeconds(output.recordedDuration)
return callback([["path": outputFileURL.absoluteString, "duration": seconds, "size": output.recordedFileSize], NSNull()])
}
// MARK: Private
private let callback: RCTResponseSenderBlock // (video?, error?) => void
private let resetTorchMode: () -> Void
}

View File

@ -196,6 +196,7 @@
buildConfigurationList = 58B511EF1A9E6C8500147676 /* Build configuration list for PBXNativeTarget "VisionCamera" */;
buildPhases = (
B81F6C7625E515810008974A /* Run SwiftLint */,
B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */,
58B511D71A9E6C8500147676 /* Sources */,
58B511D81A9E6C8500147676 /* Frameworks */,
58B511D91A9E6C8500147676 /* CopyFiles */,
@ -242,6 +243,24 @@
/* End PBXProject section */
/* Begin PBXShellScriptBuildPhase section */
B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
);
name = "Run SwiftFormat";
outputFileListPaths = (
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "if which swiftformat >/dev/null; then\n swiftformat .\nelse\n echo \"warning: SwiftFormat not installed, download from https://github.com/nicklockwood/SwiftFormat\"\nfi\n";
};
B81F6C7625E515810008974A /* Run SwiftLint */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
@ -258,7 +277,7 @@
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "if which swiftlint >/dev/null; then\n swiftlint autocorrect && swiftlint\nelse\n echo \"warning: SwiftLint not installed, download from https://github.com/realm/SwiftLint\"\nfi\n";
shellScript = "if which swiftlint >/dev/null; then\n swiftlint --fix && swiftlint\nelse\n echo \"warning: SwiftLint not installed, download from https://github.com/realm/SwiftLint\"\nfi\n";
};
/* End PBXShellScriptBuildPhase section */

View File

@ -35,10 +35,14 @@
"example": "yarn --cwd example",
"pods": "cd example && pod-install --quiet",
"bootstrap": "yarn example && yarn && yarn pods",
"ktlint-fix": "ktlint -F android/**/*.kt*",
"swiftlint-fix": "cd ios && swiftlint autocorrect",
"ktlint": "scripts/ktlint.sh",
"swiftlint": "scripts/swiftlint.sh",
"swiftformat": "scripts/swiftformat.sh",
"docs": "cd docs && yarn build"
},
"pre-commit": [
"swiftformat"
],
"keywords": [
"react-native",
"ios",
@ -81,6 +85,7 @@
"eslint-plugin-react-native": "^3.10.0",
"jest": "^26.0.1",
"pod-install": "^0.1.0",
"pre-commit": "^1.2.2",
"prettier": "^2.2.1",
"react": "17.0.1",
"react-native": "0.63.4",

7
scripts/ktlint.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
if which ktlint >/dev/null; then
cd android && ktlint -F ./**/*.kt*
else
echo "warning: KTLint not installed, download from https://github.com/pinterest/ktlint"
fi

7
scripts/swiftformat.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
if which swiftformat >/dev/null; then
cd ios && swiftformat .
else
echo "warning: SwiftFormat not installed, download from https://github.com/nicklockwood/SwiftFormat"
fi

7
scripts/swiftlint.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
if which swiftlint >/dev/null; then
cd ios && swiftlint --fix && swiftlint
else
echo "warning: SwiftLint not installed, download from https://github.com/realm/SwiftLint"
fi

View File

@ -2,11 +2,6 @@
# yarn lockfile v1
"@actions/core@^1.2.0":
version "1.2.6"
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.6.tgz#a78d49f41a4def18e88ce47c2cac615d5694bf09"
integrity sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==
"@babel/code-frame@7.12.11":
version "7.12.11"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f"
@ -1043,13 +1038,6 @@
minimatch "^3.0.4"
strip-json-comments "^3.1.1"
"@firmnav/eslint-github-actions-formatter@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@firmnav/eslint-github-actions-formatter/-/eslint-github-actions-formatter-1.0.1.tgz#dbedcc4d8a799faf9b709417981039819980aab0"
integrity sha512-KbhZwNPFuwoRWspUfoJISOeGZHGSm7tvdOC+uOUlbcY9LNmusRHHmBcq3KaorvW9WmmiOS/2EOo0nafFZ0gpEQ==
dependencies:
"@actions/core" "^1.2.0"
"@hapi/address@2.x.x":
version "2.1.4"
resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5"
@ -3035,7 +3023,7 @@ concat-map@0.0.1:
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
concat-stream@^1.6.0:
concat-stream@^1.4.7, concat-stream@^1.6.0:
version "1.6.2"
resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34"
integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==
@ -3289,7 +3277,7 @@ cosmiconfig@^5.0.5, cosmiconfig@^5.1.0:
js-yaml "^3.13.1"
parse-json "^4.0.0"
cross-spawn@^5.1.0:
cross-spawn@^5.0.1, cross-spawn@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449"
integrity sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=
@ -7112,6 +7100,11 @@ os-name@4.0.0:
macos-release "^2.2.0"
windows-release "^4.0.0"
os-shim@^0.1.2:
version "0.1.3"
resolved "https://registry.yarnpkg.com/os-shim/-/os-shim-0.1.3.tgz#6b62c3791cf7909ea35ed46e17658bb417cb3917"
integrity sha1-a2LDeRz3kJ6jXtRuF2WLtBfLORc=
os-tmpdir@^1.0.0, os-tmpdir@~1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
@ -7436,6 +7429,15 @@ posix-character-classes@^0.1.0:
resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=
pre-commit@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/pre-commit/-/pre-commit-1.2.2.tgz#dbcee0ee9de7235e57f79c56d7ce94641a69eec6"
integrity sha1-287g7p3nI15X95xW186UZBpp7sY=
dependencies:
cross-spawn "^5.0.1"
spawn-sync "^1.0.15"
which "1.2.x"
prelude-ls@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
@ -8480,6 +8482,14 @@ source-map@^0.7.3:
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383"
integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==
spawn-sync@^1.0.15:
version "1.0.15"
resolved "https://registry.yarnpkg.com/spawn-sync/-/spawn-sync-1.0.15.tgz#b00799557eb7fb0c8376c29d44e8a1ea67e57476"
integrity sha1-sAeZVX63+wyDdsKdROih6mfldHY=
dependencies:
concat-stream "^1.4.7"
os-shim "^0.1.2"
spdx-correct@^3.0.0:
version "3.1.1"
resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9"
@ -9381,6 +9391,13 @@ which-module@^2.0.0:
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
which@1.2.x:
version "1.2.14"
resolved "https://registry.yarnpkg.com/which/-/which-1.2.14.tgz#9a87c4378f03e827cecaf1acdf56c736c01c14e5"
integrity sha1-mofEN48D6CfOyvGs31bHNsAcFOU=
dependencies:
isexe "^2.0.0"
which@^1.2.9:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"