diff --git a/package/ios/.swiftlint.yml b/package/ios/.swiftlint.yml index 5ff2f00..6999c33 100644 --- a/package/ios/.swiftlint.yml +++ b/package/ios/.swiftlint.yml @@ -5,6 +5,7 @@ disabled_rules: - type_body_length - cyclomatic_complexity - function_body_length + - for_where opt_in_rules: - contains_over_filter_count - contains_over_filter_is_empty diff --git a/package/ios/Core/CameraSession+Configuration.swift b/package/ios/Core/CameraSession+Configuration.swift index 3f837b9..d7c67ba 100644 --- a/package/ios/Core/CameraSession+Configuration.swift +++ b/package/ios/Core/CameraSession+Configuration.swift @@ -19,7 +19,7 @@ extension CameraSession { ReactLogger.log(level: .info, message: "Configuring Input Device...") // Remove all inputs - captureSession.inputs.forEach { input in + for input in captureSession.inputs { captureSession.removeInput(input) } videoDeviceInput = nil @@ -57,7 +57,7 @@ extension CameraSession { ReactLogger.log(level: .info, message: "Configuring Outputs...") // Remove all outputs - captureSession.outputs.forEach { output in + for output in captureSession.outputs { captureSession.removeOutput(output) } photoOutput = nil @@ -130,7 +130,7 @@ extension CameraSession { // 2. Configure let options = codeScanner.options codeScannerOutput.setMetadataObjectsDelegate(self, queue: CameraQueues.codeScannerQueue) - try codeScanner.options.codeTypes.forEach { type in + for type in codeScanner.options.codeTypes { // CodeScanner::availableMetadataObjectTypes depends on the connection to the // AVCaptureSession, so this list is only available after we add the output to the session. if !codeScannerOutput.availableMetadataObjectTypes.contains(type) { @@ -151,8 +151,8 @@ extension CameraSession { // pragma MARK: Video Stabilization func configureVideoStabilization(configuration: CameraConfiguration) { - captureSession.outputs.forEach { output in - output.connections.forEach { connection in + for output in captureSession.outputs { + for connection in output.connections { if connection.isVideoStabilizationSupported { connection.preferredVideoStabilizationMode = configuration.videoStabilizationMode.toAVCaptureVideoStabilizationMode() } @@ -166,7 +166,7 @@ extension CameraSession { // Set up orientation and mirroring for all outputs. // Note: Photos are only rotated through EXIF tags, and Preview through view transforms let isMirrored = videoDeviceInput?.device.position == .front - captureSession.outputs.forEach { output in + for output in captureSession.outputs { if isMirrored { output.mirror() } @@ -320,7 +320,7 @@ extension CameraSession { } // Remove all current inputs - audioCaptureSession.inputs.forEach { input in + for input in audioCaptureSession.inputs { audioCaptureSession.removeInput(input) } audioDeviceInput = nil @@ -340,7 +340,7 @@ extension CameraSession { } // Remove all current outputs - audioCaptureSession.outputs.forEach { output in + for output in audioCaptureSession.outputs { audioCaptureSession.removeOutput(output) } audioOutput = nil diff --git a/package/ios/Extensions/AVCaptureOutput+mirror.swift b/package/ios/Extensions/AVCaptureOutput+mirror.swift index fb196e3..63910cd 100644 --- a/package/ios/Extensions/AVCaptureOutput+mirror.swift +++ b/package/ios/Extensions/AVCaptureOutput+mirror.swift @@ -13,7 +13,7 @@ extension AVCaptureOutput { Mirrors the video output if possible. */ func mirror() { - connections.forEach { connection in + for connection in connections { if connection.isVideoMirroringSupported { connection.automaticallyAdjustsVideoMirroring = false connection.isVideoMirrored = true @@ -31,7 +31,7 @@ extension AVCaptureOutput { */ func setOrientation(_ orientation: Orientation) { // Set orientation for each connection - connections.forEach { connection in + for connection in connections { #if swift(>=5.9) if #available(iOS 17.0, *) { // Camera Sensors are always in landscape rotation (90deg).