chore: Restructure codebase a bit (#1742)

* Move core Camera stuff into `/core/`

* `NativePreviewView` -> `PreviewView`
This commit is contained in:
Marc Rousavy 2023-09-01 13:08:33 +02:00 committed by GitHub
parent 03b57a7d27
commit 01a79d63ef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 88 additions and 129 deletions

View File

@ -20,7 +20,7 @@ using namespace facebook;
class VideoPipeline : public jni::HybridClass<VideoPipeline> {
public:
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/utils/VideoPipeline;";
static auto constexpr kJavaDescriptor = "Lcom/mrousavy/camera/core/VideoPipeline;";
static jni::local_ref<jhybriddata> initHybrid(jni::alias_ref<jhybridobject> jThis, int width,
int height);
static void registerNatives();

View File

@ -11,7 +11,6 @@ class CameraQueues {
companion object {
val cameraQueue = CameraQueue("mrousavy/VisionCamera.main")
val videoQueue = CameraQueue("mrousavy/VisionCamera.video")
val previewQueue = CameraQueue("mrousavy/VisionCamera.preview")
}
class CameraQueue(name: String) {

View File

@ -8,7 +8,7 @@ import com.facebook.react.bridge.*
import com.mrousavy.camera.parsers.Torch
import com.mrousavy.camera.parsers.VideoCodec
import com.mrousavy.camera.parsers.VideoFileType
import com.mrousavy.camera.utils.RecordingSession
import com.mrousavy.camera.core.RecordingSession
import com.mrousavy.camera.utils.makeErrorMap
import java.util.*

View File

@ -11,6 +11,7 @@ import android.util.Log
import com.facebook.react.bridge.Arguments
import com.facebook.react.bridge.ReadableMap
import com.facebook.react.bridge.WritableMap
import com.mrousavy.camera.core.CameraSession
import com.mrousavy.camera.parsers.Flash
import com.mrousavy.camera.parsers.QualityPrioritization
import com.mrousavy.camera.utils.*

View File

@ -14,6 +14,8 @@ import android.view.View
import android.widget.FrameLayout
import androidx.core.content.ContextCompat
import com.facebook.react.bridge.ReadableMap
import com.mrousavy.camera.core.CameraSession
import com.mrousavy.camera.core.PreviewView
import com.mrousavy.camera.extensions.containsAny
import com.mrousavy.camera.extensions.installHierarchyFitter
import com.mrousavy.camera.frameprocessor.FrameProcessor
@ -21,19 +23,16 @@ import com.mrousavy.camera.parsers.Orientation
import com.mrousavy.camera.parsers.PixelFormat
import com.mrousavy.camera.parsers.Torch
import com.mrousavy.camera.parsers.VideoStabilizationMode
import com.mrousavy.camera.utils.outputs.CameraOutputs
import com.mrousavy.camera.core.outputs.CameraOutputs
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import java.io.Closeable
//
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
//
// CameraView
// TODO: High-speed video recordings (export in CameraViewModule::getAvailableVideoDevices(), and set in CameraView::configurePreview()) (120FPS+)
// TODO: configureSession() enableDepthData
// TODO: configureSession() enablePortraitEffectsMatteDelivery
// CameraView+RecordVideo
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
@ -41,7 +40,6 @@ import java.io.Closeable
// CameraView+TakePhoto
// TODO: takePhoto() depth data
// TODO: takePhoto() raw capture
// TODO: takePhoto() photoCodec ("hevc" | "jpeg" | "raw")
// TODO: takePhoto() return with jsi::Value Image reference for faster capture
@SuppressLint("ClickableViewAccessibility", "ViewConstructor", "MissingPermission")
@ -129,7 +127,7 @@ class CameraView(context: Context) : FrameLayout(context) {
this.previewSurface = null
val cameraId = cameraId ?: return
val previewView = NativePreviewView(context, cameraManager, cameraId) { surface ->
val previewView = PreviewView(context, cameraManager, cameraId) { surface ->
previewSurface = surface
configureSession()
}

View File

@ -4,7 +4,6 @@ import android.Manifest
import android.content.Context
import android.content.pm.PackageManager
import android.hardware.camera2.CameraManager
import android.os.Build
import android.util.Log
import androidx.core.content.ContextCompat
import com.facebook.react.bridge.*
@ -12,6 +11,7 @@ import com.facebook.react.module.annotations.ReactModule
import com.facebook.react.modules.core.PermissionAwareActivity
import com.facebook.react.modules.core.PermissionListener
import com.facebook.react.uimanager.UIManagerHelper
import com.mrousavy.camera.core.CameraDeviceDetails
import com.mrousavy.camera.frameprocessor.VisionCameraInstaller
import com.mrousavy.camera.frameprocessor.VisionCameraProxy
import com.mrousavy.camera.parsers.*

View File

@ -1,7 +1,7 @@
package com.mrousavy.camera
import com.mrousavy.camera.parsers.CameraDeviceError
import com.mrousavy.camera.utils.outputs.CameraOutputs
import com.mrousavy.camera.core.outputs.CameraOutputs
abstract class CameraError(
/**

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera.utils
package com.mrousavy.camera.core
import android.graphics.ImageFormat
import android.hardware.camera2.CameraCharacteristics

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera
package com.mrousavy.camera.core
import android.content.Context
import android.graphics.Point
@ -16,6 +16,15 @@ import android.os.Build
import android.util.Log
import android.util.Range
import android.util.Size
import com.mrousavy.camera.CameraNotReadyError
import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.CameraView
import com.mrousavy.camera.CaptureAbortedError
import com.mrousavy.camera.NoRecordingInProgressError
import com.mrousavy.camera.PhotoNotEnabledError
import com.mrousavy.camera.RecorderError
import com.mrousavy.camera.RecordingInProgressError
import com.mrousavy.camera.VideoNotEnabledError
import com.mrousavy.camera.extensions.SessionType
import com.mrousavy.camera.extensions.capture
import com.mrousavy.camera.extensions.createCaptureSession
@ -30,9 +39,7 @@ import com.mrousavy.camera.parsers.QualityPrioritization
import com.mrousavy.camera.parsers.VideoCodec
import com.mrousavy.camera.parsers.VideoFileType
import com.mrousavy.camera.parsers.VideoStabilizationMode
import com.mrousavy.camera.utils.PhotoOutputSynchronizer
import com.mrousavy.camera.utils.RecordingSession
import com.mrousavy.camera.utils.outputs.CameraOutputs
import com.mrousavy.camera.core.outputs.CameraOutputs
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.launch
import kotlinx.coroutines.sync.Mutex

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera.utils;
package com.mrousavy.camera.core;
import android.media.Image
import kotlinx.coroutines.CompletableDeferred

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera
package com.mrousavy.camera.core
import android.annotation.SuppressLint
import android.content.Context
@ -11,12 +11,8 @@ import android.view.SurfaceView
import com.mrousavy.camera.extensions.getPreviewSize
import kotlin.math.roundToInt
/**
* A [SurfaceView] that can be adjusted to a specified aspect ratio and
* performs center-crop transformation of input frames.
*/
@SuppressLint("ViewConstructor")
class NativePreviewView(context: Context,
class PreviewView(context: Context,
cameraManager: CameraManager,
cameraId: String,
private val onSurfaceChanged: (surface: Surface?) -> Unit): SurfaceView(context) {

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera.utils
package com.mrousavy.camera.core
import android.content.Context
import android.media.ImageWriter

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera.utils
package com.mrousavy.camera.core
import android.graphics.ImageFormat
import android.graphics.SurfaceTexture

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera.utils.outputs
package com.mrousavy.camera.core.outputs
import android.graphics.ImageFormat
import android.hardware.camera2.CameraManager
@ -12,7 +12,7 @@ import com.mrousavy.camera.extensions.closestToOrMax
import com.mrousavy.camera.extensions.getPhotoSizes
import com.mrousavy.camera.extensions.getPreviewSize
import com.mrousavy.camera.extensions.getVideoSizes
import com.mrousavy.camera.utils.VideoPipeline
import com.mrousavy.camera.core.VideoPipeline
import java.io.Closeable
class CameraOutputs(val cameraId: String,
@ -21,7 +21,8 @@ class CameraOutputs(val cameraId: String,
val photo: PhotoOutput? = null,
val video: VideoOutput? = null,
val enableHdr: Boolean? = false,
val callback: Callback): Closeable {
val callback: Callback
): Closeable {
companion object {
private const val TAG = "CameraOutputs"
const val PHOTO_OUTPUT_BUFFER_SIZE = 3

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera.utils.outputs
package com.mrousavy.camera.core.outputs
import android.media.ImageReader
import android.util.Log

View File

@ -1,4 +1,4 @@
package com.mrousavy.camera.utils.outputs
package com.mrousavy.camera.core.outputs
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraMetadata

View File

@ -1,8 +1,8 @@
package com.mrousavy.camera.utils.outputs
package com.mrousavy.camera.core.outputs
import android.util.Log
import android.util.Size
import com.mrousavy.camera.utils.VideoPipeline
import com.mrousavy.camera.core.VideoPipeline
import java.io.Closeable
/**

View File

@ -11,7 +11,7 @@ import android.util.Log
import androidx.annotation.RequiresApi
import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.CameraSessionCannotBeConfiguredError
import com.mrousavy.camera.utils.outputs.CameraOutputs
import com.mrousavy.camera.core.outputs.CameraOutputs
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException

View File

@ -24,10 +24,6 @@ extension CameraView {
/// Converts a Point in the UI View Layer to a Point in the Camera Frame coordinate system
private func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
guard let previewView = previewView else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero

View File

@ -1,32 +0,0 @@
//
// CameraView+Preview.swift
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
extension CameraView {
public func setupPreviewView() {
previewView?.removeFromSuperview()
previewView = NativePreviewView(frame: frame, session: captureSession)
addSubview(previewView!)
}
func setupFpsGraph() {
#if DEBUG
if enableFpsGraph {
if fpsGraph != nil { return }
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
fpsGraph!.layer.zPosition = 9999.0
addSubview(fpsGraph!)
} else {
fpsGraph?.removeFromSuperview()
fpsGraph = nil
}
#endif
}
}

View File

@ -95,7 +95,7 @@ public final class CameraView: UIView {
var pinchGestureRecognizer: UIPinchGestureRecognizer?
var pinchScaleOffset: CGFloat = 1.0
var previewView: PreviewView?
var previewView: PreviewView
#if DEBUG
var fpsGraph: RCTFPSGraph?
#endif
@ -126,7 +126,8 @@ public final class CameraView: UIView {
name: UIDevice.orientationDidChangeNotification,
object: nil)
setupPreviewView()
previewView = PreviewView(frame: frame, session: captureSession)
addSubview(previewView)
}
@available(*, unavailable)
@ -161,11 +162,9 @@ public final class CameraView: UIView {
}
override public func layoutSubviews() {
if let previewView = previewView {
previewView.frame = frame
previewView.bounds = bounds
}
}
// pragma MARK: Props updating
override public final func didSetProps(_ changedProps: [String]!) {
@ -252,6 +251,20 @@ public final class CameraView: UIView {
}
}
func setupFpsGraph() {
#if DEBUG
if enableFpsGraph {
if fpsGraph != nil { return }
fpsGraph = RCTFPSGraph(frame: CGRect(x: 10, y: 54, width: 75, height: 45), color: .red)
fpsGraph!.layer.zPosition = 9999.0
addSubview(fpsGraph!)
} else {
fpsGraph?.removeFromSuperview()
fpsGraph = nil
}
#endif
}
@objc
func onOrientationChanged() {
updateOrientation()

View File

@ -1,35 +0,0 @@
//
// NativePreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 30.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
class NativePreviewView: PreviewView {
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
// swiftlint:enable force_cast
}
override public class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
init(frame: CGRect, session: AVCaptureSession) {
super.init(frame: frame)
videoPreviewLayer.session = session
videoPreviewLayer.videoGravity = .resizeAspectFill
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented!")
}
}

View File

@ -2,11 +2,34 @@
// PreviewView.swift
// VisionCamera
//
// Created by Marc Rousavy on 20.07.23.
// Copyright © 2023 mrousavy. All rights reserved.
// Created by Marc Rousavy on 30.11.22.
// Copyright © 2022 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
import UIKit
class PreviewView: UIView {}
class PreviewView: UIView {
/// Convenience wrapper to get layer as its statically known type.
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
// swiftlint:disable force_cast
return layer as! AVCaptureVideoPreviewLayer
// swiftlint:enable force_cast
}
override public class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
init(frame: CGRect, session: AVCaptureSession) {
super.init(frame: frame)
videoPreviewLayer.session = session
videoPreviewLayer.videoGravity = .resizeAspectFill
}
@available(*, unavailable)
required init?(coder _: NSCoder) {
fatalError("init(coder:) is not implemented!")
}
}

View File

@ -10,8 +10,7 @@
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; };
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */; };
B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B82F3A0A2A6896E3002BB804 /* PreviewView.swift */; };
B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* NativePreviewView.swift */; };
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
B84760A62608EE7C004C3180 /* FrameHostObject.mm in Sources */ = {isa = PBXBuildFile; fileRef = B84760A52608EE7C004C3180 /* FrameHostObject.mm */; };
B84760DF2608F57D004C3180 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */ = {isa = PBXBuildFile; fileRef = B85F7AE82A77BB680089C539 /* FrameProcessorPlugin.m */; };
@ -60,7 +59,6 @@
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
B8DB3BCC263DC97E004C18D7 /* AVFileType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */; };
B8E957CE2A6939A6008F5480 /* CameraView+Preview.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */; };
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */; };
/* End PBXBuildFile section */
@ -86,8 +84,7 @@
B8103E5725FF56F0007A1684 /* Frame.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Frame.h; sourceTree = "<group>"; };
B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice.Format+videoDimensions.swift"; sourceTree = "<group>"; };
B81D41EF263C86F900B041FD /* JSINSObjectConversion.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = JSINSObjectConversion.h; sourceTree = "<group>"; };
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* NativePreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NativePreviewView.swift; sourceTree = "<group>"; };
B83D5EE629377117000AFD2F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
B84760A22608EE38004C3180 /* FrameHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameHostObject.h; sourceTree = "<group>"; };
B84760A52608EE7C004C3180 /* FrameHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameHostObject.mm; sourceTree = "<group>"; };
B84760DE2608F57D004C3180 /* CameraQueues.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraQueues.swift; sourceTree = "<group>"; };
@ -141,7 +138,6 @@
B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVFileType+descriptor.swift"; sourceTree = "<group>"; };
B8E8467D2A696F44000D6A11 /* VisionCameraProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraProxy.h; sourceTree = "<group>"; };
B8E8467E2A696F4D000D6A11 /* VisionCameraProxy.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VisionCameraProxy.mm; sourceTree = "<group>"; };
B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Preview.swift"; sourceTree = "<group>"; };
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+Torch.swift"; sourceTree = "<group>"; };
B8F0825E2A6046FC00C17EB6 /* FrameProcessor.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessor.h; sourceTree = "<group>"; };
B8F0825F2A60491900C17EB6 /* FrameProcessor.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessor.mm; sourceTree = "<group>"; };
@ -180,15 +176,13 @@
B8E957CF2A693AD2008F5480 /* CameraView+Torch.swift */,
B887518025E0102000DB86D6 /* CameraView+Focus.swift */,
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */,
B8E957CD2A6939A6008F5480 /* CameraView+Preview.swift */,
B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */,
B887518225E0102000DB86D6 /* CameraView+Zoom.swift */,
B86400512784A23400E9D2CA /* CameraView+Orientation.swift */,
B887515F25E0102000DB86D6 /* CameraViewManager.m */,
B887518125E0102000DB86D6 /* CameraViewManager.swift */,
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
B82F3A0A2A6896E3002BB804 /* PreviewView.swift */,
B83D5EE629377117000AFD2F /* NativePreviewView.swift */,
B83D5EE629377117000AFD2F /* PreviewView.swift */,
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
B887516125E0102000DB86D6 /* Extensions */,
B887517225E0102000DB86D6 /* Parsers */,
@ -376,13 +370,12 @@
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */,
B81BE1BF26B936FF002696CC /* AVCaptureDevice.Format+videoDimensions.swift in Sources */,
B8DB3BCA263DC4D8004C18D7 /* RecordingSession.swift in Sources */,
B83D5EE729377117000AFD2F /* NativePreviewView.swift in Sources */,
B83D5EE729377117000AFD2F /* PreviewView.swift in Sources */,
B887518925E0102000DB86D6 /* Collection+safe.swift in Sources */,
B887519125E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
B887519725E0102000DB86D6 /* CameraView+TakePhoto.swift in Sources */,
B887519825E0102000DB86D6 /* EnumParserError.swift in Sources */,
B887518C25E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift in Sources */,
B82F3A0B2A6896E3002BB804 /* PreviewView.swift in Sources */,
B887518D25E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift in Sources */,
B887519625E0102000DB86D6 /* Promise.swift in Sources */,
B8DB3BC8263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
@ -406,7 +399,6 @@
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */,
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
B8E957CE2A6939A6008F5480 /* CameraView+Preview.swift in Sources */,
B887519A25E0102000DB86D6 /* AVVideoCodecType+descriptor.swift in Sources */,
B88751A825E0102000DB86D6 /* CameraError.swift in Sources */,
B85F7AE92A77BB680089C539 /* FrameProcessorPlugin.m in Sources */,