chore: Move everything into package/
(#1745)
* Move everything into package * Remove .DS_Store * Move scripts and eslintrc to package * Create CODE_OF_CONDUCT.md * fix some links * Update all links (I think) * Update generated docs * Update notice-yarn-changes.yml * Update validate-android.yml * Update validate-cpp.yml * Delete notice-yarn-changes.yml * Update validate-cpp.yml * Update validate-cpp.yml * Update validate-js.yml * Update validate-cpp.yml * Update validate-cpp.yml * wrong c++ style * Revert "wrong c++ style" This reverts commit 55a3575589c6f13f8b05134d83384f55e0601ab2.
This commit is contained in:
@@ -0,0 +1,31 @@
|
||||
//
|
||||
// AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 05.05.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVAssetWriterInputPixelBufferAdaptor {
|
||||
/**
|
||||
Convenience initializer to extract correct attributes from the given videoSettings.
|
||||
*/
|
||||
convenience init(assetWriterInput: AVAssetWriterInput,
|
||||
withVideoSettings videoSettings: [String: Any],
|
||||
pixelFormat: OSType) {
|
||||
var attributes: [String: Any] = [:]
|
||||
|
||||
if let width = videoSettings[AVVideoWidthKey] as? NSNumber,
|
||||
let height = videoSettings[AVVideoHeightKey] as? NSNumber {
|
||||
attributes[kCVPixelBufferWidthKey as String] = width as CFNumber
|
||||
attributes[kCVPixelBufferHeightKey as String] = height as CFNumber
|
||||
}
|
||||
|
||||
attributes[kCVPixelBufferPixelFormatTypeKey as String] = pixelFormat
|
||||
|
||||
self.init(assetWriterInput: assetWriterInput, sourcePixelBufferAttributes: attributes)
|
||||
}
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
//
|
||||
// AVAudioSession+trySetAllowHaptics.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.03.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVAudioSession {
|
||||
/**
|
||||
Tries to set allowHapticsAndSystemSoundsDuringRecording and ignore errors.
|
||||
*/
|
||||
func trySetAllowHaptics(_ allowHaptics: Bool) {
|
||||
if #available(iOS 13.0, *) {
|
||||
if !self.allowHapticsAndSystemSoundsDuringRecording {
|
||||
try? self.setAllowHapticsAndSystemSoundsDuringRecording(allowHaptics)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
23
package/ios/Extensions/AVAudioSession+updateCategory.swift
Normal file
23
package/ios/Extensions/AVAudioSession+updateCategory.swift
Normal file
@@ -0,0 +1,23 @@
|
||||
//
|
||||
// AVAudioSession+updateCategory.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 01.06.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVAudioSession {
|
||||
/**
|
||||
Calls [setCategory] if the given category or options are not equal to the currently set category and options.
|
||||
*/
|
||||
func updateCategory(_ category: AVAudioSession.Category, options: AVAudioSession.CategoryOptions = []) throws {
|
||||
if self.category != category || categoryOptions.rawValue != options.rawValue {
|
||||
ReactLogger.log(level: .info,
|
||||
message: "Changing AVAudioSession category from \(self.category.rawValue) -> \(category.rawValue)")
|
||||
try setCategory(category, options: options)
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,34 @@
|
||||
//
|
||||
// AVCaptureConnection+setInterfaceOrientation.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 26.07.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureConnection {
|
||||
/**
|
||||
Sets the `videoOrientation` to the given `orientation` if video orientation setting is supported.
|
||||
*/
|
||||
func setInterfaceOrientation(_ orientation: UIInterfaceOrientation) {
|
||||
if isVideoOrientationSupported {
|
||||
switch orientation {
|
||||
case .portrait:
|
||||
videoOrientation = .portrait
|
||||
case .portraitUpsideDown:
|
||||
videoOrientation = .portraitUpsideDown
|
||||
case .landscapeLeft:
|
||||
videoOrientation = .landscapeLeft
|
||||
case .landscapeRight:
|
||||
videoOrientation = .landscapeRight
|
||||
case .unknown:
|
||||
fallthrough
|
||||
@unknown default:
|
||||
videoOrientation = .portrait
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
22
package/ios/Extensions/AVCaptureDevice+isMultiCam.swift
Normal file
22
package/ios/Extensions/AVCaptureDevice+isMultiCam.swift
Normal file
@@ -0,0 +1,22 @@
|
||||
//
|
||||
// AVCaptureDevice+isMultiCam.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 07.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice {
|
||||
/**
|
||||
Returns true if the device is a virtual multi-cam, false otherwise.
|
||||
*/
|
||||
var isMultiCam: Bool {
|
||||
if #available(iOS 13.0, *) {
|
||||
return self.isVirtualDevice
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
28
package/ios/Extensions/AVCaptureDevice+neutralZoom.swift
Normal file
28
package/ios/Extensions/AVCaptureDevice+neutralZoom.swift
Normal file
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// AVCaptureDevice+neutralZoom.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 10.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice {
|
||||
/**
|
||||
Get the value at which the Zoom factor is neutral.
|
||||
|
||||
For normal wide-angle devices, this is always going to be 1.0, since this is the default scale.
|
||||
For devices with an ultra-wide-angle camera, this value is going to be the value where the wide-angle device will switch over.
|
||||
*/
|
||||
var neutralZoomFactor: CGFloat {
|
||||
if #available(iOS 13.0, *) {
|
||||
if let indexOfWideAngle = self.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }) {
|
||||
if let zoomFactor = self.virtualDeviceSwitchOverVideoZoomFactors[safe: indexOfWideAngle - 1] {
|
||||
return CGFloat(zoomFactor.doubleValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
return 1.0
|
||||
}
|
||||
}
|
22
package/ios/Extensions/AVCaptureDevice+physicalDevices.swift
Normal file
22
package/ios/Extensions/AVCaptureDevice+physicalDevices.swift
Normal file
@@ -0,0 +1,22 @@
|
||||
//
|
||||
// AVCaptureDevice+physicalDevices.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 10.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice {
|
||||
/**
|
||||
If the device is a virtual multi-cam, this returns `constituentDevices`, otherwise this returns an array of a single element, `self`
|
||||
*/
|
||||
var physicalDevices: [AVCaptureDevice] {
|
||||
if #available(iOS 13.0, *), isVirtualDevice {
|
||||
return self.constituentDevices
|
||||
} else {
|
||||
return [self]
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,39 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+isBetterThan.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 19.12.20.
|
||||
// Copyright © 2020 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/** Compares the current Format to the given format and returns true if the current format has either:
|
||||
* 1. Higher still image capture dimensions
|
||||
* 2. Higher video format dimensions (iOS 13.0)
|
||||
* 3. Higher FPS
|
||||
*/
|
||||
func isBetterThan(_ other: AVCaptureDevice.Format) -> Bool {
|
||||
// compare still image dimensions
|
||||
let leftDimensions = highResolutionStillImageDimensions
|
||||
let rightDimensions = other.highResolutionStillImageDimensions
|
||||
if leftDimensions.height * leftDimensions.width > rightDimensions.height * rightDimensions.width {
|
||||
return true
|
||||
}
|
||||
|
||||
// compare video dimensions
|
||||
let leftVideo = videoDimensions
|
||||
let rightVideo = other.videoDimensions
|
||||
if leftVideo.height * leftVideo.width > rightVideo.height * rightVideo.width {
|
||||
return true
|
||||
}
|
||||
|
||||
// compare max fps
|
||||
if maxFrameRate > other.maxFrameRate {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
@@ -0,0 +1,83 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+matchesFilter.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/**
|
||||
* Checks whether the given filter (NSDictionary, JSON Object) matches the given AVCaptureDevice Format.
|
||||
* The `dictionary` dictionary must be of type `CameraDeviceFormat` (from `CameraDevice.d.ts`)
|
||||
*/
|
||||
func matchesFilter(_ filter: NSDictionary) -> Bool {
|
||||
if let photoHeight = filter.value(forKey: "photoHeight") as? NSNumber {
|
||||
if highResolutionStillImageDimensions.height != photoHeight.intValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let photoWidth = filter.value(forKey: "photoWidth") as? NSNumber {
|
||||
if highResolutionStillImageDimensions.width != photoWidth.intValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoHeight = filter.value(forKey: "videoHeight") as? NSNumber {
|
||||
if videoDimensions.height != CGFloat(videoHeight.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoWidth = filter.value(forKey: "videoWidth") as? NSNumber {
|
||||
if videoDimensions.width != CGFloat(videoWidth.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxISO = filter.value(forKey: "maxISO") as? NSNumber {
|
||||
if self.maxISO != maxISO.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let minISO = filter.value(forKey: "minISO") as? NSNumber {
|
||||
if self.minISO != minISO.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let fieldOfView = filter.value(forKey: "fieldOfView") as? NSNumber {
|
||||
if videoFieldOfView != fieldOfView.floatValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxZoom = filter.value(forKey: "maxZoom") as? NSNumber {
|
||||
if videoMaxZoomFactor != CGFloat(maxZoom.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let minFps = filter.value(forKey: "minFps") as? NSNumber {
|
||||
if minFrameRate != Float64(minFps.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let maxFps = filter.value(forKey: "maxFps") as? NSNumber {
|
||||
if maxFrameRate != Float64(maxFps.doubleValue) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let autoFocusSystem = filter.value(forKey: "autoFocusSystem") as? String,
|
||||
let avAutoFocusSystem = try? AVCaptureDevice.Format.AutoFocusSystem(withString: autoFocusSystem) {
|
||||
if self.autoFocusSystem != avAutoFocusSystem {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if let videoStabilizationModes = filter.value(forKey: "videoStabilizationModes") as? [String] {
|
||||
let avVideoStabilizationModes = videoStabilizationModes.map { try? AVCaptureVideoStabilizationMode(withString: $0) }
|
||||
let allStabilizationModesIncluded = self.videoStabilizationModes.allSatisfy { avVideoStabilizationModes.contains($0) }
|
||||
if !allStabilizationModesIncluded {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
@@ -0,0 +1,60 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+toDictionary.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
private func getAllVideoStabilizationModes() -> [AVCaptureVideoStabilizationMode] {
|
||||
var modes: [AVCaptureVideoStabilizationMode] = [.auto, .cinematic, .off, .standard]
|
||||
if #available(iOS 13, *) {
|
||||
modes.append(.cinematicExtended)
|
||||
}
|
||||
return modes
|
||||
}
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
var videoStabilizationModes: [AVCaptureVideoStabilizationMode] {
|
||||
return getAllVideoStabilizationModes().filter { self.isVideoStabilizationModeSupported($0) }
|
||||
}
|
||||
|
||||
var minFrameRate: Float64 {
|
||||
let maxRange = videoSupportedFrameRateRanges.max { l, r in
|
||||
return l.maxFrameRate < r.maxFrameRate
|
||||
}
|
||||
return maxRange?.maxFrameRate ?? 0
|
||||
}
|
||||
|
||||
var maxFrameRate: Float64 {
|
||||
let maxRange = videoSupportedFrameRateRanges.max { l, r in
|
||||
return l.maxFrameRate < r.maxFrameRate
|
||||
}
|
||||
return maxRange?.maxFrameRate ?? 0
|
||||
}
|
||||
|
||||
func toDictionary() -> [String: Any] {
|
||||
let availablePixelFormats = AVCaptureVideoDataOutput().availableVideoPixelFormatTypes
|
||||
let pixelFormats = availablePixelFormats.map { format in PixelFormat(mediaSubType: format) }
|
||||
|
||||
return [
|
||||
"videoStabilizationModes": videoStabilizationModes.map(\.descriptor),
|
||||
"autoFocusSystem": autoFocusSystem.descriptor,
|
||||
"photoHeight": highResolutionStillImageDimensions.height,
|
||||
"photoWidth": highResolutionStillImageDimensions.width,
|
||||
"videoHeight": videoDimensions.height,
|
||||
"videoWidth": videoDimensions.width,
|
||||
"maxISO": maxISO,
|
||||
"minISO": minISO,
|
||||
"fieldOfView": videoFieldOfView,
|
||||
"maxZoom": videoMaxZoomFactor,
|
||||
"supportsVideoHDR": isVideoHDRSupported,
|
||||
"supportsPhotoHDR": false,
|
||||
"minFps": minFrameRate,
|
||||
"maxFps": maxFrameRate,
|
||||
"pixelFormats": pixelFormats.map(\.unionValue),
|
||||
]
|
||||
}
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
//
|
||||
// AVCaptureDevice.Format+videoDimensions.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 03.08.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureDevice.Format {
|
||||
/**
|
||||
* Returns the video dimensions, adjusted to take pixel aspect ratio and/or clean
|
||||
* aperture into account.
|
||||
*
|
||||
* Pixel aspect ratio is used to adjust the width, leaving the height alone.
|
||||
*/
|
||||
var videoDimensions: CGSize {
|
||||
return CMVideoFormatDescriptionGetPresentationDimensions(formatDescription,
|
||||
usePixelAspectRatio: true,
|
||||
useCleanAperture: true)
|
||||
}
|
||||
}
|
20
package/ios/Extensions/AVCapturePhotoOutput+mirror.swift
Normal file
20
package/ios/Extensions/AVCapturePhotoOutput+mirror.swift
Normal file
@@ -0,0 +1,20 @@
|
||||
//
|
||||
// AVCapturePhotoOutput+mirror.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 18.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVCapturePhotoOutput {
|
||||
func mirror() {
|
||||
connections.forEach { connection in
|
||||
if connection.isVideoMirroringSupported {
|
||||
connection.automaticallyAdjustsVideoMirroring = false
|
||||
connection.isVideoMirrored = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// AVCaptureSession+setVideoStabilizationMode.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Marc Rousavy on 02.06.21.
|
||||
// Copyright © 2021 Facebook. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
import Foundation
|
||||
|
||||
extension AVCaptureSession {
|
||||
/**
|
||||
Set the given video stabilization mode for all capture connections.
|
||||
*/
|
||||
func setVideoStabilizationMode(_ mode: String) {
|
||||
if #available(iOS 13.0, *) {
|
||||
guard let mode = try? AVCaptureVideoStabilizationMode(withString: mode) else {
|
||||
return
|
||||
}
|
||||
connections.forEach { connection in
|
||||
if connection.isVideoStabilizationSupported {
|
||||
connection.preferredVideoStabilizationMode = mode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
18
package/ios/Extensions/AVFrameRateRange+includes.swift
Normal file
18
package/ios/Extensions/AVFrameRateRange+includes.swift
Normal file
@@ -0,0 +1,18 @@
|
||||
//
|
||||
// AVFrameRateRange+includes.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 15.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import AVFoundation
|
||||
|
||||
extension AVFrameRateRange {
|
||||
/**
|
||||
* Returns true if this [AVFrameRateRange] contains the given [fps]
|
||||
*/
|
||||
func includes(fps: Double) -> Bool {
|
||||
return fps >= minFrameRate && fps <= maxFrameRate
|
||||
}
|
||||
}
|
18
package/ios/Extensions/Collection+safe.swift
Normal file
18
package/ios/Extensions/Collection+safe.swift
Normal file
@@ -0,0 +1,18 @@
|
||||
//
|
||||
// Collection+safe.swift
|
||||
// mrousavy
|
||||
//
|
||||
// Created by Marc Rousavy on 10.01.21.
|
||||
// Copyright © 2021 mrousavy. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
extension Collection {
|
||||
/**
|
||||
Returns the element at the specified index if it is within bounds, otherwise nil.
|
||||
*/
|
||||
subscript(safe index: Index) -> Element? {
|
||||
return indices.contains(index) ? self[index] : nil
|
||||
}
|
||||
}
|
17
package/ios/Extensions/FourCharCode+toString.swift
Normal file
17
package/ios/Extensions/FourCharCode+toString.swift
Normal file
@@ -0,0 +1,17 @@
|
||||
//
|
||||
// FourCharCode+toString.swift
|
||||
// VisionCamera
|
||||
//
|
||||
// Created by Thomas Coldwell on 28/10/2021.
|
||||
// Based off this SO answer: https://stackoverflow.com/a/25625744
|
||||
//
|
||||
|
||||
extension FourCharCode {
|
||||
func toString() -> String {
|
||||
var s = String(UnicodeScalar((self >> 24) & 255)!)
|
||||
s.append(String(UnicodeScalar((self >> 16) & 255)!))
|
||||
s.append(String(UnicodeScalar((self >> 8) & 255)!))
|
||||
s.append(String(UnicodeScalar(self & 255)!))
|
||||
return s
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user