react-native-vision-camera/ios/CameraViewManager.swift
Marc Rousavy 87e6bb710e
feat: Frame Processors for Android (#196)
* Create android gradle build setup

* Fix `prefab` config

* Add `pickFirst **/*.so` to example build.gradle

* fix REA path

* cache gradle builds

* Update validate-android.yml

* Create Native Proxy

* Copy REA header

* implement ctor

* Rename CameraViewModule -> FrameProcessorRuntimeManager

* init FrameProcessorRuntimeManager

* fix name

* Update FrameProcessorRuntimeManager.h

* format

* Create AndroidErrorHandler.h

* Initialize runtime and install JSI funcs

* Update FrameProcessorRuntimeManager.cpp

* Update CameraViewModule.kt

* Make CameraView hybrid C++ class to find view & set frame processor

* Update FrameProcessorRuntimeManager.cpp

* pass function by rvalue

* pass by const &&

* extract hermes and JSC REA

* pass `FOR_HERMES`

* correctly prepare JSC and Hermes

* Update CMakeLists.txt

* add missing hermes include

* clean up imports

* Create JImageProxy.h

* pass ImageProxy to JNI as `jobject`

* try use `JImageProxy` C++ wrapper type

* Use `local_ref<JImageProxy>`

* Create `JImageProxyHostObject` for JSI interop

* debug call to frame processor

* Unset frame processor

* Fix CameraView native part not being registered

* close image

* use `jobject` instead of `JImageProxy` for now :(

* fix hermes build error

* Set enable FP callback

* fix JNI call

* Update CameraView.cpp

* Get Format

* Create plugin abstract

* Make `FrameProcessorPlugin` a hybrid object

* Register plugin CXX

* Call `registerPlugin`

* Catch

* remove JSI

* Create sample QR code plugin

* register plugins

* Fix missing JNI binding

* Add `mHybridData`

* prefix name with two underscores (`__`)

* Update CameraPage.tsx

* wrap `ImageProxy` in host object

* Use `jobject` for HO box

* Update JImageProxy.h

* reinterpret jobject

* Try using `JImageProxy` instead of `jobject`

* Update JImageProxy.h

* get bytes per row and plane count

* Update CameraView.cpp

* Return base

* add some docs and JNI JSI conversion

* indent

* Convert JSI value to JNI jobject

* using namespace facebook

* Try using class

* Use plain old Object[]

* Try convert JNI -> JSI

* fix decl

* fix bool init

* Correctly link folly

* Update CMakeLists.txt

* Convert Map to Object

* Use folly for Map and Array

* Return `alias_ref<jobject>` instead of raw `jobject`

* fix JNI <-> JSI conversion

* Update JSIJNIConversion.cpp

* Log parameters

* fix params index offset

* add more test cases

* Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx

* fix types

* Rename to example plugin

* remove support for hashmap

* Try use HashMap iterable fbjni binding

* try using JReadableArray/JReadableMap

* Fix list return values

* Update JSIJNIConversion.cpp

* Update JSIJNIConversion.cpp

* (iOS) Rename ObjC QR Code Plugin to Example Plugin

* Rename Swift plugin QR -> Example

* Update ExamplePluginSwift.swift

* Fix Map/Dictionary logging format

* Update ExampleFrameProcessorPlugin.m

* Reconfigure session if frame processor changed

* Handle use-cases via `maxUseCasesCount`

* Don't crash app on `configureSession` error

* Document "use-cases"

* Update DEVICES.mdx

* fix merge

* Make `const &`

* iOS: Automatically enable `video` if a `frameProcessor` is set

* Update CameraView.cpp

* fix docs

* Automatically fallback to snapshot capture if `supportsParallelVideoProcessing` is false.

* Fix lookup

* Update CameraView.kt

* Implement `frameProcessorFps`

* Finalize Frame Processor Plugin Hybrid

* Update CameraViewModule.kt

* Support `flash` on `takeSnapshot()`

* Update docs

* Add docs

* Update CameraPage.tsx

* Attribute NonNull

* remove unused imports

* Add Android docs for Frame Processors

* Make JNI HashMap <-> JSI Object conversion faster

directly access `toHashMap` instead of going through java

* add todo

* Always run `prepareJSC` and `prepareHermes`

* switch jsc and hermes

* Specify ndkVersion `21.4.7075529`

* Update gradle.properties

* Update gradle.properties

* Create .aar

* Correctly prepare android package

* Update package.json

* Update package.json

* remove `prefab` build feature

* split

* Add docs for registering the FP plugin

* Add step for dep

* Update CaptureButton.tsx

* Move to `reanimated-headers/`

* Exclude reanimated-headers from cpplint

* disable `build/include_order` rule

* cpplint fixes

* perf: Make `JSIJNIConversion` a `namespace` instead of `class`

* Ignore runtime/references for `convert` funcs

* Build Android .aar in CI

* Run android build script only on `prepack`

* Update package.json

* Update package.json

* Update build-android-npm-package.sh

* Move to `yarn build`

* Also install node_modules in example step

* Update validate-android.yml

* sort imports

* fix torch

* Run ImageAnalysis on `FrameProcessorThread`

* Update Errors.kt

* Add clean android script

* Upgrade reanimated to 2.3.0-alpha.1

* Revert "Upgrade reanimated to 2.3.0-alpha.1"

This reverts commit c1d3bed5e03728d0b5e335a359524ff4f56f5035.

* ⚠️ TEMP FIX: hotfix reanimated build.gradle

* Update CameraView+TakeSnapshot.kt

* ⚠️ TEMP FIX: Disable ktlint action for now

* Update clean.sh

* Set max heap size to 4g

* rebuild lockfiles

* Update Podfile.lock

* rename

* Build lib .aar before example/
2021-06-27 12:37:54 +02:00

171 lines
6.0 KiB
Swift

//
// CameraViewManager.swift
// mrousavy
//
// Created by Marc Rousavy on 09.11.20.
// Copyright © 2020 mrousavy. All rights reserved.
//
import AVFoundation
import Foundation
@objc(CameraViewManager)
final class CameraViewManager: RCTViewManager {
// pragma MARK: Properties
private var runtimeManager: FrameProcessorRuntimeManager?
override var bridge: RCTBridge! {
didSet {
#if DEBUG
// Install console.log bindings
ReactLogger.ConsoleLogFunction = JSConsoleHelper.getLogFunction(for: bridge)
#endif
// Install Frame Processor bindings and setup Runtime
if enableFrameProcessors {
CameraQueues.frameProcessorQueue.async {
self.runtimeManager = FrameProcessorRuntimeManager(bridge: self.bridge)
self.bridge.runOnJS {
self.runtimeManager!.installFrameProcessorBindings()
}
}
}
}
}
override var methodQueue: DispatchQueue! {
return DispatchQueue.main
}
override static func requiresMainQueueSetup() -> Bool {
return true
}
override final func view() -> UIView! {
return CameraView()
}
// pragma MARK: React Functions
@objc
final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) {
let component = getCameraView(withTag: node)
component.startRecording(options: options, callback: onRecordCallback)
}
@objc
final func stopRecording(_ node: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.stopRecording(promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func takePhoto(_ node: NSNumber, options: NSDictionary, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let component = getCameraView(withTag: node)
component.takePhoto(options: options, promise: Promise(resolver: resolve, rejecter: reject))
}
@objc
final func focus(_ node: NSNumber, point: NSDictionary, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
let promise = Promise(resolver: resolve, rejecter: reject)
guard let x = point["x"] as? NSNumber, let y = point["y"] as? NSNumber else {
promise.reject(error: .parameter(.invalid(unionName: "point", receivedValue: point.description)))
return
}
let component = getCameraView(withTag: node)
component.focus(point: CGPoint(x: x.doubleValue, y: y.doubleValue), promise: promise)
}
@objc
final func getAvailableCameraDevices(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: getAllDeviceTypes(), mediaType: .video, position: .unspecified)
let devices = discoverySession.devices.filter {
if #available(iOS 11.1, *) {
// exclude the true-depth camera. The True-Depth camera has YUV and Infrared, can't take photos!
return $0.deviceType != .builtInTrueDepthCamera
}
return true
}
return devices.map {
return [
"id": $0.uniqueID,
"devices": $0.physicalDevices.map(\.deviceType.descriptor),
"position": $0.position.descriptor,
"name": $0.localizedName,
"hasFlash": $0.hasFlash,
"hasTorch": $0.hasTorch,
"minZoom": $0.minAvailableVideoZoomFactor,
"neutralZoom": $0.neutralZoomFactor,
"maxZoom": $0.maxAvailableVideoZoomFactor,
"isMultiCam": $0.isMultiCam,
"supportsParallelVideoProcessing": true,
"supportsDepthCapture": false, // TODO: supportsDepthCapture
"supportsRawCapture": false, // TODO: supportsRawCapture
"supportsLowLightBoost": $0.isLowLightBoostSupported,
"supportsFocus": $0.isFocusPointOfInterestSupported,
"formats": $0.formats.map { format -> [String: Any] in
format.toDictionary()
},
]
}
}
}
@objc
final func getCameraPermissionStatus(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let status = AVCaptureDevice.authorizationStatus(for: .video)
return status.descriptor
}
}
@objc
final func getMicrophonePermissionStatus(_ resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
withPromise(resolve: resolve, reject: reject) {
let status = AVCaptureDevice.authorizationStatus(for: .audio)
return status.descriptor
}
}
@objc
final func requestCameraPermission(_ resolve: @escaping RCTPromiseResolveBlock, reject _: @escaping RCTPromiseRejectBlock) {
AVCaptureDevice.requestAccess(for: .video) { granted in
let result: AVAuthorizationStatus = granted ? .authorized : .denied
resolve(result.descriptor)
}
}
@objc
final func requestMicrophonePermission(_ resolve: @escaping RCTPromiseResolveBlock, reject _: @escaping RCTPromiseRejectBlock) {
AVCaptureDevice.requestAccess(for: .audio) { granted in
let result: AVAuthorizationStatus = granted ? .authorized : .denied
resolve(result.descriptor)
}
}
// MARK: Private
private func getCameraView(withTag tag: NSNumber) -> CameraView {
// swiftlint:disable force_cast
return bridge.uiManager.view(forReactTag: tag) as! CameraView
}
private final func getAllDeviceTypes() -> [AVCaptureDevice.DeviceType] {
var deviceTypes: [AVCaptureDevice.DeviceType] = []
if #available(iOS 13.0, *) {
deviceTypes.append(.builtInTripleCamera)
deviceTypes.append(.builtInDualWideCamera)
deviceTypes.append(.builtInUltraWideCamera)
}
if #available(iOS 11.1, *) {
deviceTypes.append(.builtInTrueDepthCamera)
}
deviceTypes.append(.builtInDualCamera)
deviceTypes.append(.builtInWideAngleCamera)
deviceTypes.append(.builtInTelephotoCamera)
return deviceTypes
}
}