react-native-vision-camera/ios/CameraView+Focus.swift
Marc Rousavy 12f850c8e1
feat: Draw onto Frame as if it was a Skia Canvas (#1479)
* Create Shaders.ts

* Add `previewType` and `enableFpsGraph`

* Add RN Skia native dependency

* Add Skia Preview View on iOS

* Pass 1

* Update FrameHostObject.mm

* Wrap Canvas

* Lockfiles

* fix: Fix stuff

* chore: Upgrade RNWorklets

* Add `previewType` to set the Preview

* feat: Add Example

* Update project.pbxproj

* `enableFpsGraph`

* Cache the `std::shared_ptr<FrameHostObject>`

* Update CameraView+RecordVideo.swift

* Update SkiaMetalCanvasProvider.mm

* Android: Integrate Skia Dependency

* fix: Use new Prefix

* Add example for rendering shader

* chore: Upgrade CameraX

* Remove KTX

* Enable `viewBinding`

* Revert "Enable `viewBinding`"

This reverts commit f2a603f53b33ea4311a296422ffd1a910ce03f9e.

* Revert "chore: Upgrade CameraX"

This reverts commit 8dc832cf8754490d31a6192e6c1a1f11cdcd94fe.

* Remove unneeded `ProcessCameraProvider.getInstance()` call

* fix: Add REA hotfix patch

* fix: Fix FrameHostObject dead in runAsync

* fix: Make `runAsync` run truly async by dropping new Frames while executing

* chore: Upgrade RN Worklets to latest

* chore: Upgrade RN Skia

* Revert "Remove KTX"

This reverts commit 253f586633f7af2da992d2279fc206dc62597129.

* Make Skia optional in CMake

* Fix import

* Update CMakeLists.txt

* Update build.gradle

* Update CameraView.kt

* Update CameraView.kt

* Update CameraView.kt

* Update Shaders.ts

* Center Blur

* chore: Upgrade RN Worklets

* feat: Add `toByteArray()`, `orientation`, `isMirrored` and `timestamp` to `Frame` (#1487)

* feat: Implement `orientation` and `isMirrored` on Frame

* feat: Add `toArrayBuffer()` func

* perf: Do faster buffer copy

* feat: Implement `toArrayBuffer()` on Android

* feat: Add `orientation` and `isMirrored` to Android

* feat: Add `timestamp` to Frame

* Update Frame.ts

* Update JImageProxy.h

* Update FrameHostObject.cpp

* Update FrameHostObject.cpp

* Update CameraPage.tsx

* fix: Format Swift
2023-02-21 15:00:48 +01:00

98 lines
3.6 KiB
Swift

//
// CameraView+focus.swift
// mrousavy
//
// Created by Marc Rousavy on 19.02.21.
// Copyright © 2021 mrousavy. All rights reserved.
//
import Foundation
extension CameraView {
private func rotateFrameSize(frameSize: CGSize, orientation: UIInterfaceOrientation) -> CGSize {
switch orientation {
case .portrait, .portraitUpsideDown, .unknown:
// swap width and height since the input orientation is rotated
return CGSize(width: frameSize.height, height: frameSize.width)
case .landscapeLeft, .landscapeRight:
// is same as camera sensor orientation
return frameSize
@unknown default:
return frameSize
}
}
/// Converts a Point in the UI View Layer to a Point in the Camera Frame coordinate system
func convertLayerPointToFramePoint(layerPoint point: CGPoint) -> CGPoint {
guard let previewView = previewView else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
guard let viewScale = window?.screen.scale else {
invokeOnError(.unknown(message: "View has no parent Window!"))
return .zero
}
let frameSize = rotateFrameSize(frameSize: videoDeviceInput.device.activeFormat.videoDimensions,
orientation: outputOrientation)
let viewSize = CGSize(width: previewView.bounds.width * viewScale,
height: previewView.bounds.height * viewScale)
let scale = min(frameSize.width / viewSize.width, frameSize.height / viewSize.height)
let scaledViewSize = CGSize(width: viewSize.width * scale, height: viewSize.height * scale)
let overlapX = scaledViewSize.width - frameSize.width
let overlapY = scaledViewSize.height - frameSize.height
let scaledPoint = CGPoint(x: point.x * scale, y: point.y * scale)
return CGPoint(x: scaledPoint.x - (overlapX / 2), y: scaledPoint.y - (overlapY / 2))
}
/// Converts a Point in the UI View Layer to a Point in the Camera Device Sensor coordinate system (x: [0..1], y: [0..1])
func captureDevicePointConverted(fromLayerPoint pointInLayer: CGPoint) -> CGPoint {
guard let videoDeviceInput = videoDeviceInput else {
invokeOnError(.session(.cameraNotReady))
return .zero
}
let frameSize = rotateFrameSize(frameSize: videoDeviceInput.device.activeFormat.videoDimensions,
orientation: outputOrientation)
let pointInFrame = convertLayerPointToFramePoint(layerPoint: pointInLayer)
return CGPoint(x: pointInFrame.x / frameSize.width, y: pointInFrame.y / frameSize.height)
}
func focus(point: CGPoint, promise: Promise) {
withPromise(promise) {
guard let device = self.videoDeviceInput?.device else {
throw CameraError.session(SessionError.cameraNotReady)
}
if !device.isFocusPointOfInterestSupported {
throw CameraError.device(DeviceError.focusNotSupported)
}
// in {0..1} system
let normalizedPoint = captureDevicePointConverted(fromLayerPoint: point)
do {
try device.lockForConfiguration()
device.focusPointOfInterest = normalizedPoint
device.focusMode = .continuousAutoFocus
if device.isExposurePointOfInterestSupported {
device.exposurePointOfInterest = normalizedPoint
device.exposureMode = .continuousAutoExposure
}
device.unlockForConfiguration()
return nil
} catch {
throw CameraError.device(DeviceError.configureError)
}
}
}
}