375e894038
* Make Frame Processors an extra subspec * Update VisionCamera.podspec * Make optional * Make VisionCamera compile without Skia * Fix * Add skia again * Update VisionCamera.podspec * Make VisionCamera build without Frame Processors * Rename error to `system/frame-processors-unavailable` * Fix Frame Processor returning early * Remove `preset`, FP partial rewrite * Only warn on frame drop * Fix wrong queue * fix: Run on CameraQueue again * Update CameraView.swift * fix: Activate audio session asynchronously on audio queue * Update CameraView+RecordVideo.swift * Update PreviewView.h * Cleanups * Cleanup * fix cast * feat: Add LiDAR Depth Camera support * Upgrade Ruby * Add vector icons type * Update Gemfile.lock * fix: Stop queues on deinit * Also load `builtInTrueDepthCamera` * Update CameraViewManager.swift * Update SkImageHelpers.mm * Extract FrameProcessorCallback to FrameProcessor Holds more context now :) * Rename to .m * fix: Add `RCTLog` import * Create SkiaFrameProcessor * Update CameraBridge.h * Call Frame Processor * Fix defines * fix: Allow deleting callback funcs * fix Skia build * batch * Just call `setSkiaFrameProcessor` * Rewrite in Swift * Pass `SkiaRenderer` * Fix Import * Move `PreviewView` to Swift * Fix Layer * Set Skia Canvas to Frame Host Object * Make `DrawableFrameHostObject` subclass * Fix TS types * Use same MTLDevice and apply scale * Make getter * Extract `setTorch` and `Preview` * fix: Fix nil metal device * Don't wait for session stop in deinit * Use main pixel ratio * Use unique_ptr for Render Contexts * fix: Fix SkiaPreviewDisplayLink broken after deinit * inline `getTextureCache` * Update CameraPage.tsx * chore: Format iOS * perf: Allow MTLLayer to be optimized for only frame buffers * Add RN Video types * fix: Fix Frame Processors if guard * Find nodeModules recursively * Create `Frame.isDrawable` * Add `cocoapods-check` dependency
99 lines
3.7 KiB
Swift
99 lines
3.7 KiB
Swift
//
|
|
// CameraView+TakePhoto.swift
|
|
// mrousavy
|
|
//
|
|
// Created by Marc Rousavy on 16.12.20.
|
|
// Copyright © 2020 mrousavy. All rights reserved.
|
|
//
|
|
|
|
import AVFoundation
|
|
|
|
extension CameraView {
|
|
func takePhoto(options: NSDictionary, promise: Promise) {
|
|
CameraQueues.cameraQueue.async {
|
|
guard let photoOutput = self.photoOutput,
|
|
let videoDeviceInput = self.videoDeviceInput else {
|
|
if self.photo?.boolValue == true {
|
|
promise.reject(error: .session(.cameraNotReady))
|
|
return
|
|
} else {
|
|
promise.reject(error: .capture(.photoNotEnabled))
|
|
return
|
|
}
|
|
}
|
|
|
|
ReactLogger.log(level: .info, message: "Capturing photo...")
|
|
|
|
var format: [String: Any]?
|
|
// photo codec
|
|
if let photoCodecString = options["photoCodec"] as? String {
|
|
guard let photoCodec = AVVideoCodecType(withString: photoCodecString) else {
|
|
promise.reject(error: .parameter(.invalid(unionName: "PhotoCodec", receivedValue: photoCodecString)))
|
|
return
|
|
}
|
|
if photoOutput.availablePhotoCodecTypes.contains(photoCodec) {
|
|
format = [AVVideoCodecKey: photoCodec]
|
|
} else {
|
|
promise.reject(error: .capture(.invalidPhotoCodec))
|
|
return
|
|
}
|
|
}
|
|
|
|
// Create photo settings
|
|
let photoSettings = AVCapturePhotoSettings(format: format)
|
|
|
|
// default, overridable settings if high quality capture was enabled
|
|
if self.enableHighQualityPhotos?.boolValue == true {
|
|
photoSettings.isHighResolutionPhotoEnabled = true
|
|
if #available(iOS 13.0, *) {
|
|
photoSettings.photoQualityPrioritization = .quality
|
|
}
|
|
}
|
|
|
|
// flash
|
|
if videoDeviceInput.device.isFlashAvailable, let flash = options["flash"] as? String {
|
|
guard let flashMode = AVCaptureDevice.FlashMode(withString: flash) else {
|
|
promise.reject(error: .parameter(.invalid(unionName: "FlashMode", receivedValue: flash)))
|
|
return
|
|
}
|
|
photoSettings.flashMode = flashMode
|
|
}
|
|
|
|
// depth data
|
|
photoSettings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled
|
|
if #available(iOS 12.0, *) {
|
|
photoSettings.isPortraitEffectsMatteDeliveryEnabled = photoOutput.isPortraitEffectsMatteDeliveryEnabled
|
|
}
|
|
|
|
// quality prioritization
|
|
if #available(iOS 13.0, *), let qualityPrioritization = options["qualityPrioritization"] as? String {
|
|
guard let photoQualityPrioritization = AVCapturePhotoOutput.QualityPrioritization(withString: qualityPrioritization) else {
|
|
promise.reject(error: .parameter(.invalid(unionName: "QualityPrioritization", receivedValue: qualityPrioritization)))
|
|
return
|
|
}
|
|
photoSettings.photoQualityPrioritization = photoQualityPrioritization
|
|
}
|
|
|
|
// red-eye reduction
|
|
if #available(iOS 12.0, *), let autoRedEyeReduction = options["enableAutoRedEyeReduction"] as? Bool {
|
|
photoSettings.isAutoRedEyeReductionEnabled = autoRedEyeReduction
|
|
}
|
|
|
|
// stabilization
|
|
if let enableAutoStabilization = options["enableAutoStabilization"] as? Bool {
|
|
photoSettings.isAutoStillImageStabilizationEnabled = enableAutoStabilization
|
|
}
|
|
|
|
// distortion correction
|
|
if #available(iOS 14.1, *), let enableAutoDistortionCorrection = options["enableAutoDistortionCorrection"] as? Bool {
|
|
photoSettings.isAutoContentAwareDistortionCorrectionEnabled = enableAutoDistortionCorrection
|
|
}
|
|
|
|
photoOutput.capturePhoto(with: photoSettings, delegate: PhotoCaptureDelegate(promise: promise))
|
|
|
|
// Assume that `takePhoto` is always called with the same parameters, so prepare the next call too.
|
|
photoOutput.setPreparedPhotoSettingsArray([photoSettings], completionHandler: nil)
|
|
}
|
|
}
|
|
}
|