react-native-vision-camera/ios/CameraView+TakePhoto.swift

99 lines
3.7 KiB
Swift
Raw Normal View History

2021-02-19 08:28:05 -07:00
//
// CameraView+TakePhoto.swift
// mrousavy
2021-02-19 08:28:05 -07:00
//
// Created by Marc Rousavy on 16.12.20.
// Copyright © 2020 mrousavy. All rights reserved.
2021-02-19 08:28:05 -07:00
//
import AVFoundation
extension CameraView {
func takePhoto(options: NSDictionary, promise: Promise) {
feat: Complete iOS Codebase rewrite (#1647) * Make Frame Processors an extra subspec * Update VisionCamera.podspec * Make optional * Make VisionCamera compile without Skia * Fix * Add skia again * Update VisionCamera.podspec * Make VisionCamera build without Frame Processors * Rename error to `system/frame-processors-unavailable` * Fix Frame Processor returning early * Remove `preset`, FP partial rewrite * Only warn on frame drop * Fix wrong queue * fix: Run on CameraQueue again * Update CameraView.swift * fix: Activate audio session asynchronously on audio queue * Update CameraView+RecordVideo.swift * Update PreviewView.h * Cleanups * Cleanup * fix cast * feat: Add LiDAR Depth Camera support * Upgrade Ruby * Add vector icons type * Update Gemfile.lock * fix: Stop queues on deinit * Also load `builtInTrueDepthCamera` * Update CameraViewManager.swift * Update SkImageHelpers.mm * Extract FrameProcessorCallback to FrameProcessor Holds more context now :) * Rename to .m * fix: Add `RCTLog` import * Create SkiaFrameProcessor * Update CameraBridge.h * Call Frame Processor * Fix defines * fix: Allow deleting callback funcs * fix Skia build * batch * Just call `setSkiaFrameProcessor` * Rewrite in Swift * Pass `SkiaRenderer` * Fix Import * Move `PreviewView` to Swift * Fix Layer * Set Skia Canvas to Frame Host Object * Make `DrawableFrameHostObject` subclass * Fix TS types * Use same MTLDevice and apply scale * Make getter * Extract `setTorch` and `Preview` * fix: Fix nil metal device * Don't wait for session stop in deinit * Use main pixel ratio * Use unique_ptr for Render Contexts * fix: Fix SkiaPreviewDisplayLink broken after deinit * inline `getTextureCache` * Update CameraPage.tsx * chore: Format iOS * perf: Allow MTLLayer to be optimized for only frame buffers * Add RN Video types * fix: Fix Frame Processors if guard * Find nodeModules recursively * Create `Frame.isDrawable` * Add `cocoapods-check` dependency
2023-07-20 07:30:04 -06:00
CameraQueues.cameraQueue.async {
guard let photoOutput = self.photoOutput,
let videoDeviceInput = self.videoDeviceInput else {
if self.photo?.boolValue == true {
promise.reject(error: .session(.cameraNotReady))
return
} else {
promise.reject(error: .capture(.photoNotEnabled))
return
}
2021-02-19 08:28:05 -07:00
}
ReactLogger.log(level: .info, message: "Capturing photo...")
var format: [String: Any]?
// photo codec
2021-02-19 08:28:05 -07:00
if let photoCodecString = options["photoCodec"] as? String {
guard let photoCodec = AVVideoCodecType(withString: photoCodecString) else {
promise.reject(error: .parameter(.invalid(unionName: "PhotoCodec", receivedValue: photoCodecString)))
return
2021-02-19 08:28:05 -07:00
}
if photoOutput.availablePhotoCodecTypes.contains(photoCodec) {
format = [AVVideoCodecKey: photoCodec]
2021-02-19 08:28:05 -07:00
} else {
promise.reject(error: .capture(.invalidPhotoCodec))
return
2021-02-19 08:28:05 -07:00
}
}
// Create photo settings
let photoSettings = AVCapturePhotoSettings(format: format)
// default, overridable settings if high quality capture was enabled
if self.enableHighQualityPhotos?.boolValue == true {
photoSettings.isHighResolutionPhotoEnabled = true
if #available(iOS 13.0, *) {
photoSettings.photoQualityPrioritization = .quality
}
}
// flash
2021-02-19 08:28:05 -07:00
if videoDeviceInput.device.isFlashAvailable, let flash = options["flash"] as? String {
guard let flashMode = AVCaptureDevice.FlashMode(withString: flash) else {
promise.reject(error: .parameter(.invalid(unionName: "FlashMode", receivedValue: flash)))
return
2021-02-19 08:28:05 -07:00
}
photoSettings.flashMode = flashMode
}
// depth data
2021-02-19 08:28:05 -07:00
photoSettings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled
if #available(iOS 12.0, *) {
photoSettings.isPortraitEffectsMatteDeliveryEnabled = photoOutput.isPortraitEffectsMatteDeliveryEnabled
}
// quality prioritization
2021-02-19 08:28:05 -07:00
if #available(iOS 13.0, *), let qualityPrioritization = options["qualityPrioritization"] as? String {
guard let photoQualityPrioritization = AVCapturePhotoOutput.QualityPrioritization(withString: qualityPrioritization) else {
promise.reject(error: .parameter(.invalid(unionName: "QualityPrioritization", receivedValue: qualityPrioritization)))
return
2021-02-19 08:28:05 -07:00
}
photoSettings.photoQualityPrioritization = photoQualityPrioritization
}
// red-eye reduction
2021-02-19 08:28:05 -07:00
if #available(iOS 12.0, *), let autoRedEyeReduction = options["enableAutoRedEyeReduction"] as? Bool {
photoSettings.isAutoRedEyeReductionEnabled = autoRedEyeReduction
}
// stabilization
2021-02-19 08:28:05 -07:00
if let enableAutoStabilization = options["enableAutoStabilization"] as? Bool {
photoSettings.isAutoStillImageStabilizationEnabled = enableAutoStabilization
}
// distortion correction
2021-02-19 08:28:05 -07:00
if #available(iOS 14.1, *), let enableAutoDistortionCorrection = options["enableAutoDistortionCorrection"] as? Bool {
photoSettings.isAutoContentAwareDistortionCorrectionEnabled = enableAutoDistortionCorrection
}
photoOutput.capturePhoto(with: photoSettings, delegate: PhotoCaptureDelegate(promise: promise))
// Assume that `takePhoto` is always called with the same parameters, so prepare the next call too.
photoOutput.setPreparedPhotoSettingsArray([photoSettings], completionHandler: nil)
2021-02-19 08:28:05 -07:00
}
}
}