react-native-vision-camera/src/PhotoFile.ts

136 lines
3.9 KiB
TypeScript
Raw Normal View History

2021-02-20 09:07:10 -07:00
import type { TemporaryFile } from './TemporaryFile';
feat: Complete iOS Codebase rewrite (#1647) * Make Frame Processors an extra subspec * Update VisionCamera.podspec * Make optional * Make VisionCamera compile without Skia * Fix * Add skia again * Update VisionCamera.podspec * Make VisionCamera build without Frame Processors * Rename error to `system/frame-processors-unavailable` * Fix Frame Processor returning early * Remove `preset`, FP partial rewrite * Only warn on frame drop * Fix wrong queue * fix: Run on CameraQueue again * Update CameraView.swift * fix: Activate audio session asynchronously on audio queue * Update CameraView+RecordVideo.swift * Update PreviewView.h * Cleanups * Cleanup * fix cast * feat: Add LiDAR Depth Camera support * Upgrade Ruby * Add vector icons type * Update Gemfile.lock * fix: Stop queues on deinit * Also load `builtInTrueDepthCamera` * Update CameraViewManager.swift * Update SkImageHelpers.mm * Extract FrameProcessorCallback to FrameProcessor Holds more context now :) * Rename to .m * fix: Add `RCTLog` import * Create SkiaFrameProcessor * Update CameraBridge.h * Call Frame Processor * Fix defines * fix: Allow deleting callback funcs * fix Skia build * batch * Just call `setSkiaFrameProcessor` * Rewrite in Swift * Pass `SkiaRenderer` * Fix Import * Move `PreviewView` to Swift * Fix Layer * Set Skia Canvas to Frame Host Object * Make `DrawableFrameHostObject` subclass * Fix TS types * Use same MTLDevice and apply scale * Make getter * Extract `setTorch` and `Preview` * fix: Fix nil metal device * Don't wait for session stop in deinit * Use main pixel ratio * Use unique_ptr for Render Contexts * fix: Fix SkiaPreviewDisplayLink broken after deinit * inline `getTextureCache` * Update CameraPage.tsx * chore: Format iOS * perf: Allow MTLLayer to be optimized for only frame buffers * Add RN Video types * fix: Fix Frame Processors if guard * Find nodeModules recursively * Create `Frame.isDrawable` * Add `cocoapods-check` dependency
2023-07-20 07:30:04 -06:00
import { CameraPhotoCodec } from './VideoFile';
2021-02-19 08:07:53 -07:00
export interface TakePhotoOptions {
/**
* Indicates how photo quality should be prioritized against speed.
*
* * `"quality"` Indicates that photo quality is paramount, even at the expense of shot-to-shot time
2021-02-19 08:07:53 -07:00
* * `"balanced"` Indicates that photo quality and speed of delivery are balanced in priority
* * `"speed"` Indicates that speed of photo delivery is most important, even at the expense of quality
2021-02-19 08:07:53 -07:00
*
* @platform iOS 13.0+
* @default "balanced"
*/
2021-02-20 09:07:10 -07:00
qualityPrioritization?: 'quality' | 'balanced' | 'speed';
2021-02-19 08:07:53 -07:00
/**
* Whether the Flash should be enabled or disabled
*
* @default "auto"
*/
2021-02-20 09:07:10 -07:00
flash?: 'on' | 'off' | 'auto';
2021-02-19 08:07:53 -07:00
/**
* Specifies whether red-eye reduction should be applied automatically on flash captures.
*
* @default false
*/
enableAutoRedEyeReduction?: boolean;
/**
* Indicates whether still image stabilization will be employed when capturing the photo
*
* @default false
*/
enableAutoStabilization?: boolean;
/**
* Specifies whether the photo output should use content aware distortion correction on this photo request (at its discretion).
*
* @default false
*/
enableAutoDistortionCorrection?: boolean;
feat: Complete iOS Codebase rewrite (#1647) * Make Frame Processors an extra subspec * Update VisionCamera.podspec * Make optional * Make VisionCamera compile without Skia * Fix * Add skia again * Update VisionCamera.podspec * Make VisionCamera build without Frame Processors * Rename error to `system/frame-processors-unavailable` * Fix Frame Processor returning early * Remove `preset`, FP partial rewrite * Only warn on frame drop * Fix wrong queue * fix: Run on CameraQueue again * Update CameraView.swift * fix: Activate audio session asynchronously on audio queue * Update CameraView+RecordVideo.swift * Update PreviewView.h * Cleanups * Cleanup * fix cast * feat: Add LiDAR Depth Camera support * Upgrade Ruby * Add vector icons type * Update Gemfile.lock * fix: Stop queues on deinit * Also load `builtInTrueDepthCamera` * Update CameraViewManager.swift * Update SkImageHelpers.mm * Extract FrameProcessorCallback to FrameProcessor Holds more context now :) * Rename to .m * fix: Add `RCTLog` import * Create SkiaFrameProcessor * Update CameraBridge.h * Call Frame Processor * Fix defines * fix: Allow deleting callback funcs * fix Skia build * batch * Just call `setSkiaFrameProcessor` * Rewrite in Swift * Pass `SkiaRenderer` * Fix Import * Move `PreviewView` to Swift * Fix Layer * Set Skia Canvas to Frame Host Object * Make `DrawableFrameHostObject` subclass * Fix TS types * Use same MTLDevice and apply scale * Make getter * Extract `setTorch` and `Preview` * fix: Fix nil metal device * Don't wait for session stop in deinit * Use main pixel ratio * Use unique_ptr for Render Contexts * fix: Fix SkiaPreviewDisplayLink broken after deinit * inline `getTextureCache` * Update CameraPage.tsx * chore: Format iOS * perf: Allow MTLLayer to be optimized for only frame buffers * Add RN Video types * fix: Fix Frame Processors if guard * Find nodeModules recursively * Create `Frame.isDrawable` * Add `cocoapods-check` dependency
2023-07-20 07:30:04 -06:00
/**
* Specifies the photo codec to use for this capture. The provided photo codec has to be supported by the session.
*/
photoCodec?: CameraPhotoCodec;
2021-02-19 08:07:53 -07:00
/**
2021-03-08 10:51:53 -07:00
* When set to `true`, metadata reading and mapping will be skipped. ({@linkcode PhotoFile.metadata} will be null)
2021-02-19 08:07:53 -07:00
*
* This might result in a faster capture, as metadata reading and mapping requires File IO.
*
* @default false
*
* @platform Android
*/
skipMetadata?: boolean;
}
/**
* Represents a Photo taken by the Camera written to the local filesystem.
2021-03-08 10:21:30 -07:00
*
2021-03-08 10:51:53 -07:00
* Related: {@linkcode Camera.takePhoto | Camera.takePhoto()}, {@linkcode Camera.takeSnapshot | Camera.takeSnapshot()}
2021-02-19 08:07:53 -07:00
*/
export interface PhotoFile extends TemporaryFile {
width: number;
height: number;
isRawPhoto: boolean;
thumbnail?: Record<string, unknown>;
2021-03-09 04:02:10 -07:00
/**
* Metadata information describing the captured image.
*
* @see [AVCapturePhoto.metadata](https://developer.apple.com/documentation/avfoundation/avcapturephoto/2873982-metadata)
* @see [AndroidX ExifInterface](https://developer.android.com/reference/androidx/exifinterface/media/ExifInterface)
*/
metadata: {
Orientation: number;
/**
* @platform iOS
*/
DPIHeight: number;
/**
* @platform iOS
*/
DPIWidth: number;
/**
* Represents any data Apple cameras write to the metadata
*
* @platform iOS
*/
'{MakerApple}'?: Record<string, unknown>;
'{TIFF}': {
ResolutionUnit: number;
Software: string;
Make: string;
DateTime: string;
XResolution: number;
2021-02-19 08:07:53 -07:00
/**
* @platform iOS
*/
HostComputer?: string;
Model: string;
YResolution: number;
2021-02-19 08:07:53 -07:00
};
'{Exif}': {
DateTimeOriginal: string;
ExposureTime: number;
FNumber: number;
LensSpecification: number[];
ExposureBiasValue: number;
ColorSpace: number;
FocalLenIn35mmFilm: number;
BrightnessValue: number;
ExposureMode: number;
LensModel: string;
SceneType: number;
PixelXDimension: number;
ShutterSpeedValue: number;
SensingMethod: number;
SubjectArea: number[];
ApertureValue: number;
SubsecTimeDigitized: string;
FocalLength: number;
LensMake: string;
SubsecTimeOriginal: string;
OffsetTimeDigitized: string;
PixelYDimension: number;
ISOSpeedRatings: number[];
WhiteBalance: number;
DateTimeDigitized: string;
OffsetTimeOriginal: string;
ExifVersion: string;
OffsetTime: string;
Flash: number;
ExposureProgram: number;
MeteringMode: number;
};
};
}