chore: Remove semicolons (#1846)
* chore: Disable `semi` in Prettier * chore: Format w/o semi * Remove more `;` * Lint example * More ;
This commit is contained in:
@@ -1,13 +1,13 @@
|
||||
import { Dimensions } from 'react-native';
|
||||
import { FormatFilter } from './getCameraFormat';
|
||||
import { Dimensions } from 'react-native'
|
||||
import { FormatFilter } from './getCameraFormat'
|
||||
|
||||
type TTemplates = {
|
||||
[key: string]: FormatFilter[];
|
||||
};
|
||||
[key: string]: FormatFilter[]
|
||||
}
|
||||
|
||||
const SnapchatResolution = { width: 1920, height: 1080 };
|
||||
const InstagramResolution = { width: 3840, height: 2160 };
|
||||
const ScreenAspectRatio = Dimensions.get('window').height / Dimensions.get('window').width;
|
||||
const SnapchatResolution = { width: 1920, height: 1080 }
|
||||
const InstagramResolution = { width: 3840, height: 2160 }
|
||||
const ScreenAspectRatio = Dimensions.get('window').height / Dimensions.get('window').width
|
||||
|
||||
/**
|
||||
* Predefined templates for use in `useCameraFormat`/`getCameraFormat`.
|
||||
@@ -69,4 +69,4 @@ export const Templates: TTemplates = {
|
||||
{ photoAspectRatio: ScreenAspectRatio },
|
||||
{ photoResolution: InstagramResolution },
|
||||
],
|
||||
};
|
||||
}
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { CameraDevice, CameraPosition, PhysicalCameraDeviceType } from '../CameraDevice';
|
||||
import { CameraDevice, CameraPosition, PhysicalCameraDeviceType } from '../CameraDevice'
|
||||
|
||||
export interface DeviceFilter {
|
||||
/**
|
||||
@@ -19,7 +19,7 @@ export interface DeviceFilter {
|
||||
* getCameraDevice({ physicalDevices: ['ultra-wide-angle-camera', 'wide-angle-camera', 'telephoto-camera'] })
|
||||
* ```
|
||||
*/
|
||||
physicalDevices?: PhysicalCameraDeviceType[];
|
||||
physicalDevices?: PhysicalCameraDeviceType[]
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -36,26 +36,26 @@ export interface DeviceFilter {
|
||||
* ```
|
||||
*/
|
||||
export function getCameraDevice(devices: CameraDevice[], position: CameraPosition, filter: DeviceFilter = {}): CameraDevice | undefined {
|
||||
const explicitlyWantsNonWideAngle = filter.physicalDevices != null && !filter.physicalDevices.includes('wide-angle-camera');
|
||||
const explicitlyWantsNonWideAngle = filter.physicalDevices != null && !filter.physicalDevices.includes('wide-angle-camera')
|
||||
|
||||
const filtered = devices.filter((d) => d.position === position);
|
||||
const filtered = devices.filter((d) => d.position === position)
|
||||
|
||||
let bestDevice = filtered[0];
|
||||
if (bestDevice == null) return undefined;
|
||||
let bestDevice = filtered[0]
|
||||
if (bestDevice == null) return undefined
|
||||
|
||||
// Compare each device using a point scoring system
|
||||
for (const device of devices) {
|
||||
let leftPoints = 0;
|
||||
let rightPoints = 0;
|
||||
let leftPoints = 0
|
||||
let rightPoints = 0
|
||||
|
||||
// prefer higher hardware-level
|
||||
if (bestDevice.hardwareLevel === 'full') leftPoints += 4;
|
||||
if (device.hardwareLevel === 'full') rightPoints += 4;
|
||||
if (bestDevice.hardwareLevel === 'full') leftPoints += 4
|
||||
if (device.hardwareLevel === 'full') rightPoints += 4
|
||||
|
||||
if (!explicitlyWantsNonWideAngle) {
|
||||
// prefer wide-angle-camera as a default
|
||||
if (bestDevice.physicalDevices.includes('wide-angle-camera')) leftPoints += 1;
|
||||
if (device.physicalDevices.includes('wide-angle-camera')) rightPoints += 1;
|
||||
if (bestDevice.physicalDevices.includes('wide-angle-camera')) leftPoints += 1
|
||||
if (device.physicalDevices.includes('wide-angle-camera')) rightPoints += 1
|
||||
}
|
||||
|
||||
// compare devices. two possible scenarios:
|
||||
@@ -63,17 +63,17 @@ export function getCameraDevice(devices: CameraDevice[], position: CameraPositio
|
||||
// 2. user wants only one ([wide]) for faster performance. prefer those devices that only have one camera, if they have more, we rank them lower.
|
||||
if (filter.physicalDevices != null) {
|
||||
for (const d of bestDevice.physicalDevices) {
|
||||
if (filter.physicalDevices.includes(d)) leftPoints += 1;
|
||||
else leftPoints -= 1;
|
||||
if (filter.physicalDevices.includes(d)) leftPoints += 1
|
||||
else leftPoints -= 1
|
||||
}
|
||||
for (const d of device.physicalDevices) {
|
||||
if (filter.physicalDevices.includes(d)) rightPoints += 1;
|
||||
else rightPoints -= 1;
|
||||
if (filter.physicalDevices.includes(d)) rightPoints += 1
|
||||
else rightPoints -= 1
|
||||
}
|
||||
}
|
||||
|
||||
if (rightPoints > leftPoints) bestDevice = device;
|
||||
if (rightPoints > leftPoints) bestDevice = device
|
||||
}
|
||||
|
||||
return bestDevice;
|
||||
return bestDevice
|
||||
}
|
||||
|
@@ -1,10 +1,10 @@
|
||||
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from '../CameraDevice';
|
||||
import { CameraRuntimeError } from '../CameraError';
|
||||
import { PixelFormat } from '../PixelFormat';
|
||||
import type { CameraDevice, CameraDeviceFormat, VideoStabilizationMode } from '../CameraDevice'
|
||||
import { CameraRuntimeError } from '../CameraError'
|
||||
import { PixelFormat } from '../PixelFormat'
|
||||
|
||||
interface Size {
|
||||
width: number;
|
||||
height: number;
|
||||
width: number
|
||||
height: number
|
||||
}
|
||||
|
||||
export interface FormatFilter {
|
||||
@@ -12,12 +12,12 @@ export interface FormatFilter {
|
||||
* The target resolution of the video (and frame processor) output pipeline.
|
||||
* If no format supports the given resolution, the format closest to this value will be used.
|
||||
*/
|
||||
videoResolution?: Size | 'max';
|
||||
videoResolution?: Size | 'max'
|
||||
/**
|
||||
* The target resolution of the photo output pipeline.
|
||||
* If no format supports the given resolution, the format closest to this value will be used.
|
||||
*/
|
||||
photoResolution?: Size | 'max';
|
||||
photoResolution?: Size | 'max'
|
||||
/**
|
||||
* The target aspect ratio of the video (and preview) output, expressed as a factor: `width / height`.
|
||||
* (Note: Cameras are in landscape orientation)
|
||||
@@ -30,7 +30,7 @@ export interface FormatFilter {
|
||||
* targetVideoAspectRatio: screen.height / screen.width
|
||||
* ```
|
||||
*/
|
||||
videoAspectRatio?: number;
|
||||
videoAspectRatio?: number
|
||||
/**
|
||||
* The target aspect ratio of the photo output, expressed as a factor: `width / height`.
|
||||
* (Note: Cameras are in landscape orientation)
|
||||
@@ -44,39 +44,39 @@ export interface FormatFilter {
|
||||
* targetPhotoAspectRatio: screen.height / screen.width
|
||||
* ```
|
||||
*/
|
||||
photoAspectRatio?: number;
|
||||
photoAspectRatio?: number
|
||||
/**
|
||||
* The target FPS you want to record video at.
|
||||
* If the FPS requirements can not be met, the format closest to this value will be used.
|
||||
*/
|
||||
fps?: number;
|
||||
fps?: number
|
||||
/**
|
||||
* The target video stabilization mode you want to use.
|
||||
* If no format supports the target video stabilization mode, the best other matching format will be used.
|
||||
*/
|
||||
videoStabilizationMode?: VideoStabilizationMode;
|
||||
videoStabilizationMode?: VideoStabilizationMode
|
||||
/**
|
||||
* The target pixel format you want to use.
|
||||
* If no format supports the target pixel format, the best other matching format will be used.
|
||||
*/
|
||||
pixelFormat?: PixelFormat;
|
||||
pixelFormat?: PixelFormat
|
||||
/**
|
||||
* Whether you want to find a format that supports Photo HDR.
|
||||
*/
|
||||
photoHDR?: boolean;
|
||||
photoHDR?: boolean
|
||||
/**
|
||||
* Whether you want to find a format that supports Photo HDR.
|
||||
*/
|
||||
videoHDR?: boolean;
|
||||
videoHDR?: boolean
|
||||
}
|
||||
|
||||
type FilterWithPriority<T> = {
|
||||
target: Exclude<T, null | undefined>;
|
||||
priority: number;
|
||||
};
|
||||
target: Exclude<T, null | undefined>
|
||||
priority: number
|
||||
}
|
||||
type FilterMap = {
|
||||
[K in keyof FormatFilter]: FilterWithPriority<FormatFilter[K]>;
|
||||
};
|
||||
[K in keyof FormatFilter]: FilterWithPriority<FormatFilter[K]>
|
||||
}
|
||||
function filtersToFilterMap(filters: FormatFilter[]): FilterMap {
|
||||
return filters.reduce<FilterMap>((map, curr, index) => {
|
||||
for (const key in curr) {
|
||||
@@ -86,10 +86,10 @@ function filtersToFilterMap(filters: FormatFilter[]): FilterMap {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
target: curr[key],
|
||||
priority: filters.length - index,
|
||||
};
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}, {});
|
||||
return map
|
||||
}, {})
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,103 +112,103 @@ function filtersToFilterMap(filters: FormatFilter[]): FilterMap {
|
||||
*/
|
||||
export function getCameraFormat(device: CameraDevice, filters: FormatFilter[]): CameraDeviceFormat {
|
||||
// Combine filters into a single filter map for constant-time lookup
|
||||
const filter = filtersToFilterMap(filters);
|
||||
const filter = filtersToFilterMap(filters)
|
||||
|
||||
let bestFormat = device.formats[0];
|
||||
let bestFormat = device.formats[0]
|
||||
if (bestFormat == null)
|
||||
throw new CameraRuntimeError('device/invalid-device', `The given Camera Device (${device.id}) does not have any formats!`);
|
||||
throw new CameraRuntimeError('device/invalid-device', `The given Camera Device (${device.id}) does not have any formats!`)
|
||||
|
||||
// Compare each format using a point scoring system
|
||||
for (const format of device.formats) {
|
||||
let leftPoints = 0;
|
||||
let rightPoints = 0;
|
||||
let leftPoints = 0
|
||||
let rightPoints = 0
|
||||
|
||||
const leftVideoResolution = bestFormat.videoWidth * bestFormat.videoHeight;
|
||||
const rightVideoResolution = format.videoWidth * format.videoHeight;
|
||||
const leftVideoResolution = bestFormat.videoWidth * bestFormat.videoHeight
|
||||
const rightVideoResolution = format.videoWidth * format.videoHeight
|
||||
if (filter.videoResolution != null) {
|
||||
if (filter.videoResolution.target === 'max') {
|
||||
// We just want the maximum resolution
|
||||
if (leftVideoResolution > rightVideoResolution) leftPoints += filter.videoResolution.priority;
|
||||
if (rightVideoResolution > leftVideoResolution) rightPoints += filter.videoResolution.priority;
|
||||
if (leftVideoResolution > rightVideoResolution) leftPoints += filter.videoResolution.priority
|
||||
if (rightVideoResolution > leftVideoResolution) rightPoints += filter.videoResolution.priority
|
||||
} else {
|
||||
// Find video resolution closest to the filter (ignoring orientation)
|
||||
const targetResolution = filter.videoResolution.target.width * filter.videoResolution.target.height;
|
||||
const leftDiff = Math.abs(leftVideoResolution - targetResolution);
|
||||
const rightDiff = Math.abs(rightVideoResolution - targetResolution);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.videoResolution.priority;
|
||||
if (rightDiff < leftDiff) rightPoints += filter.videoResolution.priority;
|
||||
const targetResolution = filter.videoResolution.target.width * filter.videoResolution.target.height
|
||||
const leftDiff = Math.abs(leftVideoResolution - targetResolution)
|
||||
const rightDiff = Math.abs(rightVideoResolution - targetResolution)
|
||||
if (leftDiff < rightDiff) leftPoints += filter.videoResolution.priority
|
||||
if (rightDiff < leftDiff) rightPoints += filter.videoResolution.priority
|
||||
}
|
||||
}
|
||||
|
||||
const leftPhotoResolution = bestFormat.photoWidth * bestFormat.photoHeight;
|
||||
const rightPhotoResolution = format.photoWidth * format.photoHeight;
|
||||
const leftPhotoResolution = bestFormat.photoWidth * bestFormat.photoHeight
|
||||
const rightPhotoResolution = format.photoWidth * format.photoHeight
|
||||
if (filter.photoResolution != null) {
|
||||
if (filter.photoResolution.target === 'max') {
|
||||
// We just want the maximum resolution
|
||||
if (leftPhotoResolution > rightPhotoResolution) leftPoints += filter.photoResolution.priority;
|
||||
if (rightPhotoResolution > leftPhotoResolution) rightPoints += filter.photoResolution.priority;
|
||||
if (leftPhotoResolution > rightPhotoResolution) leftPoints += filter.photoResolution.priority
|
||||
if (rightPhotoResolution > leftPhotoResolution) rightPoints += filter.photoResolution.priority
|
||||
} else {
|
||||
// Find closest photo resolution to the filter (ignoring orientation)
|
||||
const targetResolution = filter.photoResolution.target.width * filter.photoResolution.target.height;
|
||||
const leftDiff = Math.abs(leftPhotoResolution - targetResolution);
|
||||
const rightDiff = Math.abs(rightPhotoResolution - targetResolution);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.photoResolution.priority;
|
||||
if (rightDiff < leftDiff) rightPoints += filter.photoResolution.priority;
|
||||
const targetResolution = filter.photoResolution.target.width * filter.photoResolution.target.height
|
||||
const leftDiff = Math.abs(leftPhotoResolution - targetResolution)
|
||||
const rightDiff = Math.abs(rightPhotoResolution - targetResolution)
|
||||
if (leftDiff < rightDiff) leftPoints += filter.photoResolution.priority
|
||||
if (rightDiff < leftDiff) rightPoints += filter.photoResolution.priority
|
||||
}
|
||||
}
|
||||
|
||||
// Find closest aspect ratio (video)
|
||||
if (filter.videoAspectRatio != null) {
|
||||
const leftAspect = bestFormat.videoWidth / bestFormat.videoHeight;
|
||||
const rightAspect = format.videoWidth / format.videoHeight;
|
||||
const leftDiff = Math.abs(leftAspect - filter.videoAspectRatio.target);
|
||||
const rightDiff = Math.abs(rightAspect - filter.videoAspectRatio.target);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.videoAspectRatio.priority;
|
||||
if (rightDiff < leftDiff) rightPoints += filter.videoAspectRatio.priority;
|
||||
const leftAspect = bestFormat.videoWidth / bestFormat.videoHeight
|
||||
const rightAspect = format.videoWidth / format.videoHeight
|
||||
const leftDiff = Math.abs(leftAspect - filter.videoAspectRatio.target)
|
||||
const rightDiff = Math.abs(rightAspect - filter.videoAspectRatio.target)
|
||||
if (leftDiff < rightDiff) leftPoints += filter.videoAspectRatio.priority
|
||||
if (rightDiff < leftDiff) rightPoints += filter.videoAspectRatio.priority
|
||||
}
|
||||
|
||||
// Find closest aspect ratio (photo)
|
||||
if (filter.photoAspectRatio != null) {
|
||||
const leftAspect = bestFormat.photoWidth / bestFormat.photoHeight;
|
||||
const rightAspect = format.photoWidth / format.photoHeight;
|
||||
const leftDiff = Math.abs(leftAspect - filter.photoAspectRatio.target);
|
||||
const rightDiff = Math.abs(rightAspect - filter.photoAspectRatio.target);
|
||||
if (leftDiff < rightDiff) leftPoints += filter.photoAspectRatio.priority;
|
||||
if (rightDiff < leftDiff) rightPoints += filter.photoAspectRatio.priority;
|
||||
const leftAspect = bestFormat.photoWidth / bestFormat.photoHeight
|
||||
const rightAspect = format.photoWidth / format.photoHeight
|
||||
const leftDiff = Math.abs(leftAspect - filter.photoAspectRatio.target)
|
||||
const rightDiff = Math.abs(rightAspect - filter.photoAspectRatio.target)
|
||||
if (leftDiff < rightDiff) leftPoints += filter.photoAspectRatio.priority
|
||||
if (rightDiff < leftDiff) rightPoints += filter.photoAspectRatio.priority
|
||||
}
|
||||
|
||||
// Find closest max FPS
|
||||
if (filter.fps != null) {
|
||||
if (bestFormat.maxFps >= filter.fps.target) leftPoints += filter.fps.priority;
|
||||
if (format.maxFps >= filter.fps.target) rightPoints += filter.fps.priority;
|
||||
if (bestFormat.maxFps >= filter.fps.target) leftPoints += filter.fps.priority
|
||||
if (format.maxFps >= filter.fps.target) rightPoints += filter.fps.priority
|
||||
}
|
||||
|
||||
// Find video stabilization mode
|
||||
if (filter.videoStabilizationMode != null) {
|
||||
if (bestFormat.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) leftPoints++;
|
||||
if (format.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) rightPoints++;
|
||||
if (bestFormat.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) leftPoints++
|
||||
if (format.videoStabilizationModes.includes(filter.videoStabilizationMode.target)) rightPoints++
|
||||
}
|
||||
|
||||
// Find pixel format
|
||||
if (filter.pixelFormat != null) {
|
||||
if (bestFormat.pixelFormats.includes(filter.pixelFormat.target)) leftPoints++;
|
||||
if (format.pixelFormats.includes(filter.pixelFormat.target)) rightPoints++;
|
||||
if (bestFormat.pixelFormats.includes(filter.pixelFormat.target)) leftPoints++
|
||||
if (format.pixelFormats.includes(filter.pixelFormat.target)) rightPoints++
|
||||
}
|
||||
|
||||
// Find Photo HDR formats
|
||||
if (filter.photoHDR != null) {
|
||||
if (bestFormat.supportsPhotoHDR === filter.photoHDR.target) leftPoints++;
|
||||
if (format.supportsPhotoHDR === filter.photoHDR.target) rightPoints++;
|
||||
if (bestFormat.supportsPhotoHDR === filter.photoHDR.target) leftPoints++
|
||||
if (format.supportsPhotoHDR === filter.photoHDR.target) rightPoints++
|
||||
}
|
||||
|
||||
// Find Video HDR formats
|
||||
if (filter.videoHDR != null) {
|
||||
if (bestFormat.supportsVideoHDR === filter.videoHDR.target) leftPoints++;
|
||||
if (format.supportsVideoHDR === filter.videoHDR.target) rightPoints++;
|
||||
if (bestFormat.supportsVideoHDR === filter.videoHDR.target) leftPoints++
|
||||
if (format.supportsVideoHDR === filter.videoHDR.target) rightPoints++
|
||||
}
|
||||
|
||||
if (rightPoints > leftPoints) bestFormat = format;
|
||||
if (rightPoints > leftPoints) bestFormat = format
|
||||
}
|
||||
|
||||
return bestFormat;
|
||||
return bestFormat
|
||||
}
|
||||
|
Reference in New Issue
Block a user