feat: Add support for Buffer Compression for a more efficient Video Pipeline (enableBufferCompression) (#1828)

feat: Add support for Buffer Compression for more efficient Video Pipeline (`enableBufferCompression`)
This commit is contained in:
Marc Rousavy 2023-09-21 17:18:54 +02:00 committed by GitHub
parent aafffa60f6
commit fffefa9d12
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 96 additions and 20 deletions

View File

@ -139,9 +139,8 @@ extension CameraView {
If HDR is disabled, this will return whatever the user specified as a pixelFormat, or the most efficient format as a fallback.
*/
private func getPixelFormat(videoOutput: AVCaptureVideoDataOutput) -> OSType {
let supportedPixelFormats = videoOutput.availableVideoPixelFormatTypes
// as per documentation, the first value is always the most efficient format
let defaultFormat = supportedPixelFormats.first!
let defaultFormat = videoOutput.availableVideoPixelFormatTypes.first!
// If the user enabled HDR, we can only use the YUV 4:2:0 10-bit pixel format.
if hdr == true {
@ -149,12 +148,21 @@ extension CameraView {
invokeOnError(.format(.incompatiblePixelFormatWithHDR))
return defaultFormat
}
guard supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange) else {
var targetFormats = [kCVPixelFormatType_420YpCbCr10BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange]
if enableBufferCompression {
// If we enable buffer compression, try to use a lossless compressed YUV format first, otherwise fall back to the others.
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange, at: 0)
}
// Find the best matching format
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
invokeOnError(.format(.invalidHdr))
return defaultFormat
}
// YUV 4:2:0 10-bit
return kCVPixelFormatType_420YpCbCr10BiPlanarFullRange
// YUV 4:2:0 10-bit (compressed/uncompressed)
return format
}
// If the user didn't specify a custom pixelFormat, just return the default one.
@ -165,24 +173,31 @@ extension CameraView {
// If we don't use HDR, we can use any other custom pixel format.
switch pixelFormat {
case "yuv":
if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
// YUV 4:2:0 8-bit (full video colors)
return kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
} else if supportedPixelFormats.contains(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
// YUV 4:2:0 8-bit (limited video colors)
return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
} else {
// YUV 4:2:0 8-bit (full/limited video colors; uncompressed)
var targetFormats = [kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
if enableBufferCompression {
// YUV 4:2:0 8-bit (full/limited video colors; compressed)
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange, at: 0)
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange, at: 0)
}
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
invokeOnError(.device(.pixelFormatNotSupported))
return defaultFormat
}
return format
case "rgb":
if supportedPixelFormats.contains(kCVPixelFormatType_32BGRA) {
// RGBA 8-bit
return kCVPixelFormatType_32BGRA
} else {
// RGBA 8-bit (uncompressed)
var targetFormats = [kCVPixelFormatType_32BGRA]
if enableBufferCompression {
// RGBA 8-bit (compressed)
targetFormats.insert(kCVPixelFormatType_Lossless_32BGRA, at: 0)
}
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
invokeOnError(.device(.pixelFormatNotSupported))
return defaultFormat
}
return format
case "native":
return defaultFormat
default:

View File

@ -40,6 +40,7 @@ public final class CameraView: UIView {
@objc var enableDepthData = false
@objc var enableHighQualityPhotos: NSNumber? // nullable bool
@objc var enablePortraitEffectsMatteDelivery = false
@objc var enableBufferCompression = false
// use cases
@objc var photo: NSNumber? // nullable bool
@objc var video: NSNumber? // nullable bool

View File

@ -28,6 +28,7 @@ RCT_EXPORT_VIEW_PROPERTY(cameraId, NSString);
RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL);
RCT_EXPORT_VIEW_PROPERTY(enableHighQualityPhotos, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL);
RCT_EXPORT_VIEW_PROPERTY(enableBufferCompression, BOOL);
// use cases
RCT_EXPORT_VIEW_PROPERTY(photo, NSNumber); // nullable bool
RCT_EXPORT_VIEW_PROPERTY(video, NSNumber); // nullable bool

View File

@ -0,0 +1,21 @@
//
// AVCaptureVideoDataOutput+findPixelFormat.swift
// VisionCamera
//
// Created by Marc Rousavy on 21.09.23.
// Copyright © 2023 mrousavy. All rights reserved.
//
import AVFoundation
extension AVCaptureVideoDataOutput {
/**
Of the given list, find the first that is available on this video data output.
If none are supported, this returns nil.
*/
func findPixelFormat(firstOf pixelFormats: [OSType]) -> OSType? {
return pixelFormats.first { format in
availableVideoPixelFormatTypes.contains(format)
}
}
}

View File

@ -154,9 +154,15 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto mediaType = CMFormatDescriptionGetMediaSubType(format);
switch (mediaType) {
case kCVPixelFormatType_32BGRA:
case kCVPixelFormatType_Lossless_32BGRA:
return jsi::String::createFromUtf8(runtime, "rgb");
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange:
return jsi::String::createFromUtf8(runtime, "yuv");
default:
return jsi::String::createFromUtf8(runtime, "unknown");

View File

@ -50,11 +50,15 @@ enum PixelFormat {
init(mediaSubType: OSType) {
switch mediaSubType {
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
kCVPixelFormatType_420YpCbCr10BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange,
kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange,
kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange,
kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange:
self = .yuv
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
self = .yuv
case kCVPixelFormatType_32BGRA:
case kCVPixelFormatType_32BGRA, kCVPixelFormatType_Lossless_32BGRA:
self = .rgb
default:
self = .unknown

View File

@ -23,6 +23,7 @@
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */; };
B87B11BF2A8E63B700732EBF /* PixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B87B11BE2A8E63B700732EBF /* PixelFormat.swift */; };
B881D35E2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B881D35D2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift */; };
B881D3602ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */; };
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */; };
B887518525E0102000DB86D6 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
B887518625E0102000DB86D6 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
@ -101,6 +102,7 @@
B86DC976260E315100FB17B2 /* CameraView+AVCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CameraView+AVCaptureSession.swift"; sourceTree = "<group>"; };
B87B11BE2A8E63B700732EBF /* PixelFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PixelFormat.swift; sourceTree = "<group>"; };
B881D35D2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+toDictionary.swift"; sourceTree = "<group>"; };
B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureVideoDataOutput+findPixelFormat.swift"; sourceTree = "<group>"; };
B882720F26AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureConnection+setInterfaceOrientation.swift"; sourceTree = "<group>"; };
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CameraView+RecordVideo.swift"; sourceTree = "<group>"; };
@ -218,6 +220,7 @@
B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */,
B88B47462667C8E00091F538 /* AVCaptureSession+setVideoStabilizationMode.swift */,
B887516225E0102000DB86D6 /* Collection+safe.swift */,
B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */,
);
path = Extensions;
sourceTree = "<group>";
@ -405,6 +408,7 @@
B88751A125E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift in Sources */,
B882721026AEB1A100B14107 /* AVCaptureConnection+setInterfaceOrientation.swift in Sources */,
B8E957D02A693AD2008F5480 /* CameraView+Torch.swift in Sources */,
B881D3602ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift in Sources */,
B86DC977260E315100FB17B2 /* CameraView+AVCaptureSession.swift in Sources */,
B887518A25E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift in Sources */,
B88751A325E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,

View File

@ -403,6 +403,8 @@ export class Camera extends React.PureComponent<CameraProps> {
);
}
const shouldEnableBufferCompression = props.video === true && frameProcessor == null;
return (
<NativeCameraView
{...props}
@ -412,6 +414,7 @@ export class Camera extends React.PureComponent<CameraProps> {
onInitialized={this.onInitialized}
onError={this.onError}
enableFrameProcessor={frameProcessor != null}
enableBufferCompression={props.enableBufferCompression ?? shouldEnableBufferCompression}
/>
);
}

View File

@ -123,6 +123,27 @@ export interface CameraProps extends ViewProps {
* Requires `format` to be set.
*/
hdr?: boolean;
/**
* Enables or disables lossless buffer compression for the video stream.
* If you only use {@linkcode video} or a {@linkcode frameProcessor}, this
* can increase the efficiency and lower memory usage of the Camera.
*
* If buffer compression is enabled, the video pipeline will try to use a
* lossless-compressed pixel format instead of the normal one.
*
* If you use a {@linkcode frameProcessor}, you might need to change how pixels
* are read inside your native frame processor function as this is different
* from the usual `yuv` or `rgb` layout.
*
* If buffer compression is not available but this property is enabled, the normal
* pixel formats will be used and no error will be thrown.
*
* @platform iOS
* @default
* - true // if video={true} and frameProcessor={undefined}
* - false // otherwise
*/
enableBufferCompression?: boolean;
/**
* Enables or disables low-light boost on this camera device. Make sure the given `format` supports low-light boost.
*