perf: Use lossy compressed YUV buffers if available (#2195)

This commit is contained in:
Marc Rousavy 2023-11-22 16:19:29 +01:00 committed by GitHub
parent deb3c41ee0
commit 49d58d0d0c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 23 additions and 23 deletions

View File

@ -11,7 +11,7 @@ import useBaseUrl from '@docusaurus/useBaseUrl'
## Performance of VisionCamera ## Performance of VisionCamera
VisionCamera is highly optimized to be **as fast as a native Camera app**, and is sometimes even faster than that. VisionCamera is highly optimized to be **as fast as a native Camera app**, and is sometimes even faster than that.
I am using highly efficient native GPU buffer formats (such as YUV 4:2:0, or lossless compressed YUV 4:2:0), running the video pipelines in parallel, using C++ for the Frame Processors implementation, and other tricks to make sure VisionCamera is as efficient as possible. I am using highly efficient native GPU buffer formats (such as YUV 4:2:0, or lossy compressed YUV 4:2:0), running the video pipelines in parallel, using C++ for the Frame Processors implementation, and other tricks to make sure VisionCamera is as efficient as possible.
## Making it faster ## Making it faster
@ -66,7 +66,7 @@ Video HDR uses 10-bit formats and/or additional processing steps that come with
### Buffer Compression ### Buffer Compression
Enable Buffer Compression ([`enableBufferCompression`](/docs/api/interfaces/CameraProps#enablebuffercompression)) to use lossless-compressed buffers for the Camera's video pipeline. These buffers can use less memory and are more efficient. Enable Buffer Compression ([`enableBufferCompression`](/docs/api/interfaces/CameraProps#enablebuffercompression)) to use lossy-compressed buffers for the Camera's video pipeline. These buffers can use less memory and are more efficient.
Note: When not using a `frameProcessor`, buffer compression is automatically enabled. Note: When not using a `frameProcessor`, buffer compression is automatically enabled.

View File

@ -77,4 +77,4 @@ Instead of [`kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`](https://developer
[Buffer Compression](/docs/guides/performance#buffer-compression) is automatically enabled if you are not using a Frame Processor. If you are using a Frame Processor, buffer compression will be turned off, as it essentially uses a different format than YUV. See ["Understanding YpCbCr Image Formats"](https://developer.apple.com/documentation/accelerate/conversion/understanding_ypcbcr_image_formats) for more information. [Buffer Compression](/docs/guides/performance#buffer-compression) is automatically enabled if you are not using a Frame Processor. If you are using a Frame Processor, buffer compression will be turned off, as it essentially uses a different format than YUV. See ["Understanding YpCbCr Image Formats"](https://developer.apple.com/documentation/accelerate/conversion/understanding_ypcbcr_image_formats) for more information.
Instead of [`kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`](https://developer.apple.com/documentation/corevideo/kcvpixelformattype_420ypcbcr8biplanarvideorange), it uses [`kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange`](https://developer.apple.com/documentation/corevideo/3746862-anonymous/kcvpixelformattype_lossless_420ypcbcr8biplanarvideorange), same for full-range. Instead of [`kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange`](https://developer.apple.com/documentation/corevideo/kcvpixelformattype_420ypcbcr8biplanarvideorange), it uses [`kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarVideoRange`](https://developer.apple.com/documentation/corevideo/3746862-anonymous/kcvpixelformattype_lossy_420ypcbcr8biplanarvideorange), same for full-range.

View File

@ -188,14 +188,14 @@ extension CameraConfiguration.Video {
if enableBufferCompression { if enableBufferCompression {
// use compressed format instead if we enabled buffer compression // use compressed format instead if we enabled buffer compression
if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange && if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange &&
videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange) { videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarVideoRange) {
// YUV 4:2:0 8-bit (limited video colors; compressed) // YUV 4:2:0 8-bit (limited video colors; compressed)
defaultFormat = kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange defaultFormat = kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarVideoRange
} }
if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange && if defaultFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange &&
videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange) { videoOutput.availableVideoPixelFormatTypes.contains(kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarFullRange) {
// YUV 4:2:0 8-bit (full video colors; compressed) // YUV 4:2:0 8-bit (full video colors; compressed)
defaultFormat = kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange defaultFormat = kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarFullRange
} }
} }
@ -208,8 +208,8 @@ extension CameraConfiguration.Video {
var targetFormats = [kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, var targetFormats = [kCVPixelFormatType_420YpCbCr10BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange] kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange]
if enableBufferCompression { if enableBufferCompression {
// If we enable buffer compression, try to use a lossless compressed YUV format first, otherwise fall back to the others. // If we enable buffer compression, try to use a lossy compressed YUV format first, otherwise fall back to the others.
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange, at: 0) targetFormats.insert(kCVPixelFormatType_Lossy_420YpCbCr10PackedBiPlanarVideoRange, at: 0)
} }
// Find the best matching format // Find the best matching format
@ -228,8 +228,8 @@ extension CameraConfiguration.Video {
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
if enableBufferCompression { if enableBufferCompression {
// YUV 4:2:0 8-bit (full/limited video colors; compressed) // YUV 4:2:0 8-bit (full/limited video colors; compressed)
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange, at: 0) targetFormats.insert(kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarVideoRange, at: 0)
targetFormats.insert(kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange, at: 0) targetFormats.insert(kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarFullRange, at: 0)
} }
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else { guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
throw CameraError.device(.pixelFormatNotSupported) throw CameraError.device(.pixelFormatNotSupported)
@ -240,7 +240,7 @@ extension CameraConfiguration.Video {
var targetFormats = [kCVPixelFormatType_32BGRA] var targetFormats = [kCVPixelFormatType_32BGRA]
if enableBufferCompression { if enableBufferCompression {
// RGBA 8-bit (compressed) // RGBA 8-bit (compressed)
targetFormats.insert(kCVPixelFormatType_Lossless_32BGRA, at: 0) targetFormats.insert(kCVPixelFormatType_Lossy_32BGRA, at: 0)
} }
guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else { guard let format = videoOutput.findPixelFormat(firstOf: targetFormats) else {
throw CameraError.device(.pixelFormatNotSupported) throw CameraError.device(.pixelFormatNotSupported)

View File

@ -33,7 +33,7 @@ extension AVCaptureDevice.Format {
let hdrFormats = [ let hdrFormats = [
kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, kCVPixelFormatType_420YpCbCr10BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange,
kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange, kCVPixelFormatType_Lossy_420YpCbCr10PackedBiPlanarVideoRange,
] ]
return hdrFormats.contains(pixelFormat) return hdrFormats.contains(pixelFormat)
} }

View File

@ -154,15 +154,15 @@ jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& pr
auto mediaType = CMFormatDescriptionGetMediaSubType(format); auto mediaType = CMFormatDescriptionGetMediaSubType(format);
switch (mediaType) { switch (mediaType) {
case kCVPixelFormatType_32BGRA: case kCVPixelFormatType_32BGRA:
case kCVPixelFormatType_Lossless_32BGRA: case kCVPixelFormatType_Lossy_32BGRA:
return jsi::String::createFromUtf8(runtime, "rgb"); return jsi::String::createFromUtf8(runtime, "rgb");
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange: case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange: case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange: case kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarFullRange:
case kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange: case kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange: case kCVPixelFormatType_Lossy_420YpCbCr10PackedBiPlanarVideoRange:
return jsi::String::createFromUtf8(runtime, "yuv"); return jsi::String::createFromUtf8(runtime, "yuv");
default: default:
return jsi::String::createFromUtf8(runtime, "unknown"); return jsi::String::createFromUtf8(runtime, "unknown");

View File

@ -33,11 +33,11 @@ enum PixelFormat: String, JSUnionValue {
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, kCVPixelFormatType_420YpCbCr10BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange,
kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarFullRange, kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarFullRange,
kCVPixelFormatType_Lossless_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_Lossy_420YpCbCr8BiPlanarVideoRange,
kCVPixelFormatType_Lossless_420YpCbCr10PackedBiPlanarVideoRange: kCVPixelFormatType_Lossy_420YpCbCr10PackedBiPlanarVideoRange:
self = .yuv self = .yuv
case kCVPixelFormatType_32BGRA, kCVPixelFormatType_Lossless_32BGRA: case kCVPixelFormatType_32BGRA, kCVPixelFormatType_Lossy_32BGRA:
self = .rgb self = .rgb
default: default:
self = .unknown self = .unknown

View File

@ -165,12 +165,12 @@ export interface CameraProps extends ViewProps {
*/ */
photoHdr?: boolean photoHdr?: boolean
/** /**
* Enables or disables lossless buffer compression for the video stream. * Enables or disables lossy buffer compression for the video stream.
* If you only use {@linkcode video} or a {@linkcode frameProcessor}, this * If you only use {@linkcode video} or a {@linkcode frameProcessor}, this
* can increase the efficiency and lower memory usage of the Camera. * can increase the efficiency and lower memory usage of the Camera.
* *
* If buffer compression is enabled, the video pipeline will try to use a * If buffer compression is enabled, the video pipeline will try to use a
* lossless-compressed pixel format instead of the normal one. * lossy-compressed pixel format instead of the normal one.
* *
* If you use a {@linkcode frameProcessor}, you might need to change how pixels * If you use a {@linkcode frameProcessor}, you might need to change how pixels
* are read inside your native frame processor function as this is different * are read inside your native frame processor function as this is different