feat: New CameraDevice + CameraFormat detection using CameraX (#1495)

* Create CameraDevice.kt

* Create VideoStabilizationMode+String.kt

* Use CameraX Extensions

* Remove `system/no-camera-manager` error
This commit is contained in:
Marc Rousavy 2023-03-13 14:23:19 +01:00 committed by GitHub
parent f791c6b4cd
commit 0d83a13196
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 237 additions and 185 deletions

View File

@ -163,177 +163,20 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
} }
} }
// TODO: This uses the Camera2 API to list all characteristics of a camera device and therefore doesn't work with Camera1. Find a way to use CameraX for this
// https://issuetracker.google.com/issues/179925896
@ReactMethod @ReactMethod
fun getAvailableCameraDevices(promise: Promise) { fun getAvailableCameraDevices(promise: Promise) {
val startTime = System.currentTimeMillis()
coroutineScope.launch { coroutineScope.launch {
withPromise(promise) { withPromise(promise) {
val cameraProvider = ProcessCameraProvider.getInstance(reactApplicationContext).await() val cameraProvider = ProcessCameraProvider.getInstance(reactApplicationContext).await()
val extensionsManager = ExtensionsManager.getInstanceAsync(reactApplicationContext, cameraProvider).await() val extensionsManager = ExtensionsManager.getInstanceAsync(reactApplicationContext, cameraProvider).await()
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as CameraManager
val manager = reactApplicationContext.getSystemService(Context.CAMERA_SERVICE) as? CameraManager val devices = Arguments.createArray()
?: throw CameraManagerUnavailableError() manager.cameraIdList.forEach { cameraId ->
val device = CameraDevice(manager, extensionsManager, cameraId)
val cameraDevices: WritableArray = Arguments.createArray() devices.pushMap(device.toMap())
manager.cameraIdList.forEach loop@{ id ->
val cameraSelector = CameraSelector.Builder().byID(id).build()
val characteristics = manager.getCameraCharacteristics(id)
val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!!
val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!!
val isMultiCam = Build.VERSION.SDK_INT >= Build.VERSION_CODES.P &&
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)
val deviceTypes = characteristics.getDeviceTypes()
val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
val lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING)!!
val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)!!
val maxScalerZoom = characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM)!!
val supportsDepthCapture = Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&
capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT)
val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)
val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE)
val digitalStabilizationModes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)
val opticalStabilizationModes = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)
val zoomRange = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R)
characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
else null
val name = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P)
characteristics.get(CameraCharacteristics.INFO_VERSION)
else null
val fpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)!!
val supportsHdr = extensionsManager.isExtensionAvailable(cameraSelector, ExtensionMode.HDR)
val supportsLowLightBoost = extensionsManager.isExtensionAvailable(cameraSelector, ExtensionMode.NIGHT)
// see https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture
val supportsParallelVideoProcessing = hardwareLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY && hardwareLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
val fieldOfView = characteristics.getFieldOfView()
val map = Arguments.createMap()
map.putString("id", id)
map.putArray("devices", deviceTypes)
map.putString("position", parseLensFacing(lensFacing))
map.putString("name", name ?: "${parseLensFacing(lensFacing)} ($id)")
map.putBoolean("hasFlash", hasFlash)
map.putBoolean("hasTorch", hasFlash)
map.putBoolean("isMultiCam", isMultiCam)
map.putBoolean("supportsParallelVideoProcessing", supportsParallelVideoProcessing)
map.putBoolean("supportsRawCapture", supportsRawCapture)
map.putBoolean("supportsDepthCapture", supportsDepthCapture)
map.putBoolean("supportsLowLightBoost", supportsLowLightBoost)
map.putBoolean("supportsFocus", true) // I believe every device here supports focussing
if (zoomRange != null) {
map.putDouble("minZoom", zoomRange.lower.toDouble())
map.putDouble("maxZoom", zoomRange.upper.toDouble())
} else {
map.putDouble("minZoom", 1.0)
map.putDouble("maxZoom", maxScalerZoom.toDouble())
}
map.putDouble("neutralZoom", 1.0)
val supportedVideoResolutions: List<Size>
val cameraInfos = cameraSelector.filter(cameraProvider.availableCameraInfos)
if (cameraInfos.size > 0) {
supportedVideoResolutions = QualitySelector
.getSupportedQualities(cameraInfos[0])
.map { QualitySelector.getResolution(cameraInfos[0], it)!! }
} else {
supportedVideoResolutions = emptyList()
}
// TODO: Optimize?
val maxImageOutputSize = cameraConfig.outputFormats
.flatMap { cameraConfig.getOutputSizes(it).toList() }
.maxByOrNull { it.width * it.height }!!
val formats = Arguments.createArray()
cameraConfig.outputFormats.forEach { formatId ->
val formatName = parseImageFormat(formatId)
cameraConfig.getOutputSizes(formatId).forEach { size ->
val isHighestPhotoQualitySupported = areUltimatelyEqual(size, maxImageOutputSize)
// Get the number of seconds that each frame will take to process
val secondsPerFrame = try {
cameraConfig.getOutputMinFrameDuration(formatId, size) / 1_000_000_000.0
} catch (error: Throwable) {
Log.e(TAG, "Minimum Frame Duration for MediaRecorder Output cannot be calculated, format \"$formatName\" is not supported.")
null
}
val frameRateRanges = Arguments.createArray()
if (secondsPerFrame != null && secondsPerFrame > 0) {
val fps = (1.0 / secondsPerFrame).toInt()
val frameRateRange = Arguments.createMap()
frameRateRange.putInt("minFrameRate", 1)
frameRateRange.putInt("maxFrameRate", fps)
frameRateRanges.pushMap(frameRateRange)
}
fpsRanges.forEach { range ->
val frameRateRange = Arguments.createMap()
frameRateRange.putInt("minFrameRate", range.lower)
frameRateRange.putInt("maxFrameRate", range.upper)
frameRateRanges.pushMap(frameRateRange)
}
val colorSpaces = Arguments.createArray()
colorSpaces.pushString(formatName)
val videoStabilizationModes = Arguments.createArray()
videoStabilizationModes.pushString("off")
if (digitalStabilizationModes != null) {
if (digitalStabilizationModes.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON)) {
videoStabilizationModes.pushString("auto")
videoStabilizationModes.pushString("standard")
}
}
if (opticalStabilizationModes != null) {
if (opticalStabilizationModes.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON)) {
videoStabilizationModes.pushString("cinematic")
}
}
// TODO: Get the pixel format programatically rather than assuming a default of 420v
val pixelFormat = "420v"
val format = Arguments.createMap()
format.putDouble("photoHeight", size.height.toDouble())
format.putDouble("photoWidth", size.width.toDouble())
// since supportedVideoResolutions is sorted from highest resolution to lowest,
// videoResolution will be the highest supported video resolution lower than or equal to photo resolution
// TODO: Somehow integrate with CamcorderProfileProxy?
val videoResolution = supportedVideoResolutions.find { it.width <= size.width && it.height <= size.height }
format.putDouble("videoHeight", videoResolution?.height?.toDouble())
format.putDouble("videoWidth", videoResolution?.width?.toDouble())
format.putBoolean("isHighestPhotoQualitySupported", isHighestPhotoQualitySupported)
format.putInt("maxISO", isoRange?.upper)
format.putInt("minISO", isoRange?.lower)
format.putDouble("fieldOfView", fieldOfView) // TODO: Revisit getAvailableCameraDevices (is fieldOfView accurate?)
format.putDouble("maxZoom", (zoomRange?.upper ?: maxScalerZoom).toDouble())
format.putArray("colorSpaces", colorSpaces)
format.putBoolean("supportsVideoHDR", false) // TODO: supportsVideoHDR
format.putBoolean("supportsPhotoHDR", supportsHdr)
format.putArray("frameRateRanges", frameRateRanges)
format.putString("autoFocusSystem", "none") // TODO: Revisit getAvailableCameraDevices (autoFocusSystem) (CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES or CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION)
format.putArray("videoStabilizationModes", videoStabilizationModes)
format.putString("pixelFormat", pixelFormat)
formats.pushMap(format)
}
}
map.putArray("formats", formats)
cameraDevices.pushMap(map)
} }
promise.resolve(devices)
val difference = System.currentTimeMillis() - startTime
Log.w(TAG, "CameraViewModule::getAvailableCameraDevices took: $difference ms")
return@withPromise cameraDevices
} }
} }
} }

View File

@ -106,7 +106,6 @@ class FileSizeLimitReachedError(cause: Throwable?) : CameraError("capture", "fil
class NoRecordingInProgressError : CameraError("capture", "no-recording-in-progress", "No active recording in progress!") class NoRecordingInProgressError : CameraError("capture", "no-recording-in-progress", "No active recording in progress!")
class CameraManagerUnavailableError : CameraError("system", "no-camera-manager", "The Camera manager instance was unavailable for the current Application!")
class ViewNotFoundError(viewId: Int) : CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.") class ViewNotFoundError(viewId: Int) : CameraError("system", "view-not-found", "The given view (ID $viewId) was not found in the view manager.")
class UnknownCameraError(cause: Throwable?) : CameraError("unknown", "unknown", cause?.message ?: "An unknown camera error occured.", cause) class UnknownCameraError(cause: Throwable?) : CameraError("unknown", "unknown", cause?.message ?: "An unknown camera error occured.", cause)

View File

@ -0,0 +1,12 @@
package com.mrousavy.camera.parsers
import android.hardware.camera2.CameraMetadata.*
fun parseVideoStabilizationMode(stabiliazionMode: Int): String {
return when (stabiliazionMode) {
CONTROL_VIDEO_STABILIZATION_MODE_OFF -> "off"
CONTROL_VIDEO_STABILIZATION_MODE_ON -> "standard"
CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION -> "cinematic"
else -> "off"
}
}

View File

@ -0,0 +1,219 @@
package com.mrousavy.camera.utils
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraExtensionCharacteristics
import android.hardware.camera2.CameraManager
import android.hardware.camera2.CameraMetadata
import android.hardware.camera2.params.DynamicRangeProfiles
import android.os.Build
import android.util.Range
import android.util.Size
import androidx.camera.core.CameraSelector
import androidx.camera.extensions.ExtensionMode
import androidx.camera.extensions.ExtensionsManager
import com.facebook.react.bridge.Arguments
import com.facebook.react.bridge.ReadableArray
import com.facebook.react.bridge.ReadableMap
import com.mrousavy.camera.parsers.bigger
import com.mrousavy.camera.parsers.parseImageFormat
import com.mrousavy.camera.parsers.parseLensFacing
import com.mrousavy.camera.parsers.parseVideoStabilizationMode
import kotlin.math.PI
import kotlin.math.atan
class CameraDevice(private val cameraManager: CameraManager, extensionsManager: ExtensionsManager, private val cameraId: String) {
private val cameraSelector = CameraSelector.Builder().byID(cameraId).build()
private val characteristics = cameraManager.getCameraCharacteristics(cameraId)
private val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) ?: CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
private val capabilities = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES) ?: IntArray(0)
private val extensions = getSupportedExtensions()
// device characteristics
private val isMultiCam = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)
private val supportsDepthCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT)
private val supportsRawCapture = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)
private val supportsLowLightBoost = extensionsManager.isExtensionAvailable(cameraSelector, ExtensionMode.NIGHT) || extensions.contains(CameraExtensionCharacteristics.EXTENSION_NIGHT)
private val lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING)!!
private val hasFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE) ?: false
private val focalLengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS) ?: FloatArray(0)
private val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
private val name = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) characteristics.get(CameraCharacteristics.INFO_VERSION)
else null) ?: "${parseLensFacing(lensFacing)} (${cameraId})"
// "formats" (all possible configurations for this device)
private val zoomRange = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) characteristics.get(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE)
else null) ?: Range(1f, characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) ?: 1f)
private val minZoom = zoomRange.lower.toDouble()
private val maxZoom = zoomRange.upper.toDouble()
private val cameraConfig = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
private val isoRange = characteristics.get(CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE) ?: Range(0, 0)
private val digitalStabilizationModes = characteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) ?: IntArray(0)
private val opticalStabilizationModes = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION) ?: IntArray(0)
private val supportsPhotoHdr = extensionsManager.isExtensionAvailable(cameraSelector, ExtensionMode.HDR) || extensions.contains(CameraExtensionCharacteristics.EXTENSION_HDR)
private val supportsVideoHdr = getHasVideoHdr()
// see https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture
private val supportsParallelVideoProcessing = hardwareLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY && hardwareLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
// get extensions (HDR, Night Mode, ..)
private fun getSupportedExtensions(): List<Int> {
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
val extensions = cameraManager.getCameraExtensionCharacteristics(cameraId)
extensions.supportedExtensions
} else {
emptyList()
}
}
private fun getHasVideoHdr(): Boolean {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
if (capabilities.contains(CameraMetadata.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT)) {
val availableProfiles = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES)
?: DynamicRangeProfiles(LongArray(0))
return availableProfiles.supportedProfiles.contains(DynamicRangeProfiles.HLG10)
}
}
return false
}
private fun createFrameRateRanges(ranges: Array<Range<Int>>): ReadableArray {
val array = Arguments.createArray()
ranges.forEach { range ->
val map = Arguments.createMap()
map.putInt("minFrameRate", range.lower)
map.putInt("maxFrameRate", range.upper)
array.pushMap(map)
}
return array
}
private fun createFrameRateRanges(minFps: Int, maxFps: Int): ReadableArray {
return createFrameRateRanges(arrayOf(Range(minFps, maxFps)))
}
private fun createColorSpaces(): ReadableArray {
val array = Arguments.createArray()
array.pushString("yuv")
return array
}
private fun createStabilizationModes(): ReadableArray {
val array = Arguments.createArray()
val videoStabilizationModes = digitalStabilizationModes.plus(opticalStabilizationModes)
videoStabilizationModes.forEach { videoStabilizationMode ->
array.pushString(parseVideoStabilizationMode(videoStabilizationMode))
}
return array
}
// 35mm is 135 film format, a standard in which focal lengths are usually measured
private val size35mm = Size(36, 24)
private fun getDeviceTypes(): ReadableArray {
// TODO: Check if getDeviceType() works correctly, even for logical multi-cameras
// To get valid focal length standards we have to upscale to the 35mm measurement (film standard)
val cropFactor = size35mm.bigger / sensorSize.bigger
val deviceTypes = Arguments.createArray()
// https://en.wikipedia.org/wiki/Telephoto_lens
val containsTelephoto = focalLengths.any { l -> (l * cropFactor) > 35 } // TODO: Telephoto lenses are > 85mm, but we don't have anything between that range..
// val containsNormalLens = focalLengths.any { l -> (l * cropFactor) > 35 && (l * cropFactor) <= 55 }
// https://en.wikipedia.org/wiki/Wide-angle_lens
val containsWideAngle = focalLengths.any { l -> (l * cropFactor) >= 24 && (l * cropFactor) <= 35 }
// https://en.wikipedia.org/wiki/Ultra_wide_angle_lens
val containsUltraWideAngle = focalLengths.any { l -> (l * cropFactor) < 24 }
if (containsTelephoto)
deviceTypes.pushString("telephoto-camera")
if (containsWideAngle)
deviceTypes.pushString("wide-angle-camera")
if (containsUltraWideAngle)
deviceTypes.pushString("ultra-wide-angle-camera")
return deviceTypes
}
private fun getFieldOfView(): Double {
return 2 * atan(sensorSize.bigger / (focalLengths[0] * 2)) * (180 / PI)
}
private fun buildFormatMap(outputSize: Size, outputFormat: Int, fpsRanges: ReadableArray): ReadableMap {
val highResSizes = (if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) cameraConfig.getHighResolutionOutputSizes(outputFormat) else null) ?: emptyArray()
val map = Arguments.createMap()
map.putInt("photoHeight", outputSize.height)
map.putInt("photoWidth", outputSize.width)
map.putInt("videoHeight", outputSize.height)
map.putInt("videoWidth", outputSize.width)
map.putBoolean("isHighestPhotoQualitySupported", highResSizes.contains(outputSize))
map.putInt("maxISO", isoRange.upper)
map.putInt("minISO", isoRange.lower)
map.putDouble("fieldOfView", getFieldOfView())
map.putArray("colorSpaces", createColorSpaces())
map.putBoolean("supportsVideoHDR", supportsVideoHdr)
map.putBoolean("supportsPhotoHDR", supportsPhotoHdr)
map.putString("autoFocusSystem", "contrast-detection") // TODO: Is this wrong?
map.putArray("videoStabilizationModes", createStabilizationModes())
map.putString("pixelFormat", parseImageFormat(outputFormat))
map.putArray("frameRateRanges", fpsRanges)
return map
}
private fun getFormats(): ReadableArray {
val array = Arguments.createArray()
val highSpeedSizes = cameraConfig.highSpeedVideoSizes
val outputFormats = cameraConfig.outputFormats
outputFormats.forEach { outputFormat ->
// Normal Video/Photo Sizes
val outputSizes = cameraConfig.getOutputSizes(outputFormat)
outputSizes.forEach { outputSize ->
val frameDuration = cameraConfig.getOutputMinFrameDuration(outputFormat, outputSize)
val maxFps = (1.0 / (frameDuration.toDouble() / 1000000000)).toInt()
val minFps = 1
val map = buildFormatMap(outputSize, outputFormat, createFrameRateRanges(minFps, maxFps))
array.pushMap(map)
}
// High-Speed (Slow Motion) Video Sizes
highSpeedSizes.forEach { outputSize ->
val highSpeedRanges = cameraConfig.getHighSpeedVideoFpsRangesFor(outputSize)
val map = buildFormatMap(outputSize, outputFormat, createFrameRateRanges(highSpeedRanges))
array.pushMap(map)
}
}
return array
}
// convert to React Native JS object (map)
fun toMap(): ReadableMap {
val map = Arguments.createMap()
map.putString("id", cameraId)
map.putArray("devices", getDeviceTypes())
map.putString("position", parseLensFacing(lensFacing))
map.putString("name", name)
map.putBoolean("hasFlash", hasFlash)
map.putBoolean("hasTorch", hasFlash)
map.putBoolean("isMultiCam", isMultiCam)
map.putBoolean("supportsParallelVideoProcessing", supportsParallelVideoProcessing)
map.putBoolean("supportsRawCapture", supportsRawCapture)
map.putBoolean("supportsDepthCapture", supportsDepthCapture)
map.putBoolean("supportsLowLightBoost", supportsLowLightBoost)
map.putBoolean("supportsFocus", true) // I believe every device here supports focussing
map.putDouble("minZoom", minZoom)
map.putDouble("maxZoom", maxZoom)
map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android
map.putArray("formats", getFormats())
return map
}
}

View File

@ -252,23 +252,6 @@ enum CaptureError {
} }
} }
// MARK: - SystemError
enum SystemError: String {
case noManager = "no-camera-manager"
var code: String {
return rawValue
}
var message: String {
switch self {
case .noManager:
return "No Camera Manager was found."
}
}
}
// MARK: - CameraError // MARK: - CameraError
enum CameraError: Error { enum CameraError: Error {
@ -295,8 +278,6 @@ enum CameraError: Error {
return "session/\(id.code)" return "session/\(id.code)"
case let .capture(id: id): case let .capture(id: id):
return "capture/\(id.code)" return "capture/\(id.code)"
case let .system(id: id):
return "system/\(id.code)"
case .unknown: case .unknown:
return "unknown/unknown" return "unknown/unknown"
} }
@ -316,8 +297,6 @@ enum CameraError: Error {
return id.message return id.message
case let .capture(id: id): case let .capture(id: id):
return id.message return id.message
case let .system(id: id):
return id.message
case let .unknown(message: message): case let .unknown(message: message):
return message ?? "An unexpected error occured." return message ?? "An unexpected error occured."
} }