chore: Move everything into package/ (#1745)

* Move everything into package

* Remove .DS_Store

* Move scripts and eslintrc to package

* Create CODE_OF_CONDUCT.md

* fix some links

* Update all links (I think)

* Update generated docs

* Update notice-yarn-changes.yml

* Update validate-android.yml

* Update validate-cpp.yml

* Delete notice-yarn-changes.yml

* Update validate-cpp.yml

* Update validate-cpp.yml

* Update validate-js.yml

* Update validate-cpp.yml

* Update validate-cpp.yml

* wrong c++ style

* Revert "wrong c++ style"

This reverts commit 55a3575589c6f13f8b05134d83384f55e0601ab2.
This commit is contained in:
Marc Rousavy
2023-09-01 18:15:28 +02:00
committed by GitHub
parent 2a5c33323b
commit 036856aed5
347 changed files with 3088 additions and 154 deletions

View File

@@ -0,0 +1,53 @@
package com.mrousavy.camera.extensions
import android.hardware.camera2.CameraCaptureSession
import android.hardware.camera2.CaptureFailure
import android.hardware.camera2.CaptureRequest
import android.hardware.camera2.TotalCaptureResult
import android.media.MediaActionSound
import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.CaptureAbortedError
import com.mrousavy.camera.UnknownCaptureError
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
import kotlin.coroutines.suspendCoroutine
suspend fun CameraCaptureSession.capture(captureRequest: CaptureRequest, enableShutterSound: Boolean): TotalCaptureResult {
return suspendCoroutine { continuation ->
this.capture(captureRequest, object: CameraCaptureSession.CaptureCallback() {
override fun onCaptureCompleted(
session: CameraCaptureSession,
request: CaptureRequest,
result: TotalCaptureResult
) {
super.onCaptureCompleted(session, request, result)
continuation.resume(result)
}
override fun onCaptureStarted(session: CameraCaptureSession, request: CaptureRequest, timestamp: Long, frameNumber: Long) {
super.onCaptureStarted(session, request, timestamp, frameNumber)
if (enableShutterSound) {
val mediaActionSound = MediaActionSound()
mediaActionSound.play(MediaActionSound.SHUTTER_CLICK)
}
}
override fun onCaptureFailed(
session: CameraCaptureSession,
request: CaptureRequest,
failure: CaptureFailure
) {
super.onCaptureFailed(session, request, failure)
val wasImageCaptured = failure.wasImageCaptured()
val error = when (failure.reason) {
CaptureFailure.REASON_ERROR -> UnknownCaptureError(wasImageCaptured)
CaptureFailure.REASON_FLUSHED -> CaptureAbortedError(wasImageCaptured)
else -> UnknownCaptureError(wasImageCaptured)
}
continuation.resumeWithException(error)
}
}, CameraQueues.cameraQueue.handler)
}
}

View File

@@ -0,0 +1,68 @@
package com.mrousavy.camera.extensions
import android.content.res.Resources
import android.hardware.camera2.CameraCharacteristics
import android.media.CamcorderProfile
import android.os.Build
import android.util.Log
import android.util.Size
import android.view.SurfaceHolder
import android.view.SurfaceView
private fun getMaximumPreviewSize(): Size {
// See https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap
// According to the Android Developer documentation, PREVIEW streams can have a resolution
// of up to the phone's display's resolution, with a maximum of 1920x1080.
val display1080p = Size(1920, 1080)
val displaySize = Size(Resources.getSystem().displayMetrics.widthPixels, Resources.getSystem().displayMetrics.heightPixels)
val isHighResScreen = displaySize.bigger >= display1080p.bigger || displaySize.smaller >= display1080p.smaller
Log.i("PreviewSize", "Phone has a ${displaySize.width} x ${displaySize.height} screen.")
return if (isHighResScreen) display1080p else displaySize
}
/**
* Gets the maximum Preview Resolution this device is capable of streaming at. (For [SurfaceView])
*/
fun CameraCharacteristics.getPreviewSize(): Size {
val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
val previewSize = getMaximumPreviewSize()
val outputSizes = config.getOutputSizes(SurfaceHolder::class.java).sortedByDescending { it.width * it.height }
return outputSizes.first { it.bigger <= previewSize.bigger && it.smaller <= previewSize.smaller }
}
private fun getMaximumVideoSize(cameraId: String): Size? {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
val profiles = CamcorderProfile.getAll(cameraId, CamcorderProfile.QUALITY_HIGH)
if (profiles != null) {
val largestProfile = profiles.videoProfiles.filterNotNull().maxByOrNull { it.width * it.height }
if (largestProfile != null) {
return Size(largestProfile.width, largestProfile.height)
}
}
}
val cameraIdInt = cameraId.toIntOrNull()
if (cameraIdInt != null) {
val profile = CamcorderProfile.get(cameraIdInt, CamcorderProfile.QUALITY_HIGH)
return Size(profile.videoFrameWidth, profile.videoFrameHeight)
}
return null
}
fun CameraCharacteristics.getVideoSizes(cameraId: String, format: Int): List<Size> {
val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
val sizes = config.getOutputSizes(format) ?: emptyArray()
val maxVideoSize = getMaximumVideoSize(cameraId)
if (maxVideoSize != null) {
return sizes.filter { it.bigger <= maxVideoSize.bigger }
}
return sizes.toList()
}
fun CameraCharacteristics.getPhotoSizes(format: Int): List<Size> {
val config = this.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!
val sizes = config.getOutputSizes(format) ?: emptyArray()
val highResSizes = config.getHighResolutionOutputSizes(format) ?: emptyArray()
return sizes.plus(highResSizes).toList()
}

View File

@@ -0,0 +1,95 @@
package com.mrousavy.camera.extensions
import android.hardware.camera2.CameraCaptureSession
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraDevice
import android.hardware.camera2.CameraManager
import android.hardware.camera2.params.OutputConfiguration
import android.hardware.camera2.params.SessionConfiguration
import android.os.Build
import android.util.Log
import androidx.annotation.RequiresApi
import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.CameraSessionCannotBeConfiguredError
import com.mrousavy.camera.core.outputs.CameraOutputs
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
enum class SessionType {
REGULAR,
HIGH_SPEED;
@RequiresApi(Build.VERSION_CODES.P)
fun toSessionType(): Int {
return when(this) {
REGULAR -> SessionConfiguration.SESSION_REGULAR
HIGH_SPEED -> SessionConfiguration.SESSION_HIGH_SPEED
}
}
}
private const val TAG = "CreateCaptureSession"
private var sessionId = 1000
suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
sessionType: SessionType,
outputs: CameraOutputs,
onClosed: (session: CameraCaptureSession) -> Unit,
queue: CameraQueues.CameraQueue): CameraCaptureSession {
return suspendCancellableCoroutine { continuation ->
val characteristics = cameraManager.getCameraCharacteristics(id)
val hardwareLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!!
val sessionId = sessionId++
Log.i(TAG, "Camera $id: Creating Capture Session #$sessionId... " +
"Hardware Level: $hardwareLevel} | Outputs: $outputs")
val callback = object: CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) {
Log.i(TAG, "Camera $id: Capture Session #$sessionId configured!")
continuation.resume(session)
}
override fun onConfigureFailed(session: CameraCaptureSession) {
Log.e(TAG, "Camera $id: Failed to configure Capture Session #$sessionId!")
continuation.resumeWithException(CameraSessionCannotBeConfiguredError(id, outputs))
}
override fun onClosed(session: CameraCaptureSession) {
super.onClosed(session)
Log.i(TAG, "Camera $id: Capture Session #$sessionId closed!")
onClosed(session)
}
}
val outputConfigurations = arrayListOf<OutputConfiguration>()
outputs.previewOutput?.let { output ->
outputConfigurations.add(output.toOutputConfiguration(characteristics))
}
outputs.photoOutput?.let { output ->
outputConfigurations.add(output.toOutputConfiguration(characteristics))
}
outputs.videoOutput?.let { output ->
outputConfigurations.add(output.toOutputConfiguration(characteristics))
}
if (outputs.enableHdr == true && Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
val supportedProfiles = characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES)
val hdrProfile = supportedProfiles?.bestProfile ?: supportedProfiles?.supportedProfiles?.firstOrNull()
if (hdrProfile != null) {
Log.i(TAG, "Camera $id: Using HDR Profile $hdrProfile...")
outputConfigurations.forEach { it.dynamicRangeProfile = hdrProfile }
} else {
Log.w(TAG, "Camera $id: HDR was enabled, but the device does not support any matching HDR profile!")
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
Log.i(TAG, "Using new API (>=28)")
val config = SessionConfiguration(sessionType.toSessionType(), outputConfigurations, queue.executor, callback)
this.createCaptureSession(config)
} else {
Log.i(TAG, "Using legacy API (<28)")
this.createCaptureSessionByOutputConfigurations(outputConfigurations, callback, queue.handler)
}
}
}

View File

@@ -0,0 +1,97 @@
package com.mrousavy.camera.extensions
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraDevice
import android.hardware.camera2.CameraManager
import android.hardware.camera2.CaptureRequest
import android.os.Build
import android.view.Surface
import com.mrousavy.camera.parsers.Flash
import com.mrousavy.camera.parsers.Orientation
import com.mrousavy.camera.parsers.QualityPrioritization
private fun supportsSnapshotCapture(cameraCharacteristics: CameraCharacteristics): Boolean {
// As per CameraDevice.TEMPLATE_VIDEO_SNAPSHOT in documentation:
val hardwareLevel = cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)!!
if (hardwareLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) return false
val capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)!!
val hasDepth = capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT)
val isBackwardsCompatible = !capabilities.contains(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE)
if (hasDepth && !isBackwardsCompatible) return false
return true
}
fun CameraDevice.createPhotoCaptureRequest(cameraManager: CameraManager,
surface: Surface,
zoom: Float,
qualityPrioritization: QualityPrioritization,
flashMode: Flash,
enableRedEyeReduction: Boolean,
enableAutoStabilization: Boolean,
orientation: Orientation): CaptureRequest {
val cameraCharacteristics = cameraManager.getCameraCharacteristics(this.id)
val template = if (qualityPrioritization == QualityPrioritization.SPEED && supportsSnapshotCapture(cameraCharacteristics)) {
CameraDevice.TEMPLATE_VIDEO_SNAPSHOT
} else {
CameraDevice.TEMPLATE_STILL_CAPTURE
}
val captureRequest = this.createCaptureRequest(template)
// TODO: Maybe we can even expose that prop directly?
val jpegQuality = when (qualityPrioritization) {
QualityPrioritization.SPEED -> 85
QualityPrioritization.BALANCED -> 92
QualityPrioritization.QUALITY -> 100
}
captureRequest.set(CaptureRequest.JPEG_QUALITY, jpegQuality.toByte())
captureRequest.set(CaptureRequest.JPEG_ORIENTATION, orientation.toDegrees())
when (flashMode) {
// Set the Flash Mode
Flash.OFF -> {
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON)
}
Flash.ON -> {
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH)
}
Flash.AUTO -> {
if (enableRedEyeReduction) {
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE)
} else {
captureRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH)
}
}
}
if (enableAutoStabilization) {
// Enable optical or digital image stabilization
val digitalStabilization = cameraCharacteristics.get(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)
val hasDigitalStabilization = digitalStabilization?.contains(CameraCharacteristics.CONTROL_VIDEO_STABILIZATION_MODE_ON) ?: false
val opticalStabilization = cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)
val hasOpticalStabilization = opticalStabilization?.contains(CameraCharacteristics.LENS_OPTICAL_STABILIZATION_MODE_ON) ?: false
if (hasOpticalStabilization) {
captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF)
captureRequest.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON)
} else if (hasDigitalStabilization) {
captureRequest.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON)
} else {
// no stabilization is supported. ignore it
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
captureRequest.set(CaptureRequest.CONTROL_ZOOM_RATIO, zoom)
} else {
val size = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)!!
captureRequest.set(CaptureRequest.SCALER_CROP_REGION, size.zoomed(zoom))
}
captureRequest.addTarget(surface)
return captureRequest.build()
}

View File

@@ -0,0 +1,68 @@
package com.mrousavy.camera.extensions
import android.annotation.SuppressLint
import android.hardware.camera2.CameraDevice
import android.hardware.camera2.CameraManager
import android.os.Build
import android.util.Log
import com.mrousavy.camera.CameraCannotBeOpenedError
import com.mrousavy.camera.CameraDisconnectedError
import com.mrousavy.camera.CameraQueues
import com.mrousavy.camera.parsers.CameraDeviceError
import kotlinx.coroutines.suspendCancellableCoroutine
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
private const val TAG = "CameraManager"
@SuppressLint("MissingPermission")
suspend fun CameraManager.openCamera(cameraId: String,
onDisconnected: (camera: CameraDevice, reason: Throwable) -> Unit,
queue: CameraQueues.CameraQueue): CameraDevice {
return suspendCancellableCoroutine { continuation ->
Log.i(TAG, "Camera $cameraId: Opening...")
val callback = object: CameraDevice.StateCallback() {
override fun onOpened(camera: CameraDevice) {
Log.i(TAG, "Camera $cameraId: Opened!")
continuation.resume(camera)
}
override fun onDisconnected(camera: CameraDevice) {
Log.i(TAG, "Camera $cameraId: Disconnected!")
if (continuation.isActive) {
continuation.resumeWithException(CameraCannotBeOpenedError(cameraId, CameraDeviceError.DISCONNECTED))
} else {
onDisconnected(camera, CameraDisconnectedError(cameraId, CameraDeviceError.DISCONNECTED))
}
camera.tryClose()
}
override fun onError(camera: CameraDevice, errorCode: Int) {
Log.e(TAG, "Camera $cameraId: Error! $errorCode")
val error = CameraDeviceError.fromCameraDeviceError(errorCode)
if (continuation.isActive) {
continuation.resumeWithException(CameraCannotBeOpenedError(cameraId, error))
} else {
onDisconnected(camera, CameraDisconnectedError(cameraId, error))
}
camera.tryClose()
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
this.openCamera(cameraId, queue.executor, callback)
} else {
this.openCamera(cameraId, callback, queue.handler)
}
}
}
fun CameraDevice.tryClose() {
try {
Log.i(TAG, "Camera $id: Closing...")
this.close()
} catch (e: Throwable) {
Log.e(TAG, "Camera $id: Failed to close!", e)
}
}

View File

@@ -0,0 +1,27 @@
package com.mrousavy.camera.extensions
import android.hardware.camera2.params.DynamicRangeProfiles
import android.os.Build
import androidx.annotation.RequiresApi
private fun Set<Long>.firstMatch(filter: Set<Long>): Long? {
filter.forEach { f ->
if (this.contains(f)) {
return f
}
}
return null
}
@RequiresApi(Build.VERSION_CODES.TIRAMISU)
private val bestProfiles = setOf(
DynamicRangeProfiles.HDR10_PLUS,
DynamicRangeProfiles.HDR10,
DynamicRangeProfiles.HLG10
)
val DynamicRangeProfiles.bestProfile: Long?
@RequiresApi(Build.VERSION_CODES.TIRAMISU)
get() {
return supportedProfiles.firstMatch(bestProfiles)
}

View File

@@ -0,0 +1,21 @@
package com.mrousavy.camera.extensions
import android.os.Handler
import java.util.concurrent.Semaphore
/**
* Posts a Message to this Handler and blocks the calling Thread until the Handler finished executing the given job.
*/
fun Handler.postAndWait(job: () -> Unit) {
val semaphore = Semaphore(0)
this.post {
try {
job()
} finally {
semaphore.release()
}
}
semaphore.acquire()
}

View File

@@ -0,0 +1,5 @@
package com.mrousavy.camera.extensions
fun <T> List<T>.containsAny(elements: List<T>): Boolean {
return elements.any { element -> this.contains(element) }
}

View File

@@ -0,0 +1,14 @@
package com.mrousavy.camera.extensions
import android.graphics.Rect
fun Rect.zoomed(zoomFactor: Float): Rect {
val height = bottom - top
val width = right - left
val left = this.left + (width / zoomFactor / 2)
val top = this.top + (height / zoomFactor / 2)
val right = this.right - (width / zoomFactor / 2)
val bottom = this.bottom - (height / zoomFactor / 2)
return Rect(left.toInt(), top.toInt(), right.toInt(), bottom.toInt())
}

View File

@@ -0,0 +1,44 @@
package com.mrousavy.camera.extensions
import android.util.Size
import android.util.SizeF
import android.view.Surface
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min
fun List<Size>.closestToOrMax(size: Size?): Size {
return if (size != null) {
this.minBy { abs(it.width - size.width) + abs(it.height - size.height) }
} else {
this.maxBy { it.width * it.height }
}
}
/**
* Rotate by a given Surface Rotation
*/
fun Size.rotated(surfaceRotation: Int): Size {
return when (surfaceRotation) {
Surface.ROTATION_0 -> Size(width, height)
Surface.ROTATION_90 -> Size(height, width)
Surface.ROTATION_180 -> Size(width, height)
Surface.ROTATION_270 -> Size(height, width)
else -> Size(width, height)
}
}
val Size.bigger: Int
get() = max(width, height)
val Size.smaller: Int
get() = min(width, height)
val SizeF.bigger: Float
get() = max(this.width, this.height)
val SizeF.smaller: Float
get() = min(this.width, this.height)
operator fun Size.compareTo(other: Size): Int {
return (this.width * this.height).compareTo(other.width * other.height)
}

View File

@@ -0,0 +1,20 @@
package com.mrousavy.camera.extensions
import android.view.View
import android.view.ViewGroup
// React does not trigger onLayout events for dynamically added views (`addView`).
// This fixes that.
// https://github.com/facebook/react-native/issues/17968#issuecomment-633308615
fun ViewGroup.installHierarchyFitter() {
setOnHierarchyChangeListener(object : ViewGroup.OnHierarchyChangeListener {
override fun onChildViewRemoved(parent: View?, child: View?) = Unit
override fun onChildViewAdded(parent: View?, child: View?) {
parent?.measure(
View.MeasureSpec.makeMeasureSpec(measuredWidth, View.MeasureSpec.EXACTLY),
View.MeasureSpec.makeMeasureSpec(measuredHeight, View.MeasureSpec.EXACTLY)
)
parent?.layout(0, 0, parent.measuredWidth, parent.measuredHeight)
}
})
}

View File

@@ -0,0 +1,17 @@
package com.mrousavy.camera.extensions
import com.facebook.react.bridge.WritableMap
fun WritableMap.putInt(key: String, value: Int?) {
if (value == null)
this.putNull(key)
else
this.putInt(key, value)
}
fun WritableMap.putDouble(key: String, value: Double?) {
if (value == null)
this.putNull(key)
else
this.putDouble(key, value)
}