chore: Clean up Android codebase a bit (#1748)

<!--
                    ❤️ Thank you for your contribution! ❤️
              Make sure you have read the Contributing Guidelines:

https://github.com/mrousavy/react-native-vision-camera/blob/main/CONTRIBUTING.md
-->

## What

<!--
  Enter a short description on what this pull-request does.
  Examples:
    This PR adds support for the HEVC format.
    This PR fixes a "unsupported device" error on iPhone 8 and below.
    This PR fixes a typo in a CameraError.
    This PR adds support for Quadruple Cameras.
-->

## Changes

<!--
  Create a short list of logic-changes.
  Examples:
    * This PR changes the default value of X to Y.
    * This PR changes the configure() function to cache results.
-->

## Tested on

<!--
Create a short list of devices and operating-systems you have tested
this change on. (And verified that everything works as expected).
  Examples:
    * iPhone 11 Pro, iOS 14.3
    * Huawai P20, Android 10
-->

## Related issues

<!--
  Link related issues here.
  Examples:
    * Fixes #29
    * Closes #30
    * Resolves #5
-->
This commit is contained in:
Marc Rousavy 2023-09-02 01:04:36 +02:00 committed by GitHub
parent 272504f39d
commit f3fd3f15e3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 38 additions and 181 deletions

View File

@ -6,19 +6,19 @@ on:
- main
paths:
- '.github/workflows/build-android.yml'
- 'cpp/**'
- 'android/**'
- 'example/android/**'
- 'yarn.lock'
- 'example/yarn.lock'
- 'package/cpp/**'
- 'package/android/**'
- 'package/example/android/**'
- 'package/yarn.lock'
- 'package/example/yarn.lock'
pull_request:
paths:
- '.github/workflows/build-android.yml'
- 'cpp/**'
- 'android/**'
- 'example/android/**'
- 'yarn.lock'
- 'example/yarn.lock'
- 'package/cpp/**'
- 'package/android/**'
- 'package/example/android/**'
- 'package/yarn.lock'
- 'package/example/yarn.lock'
jobs:
build:

View File

@ -6,17 +6,17 @@ on:
- main
paths:
- '.github/workflows/build-ios.yml'
- 'cpp/**'
- 'ios/**'
- '*.podspec'
- 'example/ios/**'
- 'package/cpp/**'
- 'package/ios/**'
- 'package/*.podspec'
- 'package/example/ios/**'
pull_request:
paths:
- '.github/workflows/build-ios.yml'
- 'cpp/**'
- 'ios/**'
- '*.podspec'
- 'example/ios/**'
- 'package/cpp/**'
- 'package/ios/**'
- 'package/*.podspec'
- 'package/example/ios/**'
jobs:
build:

View File

@ -6,13 +6,13 @@ on:
- main
paths:
- '.github/workflows/validate-android.yml'
- 'android/**'
- '.editorconfig'
- 'package/android/**'
- 'package/.editorconfig'
pull_request:
paths:
- '.github/workflows/validate-android.yml'
- 'android/**'
- '.editorconfig'
- 'package/android/**'
- 'package/.editorconfig'
jobs:
lint:

View File

@ -6,11 +6,11 @@ on:
- main
paths:
- '.github/workflows/validate-ios.yml'
- 'ios/**'
- 'package/ios/**'
pull_request:
paths:
- '.github/workflows/validate-ios.yml'
- 'ios/**'
- 'package/ios/**'
jobs:
SwiftLint:

View File

@ -4,7 +4,6 @@ import android.Manifest
import android.annotation.SuppressLint
import android.content.Context
import android.content.pm.PackageManager
import android.content.res.Configuration
import android.hardware.camera2.CameraManager
import android.util.Log
import android.util.Size
@ -31,13 +30,8 @@ import kotlinx.coroutines.launch
//
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
//
// CameraView
// TODO: High-speed video recordings (export in CameraViewModule::getAvailableVideoDevices(), and set in CameraView::configurePreview()) (120FPS+)
// CameraView+RecordVideo
// TODO: Better startRecording()/stopRecording() (promise + callback, wait for TurboModules/JSI)
// CameraView+TakePhoto
// TODO: takePhoto() depth data
// TODO: takePhoto() raw capture
// TODO: takePhoto() return with jsi::Value Image reference for faster capture
@ -103,11 +97,6 @@ class CameraView(context: Context) : FrameLayout(context) {
cameraSession = CameraSession(context, cameraManager, { invokeOnInitialized() }, { error -> invokeOnError(error) })
}
override fun onConfigurationChanged(newConfig: Configuration?) {
super.onConfigurationChanged(newConfig)
// TODO: updateOrientation()
}
override fun onAttachedToWindow() {
super.onAttachedToWindow()
if (!isMounted) {
@ -144,7 +133,6 @@ class CameraView(context: Context) : FrameLayout(context) {
val shouldReconfigureFormat = shouldReconfigureSession || changedProps.containsAny(propsThatRequireFormatReconfiguration)
val shouldReconfigureZoom = shouldReconfigureSession || changedProps.contains("zoom")
val shouldReconfigureTorch = shouldReconfigureSession || changedProps.contains("torch")
val shouldUpdateOrientation = /* TODO: When should we reconfigure this? */ shouldReconfigureSession || changedProps.contains("orientation")
val shouldCheckActive = shouldReconfigureFormat || changedProps.contains("isActive")
val shouldReconfigureZoomGesture = changedProps.contains("enableZoomGesture")
@ -167,9 +155,6 @@ class CameraView(context: Context) : FrameLayout(context) {
if (shouldReconfigureTorch) {
updateTorch()
}
if (shouldUpdateOrientation) {
// TODO: updateOrientation()
}
if (shouldReconfigureZoomGesture) {
updateZoomGesture()
}

View File

@ -12,7 +12,6 @@ import com.mrousavy.camera.parsers.VideoStabilizationMode
@Suppress("unused")
class CameraViewManager : ViewGroupManager<CameraView>() {
public override fun createViewInstance(context: ThemedReactContext): CameraView {
return CameraView(context)
}

View File

@ -37,15 +37,8 @@ class CameraPermissionError : CameraError("permission", "camera-permission-denie
class InvalidTypeScriptUnionError(unionName: String, unionValue: String) : CameraError("parameter", "invalid-parameter", "The given value for $unionName could not be parsed! (Received: $unionValue)")
class NoCameraDeviceError : CameraError("device", "no-device", "No device was set! Use `getAvailableCameraDevices()` to select a suitable Camera device.")
class NoFlashAvailableError : CameraError("device", "flash-unavailable", "The Camera Device does not have a flash unit! Make sure you select a device where `hasFlash`/`hasTorch` is true!")
class PixelFormatNotSupportedError(format: String) : CameraError("device", "pixel-format-not-supported", "The pixelFormat $format is not supported on the given Camera Device!")
class HdrNotContainedInFormatError : CameraError(
"format", "invalid-hdr",
"The currently selected format does not support HDR capture! " +
"Make sure you select a format which includes `supportsPhotoHDR`!"
)
class CameraNotReadyError : CameraError("session", "camera-not-ready", "The Camera is not ready yet! Wait for the onInitialized() callback!")
class CameraCannotBeOpenedError(cameraId: String, error: CameraDeviceError) : CameraError("session", "camera-cannot-be-opened", "The given Camera device (id: $cameraId) could not be opened! Error: $error")
class CameraSessionCannotBeConfiguredError(cameraId: String, outputs: CameraOutputs) : CameraError("session", "cannot-create-session", "Failed to create a Camera Session for Camera $cameraId! Outputs: $outputs")

View File

@ -91,7 +91,6 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
return array
}
// 35mm is 135 film format, a standard in which focal lengths are usually measured
private val size35mm = Size(36, 24)
@ -153,7 +152,6 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
return array
}
// Get available pixel formats for the given Size
private fun createPixelFormats(size: Size): ReadableArray {
val formats = cameraConfig.outputFormats
val array = Arguments.createArray()
@ -186,7 +184,6 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
return map
}
// convert to React Native JS object (map)
fun toMap(): ReadableMap {
val map = Arguments.createMap()
map.putString("id", cameraId)
@ -205,39 +202,7 @@ class CameraDeviceDetails(private val cameraManager: CameraManager, private val
map.putDouble("neutralZoom", 1.0) // Zoom is always relative to 1.0 on Android
map.putString("hardwareLevel", hardwareLevel.unionValue)
map.putString("sensorOrientation", Orientation.fromRotationDegrees(sensorOrientation).unionValue)
val array = Arguments.createArray()
cameraConfig.outputFormats.forEach { f ->
val str = when (f) {
ImageFormat.YUV_420_888 -> "YUV_420_888"
ImageFormat.YUV_422_888 -> "YUV_422_888"
ImageFormat.YUV_444_888 -> "YUV_444_888"
ImageFormat.JPEG -> "JPEG"
ImageFormat.DEPTH16 -> "DEPTH16"
ImageFormat.DEPTH_JPEG -> "DEPTH_JPEG"
ImageFormat.FLEX_RGBA_8888 -> "FLEX_RGBA_8888"
ImageFormat.FLEX_RGB_888 -> "FLEX_RGB_888"
ImageFormat.YUY2 -> "YUY2"
ImageFormat.Y8 -> "Y8"
ImageFormat.YV12 -> "YV12"
ImageFormat.HEIC -> "HEIC"
ImageFormat.PRIVATE -> "PRIVATE"
ImageFormat.RAW_PRIVATE -> "RAW_PRIVATE"
ImageFormat.RAW_SENSOR -> "RAW_SENSOR"
ImageFormat.RAW10 -> "RAW10"
ImageFormat.RAW12 -> "RAW12"
ImageFormat.NV16 -> "NV16"
ImageFormat.NV21 -> "NV21"
ImageFormat.UNKNOWN -> "UNKNOWN"
ImageFormat.YCBCR_P010 -> "YCBCR_P010"
else -> "unknown ($f)"
}
array.pushString(str)
}
map.putArray("pixelFormats", array)
map.putArray("formats", getFormats())
return map
}
}

View File

@ -25,12 +25,10 @@ import com.mrousavy.camera.PhotoNotEnabledError
import com.mrousavy.camera.RecorderError
import com.mrousavy.camera.RecordingInProgressError
import com.mrousavy.camera.VideoNotEnabledError
import com.mrousavy.camera.extensions.SessionType
import com.mrousavy.camera.extensions.capture
import com.mrousavy.camera.extensions.createCaptureSession
import com.mrousavy.camera.extensions.createPhotoCaptureRequest
import com.mrousavy.camera.extensions.openCamera
import com.mrousavy.camera.extensions.tryClose
import com.mrousavy.camera.extensions.zoomed
import com.mrousavy.camera.frameprocessor.FrameProcessor
import com.mrousavy.camera.parsers.Flash
@ -116,7 +114,7 @@ class CameraSession(private val context: Context,
cameraManager.unregisterAvailabilityCallback(this)
photoOutputSynchronizer.clear()
captureSession?.close()
cameraDevice?.tryClose()
cameraDevice?.close()
outputs?.close()
isRunning = false
}
@ -206,7 +204,6 @@ class CameraSession(private val context: Context,
private fun updateVideoOutputs() {
val videoPipeline = outputs?.videoOutput?.videoPipeline ?: return
val previewOutput = outputs?.previewOutput
videoPipeline.setRecordingSessionOutput(this.recording)
videoPipeline.setFrameProcessorOutput(this.frameProcessor)
}
@ -377,7 +374,7 @@ class CameraSession(private val context: Context,
return currentDevice
}
// Close previous device
cameraDevice?.tryClose()
cameraDevice?.close()
cameraDevice = null
val device = cameraManager.openCamera(cameraId, { camera, reason ->
@ -410,7 +407,7 @@ class CameraSession(private val context: Context,
captureSession?.close()
captureSession = null
val session = cameraDevice.createCaptureSession(cameraManager, SessionType.REGULAR, outputs, { session ->
val session = cameraDevice.createCaptureSession(cameraManager, outputs, { session ->
Log.d(TAG, "Capture Session Closed ($captureSession == $session)")
if (captureSession == session) {
// The current CameraCaptureSession has been closed, handle that!

View File

@ -66,6 +66,6 @@ class PreviewView(context: Context,
}
companion object {
private const val TAG = "NativePreviewView"
private const val TAG = "PreviewView"
}
}

View File

@ -108,10 +108,7 @@ class VideoPipeline(val width: Int,
}
/**
* Configures the Pipeline to also call the given [FrameProcessor].
* * If the [frameProcessor] is `null`, this output channel will be removed.
* * If the [frameProcessor] is not `null`, the [VideoPipeline] will create Frames
* using an [ImageWriter] and call the [FrameProcessor] with those Frames.
* Configures the Pipeline to also call the given [FrameProcessor] (or null).
*/
fun setFrameProcessorOutput(frameProcessor: FrameProcessor?) {
synchronized(this) {
@ -138,9 +135,7 @@ class VideoPipeline(val width: Int,
}
/**
* Configures the Pipeline to also write Frames to a Surface from a [MediaRecorder].
* * If the [surface] is `null`, this output channel will be removed.
* * If the [surface] is not `null`, the [VideoPipeline] will write Frames to this Surface.
* Configures the Pipeline to also write Frames to a Surface from a [MediaRecorder] (or null)
*/
fun setRecordingSessionOutput(recordingSession: RecordingSession?) {
synchronized(this) {

View File

@ -22,8 +22,7 @@ class CameraOutputs(val cameraId: String,
val photo: PhotoOutput? = null,
val video: VideoOutput? = null,
val enableHdr: Boolean? = false,
val callback: Callback
): Closeable {
val callback: Callback): Closeable {
companion object {
private const val TAG = "CameraOutputs"
const val PHOTO_OUTPUT_BUFFER_SIZE = 3

View File

@ -5,9 +5,6 @@ import android.util.Log
import android.util.Size
import java.io.Closeable
/**
* A [SurfaceOutput] that uses an [ImageReader] as it's surface.
*/
class ImageReaderOutput(private val imageReader: ImageReader,
outputType: OutputType,
dynamicRangeProfile: Long? = null): Closeable, SurfaceOutput(imageReader.surface, Size(imageReader.width, imageReader.height), outputType, dynamicRangeProfile) {

View File

@ -10,9 +10,6 @@ import android.view.Surface
import androidx.annotation.RequiresApi
import java.io.Closeable
/**
* A general-purpose Camera Output that writes to a [Surface]
*/
open class SurfaceOutput(val surface: Surface,
val size: Size,
val outputType: OutputType,

View File

@ -5,9 +5,6 @@ import android.util.Size
import com.mrousavy.camera.core.VideoPipeline
import java.io.Closeable
/**
* A [SurfaceOutput] that uses a [VideoPipeline] as it's surface.
*/
class VideoPipelineOutput(val videoPipeline: VideoPipeline,
outputType: OutputType,
dynamicRangeProfile: Long? = null): Closeable, SurfaceOutput(videoPipeline.surface, Size(videoPipeline.width, videoPipeline.height), outputType, dynamicRangeProfile) {

View File

@ -16,24 +16,10 @@ import kotlinx.coroutines.suspendCancellableCoroutine
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
enum class SessionType {
REGULAR,
HIGH_SPEED;
@RequiresApi(Build.VERSION_CODES.P)
fun toSessionType(): Int {
return when(this) {
REGULAR -> SessionConfiguration.SESSION_REGULAR
HIGH_SPEED -> SessionConfiguration.SESSION_HIGH_SPEED
}
}
}
private const val TAG = "CreateCaptureSession"
private var sessionId = 1000
suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
sessionType: SessionType,
outputs: CameraOutputs,
onClosed: (session: CameraCaptureSession) -> Unit,
queue: CameraQueues.CameraQueue): CameraCaptureSession {
@ -85,7 +71,7 @@ suspend fun CameraDevice.createCaptureSession(cameraManager: CameraManager,
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
Log.i(TAG, "Using new API (>=28)")
val config = SessionConfiguration(sessionType.toSessionType(), outputConfigurations, queue.executor, callback)
val config = SessionConfiguration(SessionConfiguration.SESSION_REGULAR, outputConfigurations, queue.executor, callback)
this.createCaptureSession(config)
} else {
Log.i(TAG, "Using legacy API (<28)")

View File

@ -35,7 +35,7 @@ suspend fun CameraManager.openCamera(cameraId: String,
} else {
onDisconnected(camera, CameraDisconnectedError(cameraId, CameraDeviceError.DISCONNECTED))
}
camera.tryClose()
camera.close()
}
override fun onError(camera: CameraDevice, errorCode: Int) {
@ -46,7 +46,7 @@ suspend fun CameraManager.openCamera(cameraId: String,
} else {
onDisconnected(camera, CameraDisconnectedError(cameraId, error))
}
camera.tryClose()
camera.close()
}
}
@ -57,12 +57,3 @@ suspend fun CameraManager.openCamera(cameraId: String,
}
}
}
fun CameraDevice.tryClose() {
try {
Log.i(TAG, "Camera $id: Closing...")
this.close()
} catch (e: Throwable) {
Log.e(TAG, "Camera $id: Failed to close!", e)
}
}

View File

@ -1,21 +0,0 @@
package com.mrousavy.camera.extensions
import android.os.Handler
import java.util.concurrent.Semaphore
/**
* Posts a Message to this Handler and blocks the calling Thread until the Handler finished executing the given job.
*/
fun Handler.postAndWait(job: () -> Unit) {
val semaphore = Semaphore(0)
this.post {
try {
job()
} finally {
semaphore.release()
}
}
semaphore.acquire()
}

View File

@ -15,9 +15,6 @@ fun List<Size>.closestToOrMax(size: Size?): Size {
}
}
/**
* Rotate by a given Surface Rotation
*/
fun Size.rotated(surfaceRotation: Int): Size {
return when (surfaceRotation) {
Surface.ROTATION_0 -> Size(width, height)

View File

@ -3,8 +3,7 @@ package com.mrousavy.camera.extensions
import android.view.View
import android.view.ViewGroup
// React does not trigger onLayout events for dynamically added views (`addView`).
// This fixes that.
// React does not trigger onLayout events for dynamically added views (`addView`). This fixes that.
// https://github.com/facebook/react-native/issues/17968#issuecomment-633308615
fun ViewGroup.installHierarchyFitter() {
setOnHierarchyChangeListener(object : ViewGroup.OnHierarchyChangeListener {

View File

@ -1,17 +0,0 @@
package com.mrousavy.camera.extensions
import com.facebook.react.bridge.WritableMap
fun WritableMap.putInt(key: String, value: Int?) {
if (value == null)
this.putNull(key)
else
this.putInt(key, value)
}
fun WritableMap.putDouble(key: String, value: Double?) {
if (value == null)
this.putNull(key)
else
this.putDouble(key, value)
}

View File

@ -74,9 +74,7 @@ class VisionCameraProxy(context: ReactApplicationContext) {
}
// private C++ funcs
private external fun initHybrid(
jsContext: Long,
jsCallInvokerHolder: CallInvokerHolderImpl,
scheduler: VisionCameraScheduler
): HybridData
private external fun initHybrid(jsContext: Long,
jsCallInvokerHolder: CallInvokerHolderImpl,
scheduler: VisionCameraScheduler): HybridData
}