7 Commits

10 changed files with 73 additions and 33 deletions

View File

@@ -11,6 +11,7 @@ import com.mrousavy.camera.core.CodeScannerFrame
import com.mrousavy.camera.core.UnknownCameraError
import com.mrousavy.camera.core.code
import com.mrousavy.camera.types.CodeType
import java.io.File
fun CameraView.invokeOnInitialized() {
Log.i(CameraView.TAG, "invokeOnInitialized()")
@@ -33,6 +34,15 @@ fun CameraView.invokeOnStopped() {
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStopped", null)
}
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
Log.e(CameraView.TAG, "invokeOnError(...):")
val event = Arguments.createMap()
event.putInt("index", index)
event.putString("filepath", filepath.toString())
val reactContext = context as ReactContext
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
}
fun CameraView.invokeOnError(error: Throwable) {
Log.e(CameraView.TAG, "invokeOnError(...):")
error.printStackTrace()

View File

@@ -13,7 +13,7 @@ import com.mrousavy.camera.types.RecordVideoOptions
import com.mrousavy.camera.utils.makeErrorMap
import java.util.*
suspend fun CameraView.startRecording(options: RecordVideoOptions, onRecordCallback: Callback) {
suspend fun CameraView.startRecording(options: RecordVideoOptions, filePath: String, onRecordCallback: Callback) {
// check audio permission
if (audio == true) {
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
@@ -33,7 +33,7 @@ suspend fun CameraView.startRecording(options: RecordVideoOptions, onRecordCallb
val errorMap = makeErrorMap(error.code, error.message)
onRecordCallback(null, errorMap)
}
cameraSession.startRecording(audio == true, options, callback, onError)
cameraSession.startRecording(audio == true, options, filePath, callback, onError)
}
@SuppressLint("RestrictedApi")

View File

@@ -25,6 +25,7 @@ import com.mrousavy.camera.types.Torch
import com.mrousavy.camera.types.VideoStabilizationMode
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.launch
import java.io.File
//
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
@@ -265,6 +266,10 @@ class CameraView(context: Context) :
invokeOnStopped()
}
override fun onVideoChunkReady(filepath: File, index: Int) {
invokeOnChunkReady(filepath, index)
}
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {
invokeOnCodeScanned(codes, scannerFrame)
}

View File

@@ -29,6 +29,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
.put("cameraStopped", MapBuilder.of("registrationName", "onStopped"))
.put("cameraError", MapBuilder.of("registrationName", "onError"))
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
.build()
override fun getName(): String = TAG

View File

@@ -95,12 +95,12 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
// TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that
@ReactMethod
fun startRecording(viewTag: Int, jsOptions: ReadableMap, onRecordCallback: Callback) {
fun startRecording(viewTag: Int, jsOptions: ReadableMap, filePath: String, onRecordCallback: Callback) {
coroutineScope.launch {
val view = findCameraView(viewTag)
try {
val options = RecordVideoOptions(jsOptions)
view.startRecording(options, onRecordCallback)
view.startRecording(options, filePath, onRecordCallback)
} catch (error: CameraError) {
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
onRecordCallback(null, map)

View File

@@ -54,6 +54,7 @@ import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock
import java.io.File
class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) :
CameraManager.AvailabilityCallback(),
@@ -620,6 +621,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
suspend fun startRecording(
enableAudio: Boolean,
options: RecordVideoOptions,
filePath: String,
callback: (video: RecordingSession.Video) -> Unit,
onError: (error: CameraError) -> Unit
) {
@@ -639,8 +641,10 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
videoOutput.enableHdr,
orientation,
options,
filePath,
callback,
onError
onError,
this.callback,
)
recording.start()
this.recording = recording
@@ -724,6 +728,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
fun onInitialized()
fun onStarted()
fun onStopped()
fun onVideoChunkReady(filepath: File, index: Int)
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
}
}

View File

@@ -12,24 +12,31 @@ import com.mrousavy.camera.types.Orientation
import com.mrousavy.camera.types.RecordVideoOptions
import java.io.File
import java.nio.ByteBuffer
import kotlin.math.ceil
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int) :
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Float, private val callbacks: CameraSession.Callback) :
MediaCodec.Callback() {
companion object {
private const val TAG = "ChunkedRecorder"
private fun roundIntervalLengthUp(fps: Float, interval: Float): Float {
return ceil(fps * interval) / fps
}
fun fromParams(
callbacks: CameraSession.Callback,
size: Size,
enableAudio: Boolean,
fps: Int? = null,
fps: Float = 30.0f,
cameraOrientation: Orientation,
bitRate: Int,
options: RecordVideoOptions,
outputDirectory: File,
iFrameInterval: Int = 3
iFrameInterval: Float = 5.0f
): ChunkedRecordingManager {
val mimeType = options.videoCodec.toMimeType()
val orientationDegrees = cameraOrientation.toDegrees()
val cameraOrientationDegrees = cameraOrientation.toDegrees()
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees()
val (width, height) = if (cameraOrientation.isLandscape()) {
size.height to size.width
} else {
@@ -40,24 +47,26 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
val codec = MediaCodec.createEncoderByType(mimeType)
val roundedInterval = roundIntervalLengthUp(fps, iFrameInterval)
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
fps?.apply {
format.setInteger(MediaFormat.KEY_FRAME_RATE, this)
}
format.setFloat(MediaFormat.KEY_FRAME_RATE, fps)
// TODO: Pull this out into configuration
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
format.setFloat(MediaFormat.KEY_I_FRAME_INTERVAL, roundedInterval)
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
Log.i(TAG, "Video Format: $format, orientation $cameraOrientation")
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees, Set fps: $fps")
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
return ChunkedRecordingManager(codec, outputDirectory, 0, iFrameInterval)
return ChunkedRecordingManager(
codec, outputDirectory, recordingOrientationDegrees, roundedInterval, callbacks
)
}
}
@@ -65,7 +74,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private var currentFrameNumber: Int = 0
private var chunkIndex = -1
private var encodedFormat: MediaFormat? = null
private var recording = false;
private var recording = false
private val targetDurationUs = iFrameInterval * 1000000
@@ -79,15 +88,15 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
}
// Muxer specific
private class MuxerContext(val muxer: MediaMuxer, startTimeUs: Long, encodedFormat: MediaFormat) {
private class MuxerContext(
val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int,
val startTimeUs: Long, encodedFormat: MediaFormat
) {
val videoTrack: Int = muxer.addTrack(encodedFormat)
val startTimeUs: Long = startTimeUs
init {
muxer.start()
}
fun finish() {
muxer.stop()
muxer.release()
@@ -97,7 +106,10 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
private var muxerContext: MuxerContext? = null
private fun createNextMuxer(bufferInfo: BufferInfo) {
muxerContext?.finish()
muxerContext?.let {
it.finish()
this.callbacks.onVideoChunkReady(it.filepath, it.chunkIndex)
}
chunkIndex++
val newFileName = "$chunkIndex.mp4"
@@ -109,7 +121,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
)
muxer.setOrientationHint(orientationHint)
muxerContext = MuxerContext(
muxer, bufferInfo.presentationTimeUs, this.encodedFormat!!
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!
)
}
@@ -139,7 +151,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
}
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: MediaCodec.BufferInfo) {
override fun onOutputBufferAvailable(codec: MediaCodec, index: Int, bufferInfo: BufferInfo) {
synchronized(this) {
if (!recording) {
return

View File

@@ -23,8 +23,10 @@ class RecordingSession(
private val hdr: Boolean = false,
private val cameraOrientation: Orientation,
private val options: RecordVideoOptions,
private val filePath: String,
private val callback: (video: Video) -> Unit,
private val onError: (error: CameraError) -> Unit
private val onError: (error: CameraError) -> Unit,
private val allCallbacks: CameraSession.Callback,
) {
companion object {
private const val TAG = "RecordingSession"
@@ -36,18 +38,14 @@ class RecordingSession(
data class Video(val path: String, val durationMs: Long, val size: Size)
private val outputPath = run {
val videoDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES)
val sdf = SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US)
val videoFileName = "VID_${sdf.format(Date())}"
File(videoDir!!, videoFileName)
}
private val outputPath: File = File(filePath)
private val bitRate = getBitRate()
private val recorder = ChunkedRecordingManager.fromParams(
allCallbacks,
size,
enableAudio,
fps,
(fps ?: 30).toFloat(),
cameraOrientation,
bitRate,
options,

View File

@@ -8,6 +8,7 @@ class RecordVideoOptions(map: ReadableMap) {
var videoCodec = VideoCodec.H264
var videoBitRateOverride: Double? = null
var videoBitRateMultiplier: Double? = null
var orientation: Orientation? = null
init {
if (map.hasKey("fileType")) {
@@ -25,5 +26,8 @@ class RecordVideoOptions(map: ReadableMap) {
if (map.hasKey("videoBitRateMultiplier")) {
videoBitRateMultiplier = map.getDouble("videoBitRateMultiplier")
}
if (map.hasKey("orientation")) {
orientation = Orientation.fromUnionValue(map.getString("orientation"))
}
}
}

View File

@@ -26,6 +26,10 @@ interface OnErrorEvent {
message: string
cause?: ErrorWithCause
}
interface OnVideoChunkReadyEvent {
filepath: string
index: number
}
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor' | 'codeScanner'> & {
cameraId: string
enableFrameProcessor: boolean
@@ -35,6 +39,7 @@ type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onE
onCodeScanned?: (event: NativeSyntheticEvent<OnCodeScannedEvent>) => void
onStarted?: (event: NativeSyntheticEvent<void>) => void
onStopped?: (event: NativeSyntheticEvent<void>) => void
onVideoChunkReady?: (event: NativeSyntheticEvent<OnVideoChunkReadyEvent>) => void
onViewReady: () => void
}
type NativeRecordVideoOptions = Omit<RecordVideoOptions, 'onRecordingError' | 'onRecordingFinished' | 'videoBitRate'> & {
@@ -168,7 +173,7 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
* }, 5000)
* ```
*/
public startRecording(options: RecordVideoOptions): void {
public startRecording(options: RecordVideoOptions, filePath: string): void {
const { onRecordingError, onRecordingFinished, videoBitRate, ...passThruOptions } = options
if (typeof onRecordingError !== 'function' || typeof onRecordingFinished !== 'function')
throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!')
@@ -202,7 +207,7 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
}
try {
// TODO: Use TurboModules to make this awaitable.
CameraModule.startRecording(this.handle, nativeOptions, onRecordCallback)
CameraModule.startRecording(this.handle, nativeOptions, filePath, onRecordCallback)
} catch (e) {
throw tryParseNativeCameraError(e)
}