Compare commits
21 Commits
fb42545890
...
eyenov/chu
Author | SHA1 | Date | |
---|---|---|---|
|
b006b1e744 | ||
|
694d9cfa8c | ||
|
91767e71c8 | ||
|
9f2c7906e5 | ||
|
621bfe333c | ||
|
20f8fa2937 | ||
|
b03f9ea423 | ||
|
98d90a6442 | ||
|
0a43d7a160 | ||
|
a2ce4df663 | ||
|
89ecb35616 | ||
|
d9a1287b68 | ||
|
23459b2635 | ||
952e4a93e1 | |||
|
489171f6f3 | ||
19bf300bbe | |||
1312c5be53 | |||
0e05fc314f | |||
413be519d5 | |||
009838db75 | |||
|
1f035ce557 |
3
package/.gitignore
vendored
3
package/.gitignore
vendored
@@ -67,3 +67,6 @@ package-lock.json
|
|||||||
.cxx/
|
.cxx/
|
||||||
|
|
||||||
example/ios/vendor
|
example/ios/vendor
|
||||||
|
|
||||||
|
#.direnv
|
||||||
|
.direnv
|
||||||
|
@@ -11,6 +11,7 @@ import com.mrousavy.camera.core.CodeScannerFrame
|
|||||||
import com.mrousavy.camera.core.UnknownCameraError
|
import com.mrousavy.camera.core.UnknownCameraError
|
||||||
import com.mrousavy.camera.core.code
|
import com.mrousavy.camera.core.code
|
||||||
import com.mrousavy.camera.types.CodeType
|
import com.mrousavy.camera.types.CodeType
|
||||||
|
import java.io.File
|
||||||
|
|
||||||
fun CameraView.invokeOnInitialized() {
|
fun CameraView.invokeOnInitialized() {
|
||||||
Log.i(CameraView.TAG, "invokeOnInitialized()")
|
Log.i(CameraView.TAG, "invokeOnInitialized()")
|
||||||
@@ -33,6 +34,15 @@ fun CameraView.invokeOnStopped() {
|
|||||||
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStopped", null)
|
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "cameraStopped", null)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun CameraView.invokeOnChunkReady(filepath: File, index: Int) {
|
||||||
|
Log.e(CameraView.TAG, "invokeOnError(...):")
|
||||||
|
val event = Arguments.createMap()
|
||||||
|
event.putInt("index", index)
|
||||||
|
event.putString("filepath", filepath.toString())
|
||||||
|
val reactContext = context as ReactContext
|
||||||
|
reactContext.getJSModule(RCTEventEmitter::class.java).receiveEvent(id, "onVideoChunkReady", event)
|
||||||
|
}
|
||||||
|
|
||||||
fun CameraView.invokeOnError(error: Throwable) {
|
fun CameraView.invokeOnError(error: Throwable) {
|
||||||
Log.e(CameraView.TAG, "invokeOnError(...):")
|
Log.e(CameraView.TAG, "invokeOnError(...):")
|
||||||
error.printStackTrace()
|
error.printStackTrace()
|
||||||
|
@@ -13,7 +13,7 @@ import com.mrousavy.camera.types.RecordVideoOptions
|
|||||||
import com.mrousavy.camera.utils.makeErrorMap
|
import com.mrousavy.camera.utils.makeErrorMap
|
||||||
import java.util.*
|
import java.util.*
|
||||||
|
|
||||||
suspend fun CameraView.startRecording(options: RecordVideoOptions, onRecordCallback: Callback) {
|
suspend fun CameraView.startRecording(options: RecordVideoOptions, filePath: String, onRecordCallback: Callback) {
|
||||||
// check audio permission
|
// check audio permission
|
||||||
if (audio == true) {
|
if (audio == true) {
|
||||||
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
|
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
|
||||||
@@ -33,7 +33,7 @@ suspend fun CameraView.startRecording(options: RecordVideoOptions, onRecordCallb
|
|||||||
val errorMap = makeErrorMap(error.code, error.message)
|
val errorMap = makeErrorMap(error.code, error.message)
|
||||||
onRecordCallback(null, errorMap)
|
onRecordCallback(null, errorMap)
|
||||||
}
|
}
|
||||||
cameraSession.startRecording(audio == true, options, callback, onError)
|
cameraSession.startRecording(audio == true, options, filePath, callback, onError)
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressLint("RestrictedApi")
|
@SuppressLint("RestrictedApi")
|
||||||
|
@@ -25,6 +25,7 @@ import com.mrousavy.camera.types.Torch
|
|||||||
import com.mrousavy.camera.types.VideoStabilizationMode
|
import com.mrousavy.camera.types.VideoStabilizationMode
|
||||||
import kotlinx.coroutines.CoroutineScope
|
import kotlinx.coroutines.CoroutineScope
|
||||||
import kotlinx.coroutines.launch
|
import kotlinx.coroutines.launch
|
||||||
|
import java.io.File
|
||||||
|
|
||||||
//
|
//
|
||||||
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
|
// TODOs for the CameraView which are currently too hard to implement either because of CameraX' limitations, or my brain capacity.
|
||||||
@@ -265,6 +266,10 @@ class CameraView(context: Context) :
|
|||||||
invokeOnStopped()
|
invokeOnStopped()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun onVideoChunkReady(filepath: File, index: Int) {
|
||||||
|
invokeOnChunkReady(filepath, index)
|
||||||
|
}
|
||||||
|
|
||||||
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {
|
override fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame) {
|
||||||
invokeOnCodeScanned(codes, scannerFrame)
|
invokeOnCodeScanned(codes, scannerFrame)
|
||||||
}
|
}
|
||||||
|
@@ -29,6 +29,7 @@ class CameraViewManager : ViewGroupManager<CameraView>() {
|
|||||||
.put("cameraStopped", MapBuilder.of("registrationName", "onStopped"))
|
.put("cameraStopped", MapBuilder.of("registrationName", "onStopped"))
|
||||||
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
.put("cameraError", MapBuilder.of("registrationName", "onError"))
|
||||||
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
|
.put("cameraCodeScanned", MapBuilder.of("registrationName", "onCodeScanned"))
|
||||||
|
.put("onVideoChunkReady", MapBuilder.of("registrationName", "onVideoChunkReady"))
|
||||||
.build()
|
.build()
|
||||||
|
|
||||||
override fun getName(): String = TAG
|
override fun getName(): String = TAG
|
||||||
|
@@ -95,12 +95,12 @@ class CameraViewModule(reactContext: ReactApplicationContext) : ReactContextBase
|
|||||||
|
|
||||||
// TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that
|
// TODO: startRecording() cannot be awaited, because I can't have a Promise and a onRecordedCallback in the same function. Hopefully TurboModules allows that
|
||||||
@ReactMethod
|
@ReactMethod
|
||||||
fun startRecording(viewTag: Int, jsOptions: ReadableMap, onRecordCallback: Callback) {
|
fun startRecording(viewTag: Int, jsOptions: ReadableMap, filePath: String, onRecordCallback: Callback) {
|
||||||
coroutineScope.launch {
|
coroutineScope.launch {
|
||||||
val view = findCameraView(viewTag)
|
val view = findCameraView(viewTag)
|
||||||
try {
|
try {
|
||||||
val options = RecordVideoOptions(jsOptions)
|
val options = RecordVideoOptions(jsOptions)
|
||||||
view.startRecording(options, onRecordCallback)
|
view.startRecording(options, filePath, onRecordCallback)
|
||||||
} catch (error: CameraError) {
|
} catch (error: CameraError) {
|
||||||
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
|
val map = makeErrorMap("${error.domain}/${error.id}", error.message, error)
|
||||||
onRecordCallback(null, map)
|
onRecordCallback(null, map)
|
||||||
|
@@ -54,6 +54,7 @@ import kotlinx.coroutines.launch
|
|||||||
import kotlinx.coroutines.runBlocking
|
import kotlinx.coroutines.runBlocking
|
||||||
import kotlinx.coroutines.sync.Mutex
|
import kotlinx.coroutines.sync.Mutex
|
||||||
import kotlinx.coroutines.sync.withLock
|
import kotlinx.coroutines.sync.withLock
|
||||||
|
import java.io.File
|
||||||
|
|
||||||
class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) :
|
class CameraSession(private val context: Context, private val cameraManager: CameraManager, private val callback: Callback) :
|
||||||
CameraManager.AvailabilityCallback(),
|
CameraManager.AvailabilityCallback(),
|
||||||
@@ -620,6 +621,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
|||||||
suspend fun startRecording(
|
suspend fun startRecording(
|
||||||
enableAudio: Boolean,
|
enableAudio: Boolean,
|
||||||
options: RecordVideoOptions,
|
options: RecordVideoOptions,
|
||||||
|
filePath: String,
|
||||||
callback: (video: RecordingSession.Video) -> Unit,
|
callback: (video: RecordingSession.Video) -> Unit,
|
||||||
onError: (error: CameraError) -> Unit
|
onError: (error: CameraError) -> Unit
|
||||||
) {
|
) {
|
||||||
@@ -639,8 +641,10 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
|||||||
videoOutput.enableHdr,
|
videoOutput.enableHdr,
|
||||||
orientation,
|
orientation,
|
||||||
options,
|
options,
|
||||||
|
filePath,
|
||||||
callback,
|
callback,
|
||||||
onError
|
onError,
|
||||||
|
this.callback,
|
||||||
)
|
)
|
||||||
recording.start()
|
recording.start()
|
||||||
this.recording = recording
|
this.recording = recording
|
||||||
@@ -724,6 +728,7 @@ class CameraSession(private val context: Context, private val cameraManager: Cam
|
|||||||
fun onInitialized()
|
fun onInitialized()
|
||||||
fun onStarted()
|
fun onStarted()
|
||||||
fun onStopped()
|
fun onStopped()
|
||||||
|
fun onVideoChunkReady(filepath: File, index: Int)
|
||||||
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
|
fun onCodeScanned(codes: List<Barcode>, scannerFrame: CodeScannerFrame)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -13,12 +13,13 @@ import com.mrousavy.camera.types.RecordVideoOptions
|
|||||||
import java.io.File
|
import java.io.File
|
||||||
import java.nio.ByteBuffer
|
import java.nio.ByteBuffer
|
||||||
|
|
||||||
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int) :
|
class ChunkedRecordingManager(private val encoder: MediaCodec, private val outputDirectory: File, private val orientationHint: Int, private val iFrameInterval: Int, private val callbacks: CameraSession.Callback) :
|
||||||
MediaCodec.Callback() {
|
MediaCodec.Callback() {
|
||||||
companion object {
|
companion object {
|
||||||
private const val TAG = "ChunkedRecorder"
|
private const val TAG = "ChunkedRecorder"
|
||||||
|
|
||||||
fun fromParams(
|
fun fromParams(
|
||||||
|
callbacks: CameraSession.Callback,
|
||||||
size: Size,
|
size: Size,
|
||||||
enableAudio: Boolean,
|
enableAudio: Boolean,
|
||||||
fps: Int? = null,
|
fps: Int? = null,
|
||||||
@@ -26,10 +27,11 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
|||||||
bitRate: Int,
|
bitRate: Int,
|
||||||
options: RecordVideoOptions,
|
options: RecordVideoOptions,
|
||||||
outputDirectory: File,
|
outputDirectory: File,
|
||||||
iFrameInterval: Int = 3
|
iFrameInterval: Int = 5
|
||||||
): ChunkedRecordingManager {
|
): ChunkedRecordingManager {
|
||||||
val mimeType = options.videoCodec.toMimeType()
|
val mimeType = options.videoCodec.toMimeType()
|
||||||
val orientationDegrees = cameraOrientation.toDegrees()
|
val cameraOrientationDegrees = cameraOrientation.toDegrees()
|
||||||
|
val recordingOrientationDegrees = (options.orientation ?: Orientation.PORTRAIT).toDegrees();
|
||||||
val (width, height) = if (cameraOrientation.isLandscape()) {
|
val (width, height) = if (cameraOrientation.isLandscape()) {
|
||||||
size.height to size.width
|
size.height to size.width
|
||||||
} else {
|
} else {
|
||||||
@@ -53,11 +55,13 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
|||||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
|
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval)
|
||||||
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
|
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate)
|
||||||
|
|
||||||
Log.i(TAG, "Video Format: $format, orientation $cameraOrientation")
|
Log.d(TAG, "Video Format: $format, camera orientation $cameraOrientationDegrees, recordingOrientation: $recordingOrientationDegrees")
|
||||||
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
|
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
|
||||||
// we can use for input and wrap it with a class that handles the EGL work.
|
// we can use for input and wrap it with a class that handles the EGL work.
|
||||||
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
|
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
|
||||||
return ChunkedRecordingManager(codec, outputDirectory, 0, iFrameInterval)
|
return ChunkedRecordingManager(
|
||||||
|
codec, outputDirectory, recordingOrientationDegrees, iFrameInterval, callbacks
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -79,7 +83,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Muxer specific
|
// Muxer specific
|
||||||
private class MuxerContext(val muxer: MediaMuxer, startTimeUs: Long, encodedFormat: MediaFormat) {
|
private class MuxerContext(val muxer: MediaMuxer, val filepath: File, val chunkIndex: Int, startTimeUs: Long, encodedFormat: MediaFormat) {
|
||||||
val videoTrack: Int = muxer.addTrack(encodedFormat)
|
val videoTrack: Int = muxer.addTrack(encodedFormat)
|
||||||
val startTimeUs: Long = startTimeUs
|
val startTimeUs: Long = startTimeUs
|
||||||
|
|
||||||
@@ -97,7 +101,10 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
|||||||
private var muxerContext: MuxerContext? = null
|
private var muxerContext: MuxerContext? = null
|
||||||
|
|
||||||
private fun createNextMuxer(bufferInfo: BufferInfo) {
|
private fun createNextMuxer(bufferInfo: BufferInfo) {
|
||||||
muxerContext?.finish()
|
muxerContext?.let {
|
||||||
|
it.finish()
|
||||||
|
this.callbacks.onVideoChunkReady(it.filepath, it.chunkIndex)
|
||||||
|
}
|
||||||
chunkIndex++
|
chunkIndex++
|
||||||
|
|
||||||
val newFileName = "$chunkIndex.mp4"
|
val newFileName = "$chunkIndex.mp4"
|
||||||
@@ -109,7 +116,7 @@ class ChunkedRecordingManager(private val encoder: MediaCodec, private val outpu
|
|||||||
)
|
)
|
||||||
muxer.setOrientationHint(orientationHint)
|
muxer.setOrientationHint(orientationHint)
|
||||||
muxerContext = MuxerContext(
|
muxerContext = MuxerContext(
|
||||||
muxer, bufferInfo.presentationTimeUs, this.encodedFormat!!
|
muxer, newOutputFile, chunkIndex, bufferInfo.presentationTimeUs, this.encodedFormat!!
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -23,8 +23,10 @@ class RecordingSession(
|
|||||||
private val hdr: Boolean = false,
|
private val hdr: Boolean = false,
|
||||||
private val cameraOrientation: Orientation,
|
private val cameraOrientation: Orientation,
|
||||||
private val options: RecordVideoOptions,
|
private val options: RecordVideoOptions,
|
||||||
|
private val filePath: String,
|
||||||
private val callback: (video: Video) -> Unit,
|
private val callback: (video: Video) -> Unit,
|
||||||
private val onError: (error: CameraError) -> Unit
|
private val onError: (error: CameraError) -> Unit,
|
||||||
|
private val allCallbacks: CameraSession.Callback,
|
||||||
) {
|
) {
|
||||||
companion object {
|
companion object {
|
||||||
private const val TAG = "RecordingSession"
|
private const val TAG = "RecordingSession"
|
||||||
@@ -36,15 +38,11 @@ class RecordingSession(
|
|||||||
|
|
||||||
data class Video(val path: String, val durationMs: Long, val size: Size)
|
data class Video(val path: String, val durationMs: Long, val size: Size)
|
||||||
|
|
||||||
private val outputPath = run {
|
private val outputPath: File = File(filePath)
|
||||||
val videoDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES)
|
|
||||||
val sdf = SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS", Locale.US)
|
|
||||||
val videoFileName = "VID_${sdf.format(Date())}"
|
|
||||||
File(videoDir!!, videoFileName)
|
|
||||||
}
|
|
||||||
|
|
||||||
private val bitRate = getBitRate()
|
private val bitRate = getBitRate()
|
||||||
private val recorder = ChunkedRecordingManager.fromParams(
|
private val recorder = ChunkedRecordingManager.fromParams(
|
||||||
|
allCallbacks,
|
||||||
size,
|
size,
|
||||||
enableAudio,
|
enableAudio,
|
||||||
fps,
|
fps,
|
||||||
|
@@ -8,6 +8,7 @@ class RecordVideoOptions(map: ReadableMap) {
|
|||||||
var videoCodec = VideoCodec.H264
|
var videoCodec = VideoCodec.H264
|
||||||
var videoBitRateOverride: Double? = null
|
var videoBitRateOverride: Double? = null
|
||||||
var videoBitRateMultiplier: Double? = null
|
var videoBitRateMultiplier: Double? = null
|
||||||
|
var orientation: Orientation? = null
|
||||||
|
|
||||||
init {
|
init {
|
||||||
if (map.hasKey("fileType")) {
|
if (map.hasKey("fileType")) {
|
||||||
@@ -25,5 +26,8 @@ class RecordVideoOptions(map: ReadableMap) {
|
|||||||
if (map.hasKey("videoBitRateMultiplier")) {
|
if (map.hasKey("videoBitRateMultiplier")) {
|
||||||
videoBitRateMultiplier = map.getDouble("videoBitRateMultiplier")
|
videoBitRateMultiplier = map.getDouble("videoBitRateMultiplier")
|
||||||
}
|
}
|
||||||
|
if (map.hasKey("orientation")) {
|
||||||
|
orientation = Orientation.fromUnionValue(map.getString("orientation"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
{
|
{
|
||||||
"name": "VisionCameraExample",
|
"name": "VisionCameraExample",
|
||||||
"displayName": "VisionCamera Example"
|
"displayName": "Railbird VisionCamera"
|
||||||
}
|
}
|
||||||
|
@@ -1,55 +1,6 @@
|
|||||||
import { NavigationContainer } from '@react-navigation/native'
|
|
||||||
import React from 'react'
|
import React from 'react'
|
||||||
import { createNativeStackNavigator } from '@react-navigation/native-stack'
|
import CameraScreen from './camera'
|
||||||
import { PermissionsPage } from './PermissionsPage'
|
|
||||||
import { MediaPage } from './MediaPage'
|
|
||||||
import { CameraPage } from './CameraPage'
|
|
||||||
import { CodeScannerPage } from './CodeScannerPage'
|
|
||||||
import type { Routes } from './Routes'
|
|
||||||
import { Camera } from 'react-native-vision-camera'
|
|
||||||
import { GestureHandlerRootView } from 'react-native-gesture-handler'
|
|
||||||
import { StyleSheet } from 'react-native'
|
|
||||||
import { DevicesPage } from './DevicesPage'
|
|
||||||
|
|
||||||
const Stack = createNativeStackNavigator<Routes>()
|
|
||||||
|
|
||||||
export function App(): React.ReactElement | null {
|
export function App(): React.ReactElement | null {
|
||||||
const cameraPermission = Camera.getCameraPermissionStatus()
|
return <CameraScreen />
|
||||||
const microphonePermission = Camera.getMicrophonePermissionStatus()
|
|
||||||
|
|
||||||
console.log(`Re-rendering Navigator. Camera: ${cameraPermission} | Microphone: ${microphonePermission}`)
|
|
||||||
|
|
||||||
const showPermissionsPage = cameraPermission !== 'granted' || microphonePermission === 'not-determined'
|
|
||||||
return (
|
|
||||||
<NavigationContainer>
|
|
||||||
<GestureHandlerRootView style={styles.root}>
|
|
||||||
<Stack.Navigator
|
|
||||||
screenOptions={{
|
|
||||||
headerShown: false,
|
|
||||||
statusBarStyle: 'dark',
|
|
||||||
animationTypeForReplace: 'push',
|
|
||||||
}}
|
|
||||||
initialRouteName={showPermissionsPage ? 'PermissionsPage' : 'CameraPage'}>
|
|
||||||
<Stack.Screen name="PermissionsPage" component={PermissionsPage} />
|
|
||||||
<Stack.Screen name="CameraPage" component={CameraPage} />
|
|
||||||
<Stack.Screen name="CodeScannerPage" component={CodeScannerPage} />
|
|
||||||
<Stack.Screen
|
|
||||||
name="MediaPage"
|
|
||||||
component={MediaPage}
|
|
||||||
options={{
|
|
||||||
animation: 'none',
|
|
||||||
presentation: 'transparentModal',
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
<Stack.Screen name="Devices" component={DevicesPage} />
|
|
||||||
</Stack.Navigator>
|
|
||||||
</GestureHandlerRootView>
|
|
||||||
</NavigationContainer>
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
root: {
|
|
||||||
flex: 1,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
@@ -1,280 +0,0 @@
|
|||||||
import * as React from 'react'
|
|
||||||
import { useRef, useState, useCallback, useMemo } from 'react'
|
|
||||||
import { StyleSheet, Text, View } from 'react-native'
|
|
||||||
import { PinchGestureHandler, PinchGestureHandlerGestureEvent, TapGestureHandler } from 'react-native-gesture-handler'
|
|
||||||
import { CameraRuntimeError, PhotoFile, useCameraDevice, useCameraFormat, useFrameProcessor, VideoFile } from 'react-native-vision-camera'
|
|
||||||
import { Camera } from 'react-native-vision-camera'
|
|
||||||
import { CONTENT_SPACING, CONTROL_BUTTON_SIZE, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING, SCREEN_HEIGHT, SCREEN_WIDTH } from './Constants'
|
|
||||||
import Reanimated, { Extrapolate, interpolate, useAnimatedGestureHandler, useAnimatedProps, useSharedValue } from 'react-native-reanimated'
|
|
||||||
import { useEffect } from 'react'
|
|
||||||
import { useIsForeground } from './hooks/useIsForeground'
|
|
||||||
import { StatusBarBlurBackground } from './views/StatusBarBlurBackground'
|
|
||||||
import { CaptureButton } from './views/CaptureButton'
|
|
||||||
import { PressableOpacity } from 'react-native-pressable-opacity'
|
|
||||||
import MaterialIcon from 'react-native-vector-icons/MaterialCommunityIcons'
|
|
||||||
import IonIcon from 'react-native-vector-icons/Ionicons'
|
|
||||||
import type { Routes } from './Routes'
|
|
||||||
import type { NativeStackScreenProps } from '@react-navigation/native-stack'
|
|
||||||
import { useIsFocused } from '@react-navigation/core'
|
|
||||||
import { examplePlugin } from './frame-processors/ExamplePlugin'
|
|
||||||
import { exampleKotlinSwiftPlugin } from './frame-processors/ExampleKotlinSwiftPlugin'
|
|
||||||
import { usePreferredCameraDevice } from './hooks/usePreferredCameraDevice'
|
|
||||||
|
|
||||||
const ReanimatedCamera = Reanimated.createAnimatedComponent(Camera)
|
|
||||||
Reanimated.addWhitelistedNativeProps({
|
|
||||||
zoom: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
const SCALE_FULL_ZOOM = 3
|
|
||||||
|
|
||||||
type Props = NativeStackScreenProps<Routes, 'CameraPage'>
|
|
||||||
export function CameraPage({ navigation }: Props): React.ReactElement {
|
|
||||||
const camera = useRef<Camera>(null)
|
|
||||||
const [isCameraInitialized, setIsCameraInitialized] = useState(false)
|
|
||||||
const hasMicrophonePermission = useMemo(() => Camera.getMicrophonePermissionStatus() === 'granted', [])
|
|
||||||
const zoom = useSharedValue(0)
|
|
||||||
const isPressingButton = useSharedValue(false)
|
|
||||||
|
|
||||||
// check if camera page is active
|
|
||||||
const isFocussed = useIsFocused()
|
|
||||||
const isForeground = useIsForeground()
|
|
||||||
const isActive = isFocussed && isForeground
|
|
||||||
|
|
||||||
const [cameraPosition, setCameraPosition] = useState<'front' | 'back'>('back')
|
|
||||||
const [enableHdr, setEnableHdr] = useState(false)
|
|
||||||
const [flash, setFlash] = useState<'off' | 'on'>('off')
|
|
||||||
const [enableNightMode, setEnableNightMode] = useState(false)
|
|
||||||
|
|
||||||
// camera device settings
|
|
||||||
const [preferredDevice] = usePreferredCameraDevice()
|
|
||||||
let device = useCameraDevice(cameraPosition)
|
|
||||||
|
|
||||||
if (preferredDevice != null && preferredDevice.position === cameraPosition) {
|
|
||||||
// override default device with the one selected by the user in settings
|
|
||||||
device = preferredDevice
|
|
||||||
}
|
|
||||||
|
|
||||||
const [targetFps, setTargetFps] = useState(60)
|
|
||||||
|
|
||||||
const screenAspectRatio = SCREEN_HEIGHT / SCREEN_WIDTH
|
|
||||||
const format = useCameraFormat(device, [
|
|
||||||
{ fps: targetFps },
|
|
||||||
{ videoAspectRatio: screenAspectRatio },
|
|
||||||
{ videoResolution: 'max' },
|
|
||||||
{ photoAspectRatio: screenAspectRatio },
|
|
||||||
{ photoResolution: 'max' },
|
|
||||||
])
|
|
||||||
|
|
||||||
const fps = Math.min(format?.maxFps ?? 1, targetFps)
|
|
||||||
|
|
||||||
const supportsFlash = device?.hasFlash ?? false
|
|
||||||
const supportsHdr = format?.supportsPhotoHdr
|
|
||||||
const supports60Fps = useMemo(() => device?.formats.some((f) => f.maxFps >= 60), [device?.formats])
|
|
||||||
const canToggleNightMode = device?.supportsLowLightBoost ?? false
|
|
||||||
|
|
||||||
//#region Animated Zoom
|
|
||||||
// This just maps the zoom factor to a percentage value.
|
|
||||||
// so e.g. for [min, neutr., max] values [1, 2, 128] this would result in [0, 0.0081, 1]
|
|
||||||
const minZoom = device?.minZoom ?? 1
|
|
||||||
const maxZoom = Math.min(device?.maxZoom ?? 1, MAX_ZOOM_FACTOR)
|
|
||||||
|
|
||||||
const cameraAnimatedProps = useAnimatedProps(() => {
|
|
||||||
const z = Math.max(Math.min(zoom.value, maxZoom), minZoom)
|
|
||||||
return {
|
|
||||||
zoom: z,
|
|
||||||
}
|
|
||||||
}, [maxZoom, minZoom, zoom])
|
|
||||||
//#endregion
|
|
||||||
|
|
||||||
//#region Callbacks
|
|
||||||
const setIsPressingButton = useCallback(
|
|
||||||
(_isPressingButton: boolean) => {
|
|
||||||
isPressingButton.value = _isPressingButton
|
|
||||||
},
|
|
||||||
[isPressingButton],
|
|
||||||
)
|
|
||||||
// Camera callbacks
|
|
||||||
const onError = useCallback((error: CameraRuntimeError) => {
|
|
||||||
console.error(error)
|
|
||||||
}, [])
|
|
||||||
const onInitialized = useCallback(() => {
|
|
||||||
console.log('Camera initialized!')
|
|
||||||
setIsCameraInitialized(true)
|
|
||||||
}, [])
|
|
||||||
const onMediaCaptured = useCallback(
|
|
||||||
(media: PhotoFile | VideoFile, type: 'photo' | 'video') => {
|
|
||||||
console.log(`Media captured! ${JSON.stringify(media)}`)
|
|
||||||
navigation.navigate('MediaPage', {
|
|
||||||
path: media.path,
|
|
||||||
type: type,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
[navigation],
|
|
||||||
)
|
|
||||||
const onFlipCameraPressed = useCallback(() => {
|
|
||||||
setCameraPosition((p) => (p === 'back' ? 'front' : 'back'))
|
|
||||||
}, [])
|
|
||||||
const onFlashPressed = useCallback(() => {
|
|
||||||
setFlash((f) => (f === 'off' ? 'on' : 'off'))
|
|
||||||
}, [])
|
|
||||||
//#endregion
|
|
||||||
|
|
||||||
//#region Tap Gesture
|
|
||||||
const onDoubleTap = useCallback(() => {
|
|
||||||
onFlipCameraPressed()
|
|
||||||
}, [onFlipCameraPressed])
|
|
||||||
//#endregion
|
|
||||||
|
|
||||||
//#region Effects
|
|
||||||
const neutralZoom = device?.neutralZoom ?? 1
|
|
||||||
useEffect(() => {
|
|
||||||
// Run everytime the neutralZoomScaled value changes. (reset zoom when device changes)
|
|
||||||
zoom.value = neutralZoom
|
|
||||||
}, [neutralZoom, zoom])
|
|
||||||
//#endregion
|
|
||||||
|
|
||||||
//#region Pinch to Zoom Gesture
|
|
||||||
// The gesture handler maps the linear pinch gesture (0 - 1) to an exponential curve since a camera's zoom
|
|
||||||
// function does not appear linear to the user. (aka zoom 0.1 -> 0.2 does not look equal in difference as 0.8 -> 0.9)
|
|
||||||
const onPinchGesture = useAnimatedGestureHandler<PinchGestureHandlerGestureEvent, { startZoom?: number }>({
|
|
||||||
onStart: (_, context) => {
|
|
||||||
context.startZoom = zoom.value
|
|
||||||
},
|
|
||||||
onActive: (event, context) => {
|
|
||||||
// we're trying to map the scale gesture to a linear zoom here
|
|
||||||
const startZoom = context.startZoom ?? 0
|
|
||||||
const scale = interpolate(event.scale, [1 - 1 / SCALE_FULL_ZOOM, 1, SCALE_FULL_ZOOM], [-1, 0, 1], Extrapolate.CLAMP)
|
|
||||||
zoom.value = interpolate(scale, [-1, 0, 1], [minZoom, startZoom, maxZoom], Extrapolate.CLAMP)
|
|
||||||
},
|
|
||||||
})
|
|
||||||
//#endregion
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const f =
|
|
||||||
format != null
|
|
||||||
? `(${format.photoWidth}x${format.photoHeight} photo / ${format.videoWidth}x${format.videoHeight}@${format.maxFps} video @ ${fps}fps)`
|
|
||||||
: undefined
|
|
||||||
console.log(`Camera: ${device?.name} | Format: ${f}`)
|
|
||||||
}, [device?.name, format, fps])
|
|
||||||
|
|
||||||
const frameProcessor = useFrameProcessor((frame) => {
|
|
||||||
'worklet'
|
|
||||||
|
|
||||||
// console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`)
|
|
||||||
// examplePlugin(frame)
|
|
||||||
// exampleKotlinSwiftPlugin(frame)
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
return (
|
|
||||||
<View style={styles.container}>
|
|
||||||
{device != null && (
|
|
||||||
<PinchGestureHandler onGestureEvent={onPinchGesture} enabled={isActive}>
|
|
||||||
<Reanimated.View style={StyleSheet.absoluteFill}>
|
|
||||||
<TapGestureHandler onEnded={onDoubleTap} numberOfTaps={2}>
|
|
||||||
<ReanimatedCamera
|
|
||||||
ref={camera}
|
|
||||||
style={StyleSheet.absoluteFill}
|
|
||||||
device={device}
|
|
||||||
format={format}
|
|
||||||
fps={fps}
|
|
||||||
photoHdr={enableHdr}
|
|
||||||
videoHdr={enableHdr}
|
|
||||||
lowLightBoost={device.supportsLowLightBoost && enableNightMode}
|
|
||||||
isActive={isActive}
|
|
||||||
onInitialized={onInitialized}
|
|
||||||
onError={onError}
|
|
||||||
enableZoomGesture={false}
|
|
||||||
animatedProps={cameraAnimatedProps}
|
|
||||||
exposure={0}
|
|
||||||
enableFpsGraph={true}
|
|
||||||
orientation="portrait"
|
|
||||||
photo={true}
|
|
||||||
video={true}
|
|
||||||
audio={hasMicrophonePermission}
|
|
||||||
frameProcessor={frameProcessor}
|
|
||||||
/>
|
|
||||||
</TapGestureHandler>
|
|
||||||
</Reanimated.View>
|
|
||||||
</PinchGestureHandler>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<CaptureButton
|
|
||||||
style={styles.captureButton}
|
|
||||||
camera={camera}
|
|
||||||
onMediaCaptured={onMediaCaptured}
|
|
||||||
cameraZoom={zoom}
|
|
||||||
minZoom={minZoom}
|
|
||||||
maxZoom={maxZoom}
|
|
||||||
flash={supportsFlash ? flash : 'off'}
|
|
||||||
enabled={isCameraInitialized && isActive}
|
|
||||||
setIsPressingButton={setIsPressingButton}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<StatusBarBlurBackground />
|
|
||||||
|
|
||||||
<View style={styles.rightButtonRow}>
|
|
||||||
<PressableOpacity style={styles.button} onPress={onFlipCameraPressed} disabledOpacity={0.4}>
|
|
||||||
<IonIcon name="camera-reverse" color="white" size={24} />
|
|
||||||
</PressableOpacity>
|
|
||||||
{supportsFlash && (
|
|
||||||
<PressableOpacity style={styles.button} onPress={onFlashPressed} disabledOpacity={0.4}>
|
|
||||||
<IonIcon name={flash === 'on' ? 'flash' : 'flash-off'} color="white" size={24} />
|
|
||||||
</PressableOpacity>
|
|
||||||
)}
|
|
||||||
{supports60Fps && (
|
|
||||||
<PressableOpacity style={styles.button} onPress={() => setTargetFps((t) => (t === 30 ? 60 : 30))}>
|
|
||||||
<Text style={styles.text}>{`${targetFps}\nFPS`}</Text>
|
|
||||||
</PressableOpacity>
|
|
||||||
)}
|
|
||||||
{supportsHdr && (
|
|
||||||
<PressableOpacity style={styles.button} onPress={() => setEnableHdr((h) => !h)}>
|
|
||||||
<MaterialIcon name={enableHdr ? 'hdr' : 'hdr-off'} color="white" size={24} />
|
|
||||||
</PressableOpacity>
|
|
||||||
)}
|
|
||||||
{canToggleNightMode && (
|
|
||||||
<PressableOpacity style={styles.button} onPress={() => setEnableNightMode(!enableNightMode)} disabledOpacity={0.4}>
|
|
||||||
<IonIcon name={enableNightMode ? 'moon' : 'moon-outline'} color="white" size={24} />
|
|
||||||
</PressableOpacity>
|
|
||||||
)}
|
|
||||||
<PressableOpacity style={styles.button} onPress={() => navigation.navigate('Devices')}>
|
|
||||||
<IonIcon name="settings-outline" color="white" size={24} />
|
|
||||||
</PressableOpacity>
|
|
||||||
<PressableOpacity style={styles.button} onPress={() => navigation.navigate('CodeScannerPage')}>
|
|
||||||
<IonIcon name="qr-code-outline" color="white" size={24} />
|
|
||||||
</PressableOpacity>
|
|
||||||
</View>
|
|
||||||
</View>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
container: {
|
|
||||||
flex: 1,
|
|
||||||
backgroundColor: 'black',
|
|
||||||
},
|
|
||||||
captureButton: {
|
|
||||||
position: 'absolute',
|
|
||||||
alignSelf: 'center',
|
|
||||||
bottom: SAFE_AREA_PADDING.paddingBottom,
|
|
||||||
},
|
|
||||||
button: {
|
|
||||||
marginBottom: CONTENT_SPACING,
|
|
||||||
width: CONTROL_BUTTON_SIZE,
|
|
||||||
height: CONTROL_BUTTON_SIZE,
|
|
||||||
borderRadius: CONTROL_BUTTON_SIZE / 2,
|
|
||||||
backgroundColor: 'rgba(140, 140, 140, 0.3)',
|
|
||||||
justifyContent: 'center',
|
|
||||||
alignItems: 'center',
|
|
||||||
},
|
|
||||||
rightButtonRow: {
|
|
||||||
position: 'absolute',
|
|
||||||
right: SAFE_AREA_PADDING.paddingRight,
|
|
||||||
top: SAFE_AREA_PADDING.paddingTop,
|
|
||||||
},
|
|
||||||
text: {
|
|
||||||
color: 'white',
|
|
||||||
fontSize: 11,
|
|
||||||
fontWeight: 'bold',
|
|
||||||
textAlign: 'center',
|
|
||||||
},
|
|
||||||
})
|
|
@@ -1,120 +0,0 @@
|
|||||||
import * as React from 'react'
|
|
||||||
import { useCallback, useRef, useState } from 'react'
|
|
||||||
import { Alert, AlertButton, Linking, StyleSheet, View } from 'react-native'
|
|
||||||
import { Code, useCameraDevice, useCodeScanner } from 'react-native-vision-camera'
|
|
||||||
import { Camera } from 'react-native-vision-camera'
|
|
||||||
import { CONTENT_SPACING, CONTROL_BUTTON_SIZE, SAFE_AREA_PADDING } from './Constants'
|
|
||||||
import { useIsForeground } from './hooks/useIsForeground'
|
|
||||||
import { StatusBarBlurBackground } from './views/StatusBarBlurBackground'
|
|
||||||
import { PressableOpacity } from 'react-native-pressable-opacity'
|
|
||||||
import IonIcon from 'react-native-vector-icons/Ionicons'
|
|
||||||
import type { Routes } from './Routes'
|
|
||||||
import type { NativeStackScreenProps } from '@react-navigation/native-stack'
|
|
||||||
import { useIsFocused } from '@react-navigation/core'
|
|
||||||
|
|
||||||
const showCodeAlert = (value: string, onDismissed: () => void): void => {
|
|
||||||
const buttons: AlertButton[] = [
|
|
||||||
{
|
|
||||||
text: 'Close',
|
|
||||||
style: 'cancel',
|
|
||||||
onPress: onDismissed,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
if (value.startsWith('http')) {
|
|
||||||
buttons.push({
|
|
||||||
text: 'Open URL',
|
|
||||||
onPress: () => {
|
|
||||||
Linking.openURL(value)
|
|
||||||
onDismissed()
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Alert.alert('Scanned Code', value, buttons)
|
|
||||||
}
|
|
||||||
|
|
||||||
type Props = NativeStackScreenProps<Routes, 'CodeScannerPage'>
|
|
||||||
export function CodeScannerPage({ navigation }: Props): React.ReactElement {
|
|
||||||
// 1. Use a simple default back camera
|
|
||||||
const device = useCameraDevice('back')
|
|
||||||
|
|
||||||
// 2. Only activate Camera when the app is focused and this screen is currently opened
|
|
||||||
const isFocused = useIsFocused()
|
|
||||||
const isForeground = useIsForeground()
|
|
||||||
const isActive = isFocused && isForeground
|
|
||||||
|
|
||||||
// 3. (Optional) enable a torch setting
|
|
||||||
const [torch, setTorch] = useState(false)
|
|
||||||
|
|
||||||
// 4. On code scanned, we show an aler to the user
|
|
||||||
const isShowingAlert = useRef(false)
|
|
||||||
const onCodeScanned = useCallback((codes: Code[]) => {
|
|
||||||
console.log(`Scanned ${codes.length} codes:`, codes)
|
|
||||||
const value = codes[0]?.value
|
|
||||||
if (value == null) return
|
|
||||||
if (isShowingAlert.current) return
|
|
||||||
showCodeAlert(value, () => {
|
|
||||||
isShowingAlert.current = false
|
|
||||||
})
|
|
||||||
isShowingAlert.current = true
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
// 5. Initialize the Code Scanner to scan QR codes and Barcodes
|
|
||||||
const codeScanner = useCodeScanner({
|
|
||||||
codeTypes: ['qr', 'ean-13'],
|
|
||||||
onCodeScanned: onCodeScanned,
|
|
||||||
})
|
|
||||||
|
|
||||||
return (
|
|
||||||
<View style={styles.container}>
|
|
||||||
{device != null && (
|
|
||||||
<Camera
|
|
||||||
style={StyleSheet.absoluteFill}
|
|
||||||
device={device}
|
|
||||||
isActive={isActive}
|
|
||||||
codeScanner={codeScanner}
|
|
||||||
torch={torch ? 'on' : 'off'}
|
|
||||||
enableZoomGesture={true}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<StatusBarBlurBackground />
|
|
||||||
|
|
||||||
<View style={styles.rightButtonRow}>
|
|
||||||
<PressableOpacity style={styles.button} onPress={() => setTorch(!torch)} disabledOpacity={0.4}>
|
|
||||||
<IonIcon name={torch ? 'flash' : 'flash-off'} color="white" size={24} />
|
|
||||||
</PressableOpacity>
|
|
||||||
</View>
|
|
||||||
|
|
||||||
{/* Back Button */}
|
|
||||||
<PressableOpacity style={styles.backButton} onPress={navigation.goBack}>
|
|
||||||
<IonIcon name="chevron-back" color="white" size={35} />
|
|
||||||
</PressableOpacity>
|
|
||||||
</View>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
container: {
|
|
||||||
flex: 1,
|
|
||||||
backgroundColor: 'black',
|
|
||||||
},
|
|
||||||
button: {
|
|
||||||
marginBottom: CONTENT_SPACING,
|
|
||||||
width: CONTROL_BUTTON_SIZE,
|
|
||||||
height: CONTROL_BUTTON_SIZE,
|
|
||||||
borderRadius: CONTROL_BUTTON_SIZE / 2,
|
|
||||||
backgroundColor: 'rgba(140, 140, 140, 0.3)',
|
|
||||||
justifyContent: 'center',
|
|
||||||
alignItems: 'center',
|
|
||||||
},
|
|
||||||
rightButtonRow: {
|
|
||||||
position: 'absolute',
|
|
||||||
right: SAFE_AREA_PADDING.paddingRight,
|
|
||||||
top: SAFE_AREA_PADDING.paddingTop,
|
|
||||||
},
|
|
||||||
backButton: {
|
|
||||||
position: 'absolute',
|
|
||||||
left: SAFE_AREA_PADDING.paddingLeft,
|
|
||||||
top: SAFE_AREA_PADDING.paddingTop,
|
|
||||||
},
|
|
||||||
})
|
|
@@ -1,215 +0,0 @@
|
|||||||
import type { NativeStackScreenProps } from '@react-navigation/native-stack'
|
|
||||||
import React, { useCallback, useMemo } from 'react'
|
|
||||||
import IonIcon from 'react-native-vector-icons/Ionicons'
|
|
||||||
import { StyleSheet, View, Text, ListRenderItemInfo, SectionList, SectionListData } from 'react-native'
|
|
||||||
import { CameraDevice, useCameraDevices } from 'react-native-vision-camera'
|
|
||||||
import { CONTENT_SPACING, SAFE_AREA_PADDING } from './Constants'
|
|
||||||
import type { Routes } from './Routes'
|
|
||||||
import { PressableOpacity } from 'react-native-pressable-opacity'
|
|
||||||
import { usePreferredCameraDevice } from './hooks/usePreferredCameraDevice'
|
|
||||||
|
|
||||||
const keyExtractor = (item: CameraDevice): string => item.id
|
|
||||||
|
|
||||||
interface SectionType {
|
|
||||||
position: CameraDevice['position'] | 'preferred'
|
|
||||||
}
|
|
||||||
type SectionData = SectionListData<CameraDevice, SectionType>
|
|
||||||
|
|
||||||
interface DeviceProps {
|
|
||||||
device: CameraDevice
|
|
||||||
onPress: () => void
|
|
||||||
}
|
|
||||||
|
|
||||||
function Device({ device, onPress }: DeviceProps): React.ReactElement {
|
|
||||||
const maxPhotoRes = useMemo(
|
|
||||||
() =>
|
|
||||||
device.formats.reduce((prev, curr) => {
|
|
||||||
if (curr.photoWidth * curr.photoHeight > prev.photoWidth * prev.photoHeight) return curr
|
|
||||||
return prev
|
|
||||||
}),
|
|
||||||
[device.formats],
|
|
||||||
)
|
|
||||||
const maxVideoRes = useMemo(
|
|
||||||
() =>
|
|
||||||
device.formats.reduce((prev, curr) => {
|
|
||||||
if (curr.videoWidth * curr.videoHeight > prev.videoWidth * prev.videoHeight) return curr
|
|
||||||
return prev
|
|
||||||
}),
|
|
||||||
[device.formats],
|
|
||||||
)
|
|
||||||
const deviceTypes = useMemo(() => device.physicalDevices.map((t) => t.replace('-camera', '')).join(' + '), [device.physicalDevices])
|
|
||||||
|
|
||||||
return (
|
|
||||||
<PressableOpacity style={styles.itemContainer} onPress={onPress}>
|
|
||||||
<View style={styles.horizontal}>
|
|
||||||
<IonIcon name="camera" size={18} color="black" />
|
|
||||||
<Text style={styles.deviceName} numberOfLines={3}>
|
|
||||||
{device.name} <Text style={styles.devicePosition}>({device.position})</Text>
|
|
||||||
</Text>
|
|
||||||
</View>
|
|
||||||
<Text style={styles.deviceTypes}>{deviceTypes}</Text>
|
|
||||||
<View style={styles.horizontal}>
|
|
||||||
<IonIcon name="camera" size={12} color="black" />
|
|
||||||
<Text style={styles.resolutionText}>
|
|
||||||
{maxPhotoRes.photoWidth}x{maxPhotoRes.photoHeight}
|
|
||||||
</Text>
|
|
||||||
</View>
|
|
||||||
<View style={styles.horizontal}>
|
|
||||||
<IonIcon name="videocam" size={12} color="black" />
|
|
||||||
<Text style={styles.resolutionText}>
|
|
||||||
{maxVideoRes.videoWidth}x{maxVideoRes.videoHeight} @ {maxVideoRes.maxFps} FPS
|
|
||||||
</Text>
|
|
||||||
</View>
|
|
||||||
<Text style={styles.deviceId} numberOfLines={2} ellipsizeMode="middle">
|
|
||||||
{device.id}
|
|
||||||
</Text>
|
|
||||||
</PressableOpacity>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
type Props = NativeStackScreenProps<Routes, 'Devices'>
|
|
||||||
export function DevicesPage({ navigation }: Props): React.ReactElement {
|
|
||||||
const devices = useCameraDevices()
|
|
||||||
const [preferredDevice, setPreferredDevice] = usePreferredCameraDevice()
|
|
||||||
|
|
||||||
const sections = useMemo((): SectionData[] => {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
position: 'preferred',
|
|
||||||
data: preferredDevice != null ? [preferredDevice] : [],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
position: 'back',
|
|
||||||
data: devices.filter((d) => d.position === 'back'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
position: 'front',
|
|
||||||
data: devices.filter((d) => d.position === 'front'),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
position: 'external',
|
|
||||||
data: devices.filter((d) => d.position === 'external'),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}, [devices, preferredDevice])
|
|
||||||
|
|
||||||
const onDevicePressed = useCallback(
|
|
||||||
(device: CameraDevice) => {
|
|
||||||
setPreferredDevice(device)
|
|
||||||
navigation.navigate('CameraPage')
|
|
||||||
},
|
|
||||||
[navigation, setPreferredDevice],
|
|
||||||
)
|
|
||||||
|
|
||||||
const renderItem = useCallback(
|
|
||||||
({ item }: ListRenderItemInfo<CameraDevice>) => {
|
|
||||||
return <Device device={item} onPress={() => onDevicePressed(item)} />
|
|
||||||
},
|
|
||||||
[onDevicePressed],
|
|
||||||
)
|
|
||||||
|
|
||||||
const renderSectionHeader = useCallback(({ section }: { section: SectionData }) => {
|
|
||||||
if (section.data.length === 0) return null
|
|
||||||
return (
|
|
||||||
<View style={styles.sectionHeader}>
|
|
||||||
<Text style={styles.sectionHeaderText}>{section.position.toUpperCase()}</Text>
|
|
||||||
</View>
|
|
||||||
)
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
return (
|
|
||||||
<View style={styles.container}>
|
|
||||||
<View style={styles.headerContainer}>
|
|
||||||
<View style={styles.horizontal}>
|
|
||||||
<PressableOpacity style={styles.backButton} onPress={navigation.goBack}>
|
|
||||||
<IonIcon name="chevron-back" size={35} color="black" />
|
|
||||||
</PressableOpacity>
|
|
||||||
<Text style={styles.header}>Camera Devices</Text>
|
|
||||||
</View>
|
|
||||||
<Text style={styles.subHeader}>
|
|
||||||
These are all detected Camera devices on your phone. This list will automatically update as you plug devices in or out.
|
|
||||||
</Text>
|
|
||||||
</View>
|
|
||||||
|
|
||||||
<SectionList
|
|
||||||
style={styles.list}
|
|
||||||
contentContainerStyle={styles.listContent}
|
|
||||||
sections={sections}
|
|
||||||
keyExtractor={keyExtractor}
|
|
||||||
renderItem={renderItem}
|
|
||||||
renderSectionHeader={renderSectionHeader}
|
|
||||||
stickySectionHeadersEnabled={false}
|
|
||||||
/>
|
|
||||||
</View>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
container: {
|
|
||||||
flex: 1,
|
|
||||||
backgroundColor: 'white',
|
|
||||||
},
|
|
||||||
headerContainer: {
|
|
||||||
paddingTop: SAFE_AREA_PADDING.paddingTop,
|
|
||||||
paddingLeft: SAFE_AREA_PADDING.paddingLeft,
|
|
||||||
paddingRight: SAFE_AREA_PADDING.paddingRight,
|
|
||||||
},
|
|
||||||
header: {
|
|
||||||
fontSize: 38,
|
|
||||||
fontWeight: 'bold',
|
|
||||||
maxWidth: '80%',
|
|
||||||
},
|
|
||||||
subHeader: {
|
|
||||||
marginTop: 10,
|
|
||||||
fontSize: 18,
|
|
||||||
maxWidth: '80%',
|
|
||||||
},
|
|
||||||
list: {
|
|
||||||
marginTop: CONTENT_SPACING,
|
|
||||||
},
|
|
||||||
listContent: {
|
|
||||||
paddingBottom: SAFE_AREA_PADDING.paddingBottom,
|
|
||||||
},
|
|
||||||
sectionHeader: {
|
|
||||||
paddingHorizontal: CONTENT_SPACING / 2,
|
|
||||||
paddingVertical: 5,
|
|
||||||
},
|
|
||||||
sectionHeaderText: {
|
|
||||||
opacity: 0.4,
|
|
||||||
fontSize: 16,
|
|
||||||
},
|
|
||||||
itemContainer: {
|
|
||||||
paddingHorizontal: CONTENT_SPACING,
|
|
||||||
paddingVertical: 7,
|
|
||||||
},
|
|
||||||
deviceName: {
|
|
||||||
fontSize: 17,
|
|
||||||
marginLeft: 5,
|
|
||||||
flexShrink: 1,
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
devicePosition: {
|
|
||||||
opacity: 0.4,
|
|
||||||
},
|
|
||||||
deviceId: {
|
|
||||||
fontSize: 12,
|
|
||||||
opacity: 0.4,
|
|
||||||
},
|
|
||||||
deviceTypes: {
|
|
||||||
fontSize: 12,
|
|
||||||
opacity: 0.4,
|
|
||||||
},
|
|
||||||
horizontal: {
|
|
||||||
flexDirection: 'row',
|
|
||||||
alignItems: 'center',
|
|
||||||
},
|
|
||||||
backButton: {
|
|
||||||
width: 40,
|
|
||||||
height: 40,
|
|
||||||
marginTop: 7,
|
|
||||||
},
|
|
||||||
resolutionText: {
|
|
||||||
marginLeft: 5,
|
|
||||||
fontSize: 12,
|
|
||||||
},
|
|
||||||
})
|
|
@@ -1,151 +0,0 @@
|
|||||||
import React, { useCallback, useMemo, useState } from 'react'
|
|
||||||
import { StyleSheet, View, ActivityIndicator, PermissionsAndroid, Platform } from 'react-native'
|
|
||||||
import Video, { LoadError, OnLoadData } from 'react-native-video'
|
|
||||||
import { SAFE_AREA_PADDING } from './Constants'
|
|
||||||
import { useIsForeground } from './hooks/useIsForeground'
|
|
||||||
import { PressableOpacity } from 'react-native-pressable-opacity'
|
|
||||||
import IonIcon from 'react-native-vector-icons/Ionicons'
|
|
||||||
import { Alert } from 'react-native'
|
|
||||||
import { CameraRoll } from '@react-native-camera-roll/camera-roll'
|
|
||||||
import { StatusBarBlurBackground } from './views/StatusBarBlurBackground'
|
|
||||||
import type { NativeStackScreenProps } from '@react-navigation/native-stack'
|
|
||||||
import type { Routes } from './Routes'
|
|
||||||
import { useIsFocused } from '@react-navigation/core'
|
|
||||||
import FastImage, { OnLoadEvent } from 'react-native-fast-image'
|
|
||||||
|
|
||||||
const requestSavePermission = async (): Promise<boolean> => {
|
|
||||||
if (Platform.OS !== 'android') return true
|
|
||||||
|
|
||||||
const permission = PermissionsAndroid.PERMISSIONS.WRITE_EXTERNAL_STORAGE
|
|
||||||
if (permission == null) return false
|
|
||||||
let hasPermission = await PermissionsAndroid.check(permission)
|
|
||||||
if (!hasPermission) {
|
|
||||||
const permissionRequestResult = await PermissionsAndroid.request(permission)
|
|
||||||
hasPermission = permissionRequestResult === 'granted'
|
|
||||||
}
|
|
||||||
return hasPermission
|
|
||||||
}
|
|
||||||
|
|
||||||
const isVideoOnLoadEvent = (event: OnLoadData | OnLoadEvent): event is OnLoadData => 'duration' in event && 'naturalSize' in event
|
|
||||||
|
|
||||||
type Props = NativeStackScreenProps<Routes, 'MediaPage'>
|
|
||||||
export function MediaPage({ navigation, route }: Props): React.ReactElement {
|
|
||||||
const { path, type } = route.params
|
|
||||||
const [hasMediaLoaded, setHasMediaLoaded] = useState(false)
|
|
||||||
const isForeground = useIsForeground()
|
|
||||||
const isScreenFocused = useIsFocused()
|
|
||||||
const isVideoPaused = !isForeground || !isScreenFocused
|
|
||||||
const [savingState, setSavingState] = useState<'none' | 'saving' | 'saved'>('none')
|
|
||||||
|
|
||||||
const onMediaLoad = useCallback((event: OnLoadData | OnLoadEvent) => {
|
|
||||||
if (isVideoOnLoadEvent(event)) {
|
|
||||||
console.log(
|
|
||||||
`Video loaded. Size: ${event.naturalSize.width}x${event.naturalSize.height} (${event.naturalSize.orientation}, ${event.duration} seconds)`,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
console.log(`Image loaded. Size: ${event.nativeEvent.width}x${event.nativeEvent.height}`)
|
|
||||||
}
|
|
||||||
}, [])
|
|
||||||
const onMediaLoadEnd = useCallback(() => {
|
|
||||||
console.log('media has loaded.')
|
|
||||||
setHasMediaLoaded(true)
|
|
||||||
}, [])
|
|
||||||
const onMediaLoadError = useCallback((error: LoadError) => {
|
|
||||||
console.log(`failed to load media: ${JSON.stringify(error)}`)
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
const onSavePressed = useCallback(async () => {
|
|
||||||
try {
|
|
||||||
setSavingState('saving')
|
|
||||||
|
|
||||||
const hasPermission = await requestSavePermission()
|
|
||||||
if (!hasPermission) {
|
|
||||||
Alert.alert('Permission denied!', 'Vision Camera does not have permission to save the media to your camera roll.')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
await CameraRoll.save(`file://${path}`, {
|
|
||||||
type: type,
|
|
||||||
})
|
|
||||||
setSavingState('saved')
|
|
||||||
} catch (e) {
|
|
||||||
const message = e instanceof Error ? e.message : JSON.stringify(e)
|
|
||||||
setSavingState('none')
|
|
||||||
Alert.alert('Failed to save!', `An unexpected error occured while trying to save your ${type}. ${message}`)
|
|
||||||
}
|
|
||||||
}, [path, type])
|
|
||||||
|
|
||||||
const source = useMemo(() => ({ uri: `file://${path}/1.mp4` }), [path])
|
|
||||||
|
|
||||||
const screenStyle = useMemo(() => ({ opacity: hasMediaLoaded ? 1 : 0 }), [hasMediaLoaded])
|
|
||||||
|
|
||||||
return (
|
|
||||||
<View style={[styles.container, screenStyle]}>
|
|
||||||
{type === 'photo' && (
|
|
||||||
<FastImage source={source} style={StyleSheet.absoluteFill} resizeMode="cover" onLoadEnd={onMediaLoadEnd} onLoad={onMediaLoad} />
|
|
||||||
)}
|
|
||||||
{type === 'video' && (
|
|
||||||
<Video
|
|
||||||
source={source}
|
|
||||||
style={StyleSheet.absoluteFill}
|
|
||||||
paused={isVideoPaused}
|
|
||||||
resizeMode="cover"
|
|
||||||
posterResizeMode="cover"
|
|
||||||
allowsExternalPlayback={false}
|
|
||||||
automaticallyWaitsToMinimizeStalling={false}
|
|
||||||
disableFocus={true}
|
|
||||||
repeat={true}
|
|
||||||
useTextureView={false}
|
|
||||||
controls={false}
|
|
||||||
playWhenInactive={true}
|
|
||||||
ignoreSilentSwitch="ignore"
|
|
||||||
onReadyForDisplay={onMediaLoadEnd}
|
|
||||||
onLoad={onMediaLoad}
|
|
||||||
onError={onMediaLoadError}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<PressableOpacity style={styles.closeButton} onPress={navigation.goBack}>
|
|
||||||
<IonIcon name="close" size={35} color="white" style={styles.icon} />
|
|
||||||
</PressableOpacity>
|
|
||||||
|
|
||||||
<PressableOpacity style={styles.saveButton} onPress={onSavePressed} disabled={savingState !== 'none'}>
|
|
||||||
{savingState === 'none' && <IonIcon name="download" size={35} color="white" style={styles.icon} />}
|
|
||||||
{savingState === 'saved' && <IonIcon name="checkmark" size={35} color="white" style={styles.icon} />}
|
|
||||||
{savingState === 'saving' && <ActivityIndicator color="white" />}
|
|
||||||
</PressableOpacity>
|
|
||||||
|
|
||||||
<StatusBarBlurBackground />
|
|
||||||
</View>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
container: {
|
|
||||||
flex: 1,
|
|
||||||
alignItems: 'center',
|
|
||||||
justifyContent: 'center',
|
|
||||||
backgroundColor: 'white',
|
|
||||||
},
|
|
||||||
closeButton: {
|
|
||||||
position: 'absolute',
|
|
||||||
top: SAFE_AREA_PADDING.paddingTop,
|
|
||||||
left: SAFE_AREA_PADDING.paddingLeft,
|
|
||||||
width: 40,
|
|
||||||
height: 40,
|
|
||||||
},
|
|
||||||
saveButton: {
|
|
||||||
position: 'absolute',
|
|
||||||
bottom: SAFE_AREA_PADDING.paddingBottom,
|
|
||||||
left: SAFE_AREA_PADDING.paddingLeft,
|
|
||||||
width: 40,
|
|
||||||
height: 40,
|
|
||||||
},
|
|
||||||
icon: {
|
|
||||||
textShadowColor: 'black',
|
|
||||||
textShadowOffset: {
|
|
||||||
height: 0,
|
|
||||||
width: 0,
|
|
||||||
},
|
|
||||||
textShadowRadius: 1,
|
|
||||||
},
|
|
||||||
})
|
|
@@ -1,96 +0,0 @@
|
|||||||
import type { NativeStackScreenProps } from '@react-navigation/native-stack'
|
|
||||||
import React, { useCallback, useEffect, useState } from 'react'
|
|
||||||
import { ImageRequireSource, Linking } from 'react-native'
|
|
||||||
|
|
||||||
import { StyleSheet, View, Text, Image } from 'react-native'
|
|
||||||
import { Camera, CameraPermissionStatus } from 'react-native-vision-camera'
|
|
||||||
import { CONTENT_SPACING, SAFE_AREA_PADDING } from './Constants'
|
|
||||||
import type { Routes } from './Routes'
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
||||||
const BANNER_IMAGE = require('./img/11.png') as ImageRequireSource
|
|
||||||
|
|
||||||
type Props = NativeStackScreenProps<Routes, 'PermissionsPage'>
|
|
||||||
export function PermissionsPage({ navigation }: Props): React.ReactElement {
|
|
||||||
const [cameraPermissionStatus, setCameraPermissionStatus] = useState<CameraPermissionStatus>('not-determined')
|
|
||||||
const [microphonePermissionStatus, setMicrophonePermissionStatus] = useState<CameraPermissionStatus>('not-determined')
|
|
||||||
|
|
||||||
const requestMicrophonePermission = useCallback(async () => {
|
|
||||||
console.log('Requesting microphone permission...')
|
|
||||||
const permission = await Camera.requestMicrophonePermission()
|
|
||||||
console.log(`Microphone permission status: ${permission}`)
|
|
||||||
|
|
||||||
if (permission === 'denied') await Linking.openSettings()
|
|
||||||
setMicrophonePermissionStatus(permission)
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
const requestCameraPermission = useCallback(async () => {
|
|
||||||
console.log('Requesting camera permission...')
|
|
||||||
const permission = await Camera.requestCameraPermission()
|
|
||||||
console.log(`Camera permission status: ${permission}`)
|
|
||||||
|
|
||||||
if (permission === 'denied') await Linking.openSettings()
|
|
||||||
setCameraPermissionStatus(permission)
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (cameraPermissionStatus === 'granted' && microphonePermissionStatus === 'granted') navigation.replace('CameraPage')
|
|
||||||
}, [cameraPermissionStatus, microphonePermissionStatus, navigation])
|
|
||||||
|
|
||||||
return (
|
|
||||||
<View style={styles.container}>
|
|
||||||
<Image source={BANNER_IMAGE} style={styles.banner} />
|
|
||||||
<Text style={styles.welcome}>Welcome to{'\n'}Vision Camera.</Text>
|
|
||||||
<View style={styles.permissionsContainer}>
|
|
||||||
{cameraPermissionStatus !== 'granted' && (
|
|
||||||
<Text style={styles.permissionText}>
|
|
||||||
Vision Camera needs <Text style={styles.bold}>Camera permission</Text>.{' '}
|
|
||||||
<Text style={styles.hyperlink} onPress={requestCameraPermission}>
|
|
||||||
Grant
|
|
||||||
</Text>
|
|
||||||
</Text>
|
|
||||||
)}
|
|
||||||
{microphonePermissionStatus !== 'granted' && (
|
|
||||||
<Text style={styles.permissionText}>
|
|
||||||
Vision Camera needs <Text style={styles.bold}>Microphone permission</Text>.{' '}
|
|
||||||
<Text style={styles.hyperlink} onPress={requestMicrophonePermission}>
|
|
||||||
Grant
|
|
||||||
</Text>
|
|
||||||
</Text>
|
|
||||||
)}
|
|
||||||
</View>
|
|
||||||
</View>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
welcome: {
|
|
||||||
fontSize: 38,
|
|
||||||
fontWeight: 'bold',
|
|
||||||
maxWidth: '80%',
|
|
||||||
},
|
|
||||||
banner: {
|
|
||||||
position: 'absolute',
|
|
||||||
opacity: 0.4,
|
|
||||||
bottom: 0,
|
|
||||||
left: 0,
|
|
||||||
},
|
|
||||||
container: {
|
|
||||||
flex: 1,
|
|
||||||
backgroundColor: 'white',
|
|
||||||
...SAFE_AREA_PADDING,
|
|
||||||
},
|
|
||||||
permissionsContainer: {
|
|
||||||
marginTop: CONTENT_SPACING * 2,
|
|
||||||
},
|
|
||||||
permissionText: {
|
|
||||||
fontSize: 17,
|
|
||||||
},
|
|
||||||
hyperlink: {
|
|
||||||
color: '#007aff',
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
bold: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
})
|
|
@@ -1,10 +0,0 @@
|
|||||||
export type Routes = {
|
|
||||||
PermissionsPage: undefined
|
|
||||||
CameraPage: undefined
|
|
||||||
CodeScannerPage: undefined
|
|
||||||
MediaPage: {
|
|
||||||
path: string
|
|
||||||
type: 'video' | 'photo'
|
|
||||||
}
|
|
||||||
Devices: undefined
|
|
||||||
}
|
|
112
package/example/src/camera.tsx
Normal file
112
package/example/src/camera.tsx
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
import React, { useCallback, useRef, useState } from 'react'
|
||||||
|
import { Button, StyleSheet, Text, View } from 'react-native'
|
||||||
|
import {
|
||||||
|
Camera,
|
||||||
|
useCameraPermission,
|
||||||
|
useCameraDevice,
|
||||||
|
useCameraFormat,
|
||||||
|
PhotoFile,
|
||||||
|
VideoFile,
|
||||||
|
CameraRuntimeError,
|
||||||
|
Orientation,
|
||||||
|
CameraDevice,
|
||||||
|
} from 'react-native-vision-camera'
|
||||||
|
import { RecordingButton } from './capture-button'
|
||||||
|
import { useIsForeground } from './is-foreground'
|
||||||
|
|
||||||
|
export default function CameraScreen() {
|
||||||
|
const camera = useRef<Camera>(null)
|
||||||
|
const { hasPermission, requestPermission } = useCameraPermission()
|
||||||
|
const [isCameraInitialized, setIsCameraInitialized] = useState<boolean>(false)
|
||||||
|
|
||||||
|
const isForeground: boolean = useIsForeground()
|
||||||
|
const isActive: boolean = isForeground // Should be combined with isFocused hook
|
||||||
|
|
||||||
|
const onError = useCallback((error: CameraRuntimeError) => {
|
||||||
|
console.error(error)
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const onInitialized = useCallback(() => {
|
||||||
|
console.log('Camera initialized!')
|
||||||
|
setIsCameraInitialized(true)
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const onMediaCaptured = useCallback((media: PhotoFile | VideoFile) => {
|
||||||
|
console.log(`Media captured! ${JSON.stringify(media)}`)
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
if (!hasPermission) requestPermission()
|
||||||
|
// Error handling in case they refuse to give permission
|
||||||
|
|
||||||
|
const device = useCameraDevice('back')
|
||||||
|
const format = useCameraFormat(device, [{ videoResolution: { width: 3048, height: 2160 } }, { fps: 60 }]) // this sets as a target
|
||||||
|
|
||||||
|
//Orientation detection
|
||||||
|
const [orientation, setOrientation] = useState<Orientation>('portrait')
|
||||||
|
|
||||||
|
const toggleOrientation = () => {
|
||||||
|
setOrientation(
|
||||||
|
(currentOrientation) => (currentOrientation === 'landscape-left' ? 'portrait' : 'landscape-left'), // Can adjust this and the type to match what we want
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||||
|
if (device === null) return <Text>Camera not available. Does user have permissions: {hasPermission}</Text>
|
||||||
|
|
||||||
|
return (
|
||||||
|
hasPermission && (
|
||||||
|
<View style={styles.container}>
|
||||||
|
<Camera
|
||||||
|
ref={camera}
|
||||||
|
style={StyleSheet.absoluteFill}
|
||||||
|
device={device as CameraDevice}
|
||||||
|
format={format}
|
||||||
|
onInitialized={onInitialized}
|
||||||
|
onError={onError}
|
||||||
|
video={true}
|
||||||
|
orientation={orientation} // TODO: #60
|
||||||
|
isActive={isActive}
|
||||||
|
/>
|
||||||
|
<RecordingButton
|
||||||
|
style={[styles.captureButton, orientation === 'portrait' ? styles.portrait : styles.landscape]}
|
||||||
|
camera={camera}
|
||||||
|
onMediaCaptured={onMediaCaptured}
|
||||||
|
enabled={isCameraInitialized}
|
||||||
|
/>
|
||||||
|
<View style={[styles.button, orientation === 'portrait' ? styles.togglePortrait : styles.toggleLandscape]}>
|
||||||
|
<Button title="Toggle Orientation" onPress={toggleOrientation} color="#841584" accessibilityLabel="Toggle camera orientation" />
|
||||||
|
</View>
|
||||||
|
</View>
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const styles = StyleSheet.create({
|
||||||
|
container: {
|
||||||
|
flex: 1,
|
||||||
|
backgroundColor: 'black',
|
||||||
|
},
|
||||||
|
captureButton: {
|
||||||
|
position: 'absolute',
|
||||||
|
alignSelf: 'center',
|
||||||
|
},
|
||||||
|
button: {
|
||||||
|
position: 'absolute',
|
||||||
|
alignSelf: 'center',
|
||||||
|
},
|
||||||
|
togglePortrait: {
|
||||||
|
bottom: 110, // needs refined
|
||||||
|
},
|
||||||
|
toggleLandscape: {
|
||||||
|
transform: [{ rotate: '90deg' }],
|
||||||
|
bottom: '43%', // Should come from SafeAreaProvider, hardcoded right now, should roughly appear above the button
|
||||||
|
left: 50, // needs refined
|
||||||
|
},
|
||||||
|
portrait: {
|
||||||
|
bottom: 20, // needs refined
|
||||||
|
},
|
||||||
|
landscape: {
|
||||||
|
bottom: '40%', // Should come from SafeAreaProvider
|
||||||
|
left: 20, // needs refined
|
||||||
|
},
|
||||||
|
})
|
96
package/example/src/capture-button.tsx
Normal file
96
package/example/src/capture-button.tsx
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import React, { useCallback, useRef, useState } from 'react'
|
||||||
|
import { TouchableOpacity, StyleSheet, View, StyleProp, ViewStyle } from 'react-native'
|
||||||
|
import { Camera, VideoFile } from 'react-native-vision-camera'
|
||||||
|
|
||||||
|
interface RecordingButtonProps {
|
||||||
|
style: StyleProp<ViewStyle>
|
||||||
|
camera: React.RefObject<Camera>
|
||||||
|
onMediaCaptured: (media: VideoFile, mediaType: string) => void
|
||||||
|
enabled: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export const RecordingButton: React.FC<RecordingButtonProps> = ({ style, camera, onMediaCaptured, enabled }) => {
|
||||||
|
const isRecording = useRef(false)
|
||||||
|
// UseRef won't trigger a re-render
|
||||||
|
const [, setRecordingState] = useState(false)
|
||||||
|
|
||||||
|
const onStoppedRecording = useCallback(() => {
|
||||||
|
isRecording.current = false
|
||||||
|
setRecordingState(false)
|
||||||
|
console.log('stopped recording video!')
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const stopRecording = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
if (camera.current === null) throw new Error('Camera ref is null!') // Error handling could be more graceful
|
||||||
|
|
||||||
|
console.log('calling stopRecording()...')
|
||||||
|
await camera.current.stopRecording()
|
||||||
|
console.log('called stopRecording()!')
|
||||||
|
} catch (e) {
|
||||||
|
console.error('failed to stop recording!', e)
|
||||||
|
}
|
||||||
|
}, [camera])
|
||||||
|
|
||||||
|
const startRecording = useCallback(() => {
|
||||||
|
console.log('press')
|
||||||
|
try {
|
||||||
|
if (camera.current === null) throw new Error('Camera ref is null!') // Error handling could be more graceful
|
||||||
|
|
||||||
|
console.log('calling startRecording()...')
|
||||||
|
camera.current.startRecording({
|
||||||
|
onRecordingError: (error) => {
|
||||||
|
console.error('Recording failed!', error)
|
||||||
|
onStoppedRecording()
|
||||||
|
},
|
||||||
|
onRecordingFinished: (video) => {
|
||||||
|
onMediaCaptured(video, 'video')
|
||||||
|
onStoppedRecording()
|
||||||
|
},
|
||||||
|
})
|
||||||
|
console.log('called startRecording()!')
|
||||||
|
isRecording.current = true
|
||||||
|
setRecordingState(true)
|
||||||
|
} catch (e) {
|
||||||
|
console.error('failed to start recording!', e, 'camera')
|
||||||
|
}
|
||||||
|
}, [camera, onMediaCaptured, onStoppedRecording])
|
||||||
|
|
||||||
|
const handlePress = () => {
|
||||||
|
if (isRecording.current) stopRecording()
|
||||||
|
else startRecording()
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<TouchableOpacity style={[styles.captureButton, style]} onPress={handlePress} disabled={!enabled}>
|
||||||
|
<View style={isRecording.current ? styles.recordingSquare : styles.innerCircle} />
|
||||||
|
</TouchableOpacity>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const styles = StyleSheet.create({
|
||||||
|
captureButton: {
|
||||||
|
height: 80,
|
||||||
|
width: 80,
|
||||||
|
borderRadius: 40,
|
||||||
|
borderWidth: 3,
|
||||||
|
borderColor: 'white',
|
||||||
|
backgroundColor: 'transparent',
|
||||||
|
justifyContent: 'center',
|
||||||
|
alignItems: 'center',
|
||||||
|
},
|
||||||
|
innerCircle: {
|
||||||
|
height: 70,
|
||||||
|
width: 70,
|
||||||
|
borderRadius: 35,
|
||||||
|
backgroundColor: '#FF3B30',
|
||||||
|
},
|
||||||
|
recordingSquare: {
|
||||||
|
height: 40,
|
||||||
|
width: 40,
|
||||||
|
borderRadius: 10,
|
||||||
|
backgroundColor: '#FF3B30',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
export default RecordingButton
|
@@ -1,17 +0,0 @@
|
|||||||
import { VisionCameraProxy, Frame } from 'react-native-vision-camera'
|
|
||||||
|
|
||||||
const plugin = VisionCameraProxy.initFrameProcessorPlugin('example_kotlin_swift_plugin', { foo: 'bar' })
|
|
||||||
|
|
||||||
export function exampleKotlinSwiftPlugin(frame: Frame): string[] {
|
|
||||||
'worklet'
|
|
||||||
|
|
||||||
if (plugin == null) throw new Error('Failed to load Frame Processor Plugin "example_kotlin_swift_plugin"!')
|
|
||||||
|
|
||||||
return plugin.call(frame, {
|
|
||||||
someString: 'hello!',
|
|
||||||
someBoolean: true,
|
|
||||||
someNumber: 42,
|
|
||||||
someObject: { test: 0, second: 'test' },
|
|
||||||
someArray: ['another test', 5],
|
|
||||||
}) as string[]
|
|
||||||
}
|
|
@@ -1,25 +0,0 @@
|
|||||||
import { VisionCameraProxy, Frame } from 'react-native-vision-camera'
|
|
||||||
|
|
||||||
const plugin = VisionCameraProxy.initFrameProcessorPlugin('example_plugin')
|
|
||||||
|
|
||||||
interface Result {
|
|
||||||
example_array: (string | number | boolean)[]
|
|
||||||
example_array_buffer: ArrayBuffer
|
|
||||||
example_str: string
|
|
||||||
example_bool: boolean
|
|
||||||
example_double: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export function examplePlugin(frame: Frame): Result {
|
|
||||||
'worklet'
|
|
||||||
|
|
||||||
if (plugin == null) throw new Error('Failed to load Frame Processor Plugin "example_plugin"!')
|
|
||||||
|
|
||||||
return plugin.call(frame, {
|
|
||||||
someString: 'hello!',
|
|
||||||
someBoolean: true,
|
|
||||||
someNumber: 42,
|
|
||||||
someObject: { test: 0, second: 'test' },
|
|
||||||
someArray: ['another test', 5],
|
|
||||||
}) as unknown as Result
|
|
||||||
}
|
|
@@ -1,20 +0,0 @@
|
|||||||
import { useMMKVString } from 'react-native-mmkv'
|
|
||||||
import { CameraDevice } from '../../../src/CameraDevice'
|
|
||||||
import { useCallback, useMemo } from 'react'
|
|
||||||
import { useCameraDevices } from '../../../src/hooks/useCameraDevices'
|
|
||||||
|
|
||||||
export function usePreferredCameraDevice(): [CameraDevice | undefined, (device: CameraDevice) => void] {
|
|
||||||
const [preferredDeviceId, setPreferredDeviceId] = useMMKVString('camera.preferredDeviceId')
|
|
||||||
|
|
||||||
const set = useCallback(
|
|
||||||
(device: CameraDevice) => {
|
|
||||||
setPreferredDeviceId(device.id)
|
|
||||||
},
|
|
||||||
[setPreferredDeviceId],
|
|
||||||
)
|
|
||||||
|
|
||||||
const devices = useCameraDevices()
|
|
||||||
const device = useMemo(() => devices.find((d) => d.id === preferredDeviceId), [devices, preferredDeviceId])
|
|
||||||
|
|
||||||
return [device, set]
|
|
||||||
}
|
|
@@ -1,5 +1,4 @@
|
|||||||
import { useState } from 'react'
|
import { useState, useEffect } from 'react'
|
||||||
import { useEffect } from 'react'
|
|
||||||
import { AppState, AppStateStatus } from 'react-native'
|
import { AppState, AppStateStatus } from 'react-native'
|
||||||
|
|
||||||
export const useIsForeground = (): boolean => {
|
export const useIsForeground = (): boolean => {
|
@@ -1,307 +0,0 @@
|
|||||||
import React, { useCallback, useMemo, useRef } from 'react'
|
|
||||||
import { StyleSheet, View, ViewProps } from 'react-native'
|
|
||||||
import {
|
|
||||||
PanGestureHandler,
|
|
||||||
PanGestureHandlerGestureEvent,
|
|
||||||
State,
|
|
||||||
TapGestureHandler,
|
|
||||||
TapGestureHandlerStateChangeEvent,
|
|
||||||
} from 'react-native-gesture-handler'
|
|
||||||
import Reanimated, {
|
|
||||||
cancelAnimation,
|
|
||||||
Easing,
|
|
||||||
Extrapolate,
|
|
||||||
interpolate,
|
|
||||||
useAnimatedStyle,
|
|
||||||
withSpring,
|
|
||||||
withTiming,
|
|
||||||
useAnimatedGestureHandler,
|
|
||||||
useSharedValue,
|
|
||||||
withRepeat,
|
|
||||||
} from 'react-native-reanimated'
|
|
||||||
import type { Camera, PhotoFile, TakePhotoOptions, VideoFile } from 'react-native-vision-camera'
|
|
||||||
import { CAPTURE_BUTTON_SIZE, SCREEN_HEIGHT, SCREEN_WIDTH } from './../Constants'
|
|
||||||
|
|
||||||
const PAN_GESTURE_HANDLER_FAIL_X = [-SCREEN_WIDTH, SCREEN_WIDTH]
|
|
||||||
const PAN_GESTURE_HANDLER_ACTIVE_Y = [-2, 2]
|
|
||||||
|
|
||||||
const START_RECORDING_DELAY = 200
|
|
||||||
const BORDER_WIDTH = CAPTURE_BUTTON_SIZE * 0.1
|
|
||||||
|
|
||||||
interface Props extends ViewProps {
|
|
||||||
camera: React.RefObject<Camera>
|
|
||||||
onMediaCaptured: (media: PhotoFile | VideoFile, type: 'photo' | 'video') => void
|
|
||||||
|
|
||||||
minZoom: number
|
|
||||||
maxZoom: number
|
|
||||||
cameraZoom: Reanimated.SharedValue<number>
|
|
||||||
|
|
||||||
flash: 'off' | 'on'
|
|
||||||
|
|
||||||
enabled: boolean
|
|
||||||
|
|
||||||
setIsPressingButton: (isPressingButton: boolean) => void
|
|
||||||
}
|
|
||||||
|
|
||||||
const _CaptureButton: React.FC<Props> = ({
|
|
||||||
camera,
|
|
||||||
onMediaCaptured,
|
|
||||||
minZoom,
|
|
||||||
maxZoom,
|
|
||||||
cameraZoom,
|
|
||||||
flash,
|
|
||||||
enabled,
|
|
||||||
setIsPressingButton,
|
|
||||||
style,
|
|
||||||
...props
|
|
||||||
}): React.ReactElement => {
|
|
||||||
const pressDownDate = useRef<Date | undefined>(undefined)
|
|
||||||
const isRecording = useRef(false)
|
|
||||||
const recordingProgress = useSharedValue(0)
|
|
||||||
const takePhotoOptions = useMemo<TakePhotoOptions>(
|
|
||||||
() => ({
|
|
||||||
qualityPrioritization: 'speed',
|
|
||||||
flash: flash,
|
|
||||||
quality: 90,
|
|
||||||
enableShutterSound: false,
|
|
||||||
}),
|
|
||||||
[flash],
|
|
||||||
)
|
|
||||||
const isPressingButton = useSharedValue(false)
|
|
||||||
|
|
||||||
//#region Camera Capture
|
|
||||||
const takePhoto = useCallback(async () => {
|
|
||||||
try {
|
|
||||||
if (camera.current == null) throw new Error('Camera ref is null!')
|
|
||||||
|
|
||||||
console.log('Taking photo...')
|
|
||||||
const photo = await camera.current.takePhoto(takePhotoOptions)
|
|
||||||
onMediaCaptured(photo, 'photo')
|
|
||||||
} catch (e) {
|
|
||||||
console.error('Failed to take photo!', e)
|
|
||||||
}
|
|
||||||
}, [camera, onMediaCaptured, takePhotoOptions])
|
|
||||||
|
|
||||||
const onStoppedRecording = useCallback(() => {
|
|
||||||
isRecording.current = false
|
|
||||||
cancelAnimation(recordingProgress)
|
|
||||||
console.log('stopped recording video!')
|
|
||||||
}, [recordingProgress])
|
|
||||||
const stopRecording = useCallback(async () => {
|
|
||||||
try {
|
|
||||||
if (camera.current == null) throw new Error('Camera ref is null!')
|
|
||||||
|
|
||||||
console.log('calling stopRecording()...')
|
|
||||||
await camera.current.stopRecording()
|
|
||||||
console.log('called stopRecording()!')
|
|
||||||
} catch (e) {
|
|
||||||
console.error('failed to stop recording!', e)
|
|
||||||
}
|
|
||||||
}, [camera])
|
|
||||||
const startRecording = useCallback(() => {
|
|
||||||
try {
|
|
||||||
if (camera.current == null) throw new Error('Camera ref is null!')
|
|
||||||
|
|
||||||
console.log('calling startRecording()...')
|
|
||||||
camera.current.startRecording({
|
|
||||||
flash: flash,
|
|
||||||
onRecordingError: (error) => {
|
|
||||||
console.error('Recording failed!', error)
|
|
||||||
onStoppedRecording()
|
|
||||||
},
|
|
||||||
onRecordingFinished: (video) => {
|
|
||||||
console.log(`Recording successfully finished! ${video.path}`)
|
|
||||||
onMediaCaptured(video, 'video')
|
|
||||||
onStoppedRecording()
|
|
||||||
},
|
|
||||||
})
|
|
||||||
// TODO: wait until startRecording returns to actually find out if the recording has successfully started
|
|
||||||
console.log('called startRecording()!')
|
|
||||||
isRecording.current = true
|
|
||||||
} catch (e) {
|
|
||||||
console.error('failed to start recording!', e, 'camera')
|
|
||||||
}
|
|
||||||
}, [camera, flash, onMediaCaptured, onStoppedRecording])
|
|
||||||
//#endregion
|
|
||||||
|
|
||||||
//#region Tap handler
|
|
||||||
const tapHandler = useRef<TapGestureHandler>()
|
|
||||||
const onHandlerStateChanged = useCallback(
|
|
||||||
async ({ nativeEvent: event }: TapGestureHandlerStateChangeEvent) => {
|
|
||||||
// This is the gesture handler for the circular "shutter" button.
|
|
||||||
// Once the finger touches the button (State.BEGAN), a photo is being taken and "capture mode" is entered. (disabled tab bar)
|
|
||||||
// Also, we set `pressDownDate` to the time of the press down event, and start a 200ms timeout. If the `pressDownDate` hasn't changed
|
|
||||||
// after the 200ms, the user is still holding down the "shutter" button. In that case, we start recording.
|
|
||||||
//
|
|
||||||
// Once the finger releases the button (State.END/FAILED/CANCELLED), we leave "capture mode" (enable tab bar) and check the `pressDownDate`,
|
|
||||||
// if `pressDownDate` was less than 200ms ago, we know that the intention of the user is to take a photo. We check the `takePhotoPromise` if
|
|
||||||
// there already is an ongoing (or already resolved) takePhoto() call (remember that we called takePhoto() when the user pressed down), and
|
|
||||||
// if yes, use that. If no, we just try calling takePhoto() again
|
|
||||||
console.debug(`state: ${Object.keys(State)[event.state]}`)
|
|
||||||
switch (event.state) {
|
|
||||||
case State.BEGAN: {
|
|
||||||
// enter "recording mode"
|
|
||||||
recordingProgress.value = 0
|
|
||||||
isPressingButton.value = true
|
|
||||||
const now = new Date()
|
|
||||||
pressDownDate.current = now
|
|
||||||
setTimeout(() => {
|
|
||||||
if (pressDownDate.current === now) {
|
|
||||||
// user is still pressing down after 200ms, so his intention is to create a video
|
|
||||||
startRecording()
|
|
||||||
}
|
|
||||||
}, START_RECORDING_DELAY)
|
|
||||||
setIsPressingButton(true)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
case State.END:
|
|
||||||
case State.FAILED:
|
|
||||||
case State.CANCELLED: {
|
|
||||||
// exit "recording mode"
|
|
||||||
try {
|
|
||||||
if (pressDownDate.current == null) throw new Error('PressDownDate ref .current was null!')
|
|
||||||
const now = new Date()
|
|
||||||
const diff = now.getTime() - pressDownDate.current.getTime()
|
|
||||||
pressDownDate.current = undefined
|
|
||||||
if (diff < START_RECORDING_DELAY) {
|
|
||||||
// user has released the button within 200ms, so his intention is to take a single picture.
|
|
||||||
await takePhoto()
|
|
||||||
} else {
|
|
||||||
// user has held the button for more than 200ms, so he has been recording this entire time.
|
|
||||||
await stopRecording()
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
setTimeout(() => {
|
|
||||||
isPressingButton.value = false
|
|
||||||
setIsPressingButton(false)
|
|
||||||
}, 500)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
break
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[isPressingButton, recordingProgress, setIsPressingButton, startRecording, stopRecording, takePhoto],
|
|
||||||
)
|
|
||||||
//#endregion
|
|
||||||
//#region Pan handler
|
|
||||||
const panHandler = useRef<PanGestureHandler>()
|
|
||||||
const onPanGestureEvent = useAnimatedGestureHandler<PanGestureHandlerGestureEvent, { offsetY?: number; startY?: number }>({
|
|
||||||
onStart: (event, context) => {
|
|
||||||
context.startY = event.absoluteY
|
|
||||||
const yForFullZoom = context.startY * 0.7
|
|
||||||
const offsetYForFullZoom = context.startY - yForFullZoom
|
|
||||||
|
|
||||||
// extrapolate [0 ... 1] zoom -> [0 ... Y_FOR_FULL_ZOOM] finger position
|
|
||||||
context.offsetY = interpolate(cameraZoom.value, [minZoom, maxZoom], [0, offsetYForFullZoom], Extrapolate.CLAMP)
|
|
||||||
},
|
|
||||||
onActive: (event, context) => {
|
|
||||||
const offset = context.offsetY ?? 0
|
|
||||||
const startY = context.startY ?? SCREEN_HEIGHT
|
|
||||||
const yForFullZoom = startY * 0.7
|
|
||||||
|
|
||||||
cameraZoom.value = interpolate(event.absoluteY - offset, [yForFullZoom, startY], [maxZoom, minZoom], Extrapolate.CLAMP)
|
|
||||||
},
|
|
||||||
})
|
|
||||||
//#endregion
|
|
||||||
|
|
||||||
const shadowStyle = useAnimatedStyle(
|
|
||||||
() => ({
|
|
||||||
transform: [
|
|
||||||
{
|
|
||||||
scale: withSpring(isPressingButton.value ? 1 : 0, {
|
|
||||||
mass: 1,
|
|
||||||
damping: 35,
|
|
||||||
stiffness: 300,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
[isPressingButton],
|
|
||||||
)
|
|
||||||
const buttonStyle = useAnimatedStyle(() => {
|
|
||||||
let scale: number
|
|
||||||
if (enabled) {
|
|
||||||
if (isPressingButton.value) {
|
|
||||||
scale = withRepeat(
|
|
||||||
withSpring(1, {
|
|
||||||
stiffness: 100,
|
|
||||||
damping: 1000,
|
|
||||||
}),
|
|
||||||
-1,
|
|
||||||
true,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
scale = withSpring(0.9, {
|
|
||||||
stiffness: 500,
|
|
||||||
damping: 300,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
scale = withSpring(0.6, {
|
|
||||||
stiffness: 500,
|
|
||||||
damping: 300,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
opacity: withTiming(enabled ? 1 : 0.3, {
|
|
||||||
duration: 100,
|
|
||||||
easing: Easing.linear,
|
|
||||||
}),
|
|
||||||
transform: [
|
|
||||||
{
|
|
||||||
scale: scale,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}, [enabled, isPressingButton])
|
|
||||||
|
|
||||||
return (
|
|
||||||
<TapGestureHandler
|
|
||||||
enabled={enabled}
|
|
||||||
ref={tapHandler}
|
|
||||||
onHandlerStateChange={onHandlerStateChanged}
|
|
||||||
shouldCancelWhenOutside={false}
|
|
||||||
maxDurationMs={99999999} // <-- this prevents the TapGestureHandler from going to State.FAILED when the user moves his finger outside of the child view (to zoom)
|
|
||||||
simultaneousHandlers={panHandler}>
|
|
||||||
<Reanimated.View {...props} style={[buttonStyle, style]}>
|
|
||||||
<PanGestureHandler
|
|
||||||
enabled={enabled}
|
|
||||||
ref={panHandler}
|
|
||||||
failOffsetX={PAN_GESTURE_HANDLER_FAIL_X}
|
|
||||||
activeOffsetY={PAN_GESTURE_HANDLER_ACTIVE_Y}
|
|
||||||
onGestureEvent={onPanGestureEvent}
|
|
||||||
simultaneousHandlers={tapHandler}>
|
|
||||||
<Reanimated.View style={styles.flex}>
|
|
||||||
<Reanimated.View style={[styles.shadow, shadowStyle]} />
|
|
||||||
<View style={styles.button} />
|
|
||||||
</Reanimated.View>
|
|
||||||
</PanGestureHandler>
|
|
||||||
</Reanimated.View>
|
|
||||||
</TapGestureHandler>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const CaptureButton = React.memo(_CaptureButton)
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
flex: {
|
|
||||||
flex: 1,
|
|
||||||
},
|
|
||||||
shadow: {
|
|
||||||
position: 'absolute',
|
|
||||||
width: CAPTURE_BUTTON_SIZE,
|
|
||||||
height: CAPTURE_BUTTON_SIZE,
|
|
||||||
borderRadius: CAPTURE_BUTTON_SIZE / 2,
|
|
||||||
backgroundColor: '#e34077',
|
|
||||||
},
|
|
||||||
button: {
|
|
||||||
width: CAPTURE_BUTTON_SIZE,
|
|
||||||
height: CAPTURE_BUTTON_SIZE,
|
|
||||||
borderRadius: CAPTURE_BUTTON_SIZE / 2,
|
|
||||||
borderWidth: BORDER_WIDTH,
|
|
||||||
borderColor: 'white',
|
|
||||||
},
|
|
||||||
})
|
|
@@ -1,32 +0,0 @@
|
|||||||
import { BlurView, BlurViewProps } from '@react-native-community/blur'
|
|
||||||
import React from 'react'
|
|
||||||
import { Platform, StyleSheet } from 'react-native'
|
|
||||||
import StaticSafeAreaInsets from 'react-native-static-safe-area-insets'
|
|
||||||
|
|
||||||
const FALLBACK_COLOR = 'rgba(140, 140, 140, 0.3)'
|
|
||||||
|
|
||||||
const StatusBarBlurBackgroundImpl = ({ style, ...props }: BlurViewProps): React.ReactElement | null => {
|
|
||||||
if (Platform.OS !== 'ios') return null
|
|
||||||
|
|
||||||
return (
|
|
||||||
<BlurView
|
|
||||||
style={[styles.statusBarBackground, style]}
|
|
||||||
blurAmount={25}
|
|
||||||
blurType="light"
|
|
||||||
reducedTransparencyFallbackColor={FALLBACK_COLOR}
|
|
||||||
{...props}
|
|
||||||
/>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const StatusBarBlurBackground = React.memo(StatusBarBlurBackgroundImpl)
|
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
|
||||||
statusBarBackground: {
|
|
||||||
position: 'absolute',
|
|
||||||
top: 0,
|
|
||||||
left: 0,
|
|
||||||
right: 0,
|
|
||||||
height: StaticSafeAreaInsets.safeAreaInsetsTop,
|
|
||||||
},
|
|
||||||
})
|
|
@@ -11,7 +11,7 @@ import AVFoundation
|
|||||||
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
// MARK: - CameraView + AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
|
||||||
|
|
||||||
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
||||||
func startRecording(options: NSDictionary, callback jsCallback: @escaping RCTResponseSenderBlock) {
|
func startRecording(options: NSDictionary, filePath: String, callback jsCallback: @escaping RCTResponseSenderBlock) {
|
||||||
// Type-safety
|
// Type-safety
|
||||||
let callback = Callback(jsCallback)
|
let callback = Callback(jsCallback)
|
||||||
|
|
||||||
@@ -21,6 +21,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud
|
|||||||
// Start Recording with success and error callbacks
|
// Start Recording with success and error callbacks
|
||||||
cameraSession.startRecording(
|
cameraSession.startRecording(
|
||||||
options: options,
|
options: options,
|
||||||
|
filePath: filePath,
|
||||||
onVideoRecorded: { video in
|
onVideoRecorded: { video in
|
||||||
callback.resolve(video.toJSValue())
|
callback.resolve(video.toJSValue())
|
||||||
},
|
},
|
||||||
|
@@ -62,6 +62,8 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
|||||||
@objc var onStarted: RCTDirectEventBlock?
|
@objc var onStarted: RCTDirectEventBlock?
|
||||||
@objc var onStopped: RCTDirectEventBlock?
|
@objc var onStopped: RCTDirectEventBlock?
|
||||||
@objc var onViewReady: RCTDirectEventBlock?
|
@objc var onViewReady: RCTDirectEventBlock?
|
||||||
|
@objc var onInitReady: RCTDirectEventBlock?
|
||||||
|
@objc var onVideoChunkReady: RCTDirectEventBlock?
|
||||||
@objc var onCodeScanned: RCTDirectEventBlock?
|
@objc var onCodeScanned: RCTDirectEventBlock?
|
||||||
// zoom
|
// zoom
|
||||||
@objc var enableZoomGesture = false {
|
@objc var enableZoomGesture = false {
|
||||||
@@ -336,6 +338,31 @@ public final class CameraView: UIView, CameraSessionDelegate {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func onVideoChunkReady(chunk: ChunkedRecorder.Chunk) {
|
||||||
|
ReactLogger.log(level: .info, message: "Chunk ready: \(chunk)")
|
||||||
|
|
||||||
|
guard let onVideoChunkReady, let onInitReady else {
|
||||||
|
ReactLogger.log(level: .warning, message: "Either onInitReady or onVideoChunkReady are not valid!")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
switch chunk.type {
|
||||||
|
case .initialization:
|
||||||
|
onInitReady([
|
||||||
|
"filepath": chunk.url.path,
|
||||||
|
])
|
||||||
|
case let .data(index: index, duration: duration):
|
||||||
|
var data: [String: Any] = [
|
||||||
|
"filepath": chunk.url.path,
|
||||||
|
"index": index,
|
||||||
|
]
|
||||||
|
if let duration {
|
||||||
|
data["duration"] = duration.seconds
|
||||||
|
}
|
||||||
|
onVideoChunkReady(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func onCodeScanned(codes: [CameraSession.Code], scannerFrame: CameraSession.CodeScannerFrame) {
|
func onCodeScanned(codes: [CameraSession.Code], scannerFrame: CameraSession.CodeScannerFrame) {
|
||||||
guard let onCodeScanned = onCodeScanned else {
|
guard let onCodeScanned = onCodeScanned else {
|
||||||
return
|
return
|
||||||
|
@@ -55,6 +55,8 @@ RCT_EXPORT_VIEW_PROPERTY(onInitialized, RCTDirectEventBlock);
|
|||||||
RCT_EXPORT_VIEW_PROPERTY(onStarted, RCTDirectEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onStarted, RCTDirectEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onStopped, RCTDirectEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onStopped, RCTDirectEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onViewReady, RCTDirectEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onViewReady, RCTDirectEventBlock);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(onInitReady, RCTDirectEventBlock);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(onVideoChunkReady, RCTDirectEventBlock);
|
||||||
// Code Scanner
|
// Code Scanner
|
||||||
RCT_EXPORT_VIEW_PROPERTY(codeScannerOptions, NSDictionary);
|
RCT_EXPORT_VIEW_PROPERTY(codeScannerOptions, NSDictionary);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onCodeScanned, RCTDirectEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onCodeScanned, RCTDirectEventBlock);
|
||||||
@@ -62,7 +64,8 @@ RCT_EXPORT_VIEW_PROPERTY(onCodeScanned, RCTDirectEventBlock);
|
|||||||
// Camera View Functions
|
// Camera View Functions
|
||||||
RCT_EXTERN_METHOD(startRecording
|
RCT_EXTERN_METHOD(startRecording
|
||||||
: (nonnull NSNumber*)node options
|
: (nonnull NSNumber*)node options
|
||||||
: (NSDictionary*)options onRecordCallback
|
: (NSDictionary*)options filePath
|
||||||
|
: (NSString*)filePath onRecordCallback
|
||||||
: (RCTResponseSenderBlock)onRecordCallback);
|
: (RCTResponseSenderBlock)onRecordCallback);
|
||||||
RCT_EXTERN_METHOD(pauseRecording
|
RCT_EXTERN_METHOD(pauseRecording
|
||||||
: (nonnull NSNumber*)node resolve
|
: (nonnull NSNumber*)node resolve
|
||||||
|
@@ -43,9 +43,9 @@ final class CameraViewManager: RCTViewManager {
|
|||||||
// This means that any errors that occur in this function have to be delegated through
|
// This means that any errors that occur in this function have to be delegated through
|
||||||
// the callback, but I'd prefer for them to throw for the original function instead.
|
// the callback, but I'd prefer for them to throw for the original function instead.
|
||||||
@objc
|
@objc
|
||||||
final func startRecording(_ node: NSNumber, options: NSDictionary, onRecordCallback: @escaping RCTResponseSenderBlock) {
|
final func startRecording(_ node: NSNumber, options: NSDictionary, filePath: NSString, onRecordCallback: @escaping RCTResponseSenderBlock) {
|
||||||
let component = getCameraView(withTag: node)
|
let component = getCameraView(withTag: node)
|
||||||
component.startRecording(options: options, callback: onRecordCallback)
|
component.startRecording(options: options, filePath: filePath as String, callback: onRecordCallback)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc
|
@objc
|
||||||
|
@@ -176,6 +176,7 @@ enum CaptureError {
|
|||||||
case noRecordingInProgress
|
case noRecordingInProgress
|
||||||
case fileError
|
case fileError
|
||||||
case createTempFileError(message: String? = nil)
|
case createTempFileError(message: String? = nil)
|
||||||
|
case createRecordingDirectoryError(message: String? = nil)
|
||||||
case createRecorderError(message: String? = nil)
|
case createRecorderError(message: String? = nil)
|
||||||
case videoNotEnabled
|
case videoNotEnabled
|
||||||
case photoNotEnabled
|
case photoNotEnabled
|
||||||
@@ -193,6 +194,8 @@ enum CaptureError {
|
|||||||
return "file-io-error"
|
return "file-io-error"
|
||||||
case .createTempFileError:
|
case .createTempFileError:
|
||||||
return "create-temp-file-error"
|
return "create-temp-file-error"
|
||||||
|
case .createRecordingDirectoryError:
|
||||||
|
return "create-recording-directory-error"
|
||||||
case .createRecorderError:
|
case .createRecorderError:
|
||||||
return "create-recorder-error"
|
return "create-recorder-error"
|
||||||
case .videoNotEnabled:
|
case .videoNotEnabled:
|
||||||
@@ -218,6 +221,8 @@ enum CaptureError {
|
|||||||
return "An unexpected File IO error occured!"
|
return "An unexpected File IO error occured!"
|
||||||
case let .createTempFileError(message: message):
|
case let .createTempFileError(message: message):
|
||||||
return "Failed to create a temporary file! \(message ?? "(no additional message)")"
|
return "Failed to create a temporary file! \(message ?? "(no additional message)")"
|
||||||
|
case let .createRecordingDirectoryError(message: message):
|
||||||
|
return "Failed to create a recording directory! \(message ?? "(no additional message)")"
|
||||||
case let .createRecorderError(message: message):
|
case let .createRecorderError(message: message):
|
||||||
return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")"
|
return "Failed to create the AVAssetWriter (Recorder)! \(message ?? "(no additional message)")"
|
||||||
case .videoNotEnabled:
|
case .videoNotEnabled:
|
||||||
|
@@ -15,6 +15,7 @@ extension CameraSession {
|
|||||||
Starts a video + audio recording with a custom Asset Writer.
|
Starts a video + audio recording with a custom Asset Writer.
|
||||||
*/
|
*/
|
||||||
func startRecording(options: RecordVideoOptions,
|
func startRecording(options: RecordVideoOptions,
|
||||||
|
filePath: String,
|
||||||
onVideoRecorded: @escaping (_ video: Video) -> Void,
|
onVideoRecorded: @escaping (_ video: Video) -> Void,
|
||||||
onError: @escaping (_ error: CameraError) -> Void) {
|
onError: @escaping (_ error: CameraError) -> Void) {
|
||||||
// Run on Camera Queue
|
// Run on Camera Queue
|
||||||
@@ -34,6 +35,14 @@ extension CameraSession {
|
|||||||
|
|
||||||
let enableAudio = self.configuration?.audio != .disabled
|
let enableAudio = self.configuration?.audio != .disabled
|
||||||
|
|
||||||
|
// Callback for when new chunks are ready
|
||||||
|
let onChunkReady: (ChunkedRecorder.Chunk) -> Void = { chunk in
|
||||||
|
guard let delegate = self.delegate else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
delegate.onVideoChunkReady(chunk: chunk)
|
||||||
|
}
|
||||||
|
|
||||||
// Callback for when the recording ends
|
// Callback for when the recording ends
|
||||||
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
|
let onFinish = { (recordingSession: RecordingSession, status: AVAssetWriter.Status, error: Error?) in
|
||||||
defer {
|
defer {
|
||||||
@@ -62,7 +71,7 @@ extension CameraSession {
|
|||||||
} else {
|
} else {
|
||||||
if status == .completed {
|
if status == .completed {
|
||||||
// Recording was successfully saved
|
// Recording was successfully saved
|
||||||
let video = Video(path: recordingSession.url.absoluteString,
|
let video = Video(path: recordingSession.outputDiretory.absoluteString,
|
||||||
duration: recordingSession.duration,
|
duration: recordingSession.duration,
|
||||||
size: recordingSession.size ?? CGSize.zero)
|
size: recordingSession.size ?? CGSize.zero)
|
||||||
onVideoRecorded(video)
|
onVideoRecorded(video)
|
||||||
@@ -73,22 +82,22 @@ extension CameraSession {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create temporary file
|
if !FileManager.default.fileExists(atPath: filePath) {
|
||||||
let errorPointer = ErrorPointer(nilLiteral: ())
|
do {
|
||||||
let fileExtension = options.fileType.descriptor ?? "mov"
|
try FileManager.default.createDirectory(atPath: filePath, withIntermediateDirectories: true)
|
||||||
guard let tempFilePath = RCTTempFilePath(fileExtension, errorPointer) else {
|
} catch {
|
||||||
let message = errorPointer?.pointee?.description
|
onError(.capture(.createRecordingDirectoryError(message: error.localizedDescription)))
|
||||||
onError(.capture(.createTempFileError(message: message)))
|
return
|
||||||
return
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ReactLogger.log(level: .info, message: "Will record to temporary file: \(tempFilePath)")
|
ReactLogger.log(level: .info, message: "Will record to temporary file: \(filePath)")
|
||||||
let tempURL = URL(string: "file://\(tempFilePath)")!
|
|
||||||
|
|
||||||
do {
|
do {
|
||||||
// Create RecordingSession for the temp file
|
// Create RecordingSession for the temp file
|
||||||
let recordingSession = try RecordingSession(url: tempURL,
|
let recordingSession = try RecordingSession(outputDiretory: filePath,
|
||||||
fileType: options.fileType,
|
fileType: options.fileType,
|
||||||
|
onChunkReady: onChunkReady,
|
||||||
completion: onFinish)
|
completion: onFinish)
|
||||||
|
|
||||||
// Init Audio + Activate Audio Session (optional)
|
// Init Audio + Activate Audio Session (optional)
|
||||||
|
@@ -33,6 +33,10 @@ protocol CameraSessionDelegate: AnyObject {
|
|||||||
Called for every frame (if video or frameProcessor is enabled)
|
Called for every frame (if video or frameProcessor is enabled)
|
||||||
*/
|
*/
|
||||||
func onFrame(sampleBuffer: CMSampleBuffer)
|
func onFrame(sampleBuffer: CMSampleBuffer)
|
||||||
|
/**
|
||||||
|
Called whenever a new video chunk is available
|
||||||
|
*/
|
||||||
|
func onVideoChunkReady(chunk: ChunkedRecorder.Chunk)
|
||||||
/**
|
/**
|
||||||
Called whenever a QR/Barcode has been scanned. Only if the CodeScanner Output is enabled
|
Called whenever a QR/Barcode has been scanned. Only if the CodeScanner Output is enabled
|
||||||
*/
|
*/
|
||||||
|
88
package/ios/Core/ChunkedRecorder.swift
Normal file
88
package/ios/Core/ChunkedRecorder.swift
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
//
|
||||||
|
// ChunkedRecorder.swift
|
||||||
|
// VisionCamera
|
||||||
|
//
|
||||||
|
// Created by Rafael Bastos on 12/07/2024.
|
||||||
|
// Copyright © 2024 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
import Foundation
|
||||||
|
import AVFoundation
|
||||||
|
|
||||||
|
|
||||||
|
class ChunkedRecorder: NSObject {
|
||||||
|
|
||||||
|
enum ChunkType {
|
||||||
|
case initialization
|
||||||
|
case data(index: UInt64, duration: CMTime?)
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Chunk {
|
||||||
|
let url: URL
|
||||||
|
let type: ChunkType
|
||||||
|
}
|
||||||
|
|
||||||
|
let outputURL: URL
|
||||||
|
let onChunkReady: ((Chunk) -> Void)
|
||||||
|
|
||||||
|
private var chunkIndex: UInt64 = 0
|
||||||
|
|
||||||
|
init(outputURL: URL, onChunkReady: @escaping ((Chunk) -> Void)) throws {
|
||||||
|
self.outputURL = outputURL
|
||||||
|
self.onChunkReady = onChunkReady
|
||||||
|
guard FileManager.default.fileExists(atPath: outputURL.path) else {
|
||||||
|
throw CameraError.unknown(message: "output directory does not exist at: \(outputURL.path)", cause: nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
extension ChunkedRecorder: AVAssetWriterDelegate {
|
||||||
|
|
||||||
|
func assetWriter(_ writer: AVAssetWriter,
|
||||||
|
didOutputSegmentData segmentData: Data,
|
||||||
|
segmentType: AVAssetSegmentType,
|
||||||
|
segmentReport: AVAssetSegmentReport?) {
|
||||||
|
|
||||||
|
switch segmentType {
|
||||||
|
case .initialization:
|
||||||
|
saveInitSegment(segmentData)
|
||||||
|
case .separable:
|
||||||
|
saveSegment(segmentData, report: segmentReport)
|
||||||
|
@unknown default:
|
||||||
|
fatalError("Unknown AVAssetSegmentType!")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func saveInitSegment(_ data: Data) {
|
||||||
|
let url = outputURL.appendingPathComponent("init.mp4")
|
||||||
|
save(data: data, url: url)
|
||||||
|
onChunkReady(url: url, type: .initialization)
|
||||||
|
}
|
||||||
|
|
||||||
|
private func saveSegment(_ data: Data, report: AVAssetSegmentReport?) {
|
||||||
|
let name = "\(chunkIndex).mp4"
|
||||||
|
let url = outputURL.appendingPathComponent(name)
|
||||||
|
save(data: data, url: url)
|
||||||
|
let duration = report?
|
||||||
|
.trackReports
|
||||||
|
.filter { $0.mediaType == .video }
|
||||||
|
.first?
|
||||||
|
.duration
|
||||||
|
onChunkReady(url: url, type: .data(index: chunkIndex, duration: duration))
|
||||||
|
chunkIndex += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
private func save(data: Data, url: URL) {
|
||||||
|
do {
|
||||||
|
try data.write(to: url)
|
||||||
|
} catch {
|
||||||
|
ReactLogger.log(level: .error, message: "Unable to write \(url): \(error.localizedDescription)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func onChunkReady(url: URL, type: ChunkType) {
|
||||||
|
onChunkReady(Chunk(url: url, type: type))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -29,6 +29,7 @@ class RecordingSession {
|
|||||||
private let assetWriter: AVAssetWriter
|
private let assetWriter: AVAssetWriter
|
||||||
private var audioWriter: AVAssetWriterInput?
|
private var audioWriter: AVAssetWriterInput?
|
||||||
private var videoWriter: AVAssetWriterInput?
|
private var videoWriter: AVAssetWriterInput?
|
||||||
|
private let recorder: ChunkedRecorder
|
||||||
private let completionHandler: (RecordingSession, AVAssetWriter.Status, Error?) -> Void
|
private let completionHandler: (RecordingSession, AVAssetWriter.Status, Error?) -> Void
|
||||||
|
|
||||||
private var startTimestamp: CMTime?
|
private var startTimestamp: CMTime?
|
||||||
@@ -48,8 +49,8 @@ class RecordingSession {
|
|||||||
/**
|
/**
|
||||||
Gets the file URL of the recorded video.
|
Gets the file URL of the recorded video.
|
||||||
*/
|
*/
|
||||||
var url: URL {
|
var outputDiretory: URL {
|
||||||
return assetWriter.outputURL
|
return recorder.outputURL
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -70,14 +71,32 @@ class RecordingSession {
|
|||||||
return (lastWrittenTimestamp - startTimestamp).seconds
|
return (lastWrittenTimestamp - startTimestamp).seconds
|
||||||
}
|
}
|
||||||
|
|
||||||
init(url: URL,
|
init(outputDiretory: String,
|
||||||
fileType: AVFileType,
|
fileType: AVFileType,
|
||||||
|
onChunkReady: @escaping ((ChunkedRecorder.Chunk) -> Void),
|
||||||
completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws {
|
completion: @escaping (RecordingSession, AVAssetWriter.Status, Error?) -> Void) throws {
|
||||||
completionHandler = completion
|
completionHandler = completion
|
||||||
|
|
||||||
do {
|
do {
|
||||||
assetWriter = try AVAssetWriter(outputURL: url, fileType: fileType)
|
let outputURL = URL(fileURLWithPath: outputDiretory)
|
||||||
|
recorder = try ChunkedRecorder(outputURL: outputURL, onChunkReady: onChunkReady)
|
||||||
|
assetWriter = AVAssetWriter(contentType: UTType(fileType.rawValue)!)
|
||||||
assetWriter.shouldOptimizeForNetworkUse = false
|
assetWriter.shouldOptimizeForNetworkUse = false
|
||||||
|
assetWriter.outputFileTypeProfile = .mpeg4AppleHLS
|
||||||
|
assetWriter.preferredOutputSegmentInterval = CMTime(seconds: 6, preferredTimescale: 1)
|
||||||
|
|
||||||
|
/*
|
||||||
|
Apple HLS fMP4 does not have an Edit List Box ('elst') in an initialization segment to remove
|
||||||
|
audio priming duration which advanced audio formats like AAC have, since the sample tables
|
||||||
|
are empty. As a result, if the output PTS of the first non-fully trimmed audio sample buffer is
|
||||||
|
kCMTimeZero, the audio samples’ presentation time in segment files may be pushed forward by the
|
||||||
|
audio priming duration. This may cause audio and video to be out of sync. You should add a time
|
||||||
|
offset to all samples to avoid this situation.
|
||||||
|
*/
|
||||||
|
let startTimeOffset = CMTime(value: 10, timescale: 1)
|
||||||
|
assetWriter.initialSegmentStartTime = startTimeOffset
|
||||||
|
|
||||||
|
assetWriter.delegate = recorder
|
||||||
} catch let error as NSError {
|
} catch let error as NSError {
|
||||||
throw CameraError.capture(.createRecorderError(message: error.description))
|
throw CameraError.capture(.createRecorderError(message: error.description))
|
||||||
}
|
}
|
||||||
|
37
package/ios/TestRecorder/AppDelegate.swift
Normal file
37
package/ios/TestRecorder/AppDelegate.swift
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
//
|
||||||
|
// AppDelegate.swift
|
||||||
|
// TestRecorder
|
||||||
|
//
|
||||||
|
// Created by Rafael Bastos on 11/07/2024.
|
||||||
|
// Copyright © 2024 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
import UIKit
|
||||||
|
|
||||||
|
@main
|
||||||
|
class AppDelegate: UIResponder, UIApplicationDelegate {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
|
||||||
|
// Override point for customization after application launch.
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// MARK: UISceneSession Lifecycle
|
||||||
|
|
||||||
|
func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration {
|
||||||
|
// Called when a new scene session is being created.
|
||||||
|
// Use this method to select a configuration to create the new scene with.
|
||||||
|
return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role)
|
||||||
|
}
|
||||||
|
|
||||||
|
func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set<UISceneSession>) {
|
||||||
|
// Called when the user discards a scene session.
|
||||||
|
// If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions.
|
||||||
|
// Use this method to release any resources that were specific to the discarded scenes, as they will not return.
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"colors" : [
|
||||||
|
{
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"idiom" : "universal",
|
||||||
|
"platform" : "ios",
|
||||||
|
"size" : "1024x1024"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
6
package/ios/TestRecorder/Assets.xcassets/Contents.json
Normal file
6
package/ios/TestRecorder/Assets.xcassets/Contents.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
25
package/ios/TestRecorder/Base.lproj/LaunchScreen.storyboard
Normal file
25
package/ios/TestRecorder/Base.lproj/LaunchScreen.storyboard
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13122.16" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
|
||||||
|
<dependencies>
|
||||||
|
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13104.12"/>
|
||||||
|
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
||||||
|
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||||
|
</dependencies>
|
||||||
|
<scenes>
|
||||||
|
<!--View Controller-->
|
||||||
|
<scene sceneID="EHf-IW-A2E">
|
||||||
|
<objects>
|
||||||
|
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
|
||||||
|
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
|
||||||
|
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
|
||||||
|
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||||
|
<color key="backgroundColor" xcode11CocoaTouchSystemColor="systemBackgroundColor" cocoaTouchSystemColor="whiteColor"/>
|
||||||
|
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
|
||||||
|
</view>
|
||||||
|
</viewController>
|
||||||
|
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
|
||||||
|
</objects>
|
||||||
|
<point key="canvasLocation" x="53" y="375"/>
|
||||||
|
</scene>
|
||||||
|
</scenes>
|
||||||
|
</document>
|
51
package/ios/TestRecorder/Base.lproj/Main.storyboard
Normal file
51
package/ios/TestRecorder/Base.lproj/Main.storyboard
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="32700.99.1234" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
|
||||||
|
<device id="retina6_12" orientation="portrait" appearance="light"/>
|
||||||
|
<dependencies>
|
||||||
|
<deployment identifier="iOS"/>
|
||||||
|
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="22685"/>
|
||||||
|
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
|
||||||
|
<capability name="System colors in document resources" minToolsVersion="11.0"/>
|
||||||
|
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||||
|
</dependencies>
|
||||||
|
<scenes>
|
||||||
|
<!--View Controller-->
|
||||||
|
<scene sceneID="tne-QT-ifu">
|
||||||
|
<objects>
|
||||||
|
<viewController id="BYZ-38-t0r" customClass="ViewController" customModule="TestRecorder" customModuleProvider="target" sceneMemberID="viewController">
|
||||||
|
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
|
||||||
|
<rect key="frame" x="0.0" y="0.0" width="393" height="852"/>
|
||||||
|
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||||
|
<subviews>
|
||||||
|
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="VWP-nN-U6K">
|
||||||
|
<rect key="frame" x="157.33333333333334" y="722.66666666666663" width="78.333333333333343" height="34.333333333333371"/>
|
||||||
|
<inset key="imageEdgeInsets" minX="0.0" minY="0.0" maxX="2.2250738585072014e-308" maxY="0.0"/>
|
||||||
|
<state key="normal" title="Record"/>
|
||||||
|
<buttonConfiguration key="configuration" style="filled" title="Record"/>
|
||||||
|
<connections>
|
||||||
|
<action selector="toggleRecord:" destination="BYZ-38-t0r" eventType="touchUpInside" id="63a-uH-hTe"/>
|
||||||
|
</connections>
|
||||||
|
</button>
|
||||||
|
</subviews>
|
||||||
|
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
|
||||||
|
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
|
||||||
|
<constraints>
|
||||||
|
<constraint firstItem="6Tk-OE-BBY" firstAttribute="bottom" secondItem="VWP-nN-U6K" secondAttribute="bottom" constant="61" id="0iW-h7-WDE"/>
|
||||||
|
<constraint firstItem="VWP-nN-U6K" firstAttribute="centerX" secondItem="6Tk-OE-BBY" secondAttribute="centerX" id="yZb-ba-qfO"/>
|
||||||
|
</constraints>
|
||||||
|
</view>
|
||||||
|
<connections>
|
||||||
|
<outlet property="recordButton" destination="VWP-nN-U6K" id="gSk-uh-nDX"/>
|
||||||
|
</connections>
|
||||||
|
</viewController>
|
||||||
|
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
|
||||||
|
</objects>
|
||||||
|
<point key="canvasLocation" x="115" y="-27"/>
|
||||||
|
</scene>
|
||||||
|
</scenes>
|
||||||
|
<resources>
|
||||||
|
<systemColor name="systemBackgroundColor">
|
||||||
|
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
|
||||||
|
</systemColor>
|
||||||
|
</resources>
|
||||||
|
</document>
|
25
package/ios/TestRecorder/Info.plist
Normal file
25
package/ios/TestRecorder/Info.plist
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
|
<plist version="1.0">
|
||||||
|
<dict>
|
||||||
|
<key>UIApplicationSceneManifest</key>
|
||||||
|
<dict>
|
||||||
|
<key>UIApplicationSupportsMultipleScenes</key>
|
||||||
|
<false/>
|
||||||
|
<key>UISceneConfigurations</key>
|
||||||
|
<dict>
|
||||||
|
<key>UIWindowSceneSessionRoleApplication</key>
|
||||||
|
<array>
|
||||||
|
<dict>
|
||||||
|
<key>UISceneConfigurationName</key>
|
||||||
|
<string>Default Configuration</string>
|
||||||
|
<key>UISceneDelegateClassName</key>
|
||||||
|
<string>$(PRODUCT_MODULE_NAME).SceneDelegate</string>
|
||||||
|
<key>UISceneStoryboardFile</key>
|
||||||
|
<string>Main</string>
|
||||||
|
</dict>
|
||||||
|
</array>
|
||||||
|
</dict>
|
||||||
|
</dict>
|
||||||
|
</dict>
|
||||||
|
</plist>
|
15
package/ios/TestRecorder/ReactStubs.h
Normal file
15
package/ios/TestRecorder/ReactStubs.h
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
//
|
||||||
|
// ReactStubs.h
|
||||||
|
// TestRecorder
|
||||||
|
//
|
||||||
|
// Created by Rafael Bastos on 12/07/2024.
|
||||||
|
// Copyright © 2024 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#import <UIKit/UIKit.h>
|
||||||
|
|
||||||
|
@interface UIView (React)
|
||||||
|
|
||||||
|
- (void)didSetProps:(NSArray<NSString *> *)changedProps;
|
||||||
|
|
||||||
|
@end
|
17
package/ios/TestRecorder/ReactStubs.m
Normal file
17
package/ios/TestRecorder/ReactStubs.m
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
//
|
||||||
|
// ReactStubs.m
|
||||||
|
// TestRecorder
|
||||||
|
//
|
||||||
|
// Created by Rafael Bastos on 12/07/2024.
|
||||||
|
// Copyright © 2024 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#import "ReactStubs.h"
|
||||||
|
|
||||||
|
@implementation UIView (React)
|
||||||
|
|
||||||
|
- (void)didSetProps:(__unused NSArray<NSString *> *)changedProps
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
102
package/ios/TestRecorder/ReactStubs.swift
Normal file
102
package/ios/TestRecorder/ReactStubs.swift
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
//
|
||||||
|
// ReactStubs.swift
|
||||||
|
// TestRecorder
|
||||||
|
//
|
||||||
|
// Created by Rafael Bastos on 11/07/2024.
|
||||||
|
// Copyright © 2024 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
import UIKit
|
||||||
|
|
||||||
|
|
||||||
|
enum RCTLogLevel: String {
|
||||||
|
case trace
|
||||||
|
case info
|
||||||
|
case warning
|
||||||
|
case error
|
||||||
|
}
|
||||||
|
|
||||||
|
enum RCTLogSource {
|
||||||
|
case native
|
||||||
|
}
|
||||||
|
|
||||||
|
func RCTDefaultLogFunction(_ level: RCTLogLevel, _ source: RCTLogSource, _ file: String, _ line: NSNumber, _ message: String) {
|
||||||
|
print(level.rawValue, "-", message)
|
||||||
|
}
|
||||||
|
|
||||||
|
typealias RCTDirectEventBlock = (Any?) -> Void
|
||||||
|
typealias RCTPromiseResolveBlock = (Any?) -> Void
|
||||||
|
typealias RCTPromiseRejectBlock = (String, String, NSError?) -> Void
|
||||||
|
typealias RCTResponseSenderBlock = (Any) -> Void
|
||||||
|
|
||||||
|
func NSNull() -> [String: String] {
|
||||||
|
return [:]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
func makeReactError(_ cameraError: CameraError, cause: NSError?) -> [String: Any] {
|
||||||
|
var causeDictionary: [String: Any]?
|
||||||
|
if let cause = cause {
|
||||||
|
causeDictionary = [
|
||||||
|
"cause": "\(cause.domain): \(cause.code) \(cause.description)",
|
||||||
|
"userInfo": cause.userInfo
|
||||||
|
]
|
||||||
|
}
|
||||||
|
return [
|
||||||
|
"error": "\(cameraError.code): \(cameraError.message)",
|
||||||
|
"extra": [
|
||||||
|
"code": cameraError.code,
|
||||||
|
"message": cameraError.message,
|
||||||
|
"cause": causeDictionary ?? NSNull(),
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeReactError(_ cameraError: CameraError) -> [String: Any] {
|
||||||
|
return makeReactError(cameraError, cause: nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class RCTFPSGraph: UIView {
|
||||||
|
convenience init(frame: CGRect, color: UIColor) {
|
||||||
|
self.init(frame: frame)
|
||||||
|
}
|
||||||
|
|
||||||
|
func onTick(_ tick: CFTimeInterval) {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func RCTTempFilePath(_ ext: String, _ error: ErrorPointer) -> String? {
|
||||||
|
let directory = NSTemporaryDirectory().appending("ReactNative")
|
||||||
|
let fm = FileManager.default
|
||||||
|
if fm.fileExists(atPath: directory) {
|
||||||
|
try! fm.removeItem(atPath: directory)
|
||||||
|
}
|
||||||
|
if !fm.fileExists(atPath: directory) {
|
||||||
|
try! fm.createDirectory(atPath: directory, withIntermediateDirectories: true)
|
||||||
|
}
|
||||||
|
return directory
|
||||||
|
.appending("/").appending(UUID().uuidString)
|
||||||
|
.appending(".").appending(ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class RCTViewManager: NSObject {
|
||||||
|
|
||||||
|
var methodQueue: DispatchQueue! { nil }
|
||||||
|
class func requiresMainQueueSetup() -> Bool { false }
|
||||||
|
func view() -> UIView! { nil }
|
||||||
|
|
||||||
|
struct Bridge {
|
||||||
|
let uiManager = UIManager()
|
||||||
|
}
|
||||||
|
|
||||||
|
struct UIManager {
|
||||||
|
func view(forReactTag: NSNumber) -> UIView! {
|
||||||
|
nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let bridge: Bridge = Bridge()
|
||||||
|
}
|
53
package/ios/TestRecorder/SceneDelegate.swift
Normal file
53
package/ios/TestRecorder/SceneDelegate.swift
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
//
|
||||||
|
// SceneDelegate.swift
|
||||||
|
// TestRecorder
|
||||||
|
//
|
||||||
|
// Created by Rafael Bastos on 11/07/2024.
|
||||||
|
// Copyright © 2024 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
import UIKit
|
||||||
|
|
||||||
|
class SceneDelegate: UIResponder, UIWindowSceneDelegate {
|
||||||
|
|
||||||
|
var window: UIWindow?
|
||||||
|
|
||||||
|
|
||||||
|
func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) {
|
||||||
|
// Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`.
|
||||||
|
// If using a storyboard, the `window` property will automatically be initialized and attached to the scene.
|
||||||
|
// This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead).
|
||||||
|
guard let _ = (scene as? UIWindowScene) else { return }
|
||||||
|
}
|
||||||
|
|
||||||
|
func sceneDidDisconnect(_ scene: UIScene) {
|
||||||
|
// Called as the scene is being released by the system.
|
||||||
|
// This occurs shortly after the scene enters the background, or when its session is discarded.
|
||||||
|
// Release any resources associated with this scene that can be re-created the next time the scene connects.
|
||||||
|
// The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead).
|
||||||
|
}
|
||||||
|
|
||||||
|
func sceneDidBecomeActive(_ scene: UIScene) {
|
||||||
|
// Called when the scene has moved from an inactive state to an active state.
|
||||||
|
// Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive.
|
||||||
|
}
|
||||||
|
|
||||||
|
func sceneWillResignActive(_ scene: UIScene) {
|
||||||
|
// Called when the scene will move from an active state to an inactive state.
|
||||||
|
// This may occur due to temporary interruptions (ex. an incoming phone call).
|
||||||
|
}
|
||||||
|
|
||||||
|
func sceneWillEnterForeground(_ scene: UIScene) {
|
||||||
|
// Called as the scene transitions from the background to the foreground.
|
||||||
|
// Use this method to undo the changes made on entering the background.
|
||||||
|
}
|
||||||
|
|
||||||
|
func sceneDidEnterBackground(_ scene: UIScene) {
|
||||||
|
// Called as the scene transitions from the foreground to the background.
|
||||||
|
// Use this method to save data, release shared resources, and store enough scene-specific state information
|
||||||
|
// to restore the scene back to its current state.
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
6
package/ios/TestRecorder/TestRecorder-Bridging-Header.h
Normal file
6
package/ios/TestRecorder/TestRecorder-Bridging-Header.h
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
//
|
||||||
|
// Use this file to import your target's public headers that you would like to expose to Swift.
|
||||||
|
//
|
||||||
|
|
||||||
|
|
||||||
|
#import "ReactStubs.h"
|
117
package/ios/TestRecorder/ViewController.swift
Normal file
117
package/ios/TestRecorder/ViewController.swift
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
//
|
||||||
|
// ViewController.swift
|
||||||
|
// TestRecorder
|
||||||
|
//
|
||||||
|
// Created by Rafael Bastos on 11/07/2024.
|
||||||
|
// Copyright © 2024 mrousavy. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
import UIKit
|
||||||
|
import AVFoundation
|
||||||
|
|
||||||
|
class ViewController: UIViewController {
|
||||||
|
|
||||||
|
@IBOutlet weak var recordButton: UIButton!
|
||||||
|
|
||||||
|
let cameraView = CameraView()
|
||||||
|
let filePath: String = {
|
||||||
|
NSTemporaryDirectory() + "TestRecorder"
|
||||||
|
}()
|
||||||
|
|
||||||
|
override func viewDidLoad() {
|
||||||
|
super.viewDidLoad()
|
||||||
|
|
||||||
|
try? FileManager.default.removeItem(atPath: filePath)
|
||||||
|
|
||||||
|
cameraView.translatesAutoresizingMaskIntoConstraints = false;
|
||||||
|
view.insertSubview(cameraView, at: 0)
|
||||||
|
NSLayoutConstraint.activate([
|
||||||
|
cameraView.topAnchor.constraint(equalTo: view.topAnchor),
|
||||||
|
cameraView.leadingAnchor.constraint(equalTo: view.leadingAnchor),
|
||||||
|
cameraView.trailingAnchor.constraint(equalTo: view.trailingAnchor),
|
||||||
|
cameraView.bottomAnchor.constraint(equalTo: view.bottomAnchor),
|
||||||
|
])
|
||||||
|
|
||||||
|
recordButton.isHidden = true
|
||||||
|
cameraView.onInitialized = { _ in
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
self.recordButton.isHidden = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cameraView.onInitReady = { json in
|
||||||
|
print("onInitReady:", json ?? "nil")
|
||||||
|
}
|
||||||
|
cameraView.onVideoChunkReady = { json in
|
||||||
|
print("onVideoChunkReady:", json ?? "nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
Task { @MainActor in
|
||||||
|
await requestAuthorizations()
|
||||||
|
|
||||||
|
cameraView.photo = true
|
||||||
|
cameraView.video = true
|
||||||
|
cameraView.audio = false
|
||||||
|
cameraView.isActive = true
|
||||||
|
cameraView.cameraId = getCameraDeviceId() as NSString?
|
||||||
|
cameraView.didSetProps([])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isAuthorized(for mediaType: AVMediaType) async -> Bool {
|
||||||
|
let status = AVCaptureDevice.authorizationStatus(for: mediaType)
|
||||||
|
var isAuthorized = status == .authorized
|
||||||
|
if status == .notDetermined {
|
||||||
|
isAuthorized = await AVCaptureDevice.requestAccess(for: mediaType)
|
||||||
|
}
|
||||||
|
return isAuthorized
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
func requestAuthorizations() async {
|
||||||
|
guard await isAuthorized(for: .video) else { return }
|
||||||
|
guard await isAuthorized(for: .audio) else { return }
|
||||||
|
// Set up the capture session.
|
||||||
|
}
|
||||||
|
|
||||||
|
private func getCameraDeviceId() -> String? {
|
||||||
|
let deviceTypes: [AVCaptureDevice.DeviceType] = [
|
||||||
|
.builtInWideAngleCamera
|
||||||
|
]
|
||||||
|
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: deviceTypes, mediaType: .video, position: .back)
|
||||||
|
|
||||||
|
let device = discoverySession.devices.first
|
||||||
|
|
||||||
|
return device?.uniqueID
|
||||||
|
}
|
||||||
|
|
||||||
|
@IBAction
|
||||||
|
func toggleRecord(_ button: UIButton) {
|
||||||
|
if button.title(for: .normal) == "Stop" {
|
||||||
|
|
||||||
|
cameraView.stopRecording(promise: Promise(
|
||||||
|
resolver: { result in
|
||||||
|
print("result")
|
||||||
|
}, rejecter: { code, message, cause in
|
||||||
|
print("error")
|
||||||
|
}))
|
||||||
|
|
||||||
|
button.setTitle("Record", for: .normal)
|
||||||
|
button.configuration = .filled()
|
||||||
|
|
||||||
|
} else {
|
||||||
|
cameraView.startRecording(
|
||||||
|
options: [
|
||||||
|
"fileType": "mp4",
|
||||||
|
"videoCodec": "h265",
|
||||||
|
],
|
||||||
|
filePath: filePath) { callback in
|
||||||
|
print("callback", callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
button.setTitle("Stop", for: .normal)
|
||||||
|
button.configuration = .bordered()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
@@ -10,7 +10,7 @@ import AVFoundation
|
|||||||
import Foundation
|
import Foundation
|
||||||
|
|
||||||
struct RecordVideoOptions {
|
struct RecordVideoOptions {
|
||||||
var fileType: AVFileType = .mov
|
var fileType: AVFileType = .mp4
|
||||||
var flash: Torch = .off
|
var flash: Torch = .off
|
||||||
var codec: AVVideoCodecType?
|
var codec: AVVideoCodecType?
|
||||||
/**
|
/**
|
||||||
|
@@ -7,6 +7,79 @@
|
|||||||
objects = {
|
objects = {
|
||||||
|
|
||||||
/* Begin PBXBuildFile section */
|
/* Begin PBXBuildFile section */
|
||||||
|
B31481772C46547B00084A26 /* CameraViewManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518125E0102000DB86D6 /* CameraViewManager.swift */; };
|
||||||
|
B31481782C46558C00084A26 /* CameraView+TakePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517125E0102000DB86D6 /* CameraView+TakePhoto.swift */; };
|
||||||
|
B31481792C46559700084A26 /* CameraView+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC52AD7F08E00169C0D /* CameraView+Focus.swift */; };
|
||||||
|
B3AF8E862C410FB700CC198C /* ReactStubs.m in Sources */ = {isa = PBXBuildFile; fileRef = B3AF8E852C410FB700CC198C /* ReactStubs.m */; };
|
||||||
|
B3AF8E882C41159300CC198C /* ChunkedRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3AF8E872C41159300CC198C /* ChunkedRecorder.swift */; };
|
||||||
|
B3AF8E892C41159300CC198C /* ChunkedRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3AF8E872C41159300CC198C /* ChunkedRecorder.swift */; };
|
||||||
|
B3EF9F0D2C3FBD8300832EE7 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3EF9F0C2C3FBD8300832EE7 /* AppDelegate.swift */; };
|
||||||
|
B3EF9F0F2C3FBD8300832EE7 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3EF9F0E2C3FBD8300832EE7 /* SceneDelegate.swift */; };
|
||||||
|
B3EF9F112C3FBD8300832EE7 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3EF9F102C3FBD8300832EE7 /* ViewController.swift */; };
|
||||||
|
B3EF9F142C3FBD8300832EE7 /* Base in Resources */ = {isa = PBXBuildFile; fileRef = B3EF9F132C3FBD8300832EE7 /* Base */; };
|
||||||
|
B3EF9F162C3FBD8400832EE7 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = B3EF9F152C3FBD8400832EE7 /* Assets.xcassets */; };
|
||||||
|
B3EF9F192C3FBD8400832EE7 /* Base in Resources */ = {isa = PBXBuildFile; fileRef = B3EF9F182C3FBD8400832EE7 /* Base */; };
|
||||||
|
B3EF9F1E2C3FBDCF00832EE7 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518425E0102000DB86D6 /* CameraView.swift */; };
|
||||||
|
B3EF9F1F2C3FBDDC00832EE7 /* ReactLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516F25E0102000DB86D6 /* ReactLogger.swift */; };
|
||||||
|
B3EF9F212C3FBDFC00832EE7 /* ReactStubs.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3EF9F202C3FBDFC00832EE7 /* ReactStubs.swift */; };
|
||||||
|
B3EF9F222C3FBE8200832EE7 /* CameraConfiguration.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E62AD698DF00E93869 /* CameraConfiguration.swift */; };
|
||||||
|
B3EF9F232C3FBE8B00832EE7 /* VideoStabilizationMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882332AD969E000317161 /* VideoStabilizationMode.swift */; };
|
||||||
|
B3EF9F242C3FBEBC00832EE7 /* CameraError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518325E0102000DB86D6 /* CameraError.swift */; };
|
||||||
|
B3EF9F252C3FBED900832EE7 /* Orientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DE2AD6FB230087F063 /* Orientation.swift */; };
|
||||||
|
B3EF9F262C3FBEEA00832EE7 /* CameraDeviceFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882312AD966FC00317161 /* CameraDeviceFormat.swift */; };
|
||||||
|
B3EF9F272C3FBEF800832EE7 /* PixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B87B11BE2A8E63B700732EBF /* PixelFormat.swift */; };
|
||||||
|
B3EF9F282C3FBF1900832EE7 /* JSUnionValue.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882372AD96B4400317161 /* JSUnionValue.swift */; };
|
||||||
|
B3EF9F292C3FBF2500832EE7 /* Torch.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103E02AD7046E0087F063 /* Torch.swift */; };
|
||||||
|
B3EF9F2A2C3FBF3400832EE7 /* CodeScannerOptions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60AD2ACC9731009D612F /* CodeScannerOptions.swift */; };
|
||||||
|
B3EF9F2B2C3FBF4100832EE7 /* AVMetadataObject.ObjectType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8FF60B02ACC981B009D612F /* AVMetadataObject.ObjectType+descriptor.swift */; };
|
||||||
|
B3EF9F2C2C3FBF4A00832EE7 /* EnumParserError.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517325E0102000DB86D6 /* EnumParserError.swift */; };
|
||||||
|
B3EF9F2D2C3FBF9600832EE7 /* CameraSessionDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */; };
|
||||||
|
B3EF9F2E2C3FBFA600832EE7 /* CameraSession+CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685EC2AD6A5E600E93869 /* CameraSession+CodeScanner.swift */; };
|
||||||
|
B3EF9F2F2C3FBFB200832EE7 /* CameraSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E42AD68D9300E93869 /* CameraSession.swift */; };
|
||||||
|
B3EF9F302C3FBFBB00832EE7 /* RecordingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */; };
|
||||||
|
B3EF9F312C3FBFD500832EE7 /* AVAssetWriter.Status+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BC7263DC28C004C18D7 /* AVAssetWriter.Status+descriptor.swift */; };
|
||||||
|
B3EF9F322C3FBFF100832EE7 /* CameraQueues.swift in Sources */ = {isa = PBXBuildFile; fileRef = B84760DE2608F57D004C3180 /* CameraQueues.swift */; };
|
||||||
|
B3EF9F332C3FC00900832EE7 /* CameraSession+Configuration.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC72AD8005400169C0D /* CameraSession+Configuration.swift */; };
|
||||||
|
B3EF9F362C3FC05600832EE7 /* ResizeMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */; };
|
||||||
|
B3EF9F372C3FC0CA00832EE7 /* CameraView+Zoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887518225E0102000DB86D6 /* CameraView+Zoom.swift */; };
|
||||||
|
B3EF9F382C3FC0D900832EE7 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B83D5EE629377117000AFD2F /* PreviewView.swift */; };
|
||||||
|
B3EF9F3A2C3FC2EB00832EE7 /* AutoFocusSystem.swift in Sources */ = {isa = PBXBuildFile; fileRef = B85882352AD96AFF00317161 /* AutoFocusSystem.swift */; };
|
||||||
|
B3EF9F3C2C3FC30D00832EE7 /* AVCaptureDevice.Position+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517C25E0102000DB86D6 /* AVCaptureDevice.Position+descriptor.swift */; };
|
||||||
|
B3EF9F4A2C3FC31E00832EE7 /* AVFrameRateRange+includes.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516725E0102000DB86D6 /* AVFrameRateRange+includes.swift */; };
|
||||||
|
B3EF9F4B2C3FC31E00832EE7 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8D22CDB2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift */; };
|
||||||
|
B3EF9F4C2C3FC31E00832EE7 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
|
||||||
|
B3EF9F4D2C3FC31E00832EE7 /* AVCaptureVideoDataOutput+findPixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B881D35F2ABC8E4E009B21C8 /* AVCaptureVideoDataOutput+findPixelFormat.swift */; };
|
||||||
|
B3EF9F4E2C3FC31E00832EE7 /* AVCaptureOutput+mirror.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516825E0102000DB86D6 /* AVCaptureOutput+mirror.swift */; };
|
||||||
|
B3EF9F4F2C3FC31E00832EE7 /* Collection+safe.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516225E0102000DB86D6 /* Collection+safe.swift */; };
|
||||||
|
B3EF9F502C3FC31E00832EE7 /* AVCaptureVideoDataOutput+recommendedVideoSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8207AAE2B0E67460002990F /* AVCaptureVideoDataOutput+recommendedVideoSettings.swift */; };
|
||||||
|
B3EF9F512C3FC31E00832EE7 /* AVCaptureDevice+minFocusDistance.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88977BD2B556DBA0095C92C /* AVCaptureDevice+minFocusDistance.swift */; };
|
||||||
|
B3EF9F522C3FC31E00832EE7 /* AVCaptureDevice+physicalDevices.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516625E0102000DB86D6 /* AVCaptureDevice+physicalDevices.swift */; };
|
||||||
|
B3EF9F532C3FC31E00832EE7 /* AVCaptureDevice+neutralZoom.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516325E0102000DB86D6 /* AVCaptureDevice+neutralZoom.swift */; };
|
||||||
|
B3EF9F542C3FC31E00832EE7 /* AVCaptureDevice.Format+dimensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B81BE1BE26B936FF002696CC /* AVCaptureDevice.Format+dimensions.swift */; };
|
||||||
|
B3EF9F552C3FC31E00832EE7 /* AVCaptureVideoDataOutput+pixelFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC32AD7EDE800169C0D /* AVCaptureVideoDataOutput+pixelFormat.swift */; };
|
||||||
|
B3EF9F562C3FC31E00832EE7 /* AVCaptureSession+synchronizeBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8207AAC2B0E5DD70002990F /* AVCaptureSession+synchronizeBuffer.swift */; };
|
||||||
|
B3EF9F572C3FC31E00832EE7 /* AVCaptureDevice+isMultiCam.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516525E0102000DB86D6 /* AVCaptureDevice+isMultiCam.swift */; };
|
||||||
|
B3EF9F582C3FC31E00832EE7 /* AVCaptureDevice+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B881D35D2ABC775E009B21C8 /* AVCaptureDevice+toDictionary.swift */; };
|
||||||
|
B3EF9F592C3FC31E00832EE7 /* AVCaptureDevice.Format+toDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887516A25E0102000DB86D6 /* AVCaptureDevice.Format+toDictionary.swift */; };
|
||||||
|
B3EF9F5A2C3FC31E00832EE7 /* CMVideoDimensions+toCGSize.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8F127CF2ACF054A00B39EA3 /* CMVideoDimensions+toCGSize.swift */; };
|
||||||
|
B3EF9F5B2C3FC33000832EE7 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517A25E0102000DB86D6 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift */; };
|
||||||
|
B3EF9F5C2C3FC33E00832EE7 /* RecordVideoOptions.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AEC92AD8034E00169C0D /* RecordVideoOptions.swift */; };
|
||||||
|
B3EF9F5D2C3FC34600832EE7 /* Video.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8A1AECB2AD803B200169C0D /* Video.swift */; };
|
||||||
|
B3EF9F5E2C3FC43000832EE7 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517925E0102000DB86D6 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift */; };
|
||||||
|
B3EF9F5F2C3FC43000832EE7 /* AVAuthorizationStatus+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517B25E0102000DB86D6 /* AVAuthorizationStatus+descriptor.swift */; };
|
||||||
|
B3EF9F602C3FC43000832EE7 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517E25E0102000DB86D6 /* AVCaptureDevice.FlashMode+descriptor.swift */; };
|
||||||
|
B3EF9F612C3FC43000832EE7 /* AVFileType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8DB3BCB263DC97E004C18D7 /* AVFileType+descriptor.swift */; };
|
||||||
|
B3EF9F622C3FC43000832EE7 /* AVVideoCodecType+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517525E0102000DB86D6 /* AVVideoCodecType+descriptor.swift */; };
|
||||||
|
B3EF9F632C3FC43000832EE7 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517725E0102000DB86D6 /* AVCaptureDevice.TorchMode+descriptor.swift */; };
|
||||||
|
B3EF9F642C3FC43000832EE7 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517F25E0102000DB86D6 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift */; };
|
||||||
|
B3EF9F652C3FC43C00832EE7 /* CameraSession+Audio.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DA2AD6F0A00087F063 /* CameraSession+Audio.swift */; };
|
||||||
|
B3EF9F662C3FC44B00832EE7 /* CameraSession+Video.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685E82AD6A5D600E93869 /* CameraSession+Video.swift */; };
|
||||||
|
B3EF9F672C3FC44B00832EE7 /* CameraSession+Photo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88685EA2AD6A5DE00E93869 /* CameraSession+Photo.swift */; };
|
||||||
|
B3EF9F682C3FC44B00832EE7 /* CameraSession+Focus.swift in Sources */ = {isa = PBXBuildFile; fileRef = B88103DC2AD6F62C0087F063 /* CameraSession+Focus.swift */; };
|
||||||
|
B3EF9F692C3FC44B00832EE7 /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */; };
|
||||||
|
B3EF9F6A2C3FC46900832EE7 /* Promise.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887517025E0102000DB86D6 /* Promise.swift */; };
|
||||||
|
B3EF9F6B2C3FD35600832EE7 /* CameraView+RecordVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B887515D25E0102000DB86D6 /* CameraView+RecordVideo.swift */; };
|
||||||
|
B3EF9F6C2C3FD36800832EE7 /* Callback.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BD3BA1266E22D2006C80A2 /* Callback.swift */; };
|
||||||
B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */; };
|
B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */; };
|
||||||
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; };
|
B80C0E00260BDDF7001699AB /* FrameProcessorPluginRegistry.m in Sources */ = {isa = PBXBuildFile; fileRef = B80C0DFF260BDDF7001699AB /* FrameProcessorPluginRegistry.m */; };
|
||||||
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
|
B80E06A0266632F000728644 /* AVAudioSession+updateCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = B80E069F266632F000728644 /* AVAudioSession+updateCategory.swift */; };
|
||||||
@@ -94,6 +167,19 @@
|
|||||||
|
|
||||||
/* Begin PBXFileReference section */
|
/* Begin PBXFileReference section */
|
||||||
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
|
134814201AA4EA6300B7C361 /* libVisionCamera.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libVisionCamera.a; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||||
|
B3AF8E832C410FB600CC198C /* TestRecorder-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "TestRecorder-Bridging-Header.h"; sourceTree = "<group>"; };
|
||||||
|
B3AF8E842C410FB700CC198C /* ReactStubs.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ReactStubs.h; sourceTree = "<group>"; };
|
||||||
|
B3AF8E852C410FB700CC198C /* ReactStubs.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ReactStubs.m; sourceTree = "<group>"; };
|
||||||
|
B3AF8E872C41159300CC198C /* ChunkedRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChunkedRecorder.swift; sourceTree = "<group>"; };
|
||||||
|
B3EF9F0A2C3FBD8300832EE7 /* TestRecorder.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = TestRecorder.app; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||||
|
B3EF9F0C2C3FBD8300832EE7 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
|
||||||
|
B3EF9F0E2C3FBD8300832EE7 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = "<group>"; };
|
||||||
|
B3EF9F102C3FBD8300832EE7 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = "<group>"; };
|
||||||
|
B3EF9F132C3FBD8300832EE7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
|
||||||
|
B3EF9F152C3FBD8400832EE7 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
|
||||||
|
B3EF9F182C3FBD8400832EE7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
|
||||||
|
B3EF9F1A2C3FBD8400832EE7 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
|
||||||
|
B3EF9F202C3FBDFC00832EE7 /* ReactStubs.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ReactStubs.swift; sourceTree = "<group>"; };
|
||||||
B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ResizeMode.swift; sourceTree = "<group>"; };
|
B80175EB2ABDEBD000E7DE90 /* ResizeMode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ResizeMode.swift; sourceTree = "<group>"; };
|
||||||
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; };
|
B80C02EB2A6A954D001975E2 /* FrameProcessorPluginHostObject.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FrameProcessorPluginHostObject.mm; sourceTree = "<group>"; };
|
||||||
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; };
|
B80C02EC2A6A9552001975E2 /* FrameProcessorPluginHostObject.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FrameProcessorPluginHostObject.h; sourceTree = "<group>"; };
|
||||||
@@ -191,6 +277,13 @@
|
|||||||
);
|
);
|
||||||
runOnlyForDeploymentPostprocessing = 0;
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
};
|
};
|
||||||
|
B3EF9F072C3FBD8300832EE7 /* Frameworks */ = {
|
||||||
|
isa = PBXFrameworksBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
};
|
||||||
/* End PBXFrameworksBuildPhase section */
|
/* End PBXFrameworksBuildPhase section */
|
||||||
|
|
||||||
/* Begin PBXGroup section */
|
/* Begin PBXGroup section */
|
||||||
@@ -221,10 +314,30 @@
|
|||||||
B887516125E0102000DB86D6 /* Extensions */,
|
B887516125E0102000DB86D6 /* Extensions */,
|
||||||
B887517225E0102000DB86D6 /* Parsers */,
|
B887517225E0102000DB86D6 /* Parsers */,
|
||||||
B887516D25E0102000DB86D6 /* React Utils */,
|
B887516D25E0102000DB86D6 /* React Utils */,
|
||||||
|
B3EF9F0B2C3FBD8300832EE7 /* TestRecorder */,
|
||||||
134814211AA4EA7D00B7C361 /* Products */,
|
134814211AA4EA7D00B7C361 /* Products */,
|
||||||
|
B3EF9F0A2C3FBD8300832EE7 /* TestRecorder.app */,
|
||||||
);
|
);
|
||||||
sourceTree = "<group>";
|
sourceTree = "<group>";
|
||||||
};
|
};
|
||||||
|
B3EF9F0B2C3FBD8300832EE7 /* TestRecorder */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
B3EF9F0C2C3FBD8300832EE7 /* AppDelegate.swift */,
|
||||||
|
B3EF9F0E2C3FBD8300832EE7 /* SceneDelegate.swift */,
|
||||||
|
B3EF9F102C3FBD8300832EE7 /* ViewController.swift */,
|
||||||
|
B3EF9F202C3FBDFC00832EE7 /* ReactStubs.swift */,
|
||||||
|
B3AF8E842C410FB700CC198C /* ReactStubs.h */,
|
||||||
|
B3AF8E852C410FB700CC198C /* ReactStubs.m */,
|
||||||
|
B3AF8E832C410FB600CC198C /* TestRecorder-Bridging-Header.h */,
|
||||||
|
B3EF9F122C3FBD8300832EE7 /* Main.storyboard */,
|
||||||
|
B3EF9F152C3FBD8400832EE7 /* Assets.xcassets */,
|
||||||
|
B3EF9F172C3FBD8400832EE7 /* LaunchScreen.storyboard */,
|
||||||
|
B3EF9F1A2C3FBD8400832EE7 /* Info.plist */,
|
||||||
|
);
|
||||||
|
path = TestRecorder;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
B80175EA2ABDEBBB00E7DE90 /* Types */ = {
|
B80175EA2ABDEBBB00E7DE90 /* Types */ = {
|
||||||
isa = PBXGroup;
|
isa = PBXGroup;
|
||||||
children = (
|
children = (
|
||||||
@@ -257,6 +370,7 @@
|
|||||||
B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */,
|
B88103E22AD7065C0087F063 /* CameraSessionDelegate.swift */,
|
||||||
B83D5EE629377117000AFD2F /* PreviewView.swift */,
|
B83D5EE629377117000AFD2F /* PreviewView.swift */,
|
||||||
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
|
B8DB3BC9263DC4D8004C18D7 /* RecordingSession.swift */,
|
||||||
|
B3AF8E872C41159300CC198C /* ChunkedRecorder.swift */,
|
||||||
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
|
B887515C25E0102000DB86D6 /* PhotoCaptureDelegate.swift */,
|
||||||
B84760DE2608F57D004C3180 /* CameraQueues.swift */,
|
B84760DE2608F57D004C3180 /* CameraQueues.swift */,
|
||||||
B887518325E0102000DB86D6 /* CameraError.swift */,
|
B887518325E0102000DB86D6 /* CameraError.swift */,
|
||||||
@@ -366,18 +480,42 @@
|
|||||||
productReference = 134814201AA4EA6300B7C361 /* libVisionCamera.a */;
|
productReference = 134814201AA4EA6300B7C361 /* libVisionCamera.a */;
|
||||||
productType = "com.apple.product-type.library.static";
|
productType = "com.apple.product-type.library.static";
|
||||||
};
|
};
|
||||||
|
B3EF9F092C3FBD8300832EE7 /* TestRecorder */ = {
|
||||||
|
isa = PBXNativeTarget;
|
||||||
|
buildConfigurationList = B3EF9F1D2C3FBD8400832EE7 /* Build configuration list for PBXNativeTarget "TestRecorder" */;
|
||||||
|
buildPhases = (
|
||||||
|
B3EF9F062C3FBD8300832EE7 /* Sources */,
|
||||||
|
B3EF9F072C3FBD8300832EE7 /* Frameworks */,
|
||||||
|
B3EF9F082C3FBD8300832EE7 /* Resources */,
|
||||||
|
);
|
||||||
|
buildRules = (
|
||||||
|
);
|
||||||
|
dependencies = (
|
||||||
|
);
|
||||||
|
name = TestRecorder;
|
||||||
|
productName = TestRecorder;
|
||||||
|
productReference = B3EF9F0A2C3FBD8300832EE7 /* TestRecorder.app */;
|
||||||
|
productType = "com.apple.product-type.application";
|
||||||
|
};
|
||||||
/* End PBXNativeTarget section */
|
/* End PBXNativeTarget section */
|
||||||
|
|
||||||
/* Begin PBXProject section */
|
/* Begin PBXProject section */
|
||||||
58B511D31A9E6C8500147676 /* Project object */ = {
|
58B511D31A9E6C8500147676 /* Project object */ = {
|
||||||
isa = PBXProject;
|
isa = PBXProject;
|
||||||
attributes = {
|
attributes = {
|
||||||
|
LastSwiftUpdateCheck = 1540;
|
||||||
LastUpgradeCheck = 1240;
|
LastUpgradeCheck = 1240;
|
||||||
ORGANIZATIONNAME = mrousavy;
|
ORGANIZATIONNAME = mrousavy;
|
||||||
TargetAttributes = {
|
TargetAttributes = {
|
||||||
58B511DA1A9E6C8500147676 = {
|
58B511DA1A9E6C8500147676 = {
|
||||||
CreatedOnToolsVersion = 6.1.1;
|
CreatedOnToolsVersion = 6.1.1;
|
||||||
};
|
};
|
||||||
|
B3EF9F092C3FBD8300832EE7 = {
|
||||||
|
CreatedOnToolsVersion = 15.4;
|
||||||
|
DevelopmentTeam = HP3AMBWJGS;
|
||||||
|
LastSwiftMigration = 1540;
|
||||||
|
ProvisioningStyle = Automatic;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
buildConfigurationList = 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "VisionCamera" */;
|
buildConfigurationList = 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "VisionCamera" */;
|
||||||
@@ -387,6 +525,7 @@
|
|||||||
knownRegions = (
|
knownRegions = (
|
||||||
English,
|
English,
|
||||||
en,
|
en,
|
||||||
|
Base,
|
||||||
);
|
);
|
||||||
mainGroup = 58B511D21A9E6C8500147676;
|
mainGroup = 58B511D21A9E6C8500147676;
|
||||||
productRefGroup = 58B511D21A9E6C8500147676;
|
productRefGroup = 58B511D21A9E6C8500147676;
|
||||||
@@ -394,10 +533,24 @@
|
|||||||
projectRoot = "";
|
projectRoot = "";
|
||||||
targets = (
|
targets = (
|
||||||
58B511DA1A9E6C8500147676 /* VisionCamera */,
|
58B511DA1A9E6C8500147676 /* VisionCamera */,
|
||||||
|
B3EF9F092C3FBD8300832EE7 /* TestRecorder */,
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
/* End PBXProject section */
|
/* End PBXProject section */
|
||||||
|
|
||||||
|
/* Begin PBXResourcesBuildPhase section */
|
||||||
|
B3EF9F082C3FBD8300832EE7 /* Resources */ = {
|
||||||
|
isa = PBXResourcesBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
B3EF9F162C3FBD8400832EE7 /* Assets.xcassets in Resources */,
|
||||||
|
B3EF9F192C3FBD8400832EE7 /* Base in Resources */,
|
||||||
|
B3EF9F142C3FBD8300832EE7 /* Base in Resources */,
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
};
|
||||||
|
/* End PBXResourcesBuildPhase section */
|
||||||
|
|
||||||
/* Begin PBXShellScriptBuildPhase section */
|
/* Begin PBXShellScriptBuildPhase section */
|
||||||
B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */ = {
|
B80D6CAB25F770FE006F2CB7 /* Run SwiftFormat */ = {
|
||||||
isa = PBXShellScriptBuildPhase;
|
isa = PBXShellScriptBuildPhase;
|
||||||
@@ -490,6 +643,7 @@
|
|||||||
B88977BE2B556DBA0095C92C /* AVCaptureDevice+minFocusDistance.swift in Sources */,
|
B88977BE2B556DBA0095C92C /* AVCaptureDevice+minFocusDistance.swift in Sources */,
|
||||||
B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */,
|
B80175EC2ABDEBD000E7DE90 /* ResizeMode.swift in Sources */,
|
||||||
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */,
|
B887519F25E0102000DB86D6 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */,
|
||||||
|
B3AF8E882C41159300CC198C /* ChunkedRecorder.swift in Sources */,
|
||||||
B88685ED2AD6A5E600E93869 /* CameraSession+CodeScanner.swift in Sources */,
|
B88685ED2AD6A5E600E93869 /* CameraSession+CodeScanner.swift in Sources */,
|
||||||
B8207AAD2B0E5DD70002990F /* AVCaptureSession+synchronizeBuffer.swift in Sources */,
|
B8207AAD2B0E5DD70002990F /* AVCaptureSession+synchronizeBuffer.swift in Sources */,
|
||||||
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
|
B8D22CDC2642DB4D00234472 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
|
||||||
@@ -516,8 +670,103 @@
|
|||||||
);
|
);
|
||||||
runOnlyForDeploymentPostprocessing = 0;
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
};
|
};
|
||||||
|
B3EF9F062C3FBD8300832EE7 /* Sources */ = {
|
||||||
|
isa = PBXSourcesBuildPhase;
|
||||||
|
buildActionMask = 2147483647;
|
||||||
|
files = (
|
||||||
|
B3EF9F372C3FC0CA00832EE7 /* CameraView+Zoom.swift in Sources */,
|
||||||
|
B3EF9F232C3FBE8B00832EE7 /* VideoStabilizationMode.swift in Sources */,
|
||||||
|
B3EF9F4A2C3FC31E00832EE7 /* AVFrameRateRange+includes.swift in Sources */,
|
||||||
|
B3EF9F6A2C3FC46900832EE7 /* Promise.swift in Sources */,
|
||||||
|
B3EF9F4B2C3FC31E00832EE7 /* AVAssetWriterInputPixelBufferAdaptor+initWithVideoSettings.swift in Sources */,
|
||||||
|
B3EF9F5E2C3FC43000832EE7 /* AVCapturePhotoOutput.QualityPrioritization+descriptor.swift in Sources */,
|
||||||
|
B3AF8E892C41159300CC198C /* ChunkedRecorder.swift in Sources */,
|
||||||
|
B3EF9F5F2C3FC43000832EE7 /* AVAuthorizationStatus+descriptor.swift in Sources */,
|
||||||
|
B3EF9F602C3FC43000832EE7 /* AVCaptureDevice.FlashMode+descriptor.swift in Sources */,
|
||||||
|
B3EF9F612C3FC43000832EE7 /* AVFileType+descriptor.swift in Sources */,
|
||||||
|
B3EF9F622C3FC43000832EE7 /* AVVideoCodecType+descriptor.swift in Sources */,
|
||||||
|
B3EF9F632C3FC43000832EE7 /* AVCaptureDevice.TorchMode+descriptor.swift in Sources */,
|
||||||
|
B3EF9F642C3FC43000832EE7 /* AVCaptureDevice.Format.AutoFocusSystem+descriptor.swift in Sources */,
|
||||||
|
B3EF9F4C2C3FC31E00832EE7 /* AVAudioSession+updateCategory.swift in Sources */,
|
||||||
|
B3EF9F4D2C3FC31E00832EE7 /* AVCaptureVideoDataOutput+findPixelFormat.swift in Sources */,
|
||||||
|
B3EF9F4E2C3FC31E00832EE7 /* AVCaptureOutput+mirror.swift in Sources */,
|
||||||
|
B3EF9F4F2C3FC31E00832EE7 /* Collection+safe.swift in Sources */,
|
||||||
|
B3EF9F502C3FC31E00832EE7 /* AVCaptureVideoDataOutput+recommendedVideoSettings.swift in Sources */,
|
||||||
|
B3EF9F512C3FC31E00832EE7 /* AVCaptureDevice+minFocusDistance.swift in Sources */,
|
||||||
|
B3EF9F5B2C3FC33000832EE7 /* AVCaptureDevice.DeviceType+physicalDeviceDescriptor.swift in Sources */,
|
||||||
|
B31481792C46559700084A26 /* CameraView+Focus.swift in Sources */,
|
||||||
|
B31481772C46547B00084A26 /* CameraViewManager.swift in Sources */,
|
||||||
|
B3EF9F522C3FC31E00832EE7 /* AVCaptureDevice+physicalDevices.swift in Sources */,
|
||||||
|
B3EF9F532C3FC31E00832EE7 /* AVCaptureDevice+neutralZoom.swift in Sources */,
|
||||||
|
B3EF9F542C3FC31E00832EE7 /* AVCaptureDevice.Format+dimensions.swift in Sources */,
|
||||||
|
B3EF9F552C3FC31E00832EE7 /* AVCaptureVideoDataOutput+pixelFormat.swift in Sources */,
|
||||||
|
B3EF9F562C3FC31E00832EE7 /* AVCaptureSession+synchronizeBuffer.swift in Sources */,
|
||||||
|
B3EF9F572C3FC31E00832EE7 /* AVCaptureDevice+isMultiCam.swift in Sources */,
|
||||||
|
B3EF9F582C3FC31E00832EE7 /* AVCaptureDevice+toDictionary.swift in Sources */,
|
||||||
|
B3EF9F592C3FC31E00832EE7 /* AVCaptureDevice.Format+toDictionary.swift in Sources */,
|
||||||
|
B3EF9F5A2C3FC31E00832EE7 /* CMVideoDimensions+toCGSize.swift in Sources */,
|
||||||
|
B3EF9F212C3FBDFC00832EE7 /* ReactStubs.swift in Sources */,
|
||||||
|
B3EF9F5C2C3FC33E00832EE7 /* RecordVideoOptions.swift in Sources */,
|
||||||
|
B3EF9F6B2C3FD35600832EE7 /* CameraView+RecordVideo.swift in Sources */,
|
||||||
|
B3EF9F222C3FBE8200832EE7 /* CameraConfiguration.swift in Sources */,
|
||||||
|
B3EF9F282C3FBF1900832EE7 /* JSUnionValue.swift in Sources */,
|
||||||
|
B3EF9F332C3FC00900832EE7 /* CameraSession+Configuration.swift in Sources */,
|
||||||
|
B3EF9F362C3FC05600832EE7 /* ResizeMode.swift in Sources */,
|
||||||
|
B3EF9F312C3FBFD500832EE7 /* AVAssetWriter.Status+descriptor.swift in Sources */,
|
||||||
|
B3EF9F292C3FBF2500832EE7 /* Torch.swift in Sources */,
|
||||||
|
B31481782C46558C00084A26 /* CameraView+TakePhoto.swift in Sources */,
|
||||||
|
B3EF9F2C2C3FBF4A00832EE7 /* EnumParserError.swift in Sources */,
|
||||||
|
B3EF9F272C3FBEF800832EE7 /* PixelFormat.swift in Sources */,
|
||||||
|
B3EF9F652C3FC43C00832EE7 /* CameraSession+Audio.swift in Sources */,
|
||||||
|
B3EF9F382C3FC0D900832EE7 /* PreviewView.swift in Sources */,
|
||||||
|
B3EF9F3A2C3FC2EB00832EE7 /* AutoFocusSystem.swift in Sources */,
|
||||||
|
B3EF9F112C3FBD8300832EE7 /* ViewController.swift in Sources */,
|
||||||
|
B3EF9F5D2C3FC34600832EE7 /* Video.swift in Sources */,
|
||||||
|
B3EF9F2B2C3FBF4100832EE7 /* AVMetadataObject.ObjectType+descriptor.swift in Sources */,
|
||||||
|
B3AF8E862C410FB700CC198C /* ReactStubs.m in Sources */,
|
||||||
|
B3EF9F0D2C3FBD8300832EE7 /* AppDelegate.swift in Sources */,
|
||||||
|
B3EF9F2D2C3FBF9600832EE7 /* CameraSessionDelegate.swift in Sources */,
|
||||||
|
B3EF9F262C3FBEEA00832EE7 /* CameraDeviceFormat.swift in Sources */,
|
||||||
|
B3EF9F242C3FBEBC00832EE7 /* CameraError.swift in Sources */,
|
||||||
|
B3EF9F2E2C3FBFA600832EE7 /* CameraSession+CodeScanner.swift in Sources */,
|
||||||
|
B3EF9F252C3FBED900832EE7 /* Orientation.swift in Sources */,
|
||||||
|
B3EF9F662C3FC44B00832EE7 /* CameraSession+Video.swift in Sources */,
|
||||||
|
B3EF9F672C3FC44B00832EE7 /* CameraSession+Photo.swift in Sources */,
|
||||||
|
B3EF9F682C3FC44B00832EE7 /* CameraSession+Focus.swift in Sources */,
|
||||||
|
B3EF9F6C2C3FD36800832EE7 /* Callback.swift in Sources */,
|
||||||
|
B3EF9F692C3FC44B00832EE7 /* PhotoCaptureDelegate.swift in Sources */,
|
||||||
|
B3EF9F302C3FBFBB00832EE7 /* RecordingSession.swift in Sources */,
|
||||||
|
B3EF9F322C3FBFF100832EE7 /* CameraQueues.swift in Sources */,
|
||||||
|
B3EF9F2F2C3FBFB200832EE7 /* CameraSession.swift in Sources */,
|
||||||
|
B3EF9F2A2C3FBF3400832EE7 /* CodeScannerOptions.swift in Sources */,
|
||||||
|
B3EF9F0F2C3FBD8300832EE7 /* SceneDelegate.swift in Sources */,
|
||||||
|
B3EF9F1E2C3FBDCF00832EE7 /* CameraView.swift in Sources */,
|
||||||
|
B3EF9F3C2C3FC30D00832EE7 /* AVCaptureDevice.Position+descriptor.swift in Sources */,
|
||||||
|
B3EF9F1F2C3FBDDC00832EE7 /* ReactLogger.swift in Sources */,
|
||||||
|
);
|
||||||
|
runOnlyForDeploymentPostprocessing = 0;
|
||||||
|
};
|
||||||
/* End PBXSourcesBuildPhase section */
|
/* End PBXSourcesBuildPhase section */
|
||||||
|
|
||||||
|
/* Begin PBXVariantGroup section */
|
||||||
|
B3EF9F122C3FBD8300832EE7 /* Main.storyboard */ = {
|
||||||
|
isa = PBXVariantGroup;
|
||||||
|
children = (
|
||||||
|
B3EF9F132C3FBD8300832EE7 /* Base */,
|
||||||
|
);
|
||||||
|
name = Main.storyboard;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
B3EF9F172C3FBD8400832EE7 /* LaunchScreen.storyboard */ = {
|
||||||
|
isa = PBXVariantGroup;
|
||||||
|
children = (
|
||||||
|
B3EF9F182C3FBD8400832EE7 /* Base */,
|
||||||
|
);
|
||||||
|
name = LaunchScreen.storyboard;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
|
/* End PBXVariantGroup section */
|
||||||
|
|
||||||
/* Begin XCBuildConfiguration section */
|
/* Begin XCBuildConfiguration section */
|
||||||
58B511ED1A9E6C8500147676 /* Debug */ = {
|
58B511ED1A9E6C8500147676 /* Debug */ = {
|
||||||
isa = XCBuildConfiguration;
|
isa = XCBuildConfiguration;
|
||||||
@@ -660,6 +909,94 @@
|
|||||||
};
|
};
|
||||||
name = Release;
|
name = Release;
|
||||||
};
|
};
|
||||||
|
B3EF9F1B2C3FBD8400832EE7 /* Debug */ = {
|
||||||
|
isa = XCBuildConfiguration;
|
||||||
|
buildSettings = {
|
||||||
|
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
|
||||||
|
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
|
||||||
|
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
|
||||||
|
CLANG_ANALYZER_NONNULL = YES;
|
||||||
|
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
||||||
|
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
|
||||||
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
CLANG_ENABLE_OBJC_WEAK = YES;
|
||||||
|
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
||||||
|
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
||||||
|
CODE_SIGN_STYLE = Automatic;
|
||||||
|
CURRENT_PROJECT_VERSION = 1;
|
||||||
|
DEBUG_INFORMATION_FORMAT = dwarf;
|
||||||
|
DEVELOPMENT_TEAM = HP3AMBWJGS;
|
||||||
|
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||||
|
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||||
|
GENERATE_INFOPLIST_FILE = YES;
|
||||||
|
INFOPLIST_FILE = TestRecorder/Info.plist;
|
||||||
|
INFOPLIST_KEY_NSCameraUsageDescription = "Record form camera";
|
||||||
|
INFOPLIST_KEY_NSMicrophoneUsageDescription = "Record from microphone";
|
||||||
|
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
|
||||||
|
INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen;
|
||||||
|
INFOPLIST_KEY_UIMainStoryboardFile = Main;
|
||||||
|
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||||
|
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||||
|
IPHONEOS_DEPLOYMENT_TARGET = 16.0;
|
||||||
|
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
|
||||||
|
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||||
|
MARKETING_VERSION = 1.0;
|
||||||
|
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
|
||||||
|
MTL_FAST_MATH = YES;
|
||||||
|
PRODUCT_BUNDLE_IDENTIFIER = camera.TestRecorder;
|
||||||
|
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||||
|
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
|
||||||
|
SWIFT_EMIT_LOC_STRINGS = YES;
|
||||||
|
SWIFT_OBJC_BRIDGING_HEADER = "TestRecorder/TestRecorder-Bridging-Header.h";
|
||||||
|
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
|
||||||
|
SWIFT_VERSION = 5.0;
|
||||||
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
|
};
|
||||||
|
name = Debug;
|
||||||
|
};
|
||||||
|
B3EF9F1C2C3FBD8400832EE7 /* Release */ = {
|
||||||
|
isa = XCBuildConfiguration;
|
||||||
|
buildSettings = {
|
||||||
|
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
|
||||||
|
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
|
||||||
|
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
|
||||||
|
CLANG_ANALYZER_NONNULL = YES;
|
||||||
|
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
||||||
|
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
|
||||||
|
CLANG_ENABLE_MODULES = YES;
|
||||||
|
CLANG_ENABLE_OBJC_WEAK = YES;
|
||||||
|
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
||||||
|
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
||||||
|
CODE_SIGN_STYLE = Automatic;
|
||||||
|
COPY_PHASE_STRIP = NO;
|
||||||
|
CURRENT_PROJECT_VERSION = 1;
|
||||||
|
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
|
||||||
|
DEVELOPMENT_TEAM = HP3AMBWJGS;
|
||||||
|
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||||
|
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||||
|
GENERATE_INFOPLIST_FILE = YES;
|
||||||
|
INFOPLIST_FILE = TestRecorder/Info.plist;
|
||||||
|
INFOPLIST_KEY_NSCameraUsageDescription = "Record form camera";
|
||||||
|
INFOPLIST_KEY_NSMicrophoneUsageDescription = "Record from microphone";
|
||||||
|
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
|
||||||
|
INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen;
|
||||||
|
INFOPLIST_KEY_UIMainStoryboardFile = Main;
|
||||||
|
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||||
|
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||||
|
IPHONEOS_DEPLOYMENT_TARGET = 16.0;
|
||||||
|
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
|
||||||
|
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||||
|
MARKETING_VERSION = 1.0;
|
||||||
|
MTL_FAST_MATH = YES;
|
||||||
|
PRODUCT_BUNDLE_IDENTIFIER = camera.TestRecorder;
|
||||||
|
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||||
|
SWIFT_EMIT_LOC_STRINGS = YES;
|
||||||
|
SWIFT_OBJC_BRIDGING_HEADER = "TestRecorder/TestRecorder-Bridging-Header.h";
|
||||||
|
SWIFT_VERSION = 5.0;
|
||||||
|
TARGETED_DEVICE_FAMILY = "1,2";
|
||||||
|
};
|
||||||
|
name = Release;
|
||||||
|
};
|
||||||
/* End XCBuildConfiguration section */
|
/* End XCBuildConfiguration section */
|
||||||
|
|
||||||
/* Begin XCConfigurationList section */
|
/* Begin XCConfigurationList section */
|
||||||
@@ -681,6 +1018,15 @@
|
|||||||
defaultConfigurationIsVisible = 0;
|
defaultConfigurationIsVisible = 0;
|
||||||
defaultConfigurationName = Release;
|
defaultConfigurationName = Release;
|
||||||
};
|
};
|
||||||
|
B3EF9F1D2C3FBD8400832EE7 /* Build configuration list for PBXNativeTarget "TestRecorder" */ = {
|
||||||
|
isa = XCConfigurationList;
|
||||||
|
buildConfigurations = (
|
||||||
|
B3EF9F1B2C3FBD8400832EE7 /* Debug */,
|
||||||
|
B3EF9F1C2C3FBD8400832EE7 /* Release */,
|
||||||
|
);
|
||||||
|
defaultConfigurationIsVisible = 0;
|
||||||
|
defaultConfigurationName = Release;
|
||||||
|
};
|
||||||
/* End XCConfigurationList section */
|
/* End XCConfigurationList section */
|
||||||
};
|
};
|
||||||
rootObject = 58B511D31A9E6C8500147676 /* Project object */;
|
rootObject = 58B511D31A9E6C8500147676 /* Project object */;
|
||||||
|
@@ -26,6 +26,13 @@ interface OnErrorEvent {
|
|||||||
message: string
|
message: string
|
||||||
cause?: ErrorWithCause
|
cause?: ErrorWithCause
|
||||||
}
|
}
|
||||||
|
interface OnInitReadyEvent {
|
||||||
|
filepath: string
|
||||||
|
}
|
||||||
|
interface OnVideoChunkReadyEvent {
|
||||||
|
filepath: string
|
||||||
|
index: number
|
||||||
|
}
|
||||||
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor' | 'codeScanner'> & {
|
type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onError' | 'frameProcessor' | 'codeScanner'> & {
|
||||||
cameraId: string
|
cameraId: string
|
||||||
enableFrameProcessor: boolean
|
enableFrameProcessor: boolean
|
||||||
@@ -35,6 +42,8 @@ type NativeCameraViewProps = Omit<CameraProps, 'device' | 'onInitialized' | 'onE
|
|||||||
onCodeScanned?: (event: NativeSyntheticEvent<OnCodeScannedEvent>) => void
|
onCodeScanned?: (event: NativeSyntheticEvent<OnCodeScannedEvent>) => void
|
||||||
onStarted?: (event: NativeSyntheticEvent<void>) => void
|
onStarted?: (event: NativeSyntheticEvent<void>) => void
|
||||||
onStopped?: (event: NativeSyntheticEvent<void>) => void
|
onStopped?: (event: NativeSyntheticEvent<void>) => void
|
||||||
|
onInitReady?: (event: NativeSyntheticEvent<OnInitReadyEvent>) => void
|
||||||
|
onVideoChunkReady?: (event: NativeSyntheticEvent<OnVideoChunkReadyEvent>) => void
|
||||||
onViewReady: () => void
|
onViewReady: () => void
|
||||||
}
|
}
|
||||||
type NativeRecordVideoOptions = Omit<RecordVideoOptions, 'onRecordingError' | 'onRecordingFinished' | 'videoBitRate'> & {
|
type NativeRecordVideoOptions = Omit<RecordVideoOptions, 'onRecordingError' | 'onRecordingFinished' | 'videoBitRate'> & {
|
||||||
@@ -168,7 +177,7 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
|
|||||||
* }, 5000)
|
* }, 5000)
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
public startRecording(options: RecordVideoOptions): void {
|
public startRecording(options: RecordVideoOptions, filePath: string): void {
|
||||||
const { onRecordingError, onRecordingFinished, videoBitRate, ...passThruOptions } = options
|
const { onRecordingError, onRecordingFinished, videoBitRate, ...passThruOptions } = options
|
||||||
if (typeof onRecordingError !== 'function' || typeof onRecordingFinished !== 'function')
|
if (typeof onRecordingError !== 'function' || typeof onRecordingFinished !== 'function')
|
||||||
throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!')
|
throw new CameraRuntimeError('parameter/invalid-parameter', 'The onRecordingError or onRecordingFinished functions were not set!')
|
||||||
@@ -202,7 +211,7 @@ export class Camera extends React.PureComponent<CameraProps, CameraState> {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
// TODO: Use TurboModules to make this awaitable.
|
// TODO: Use TurboModules to make this awaitable.
|
||||||
CameraModule.startRecording(this.handle, nativeOptions, onRecordCallback)
|
CameraModule.startRecording(this.handle, nativeOptions, filePath, onRecordCallback)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw tryParseNativeCameraError(e)
|
throw tryParseNativeCameraError(e)
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user