react-native-vision-camera/package/example/src/CameraPage.tsx

272 lines
10 KiB
TypeScript
Raw Normal View History

import * as React from 'react'
import { useRef, useState, useCallback, useMemo } from 'react'
import { StyleSheet, Text, View } from 'react-native'
import { PinchGestureHandler, PinchGestureHandlerGestureEvent, TapGestureHandler } from 'react-native-gesture-handler'
import { CameraRuntimeError, PhotoFile, useCameraDevice, useCameraFormat, useFrameProcessor, VideoFile } from 'react-native-vision-camera'
import { Camera } from 'react-native-vision-camera'
import { CONTENT_SPACING, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING, SCREEN_HEIGHT, SCREEN_WIDTH } from './Constants'
import Reanimated, { Extrapolate, interpolate, useAnimatedGestureHandler, useAnimatedProps, useSharedValue } from 'react-native-reanimated'
import { useEffect } from 'react'
import { useIsForeground } from './hooks/useIsForeground'
import { StatusBarBlurBackground } from './views/StatusBarBlurBackground'
import { CaptureButton } from './views/CaptureButton'
import { PressableOpacity } from 'react-native-pressable-opacity'
import MaterialIcon from 'react-native-vector-icons/MaterialCommunityIcons'
import IonIcon from 'react-native-vector-icons/Ionicons'
import type { Routes } from './Routes'
import type { NativeStackScreenProps } from '@react-navigation/native-stack'
import { useIsFocused } from '@react-navigation/core'
import { examplePlugin } from './frame-processors/ExamplePlugin'
2021-02-19 08:07:53 -07:00
const ReanimatedCamera = Reanimated.createAnimatedComponent(Camera)
2021-02-19 11:06:28 -07:00
Reanimated.addWhitelistedNativeProps({
zoom: true,
})
2021-02-19 11:06:28 -07:00
const SCALE_FULL_ZOOM = 3
const BUTTON_SIZE = 40
2021-02-19 11:06:28 -07:00
type Props = NativeStackScreenProps<Routes, 'CameraPage'>
export function CameraPage({ navigation }: Props): React.ReactElement {
const camera = useRef<Camera>(null)
const [isCameraInitialized, setIsCameraInitialized] = useState(false)
const [hasMicrophonePermission, setHasMicrophonePermission] = useState(false)
const zoom = useSharedValue(0)
const isPressingButton = useSharedValue(false)
2021-02-19 11:06:28 -07:00
// check if camera page is active
const isFocussed = useIsFocused()
const isForeground = useIsForeground()
const isActive = isFocussed && isForeground
2021-02-19 11:06:28 -07:00
const [cameraPosition, setCameraPosition] = useState<'front' | 'back'>('back')
const [enableHdr, setEnableHdr] = useState(false)
const [flash, setFlash] = useState<'off' | 'on'>('off')
const [enableNightMode, setEnableNightMode] = useState(false)
2021-02-19 11:06:28 -07:00
// camera format settings
const device = useCameraDevice(cameraPosition, {
physicalDevices: ['ultra-wide-angle-camera', 'wide-angle-camera', 'telephoto-camera'],
})
2021-02-19 11:06:28 -07:00
const [targetFps, setTargetFps] = useState(60)
const screenAspectRatio = SCREEN_HEIGHT / SCREEN_WIDTH
const format = useCameraFormat(device, [
{ fps: targetFps },
{ videoAspectRatio: screenAspectRatio },
{ videoResolution: 'max' },
{ photoAspectRatio: screenAspectRatio },
{ photoResolution: 'max' },
])
const fps = Math.min(format?.maxFps ?? 1, targetFps)
2021-02-19 11:06:28 -07:00
const supportsFlash = device?.hasFlash ?? false
const supportsHdr = format?.supportsPhotoHDR
const supports60Fps = useMemo(() => device?.formats.some((f) => f.maxFps >= 60), [device?.formats])
const canToggleNightMode = device?.supportsLowLightBoost ?? false
2021-02-19 11:06:28 -07:00
//#region Animated Zoom
// This just maps the zoom factor to a percentage value.
// so e.g. for [min, neutr., max] values [1, 2, 128] this would result in [0, 0.0081, 1]
const minZoom = device?.minZoom ?? 1
const maxZoom = Math.min(device?.maxZoom ?? 1, MAX_ZOOM_FACTOR)
const cameraAnimatedProps = useAnimatedProps(() => {
const z = Math.max(Math.min(zoom.value, maxZoom), minZoom)
return {
zoom: z,
}
}, [maxZoom, minZoom, zoom])
2021-02-19 11:06:28 -07:00
//#endregion
//#region Callbacks
const setIsPressingButton = useCallback(
(_isPressingButton: boolean) => {
isPressingButton.value = _isPressingButton
2021-02-19 11:06:28 -07:00
},
2021-02-20 09:07:10 -07:00
[isPressingButton],
)
2021-02-19 11:06:28 -07:00
// Camera callbacks
const onError = useCallback((error: CameraRuntimeError) => {
console.error(error)
}, [])
2021-02-19 11:06:28 -07:00
const onInitialized = useCallback(() => {
console.log('Camera initialized!')
setIsCameraInitialized(true)
}, [])
const onMediaCaptured = useCallback(
(media: PhotoFile | VideoFile, type: 'photo' | 'video') => {
console.log(`Media captured! ${JSON.stringify(media)}`)
navigation.navigate('MediaPage', {
path: media.path,
type: type,
})
},
[navigation],
)
2021-02-19 11:06:28 -07:00
const onFlipCameraPressed = useCallback(() => {
setCameraPosition((p) => (p === 'back' ? 'front' : 'back'))
}, [])
2021-02-19 11:06:28 -07:00
const onFlashPressed = useCallback(() => {
setFlash((f) => (f === 'off' ? 'on' : 'off'))
}, [])
2021-02-19 11:06:28 -07:00
//#endregion
//#region Tap Gesture
const onDoubleTap = useCallback(() => {
onFlipCameraPressed()
}, [onFlipCameraPressed])
2021-02-19 11:06:28 -07:00
//#endregion
//#region Effects
const neutralZoom = device?.neutralZoom ?? 1
2021-02-19 11:06:28 -07:00
useEffect(() => {
// Run everytime the neutralZoomScaled value changes. (reset zoom when device changes)
zoom.value = neutralZoom
}, [neutralZoom, zoom])
useEffect(() => {
Camera.getMicrophonePermissionStatus().then((status) => setHasMicrophonePermission(status === 'granted'))
}, [])
2021-02-19 11:06:28 -07:00
//#endregion
//#region Pinch to Zoom Gesture
// The gesture handler maps the linear pinch gesture (0 - 1) to an exponential curve since a camera's zoom
// function does not appear linear to the user. (aka zoom 0.1 -> 0.2 does not look equal in difference as 0.8 -> 0.9)
const onPinchGesture = useAnimatedGestureHandler<PinchGestureHandlerGestureEvent, { startZoom?: number }>({
onStart: (_, context) => {
context.startZoom = zoom.value
2021-02-19 11:06:28 -07:00
},
onActive: (event, context) => {
// we're trying to map the scale gesture to a linear zoom here
const startZoom = context.startZoom ?? 0
const scale = interpolate(event.scale, [1 - 1 / SCALE_FULL_ZOOM, 1, SCALE_FULL_ZOOM], [-1, 0, 1], Extrapolate.CLAMP)
zoom.value = interpolate(scale, [-1, 0, 1], [minZoom, startZoom, maxZoom], Extrapolate.CLAMP)
2021-02-19 11:06:28 -07:00
},
})
2021-02-19 11:06:28 -07:00
//#endregion
2021-02-19 12:05:02 -07:00
if (device != null && format != null) {
2021-02-20 09:07:10 -07:00
console.log(
`Re-rendering camera page with ${isActive ? 'active' : 'inactive'} camera. ` +
feat: Full Android rewrite (CameraX -> Camera2) (#1674) * Nuke CameraX * fix: Run View Finder on UI Thread * Open Camera, set up Threads * fix init * Mirror if needed * Try PreviewView * Use max resolution * Add `hardwareLevel` property * Check if output type is supported * Replace `frameRateRanges` with `minFps` and `maxFps` * Remove `isHighestPhotoQualitySupported` * Remove `colorSpace` The native platforms will use the best / most accurate colorSpace by default anyways. * HDR * Check from format * fix * Remove `supportsParallelVideoProcessing` * Correctly return video/photo sizes on Android now. Finally * Log all Device props * Log if optimized usecase is used * Cleanup * Configure Camera Input only once * Revert "Configure Camera Input only once" This reverts commit 0fd6c03f54c7566cb5592053720c4a8743aba92e. * Extract Camera configuration * Try to reconfigure all * Hook based * Properly set up `CameraSession` * Delete unused * fix: Fix recreate when outputs change * Update NativePreviewView.kt * Use callback for closing * Catch CameraAccessException * Finally got it stable * Remove isMirrored * Implement `takePhoto()` * Add ExifInterface library * Run findViewById on UI Thread * Add Photo Output Surface to takePhoto * Fix Video Stabilization Modes * Optimize Imports * More logs * Update CameraSession.kt * Close Image * Use separate Executor in CameraQueue * Delete hooks * Use same Thread again * If opened, call error * Update CameraSession.kt * Log HW level * fix: Don't enable Stream Use Case if it's not 100% supported * Move some stuff * Cleanup PhotoOutputSynchronizer * Try just open in suspend fun * Some synchronization fixes * fix logs * Update CameraDevice+createCaptureSession.kt * Update CameraDevice+createCaptureSession.kt * fixes * fix: Use Snapshot Template for speed capture prio * Use PREVIEW template for repeating request * Use `TEMPLATE_RECORD` if video use-case is attached * Use `isRunning` flag * Recreate session everytime on active/inactive * Lazily get values in capture session * Stability * Rebuild session if outputs change * Set `didOutputsChange` back to false * Capture first in lock * Try * kinda fix it? idk * fix: Keep Outputs * Refactor into single method * Update CameraView.kt * Use Enums for type safety * Implement Orientation (I think) * Move RefCount management to Java (Frame) * Don't crash when dropping a Frame * Prefer Devices with higher max resolution * Prefer multi-cams * Use FastImage for Media Page * Return orientation in takePhoto() * Load orientation from EXIF Data * Add `isMirrored` props and documentation for PhotoFile * fix: Return `not-determined` on Android * Update CameraViewModule.kt * chore: Upgrade packages * fix: Fix Metro Config * Cleanup config * Properly mirror Images on save * Prepare MediaRecorder * Start/Stop MediaRecorder * Remove `takeSnapshot()` It no longer works on Android and never worked on iOS. Users could use useFrameProcessor to take a Snapshot * Use `MediaCodec` * Move to `VideoRecording` class * Cleanup Snapshot * Create `SkiaPreviewView` hybrid class * Create OpenGL context * Create `SkiaPreviewView` * Fix texture creation missing context * Draw red frame * Somehow get it working * Add Skia CMake setup * Start looping * Init OpenGL * Refactor into `SkiaRenderer` * Cleanup PreviewSize * Set up * Only re-render UI if there is a new Frame * Preview * Fix init * Try rendering Preview * Update SkiaPreviewView.kt * Log version * Try using Skia (fail) * Drawwwww!!!!!!!!!! :tada: * Use Preview Size * Clear first * Refactor into SkiaRenderer * Add `previewType: "none"` on iOS * Simplify a lot * Draw Camera? For some reason? I have no idea anymore * Fix OpenGL errors * Got it kinda working again? * Actually draw Frame woah * Clean up code * Cleanup * Update on main * Synchronize render calls * holy shit * Update SkiaRenderer.cpp * Update SkiaRenderer.cpp * Refactor * Update SkiaRenderer.cpp * Check for `NO_INPUT_TEXTURE`^ * Post & Wait * Set input size * Add Video back again * Allow session without preview * Convert JPEG to byte[] * feat: Use `ImageReader` and use YUV Image Buffers in Skia Context (#1689) * Try to pass YUV Buffers as Pixmaps * Create pixmap! * Clean up * Render to preview * Only render if we have an output surface * Update SkiaRenderer.cpp * Fix Y+U+V sampling code * Cleanup * Fix Semaphore 0 * Use 4:2:0 YUV again idk * Update SkiaRenderer.h * Set minSdk to 26 * Set surface * Revert "Set minSdk to 26" This reverts commit c4085b7c16c628532e5c2d68cf7ed11c751d0b48. * Set previewType * feat: Video Recording with Camera2 (#1691) * Rename * Update CameraSession.kt * Use `SurfaceHolder` instead of `SurfaceView` for output * Update CameraOutputs.kt * Update CameraSession.kt * fix: Fix crash when Preview is null * Check if snapshot capture is supported * Update RecordingSession.kt * S * Use `MediaRecorder` * Make audio optional * Add Torch * Output duration * Update RecordingSession.kt * Start RecordingSession * logs * More log * Base for preparing pass-through Recording * Use `ImageWriter` to append Images to the Recording Surface * Stream PRIVATE GPU_SAMPLED_IMAGE Images * Add flags * Close session on stop * Allow customizing `videoCodec` and `fileType` * Enable Torch * Fix Torch Mode * Fix comparing outputs with hashCode * Update CameraSession.kt * Correctly pass along Frame Processor * fix: Use AUDIO_BIT_RATE of 16 * 44,1Khz * Use CAMCORDER instead of MIC microphone * Use 1 channel * fix: Use `Orientation` * Add `native` PixelFormat * Update iOS to latest Skia integration * feat: Add `pixelFormat` property to Camera * Catch error in configureSession * Fix JPEG format * Clean up best match finder * Update CameraDeviceDetails.kt * Clamp sizes by maximum CamcorderProfile size * Remove `getAvailableVideoCodecs` * chore: release 3.0.0-rc.5 * Use maximum video size of RECORD as default * Update CameraDeviceDetails.kt * Add a todo * Add JSON device to issue report * Prefer `full` devices and flash * Lock to 30 FPS on Samsung * Implement Zoom * Refactor * Format -> PixelFormat * fix: Feat `pixelFormat` -> `pixelFormats` * Update TROUBLESHOOTING.mdx * Format * fix: Implement `zoom` for Photo Capture * fix: Don't run if `isActive` is `false` * fix: Call `examplePlugin(frame)` * fix: Fix Flash * fix: Use `react-native-worklets-core`! * fix: Fix import
2023-08-21 04:50:14 -06:00
`Device: "${device.name}" (${format.photoWidth}x${format.photoHeight} photo / ${format.videoWidth}x${format.videoHeight} video @ ${fps}fps)`,
)
2021-02-19 12:05:02 -07:00
} else {
console.log('re-rendering camera page without active camera')
2021-02-19 12:05:02 -07:00
}
const frameProcessor = useFrameProcessor((frame) => {
'worklet'
feat: Draw onto `Frame` as if it was a Skia Canvas (#1479) * Create Shaders.ts * Add `previewType` and `enableFpsGraph` * Add RN Skia native dependency * Add Skia Preview View on iOS * Pass 1 * Update FrameHostObject.mm * Wrap Canvas * Lockfiles * fix: Fix stuff * chore: Upgrade RNWorklets * Add `previewType` to set the Preview * feat: Add Example * Update project.pbxproj * `enableFpsGraph` * Cache the `std::shared_ptr<FrameHostObject>` * Update CameraView+RecordVideo.swift * Update SkiaMetalCanvasProvider.mm * Android: Integrate Skia Dependency * fix: Use new Prefix * Add example for rendering shader * chore: Upgrade CameraX * Remove KTX * Enable `viewBinding` * Revert "Enable `viewBinding`" This reverts commit f2a603f53b33ea4311a296422ffd1a910ce03f9e. * Revert "chore: Upgrade CameraX" This reverts commit 8dc832cf8754490d31a6192e6c1a1f11cdcd94fe. * Remove unneeded `ProcessCameraProvider.getInstance()` call * fix: Add REA hotfix patch * fix: Fix FrameHostObject dead in runAsync * fix: Make `runAsync` run truly async by dropping new Frames while executing * chore: Upgrade RN Worklets to latest * chore: Upgrade RN Skia * Revert "Remove KTX" This reverts commit 253f586633f7af2da992d2279fc206dc62597129. * Make Skia optional in CMake * Fix import * Update CMakeLists.txt * Update build.gradle * Update CameraView.kt * Update CameraView.kt * Update CameraView.kt * Update Shaders.ts * Center Blur * chore: Upgrade RN Worklets * feat: Add `toByteArray()`, `orientation`, `isMirrored` and `timestamp` to `Frame` (#1487) * feat: Implement `orientation` and `isMirrored` on Frame * feat: Add `toArrayBuffer()` func * perf: Do faster buffer copy * feat: Implement `toArrayBuffer()` on Android * feat: Add `orientation` and `isMirrored` to Android * feat: Add `timestamp` to Frame * Update Frame.ts * Update JImageProxy.h * Update FrameHostObject.cpp * Update FrameHostObject.cpp * Update CameraPage.tsx * fix: Format Swift
2023-02-21 07:00:48 -07:00
console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`)
examplePlugin(frame)
}, [])
feature: Frame Processors (iOS) (#2) * Clean up Frame Processor * Create FrameProcessorHolder * Create FrameProcessorDelegate in ObjC++ * Move frame processor to FrameProcessorDelegate * Decorate runtime, check for null * Update FrameProcessorDelegate.mm * Cleanup FrameProcessorBindings.mm * Fix RuntimeDecorator.h import * Update FrameProcessorDelegate.mm * "React" -> "React Helper" to avoid confusion * Rename folders again * Fix podspec flattening a lot of headers, causing REA nameclash * Fix header imports to avoid REA naming collision * Lazily initialize jsi::Runtime on DispatchQueue * Install frame processor bindings from Swift * First try to call jsi::Function (frame processor) :eyes: * Call viewForReactTag on RCT main thread * Fix bridge accessing * Add more logs * Update CameraViewManager.swift * Add more TODOs * Re-indent .cpp files * Fix RCTTurboModule import podspec * Remove unnecessary include check for swift umbrella header * Merge branch 'main' into frame-processors * Docs: use static width for images (283) * Create validate-cpp.yml * Update a lot of packages to latest * Set SWIFT_VERSION to 5.2 in podspec * Create clean.sh * Delete unused C++ files * podspec: Remove CLANG_CXX_LANGUAGE_STANDARD and OTHER_CFLAGS * Update pod lockfiles * Regenerate lockfiles * Remove IOSLogger * Use NSLog * Create FrameProcessorManager (inherits from REA RuntimeManager) * Create reanimated::RuntimeManager shared_ptr * Re-integrate pods * Add react-native-reanimated >=2 peerDependency * Add metro-config * blacklist -> exclusionList * Try to call worklet * Fix jsi::Value* initializer * Call ShareableValue::adapt (makeShareable) with React/JS Runtime * Add null-checks * Lift runtime manager creation out of delegate, into bindings * Remove debug statement * Make RuntimeManager unique_ptr * Set _FRAME_PROCESSOR * Extract convertJSIFunctionToFrameProcessorCallback * Print frame * Merge branch 'main' into frame-processors * Reformat Swift code * Install reanimated from npm again * Re-integrate Pods * Dependabot: Also scan example/ and docs/ * Update validate-cpp.yml * Create FrameProcessorUtils * Create Frame.h * Abstract HostObject creation away * Fix types * Fix frame processor call * Add todo * Update lockfiles * Add C++ contributing instructions * Update CONTRIBUTING.md * Add android/src/main/cpp to cpplint * Update cpplint.sh * Fix a few cpplint errors * Fix globals * Fix a few more cpplint errors * Update App.tsx * Update AndroidLogger.cpp * Format * Fix cpplint script (check-cpp) * Try to simplify frame processor * y * Update FrameProcessorUtils.mm * Update FrameProcessorBindings.mm * Update CameraView.swift * Update CameraViewManager.m * Restructure everything * fix * Fix `@objc` export (make public) * Refactor installFrameProcessorBindings into FrameProcessorRuntimeManager * Add swift RCTBridge.runOnJS helper * Fix run(onJS) * Add pragma once * Add `&self` to lambda * Update FrameProcessorRuntimeManager.mm * reorder imports * Fix imports * forward declare * Rename extension * Destroy buffer after execution * Add FrameProcessorPluginRegistry base * Merge branch 'main' into frame-processors * Add frameProcessor to types * Update Camera.tsx * Fix rebase merge * Remove movieOutput * Use `useFrameProcessor` * Fix bad merge * Add additional ESLint rules * Update lockfiles * Update CameraViewManager.m * Add support for V8 runtime * Add frame processor plugins API * Print plugin invoke * Fix React Utils in podspec * Fix runOnJS swift name * Remove invalid redecl of `captureSession` * Use REA 2.1.0 which includes all my big PRs :tada: * Update validate-cpp.yml * Update Podfile.lock * Remove Flipper * Fix dereferencing * Capture `self` by value. Fucking hell, what a dumb mistake. * Override a few HostObject functions * Expose isReady, width, height, bytesPerRow and planesCount * use hook again * Expose property names * FrameProcessor -> Frame * Update CameraView+RecordVideo.swift * Add Swift support for Frame Processors Plugins * Add macros for plugin installation * Add ObjC frame processor plugin * Correctly install frame processor plugins * Don't require custom name for macro * Check if plugin already exists * Implement QR Code Frame Processor Plugin in Swift * Adjust ObjC style frame processor macro * optimize * Add `frameProcessorFrameDropRate` * Fix types * Only log once * Log if it executes slowly * Implement `frameProcessorFps` * Implement manual encoded video recordings * Use recommended video settings * Add fileType types * Ignore if input is not ready for media data * Add completion handler * Add audio buffer sampling * Init only for video frame * use AVAssetWriterInputPixelBufferAdaptor * Remove AVAssetWriterInputPixelBufferAdaptor * Rotate VideoWriter * Always assume portrait orientation * Update RecordingSession.swift * Use a separate Queue for Audio * Format Swift * Update CameraView+RecordVideo.swift * Use `videoQueue` instead of `cameraQueue` * Move example plugins to example app * Fix hardcoded name in plugin macro * QRFrame... -> QRCodeFrame... * Update FrameProcessorPlugin.h * Add example frame processors to JS base * Update QRCodeFrameProcessorPluginSwift.m * Add docs to create FP Plugins * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Use `AVAssetWriterInputPixelBufferAdaptor` for efficient pixel buffer recycling * Add customizable `pixelFormat` * Use native format if available * Update project.pbxproj * Set video width and height as source-pixel-buffer attributes * Catch * Update App.tsx * Don't explicitly set video dimensions, let CVPixelBufferPool handle it * Add a few logs * Cleanup * Update CameraView+RecordVideo.swift * Eagerly initialize asset writer to fix stutter at first frame * Use `cameraQueue` DispatchQueue to not block CaptureDataOutputDelegate * Fix duration calculation * cleanup * Cleanup * Swiftformat * Return available video codecs * Only show frame drop notification for video output * Remove photo and video codec functionality It was too much complexity and probably never used anyways. * Revert all android related changes for now * Cleanup * Remove unused header * Update AVAssetWriter.Status+descriptor.swift * Only call Frame Processor for Video Frames * Fix `if` * Add support for Frame Processor plugin parameters/arguments * Fix arg support * Move to JSIUtils.mm * Update JSIUtils.h * Update FRAME_PROCESSORS_CREATE.mdx * Update FRAME_PROCESSORS_CREATE.mdx * Upgrade packages for docs/ * fix docs * Rename * highlight lines * docs * community plugins * Update FRAME_PROCESSOR_CREATE_FINAL.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update FRAME_PROCESSOR_PLUGIN_LIST.mdx * Update dependencies (1/2) * Update dependencies (2/2) * Update Gemfile.lock * add FP docs * Update README.md * Make `lastFrameProcessor` private * add `frameProcessor` docs * fix docs * adjust docs * Update DEVICES.mdx * fix * s * Add logs demo * add metro restart note * Update FRAME_PROCESSOR_CREATE_PLUGIN_IOS.mdx * Mirror video device * Update AVCaptureVideoDataOutput+mirror.swift * Create .swift-version * Enable whole module optimization * Fix recording mirrored video * Swift format * Clean dictionary on `markInvalid` * Fix cleanup * Add docs for disabling frame processors * Update project.pbxproj * Revert "Update project.pbxproj" This reverts commit e67861e51b88b4888a6940e2d20388f3044211d0. * Log frame drop reason * Format * add more samples * Add clang-format * also check .mm * Revert "also check .mm" This reverts commit 8b9d5e2c29866b05909530d104f6633d6c49eadd. * Revert "Add clang-format" This reverts commit 7643ac808e0fc34567ea1f814e73d84955381636. * Use `kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange` as default * Read matching video attributes from videoSettings * Add TODO * Swiftformat * Conditionally disable frame processors * Assert if trying to use frame processors when disabled * Add frame-processors demo gif * Allow disabling frame processors via `VISION_CAMERA_DISABLE_FRAME_PROCESSORS` * Update FrameProcessorRuntimeManager.mm * Update FRAME_PROCESSORS.mdx * Update project.pbxproj * Update FRAME_PROCESSORS_CREATE_OVERVIEW.mdx
2021-05-06 06:11:55 -06:00
2021-02-19 08:07:53 -07:00
return (
<View style={styles.container}>
2021-02-19 11:06:28 -07:00
{device != null && (
<PinchGestureHandler onGestureEvent={onPinchGesture} enabled={isActive}>
<Reanimated.View style={StyleSheet.absoluteFill}>
<TapGestureHandler onEnded={onDoubleTap} numberOfTaps={2}>
2021-02-19 12:05:02 -07:00
<ReanimatedCamera
ref={camera}
style={StyleSheet.absoluteFill}
device={device}
format={format}
fps={fps}
hdr={enableHdr}
2021-02-20 09:07:10 -07:00
lowLightBoost={device.supportsLowLightBoost && enableNightMode}
2021-02-19 12:05:02 -07:00
isActive={isActive}
onInitialized={onInitialized}
onError={onError}
enableZoomGesture={false}
animatedProps={cameraAnimatedProps}
feat: Draw onto `Frame` as if it was a Skia Canvas (#1479) * Create Shaders.ts * Add `previewType` and `enableFpsGraph` * Add RN Skia native dependency * Add Skia Preview View on iOS * Pass 1 * Update FrameHostObject.mm * Wrap Canvas * Lockfiles * fix: Fix stuff * chore: Upgrade RNWorklets * Add `previewType` to set the Preview * feat: Add Example * Update project.pbxproj * `enableFpsGraph` * Cache the `std::shared_ptr<FrameHostObject>` * Update CameraView+RecordVideo.swift * Update SkiaMetalCanvasProvider.mm * Android: Integrate Skia Dependency * fix: Use new Prefix * Add example for rendering shader * chore: Upgrade CameraX * Remove KTX * Enable `viewBinding` * Revert "Enable `viewBinding`" This reverts commit f2a603f53b33ea4311a296422ffd1a910ce03f9e. * Revert "chore: Upgrade CameraX" This reverts commit 8dc832cf8754490d31a6192e6c1a1f11cdcd94fe. * Remove unneeded `ProcessCameraProvider.getInstance()` call * fix: Add REA hotfix patch * fix: Fix FrameHostObject dead in runAsync * fix: Make `runAsync` run truly async by dropping new Frames while executing * chore: Upgrade RN Worklets to latest * chore: Upgrade RN Skia * Revert "Remove KTX" This reverts commit 253f586633f7af2da992d2279fc206dc62597129. * Make Skia optional in CMake * Fix import * Update CMakeLists.txt * Update build.gradle * Update CameraView.kt * Update CameraView.kt * Update CameraView.kt * Update Shaders.ts * Center Blur * chore: Upgrade RN Worklets * feat: Add `toByteArray()`, `orientation`, `isMirrored` and `timestamp` to `Frame` (#1487) * feat: Implement `orientation` and `isMirrored` on Frame * feat: Add `toArrayBuffer()` func * perf: Do faster buffer copy * feat: Implement `toArrayBuffer()` on Android * feat: Add `orientation` and `isMirrored` to Android * feat: Add `timestamp` to Frame * Update Frame.ts * Update JImageProxy.h * Update FrameHostObject.cpp * Update FrameHostObject.cpp * Update CameraPage.tsx * fix: Format Swift
2023-02-21 07:00:48 -07:00
enableFpsGraph={true}
orientation="portrait"
feat: Full Android rewrite (CameraX -> Camera2) (#1674) * Nuke CameraX * fix: Run View Finder on UI Thread * Open Camera, set up Threads * fix init * Mirror if needed * Try PreviewView * Use max resolution * Add `hardwareLevel` property * Check if output type is supported * Replace `frameRateRanges` with `minFps` and `maxFps` * Remove `isHighestPhotoQualitySupported` * Remove `colorSpace` The native platforms will use the best / most accurate colorSpace by default anyways. * HDR * Check from format * fix * Remove `supportsParallelVideoProcessing` * Correctly return video/photo sizes on Android now. Finally * Log all Device props * Log if optimized usecase is used * Cleanup * Configure Camera Input only once * Revert "Configure Camera Input only once" This reverts commit 0fd6c03f54c7566cb5592053720c4a8743aba92e. * Extract Camera configuration * Try to reconfigure all * Hook based * Properly set up `CameraSession` * Delete unused * fix: Fix recreate when outputs change * Update NativePreviewView.kt * Use callback for closing * Catch CameraAccessException * Finally got it stable * Remove isMirrored * Implement `takePhoto()` * Add ExifInterface library * Run findViewById on UI Thread * Add Photo Output Surface to takePhoto * Fix Video Stabilization Modes * Optimize Imports * More logs * Update CameraSession.kt * Close Image * Use separate Executor in CameraQueue * Delete hooks * Use same Thread again * If opened, call error * Update CameraSession.kt * Log HW level * fix: Don't enable Stream Use Case if it's not 100% supported * Move some stuff * Cleanup PhotoOutputSynchronizer * Try just open in suspend fun * Some synchronization fixes * fix logs * Update CameraDevice+createCaptureSession.kt * Update CameraDevice+createCaptureSession.kt * fixes * fix: Use Snapshot Template for speed capture prio * Use PREVIEW template for repeating request * Use `TEMPLATE_RECORD` if video use-case is attached * Use `isRunning` flag * Recreate session everytime on active/inactive * Lazily get values in capture session * Stability * Rebuild session if outputs change * Set `didOutputsChange` back to false * Capture first in lock * Try * kinda fix it? idk * fix: Keep Outputs * Refactor into single method * Update CameraView.kt * Use Enums for type safety * Implement Orientation (I think) * Move RefCount management to Java (Frame) * Don't crash when dropping a Frame * Prefer Devices with higher max resolution * Prefer multi-cams * Use FastImage for Media Page * Return orientation in takePhoto() * Load orientation from EXIF Data * Add `isMirrored` props and documentation for PhotoFile * fix: Return `not-determined` on Android * Update CameraViewModule.kt * chore: Upgrade packages * fix: Fix Metro Config * Cleanup config * Properly mirror Images on save * Prepare MediaRecorder * Start/Stop MediaRecorder * Remove `takeSnapshot()` It no longer works on Android and never worked on iOS. Users could use useFrameProcessor to take a Snapshot * Use `MediaCodec` * Move to `VideoRecording` class * Cleanup Snapshot * Create `SkiaPreviewView` hybrid class * Create OpenGL context * Create `SkiaPreviewView` * Fix texture creation missing context * Draw red frame * Somehow get it working * Add Skia CMake setup * Start looping * Init OpenGL * Refactor into `SkiaRenderer` * Cleanup PreviewSize * Set up * Only re-render UI if there is a new Frame * Preview * Fix init * Try rendering Preview * Update SkiaPreviewView.kt * Log version * Try using Skia (fail) * Drawwwww!!!!!!!!!! :tada: * Use Preview Size * Clear first * Refactor into SkiaRenderer * Add `previewType: "none"` on iOS * Simplify a lot * Draw Camera? For some reason? I have no idea anymore * Fix OpenGL errors * Got it kinda working again? * Actually draw Frame woah * Clean up code * Cleanup * Update on main * Synchronize render calls * holy shit * Update SkiaRenderer.cpp * Update SkiaRenderer.cpp * Refactor * Update SkiaRenderer.cpp * Check for `NO_INPUT_TEXTURE`^ * Post & Wait * Set input size * Add Video back again * Allow session without preview * Convert JPEG to byte[] * feat: Use `ImageReader` and use YUV Image Buffers in Skia Context (#1689) * Try to pass YUV Buffers as Pixmaps * Create pixmap! * Clean up * Render to preview * Only render if we have an output surface * Update SkiaRenderer.cpp * Fix Y+U+V sampling code * Cleanup * Fix Semaphore 0 * Use 4:2:0 YUV again idk * Update SkiaRenderer.h * Set minSdk to 26 * Set surface * Revert "Set minSdk to 26" This reverts commit c4085b7c16c628532e5c2d68cf7ed11c751d0b48. * Set previewType * feat: Video Recording with Camera2 (#1691) * Rename * Update CameraSession.kt * Use `SurfaceHolder` instead of `SurfaceView` for output * Update CameraOutputs.kt * Update CameraSession.kt * fix: Fix crash when Preview is null * Check if snapshot capture is supported * Update RecordingSession.kt * S * Use `MediaRecorder` * Make audio optional * Add Torch * Output duration * Update RecordingSession.kt * Start RecordingSession * logs * More log * Base for preparing pass-through Recording * Use `ImageWriter` to append Images to the Recording Surface * Stream PRIVATE GPU_SAMPLED_IMAGE Images * Add flags * Close session on stop * Allow customizing `videoCodec` and `fileType` * Enable Torch * Fix Torch Mode * Fix comparing outputs with hashCode * Update CameraSession.kt * Correctly pass along Frame Processor * fix: Use AUDIO_BIT_RATE of 16 * 44,1Khz * Use CAMCORDER instead of MIC microphone * Use 1 channel * fix: Use `Orientation` * Add `native` PixelFormat * Update iOS to latest Skia integration * feat: Add `pixelFormat` property to Camera * Catch error in configureSession * Fix JPEG format * Clean up best match finder * Update CameraDeviceDetails.kt * Clamp sizes by maximum CamcorderProfile size * Remove `getAvailableVideoCodecs` * chore: release 3.0.0-rc.5 * Use maximum video size of RECORD as default * Update CameraDeviceDetails.kt * Add a todo * Add JSON device to issue report * Prefer `full` devices and flash * Lock to 30 FPS on Samsung * Implement Zoom * Refactor * Format -> PixelFormat * fix: Feat `pixelFormat` -> `pixelFormats` * Update TROUBLESHOOTING.mdx * Format * fix: Implement `zoom` for Photo Capture * fix: Don't run if `isActive` is `false` * fix: Call `examplePlugin(frame)` * fix: Fix Flash * fix: Use `react-native-worklets-core`! * fix: Fix import
2023-08-21 04:50:14 -06:00
photo={true}
video={true}
audio={hasMicrophonePermission}
frameProcessor={frameProcessor}
2021-02-19 12:05:02 -07:00
/>
2021-02-19 11:06:28 -07:00
</TapGestureHandler>
</Reanimated.View>
</PinchGestureHandler>
)}
<CaptureButton
style={styles.captureButton}
camera={camera}
onMediaCaptured={onMediaCaptured}
cameraZoom={zoom}
minZoom={minZoom}
maxZoom={maxZoom}
2021-02-20 09:07:10 -07:00
flash={supportsFlash ? flash : 'off'}
2021-02-19 11:06:28 -07:00
enabled={isCameraInitialized && isActive}
setIsPressingButton={setIsPressingButton}
/>
<StatusBarBlurBackground />
<View style={styles.rightButtonRow}>
<PressableOpacity style={styles.button} onPress={onFlipCameraPressed} disabledOpacity={0.4}>
<IonIcon name="camera-reverse" color="white" size={24} />
</PressableOpacity>
2021-02-19 11:06:28 -07:00
{supportsFlash && (
2021-02-20 09:07:10 -07:00
<PressableOpacity style={styles.button} onPress={onFlashPressed} disabledOpacity={0.4}>
<IonIcon name={flash === 'on' ? 'flash' : 'flash-off'} color="white" size={24} />
2021-02-19 11:06:28 -07:00
</PressableOpacity>
)}
{supports60Fps && (
<PressableOpacity style={styles.button} onPress={() => setTargetFps((t) => (t === 30 ? 60 : 30))}>
<Text style={styles.text}>{`${targetFps}\nFPS`}</Text>
2021-02-19 11:06:28 -07:00
</PressableOpacity>
)}
{supportsHdr && (
<PressableOpacity style={styles.button} onPress={() => setEnableHdr((h) => !h)}>
2021-02-20 09:07:10 -07:00
<MaterialIcon name={enableHdr ? 'hdr' : 'hdr-off'} color="white" size={24} />
2021-02-19 11:06:28 -07:00
</PressableOpacity>
)}
{canToggleNightMode && (
<PressableOpacity style={styles.button} onPress={() => setEnableNightMode(!enableNightMode)} disabledOpacity={0.4}>
<IonIcon name={enableNightMode ? 'moon' : 'moon-outline'} color="white" size={24} />
</PressableOpacity>
)}
2021-02-19 11:06:28 -07:00
</View>
2021-02-19 08:07:53 -07:00
</View>
)
}
2021-02-19 08:07:53 -07:00
const styles = StyleSheet.create({
container: {
flex: 1,
2021-02-20 09:07:10 -07:00
backgroundColor: 'black',
2021-02-19 11:06:28 -07:00
},
captureButton: {
2021-02-20 09:07:10 -07:00
position: 'absolute',
alignSelf: 'center',
bottom: SAFE_AREA_PADDING.paddingBottom,
2021-02-19 11:06:28 -07:00
},
button: {
2021-02-20 09:42:37 -07:00
marginBottom: CONTENT_SPACING,
2021-02-19 11:06:28 -07:00
width: BUTTON_SIZE,
height: BUTTON_SIZE,
borderRadius: BUTTON_SIZE / 2,
2021-02-20 09:07:10 -07:00
backgroundColor: 'rgba(140, 140, 140, 0.3)',
justifyContent: 'center',
alignItems: 'center',
2021-02-19 08:07:53 -07:00
},
2021-02-19 11:06:28 -07:00
rightButtonRow: {
2021-02-20 09:07:10 -07:00
position: 'absolute',
2021-02-20 09:42:37 -07:00
right: SAFE_AREA_PADDING.paddingRight,
2021-02-20 09:07:10 -07:00
top: SAFE_AREA_PADDING.paddingTop,
2021-02-19 08:07:53 -07:00
},
text: {
color: 'white',
fontSize: 11,
fontWeight: 'bold',
textAlign: 'center',
},
})