react-native-vision-camera/package.json

165 lines
4.2 KiB
JSON
Raw Normal View History

2021-02-19 08:07:53 -07:00
{
"name": "react-native-vision-camera",
2023-02-15 10:23:05 -07:00
"version": "3.0.0-rc.1",
2021-02-19 08:07:53 -07:00
"description": "The Camera library that sees the vision.",
"main": "lib/commonjs/index",
"module": "lib/module/index",
2021-02-25 09:48:00 -07:00
"types": "lib/typescript/index.d.ts",
2021-02-19 08:07:53 -07:00
"react-native": "src/index",
"source": "src/index",
"files": [
2021-02-25 10:00:11 -07:00
"src",
2021-02-20 12:41:10 -07:00
"lib/commonjs",
"lib/module",
"lib/typescript",
2021-02-19 08:20:19 -07:00
"android/build.gradle",
2021-07-08 03:07:50 -06:00
"android/gradle.properties",
"android/CMakeLists.txt",
"android/src",
2021-02-19 08:20:19 -07:00
"ios/**/*.h",
"ios/**/*.m",
"ios/**/*.mm",
"ios/**/*.cpp",
"ios/**/*.swift",
2021-02-25 07:32:12 -07:00
"ios/VisionCamera.xcodeproj/project.pbxproj",
"app.plugin.js",
2021-03-26 09:22:24 -06:00
"VisionCamera.podspec",
2021-04-08 04:30:22 -06:00
"README.md"
2021-02-19 08:07:53 -07:00
],
"scripts": {
"typescript": "tsc --noEmit",
"lint": "eslint \"**/*.{js,ts,tsx}\"",
2021-02-23 06:17:10 -07:00
"lint-ci": "yarn lint -f ./node_modules/@firmnav/eslint-github-actions-formatter/dist/formatter.js",
"build": "bob build",
2021-02-19 08:07:53 -07:00
"release": "release-it",
fix: Move Audio Input initialization shortly before `startRecording` (#159) * rename * Update AVAudioSession+updateCategory.swift * fix bootstrap script * Update CameraView+AVAudioSession.swift * move audio input adding lower * Activate AudioSession only when starting recording * format * Deactivate Audio Session * remove audio input before deactivating audio session * Update CameraView+AVAudioSession.swift * log time * Update CameraView+AVAudioSession.swift * measure time with `measureElapsedTime` * Update project.pbxproj * only log in debug builds * bootstrap with bridge (RNN new API) * Mark two funcs as `@inlinable` * format * Update ReactLogger.swift * Make audioWriter optional (allow videos without sound) * only log frame drop reason in DEBUG * Make audio writing entirely optional * format * Use function name as label for measureElapsedTime * Update MeasureElapsedTime.swift * Update MeasureElapsedTime.swift * Mark AudioWriter as finished * set `automaticallyConfiguresApplicationAudioSession` once * Add JS console logging * log to JS console for a few logs * Update AVAudioSession+updateCategory.swift * format * Update JSConsoleHelper.mm * catch log errors * Update ReactLogger.swift * fix docs * Update RecordingSession.swift * Immediatelly add audio input * Update CameraView+AVCaptureSession.swift * Update CameraView+AVCaptureSession.swift * Update ReactLogger.swift * immediatelly set audio session * extract * format * Update TROUBLESHOOTING.mdx * hmm * Update AVAudioSession+updateCategory.swift * Create secondary `AVCaptureSession` for audio * Configure once, start stop on demand * format * fix audio notification interruptions * docs
2021-06-03 06:16:02 -06:00
"pods": "cd example && yarn pods",
2021-08-20 09:37:13 -06:00
"bootstrap": "yarn && cd example && yarn && yarn setup && yarn pods",
2021-07-14 05:17:14 -06:00
"check-android": "scripts/ktlint.sh && scripts/cpplint.sh",
2021-03-09 04:19:18 -07:00
"check-ios": "scripts/swiftformat.sh && scripts/swiftlint.sh",
2021-07-14 05:17:14 -06:00
"check-js": "yarn lint --fix && yarn typescript",
2021-03-17 08:35:49 -06:00
"check-all": "scripts/check-all.sh",
2021-06-28 02:25:47 -06:00
"clean-ios": "scripts/clean-ios.sh",
"clean-android": "scripts/clean-android.sh",
"clean-js": "scripts/clean-js.sh",
"docs": "cd docs && yarn build"
2021-02-19 08:07:53 -07:00
},
"keywords": [
"react-native",
"ios",
2021-02-25 14:51:20 -07:00
"android",
"camera",
"vision",
"native",
"module",
"react",
"ai",
"ar",
"qr",
"qr-code",
"scanner"
2021-02-19 08:07:53 -07:00
],
"repository": "https://github.com/mrousavy/react-native-vision-camera",
2021-02-19 08:07:53 -07:00
"author": "Marc Rousavy <marcrousavy@hotmail.com> (https://github.com/mrousavy)",
2021-02-25 14:45:59 -07:00
"license": "MIT",
2021-02-19 08:07:53 -07:00
"bugs": {
"url": "https://github.com/mrousavy/react-native-vision-camera/issues"
2021-02-19 08:07:53 -07:00
},
"homepage": "https://mrousavy.github.io/react-native-vision-camera/",
2021-02-19 08:07:53 -07:00
"publishConfig": {
"registry": "https://registry.npmjs.org/"
},
"devDependencies": {
"@expo/config-plugins": "^5.0.4",
"@jamesacarr/eslint-formatter-github-actions": "^0.1.0",
"@react-native-community/eslint-config": "^3.2.0",
"@react-native-community/eslint-plugin": "^1.3.0",
"@release-it/conventional-changelog": "^5.1.1",
"@types/react": "^18.0.27",
"@types/react-native": "^0.71.2",
"eslint": "^8.33.0",
"pod-install": "^0.1.38",
"prettier": "^2.8.4",
"react": "^18.2.0",
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472) Before, Frame Processors ran on a separate Thread. After, Frame Processors run fully synchronous and always at the same FPS as the Camera. Two new functions have been introduced: * `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls. * `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute. You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.) You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side. You can also combine both functions. Examples: ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAtTargetFps(10, () => { 'worklet' console.log("I'm running at 10 FPS!") }) }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAsync(frame, () => { 'worklet' console.log("I'm running on another Thread, I can block for longer!") }) }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAtTargetFps(10, () => { 'worklet' runAsync(frame, () => { 'worklet' console.log("I'm running on another Thread at 10 FPS, I can block for longer!") }) }) }, []) ```
2023-02-15 08:47:09 -07:00
"react-native": "^0.71.3",
"react-native-builder-bob": "^0.20.3",
feat: Sync Frame Processors (plus `runAsync` and `runAtTargetFps`) (#1472) Before, Frame Processors ran on a separate Thread. After, Frame Processors run fully synchronous and always at the same FPS as the Camera. Two new functions have been introduced: * `runAtTargetFps(fps: number, func: () => void)`: Runs the given code as often as the given `fps`, effectively throttling it's calls. * `runAsync(frame: Frame, func: () => void)`: Runs the given function on a separate Thread for Frame Processing. A strong reference to the Frame is held as long as the function takes to execute. You can use `runAtTargetFps` to throttle calls to a specific API (e.g. if your Camera is running at 60 FPS, but you only want to run face detection at ~25 FPS, use `runAtTargetFps(25, ...)`.) You can use `runAsync` to run a heavy algorithm asynchronous, so that the Camera is not blocked while your algorithm runs. This is useful if your main sync processor draws something, and your async processor is doing some image analysis on the side. You can also combine both functions. Examples: ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAtTargetFps(10, () => { 'worklet' console.log("I'm running at 10 FPS!") }) }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAsync(frame, () => { 'worklet' console.log("I'm running on another Thread, I can block for longer!") }) }, []) ``` ```js const frameProcessor = useFrameProcessor((frame) => { 'worklet' console.log("I'm running at 60 FPS!") runAtTargetFps(10, () => { 'worklet' runAsync(frame, () => { 'worklet' console.log("I'm running on another Thread at 10 FPS, I can block for longer!") }) }) }, []) ```
2023-02-15 08:47:09 -07:00
"react-native-worklets": "https://github.com/chrfalch/react-native-worklets#15d52dd",
"release-it": "^15.6.0",
"typescript": "^4.9.5"
2021-02-19 08:07:53 -07:00
},
"peerDependencies": {
"react": "*",
"react-native": "*",
"react-native-worklets": "*"
2021-02-19 08:07:53 -07:00
},
"prettier": {
"bracketSpacing": true,
"bracketSameLine": true,
"singleQuote": true,
"trailingComma": "all",
"semi": true,
"tabWidth": 2,
"useTabs": false,
"printWidth": 140
},
"babel": {
2021-07-08 03:16:25 -06:00
"presets": [
"module:metro-react-native-babel-preset"
]
2021-02-19 08:07:53 -07:00
},
"release-it": {
"git": {
"commitMessage": "chore: release ${version}",
"tagName": "v${version}"
},
"npm": {
"publish": true
},
"github": {
"release": true
},
"plugins": {
"@release-it/conventional-changelog": {
2021-06-10 06:30:57 -06:00
"preset": {
"name": "conventionalcommits",
"types": [
{
"type": "feat",
"section": "✨ Features"
},
{
"type": "fix",
"section": "🐛 Bug Fixes"
},
{
"type": "perf",
"section": "💨 Performance Improvements"
},
{
"type": "chore(deps)",
"section": "🛠️ Dependency Upgrades"
},
2021-06-10 06:30:57 -06:00
{
"type": "docs",
"section": "📚 Documentation"
}
]
}
2021-02-19 08:07:53 -07:00
}
}
},
"react-native-builder-bob": {
"source": "src",
"output": "lib",
"targets": [
"commonjs",
"module",
[
"typescript",
{
"project": "tsconfig.json"
2021-02-19 08:07:53 -07:00
}
]
]
}
}