1 Commits

Author SHA1 Message Date
0899a9a264 Elpaca migration wip 2024-06-25 03:01:36 -06:00
389 changed files with 5089 additions and 27889 deletions

View File

@@ -1,11 +0,0 @@
{
"permissions": {
"allow": [
"Bash(rg:*)",
"Bash(wmctrl:*)",
"Bash(grep:*)",
"Bash(hyprctl:*)"
],
"deny": []
}
}

View File

@@ -1,95 +0,0 @@
name: Build and Push Cachix (NixOS)
on:
push:
branches: [master]
paths:
- "nixos/**"
- "org-agenda-api/**"
- ".github/workflows/cachix.yml"
pull_request:
branches: [master]
paths:
- "nixos/**"
- "org-agenda-api/**"
- ".github/workflows/cachix.yml"
workflow_dispatch: {}
jobs:
nixos-strixi-minaj:
runs-on: ubuntu-latest
permissions:
contents: read
env:
# Avoid flaky/stalled CI due to unreachable substituters referenced in flake config
# (e.g. LAN caches). We keep this list explicit for CI reliability.
NIX_CONFIG: |
experimental-features = nix-command flakes
connect-timeout = 5
substituters = https://cache.nixos.org https://colonelpanic8-dotfiles.cachix.org https://org-agenda-api.cachix.org https://taffybar.cachix.org https://codex-cli.cachix.org https://claude-code.cachix.org
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= colonelpanic8-dotfiles.cachix.org-1:O6GF3nptpeMFapX29okzO92eSWXR36zqW6ZF2C8P0eQ= org-agenda-api.cachix.org-1:liKFemKkOLV/rJt2txDNcpDjRsqLuBneBjkSw/UVXKA= taffybar.cachix.org-1:beZotJ1nVEsAnJxa3lWn0zwzZM7oeXmGh4ADRpHeeIo= codex-cli.cachix.org-1:1Br3H1hHoRYG22n//cGKJOk3cQXgYobUel6O8DgSing= claude-code.cachix.org-1:YeXf2aNu7UTX8Vwrze0za1WEDS+4DuI2kVeWEE4fsRk=
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Free disk space
run: |
set -euxo pipefail
df -h
sudo rm -rf /usr/share/dotnet || true
sudo rm -rf /usr/local/lib/android || true
sudo rm -rf /opt/ghc || true
sudo rm -rf /usr/local/share/boost || true
sudo apt-get clean || true
df -h
- name: Install Nix
uses: DeterminateSystems/nix-installer-action@v16
- name: Use GitHub Actions Cache for /nix/store
uses: DeterminateSystems/magic-nix-cache-action@v7
- name: Require Cachix config (push only)
if: github.event_name == 'push'
env:
CACHIX_CACHE_NAME: ${{ vars.CACHIX_CACHE_NAME }}
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
run: |
set -euo pipefail
if [ -z "${CACHIX_CACHE_NAME:-}" ]; then
echo "Missing repo variable CACHIX_CACHE_NAME (Settings -> Secrets and variables -> Actions -> Variables)." >&2
exit 1
fi
if [ -z "${CACHIX_AUTH_TOKEN:-}" ]; then
echo "Missing repo secret CACHIX_AUTH_TOKEN (Settings -> Secrets and variables -> Actions -> Secrets)." >&2
exit 1
fi
- name: Setup Cachix (push)
if: github.event_name == 'push'
uses: cachix/cachix-action@v15
with:
name: ${{ vars.CACHIX_CACHE_NAME }}
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
skipPush: false
- name: Setup Cachix (PR, no push)
if: github.event_name == 'pull_request' && vars.CACHIX_CACHE_NAME != ''
uses: cachix/cachix-action@v15
with:
name: ${{ vars.CACHIX_CACHE_NAME }}
skipPush: true
- name: Build NixOS system (strixi-minaj)
run: |
set -euxo pipefail
nix build \
--no-link \
--print-build-logs \
./nixos#nixosConfigurations.strixi-minaj.config.system.build.toplevel \
--override-input railbird-secrets ./nixos/ci/railbird-secrets-stub

View File

@@ -1,54 +0,0 @@
name: Deploy to GitHub Pages
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build-and-deploy:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Emacs
uses: purcell/setup-emacs@master
with:
version: 29.1
- name: Setup Cask
uses: conao3/setup-cask@master
with:
version: snapshot
- name: Install dependencies
working-directory: gen-gh-pages
run: cask install
- name: Generate HTML
working-directory: gen-gh-pages
run: |
cask exec emacs --script generate-html.el
mv ../dotfiles/emacs.d/README.html ./index.html
- name: Deploy to GitHub Pages
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./gen-gh-pages
publish_branch: gh-pages
user_name: 'github-actions[bot]'
user_email: 'github-actions[bot]@users.noreply.github.com'
commit_message: 'Deploy to GitHub Pages: ${{ github.sha }}'
keep_files: false

26
.gitignore vendored
View File

@@ -21,33 +21,7 @@
gotools gotools
/dotfiles/config/xmonad/result /dotfiles/config/xmonad/result
/dotfiles/config/taffybar/result /dotfiles/config/taffybar/result
/nix-darwin/result
/nixos/result
/dotfiles/emacs.d/*.sqlite /dotfiles/emacs.d/*.sqlite
/dotfiles/config/gtk-3.0/colors.css /dotfiles/config/gtk-3.0/colors.css
/dotfiles/config/gtk-3.0/settings.ini
/dotfiles/emacs.d/.cache/ /dotfiles/emacs.d/.cache/
/dotfiles/emacs.d/projectile.cache
/dotfiles/emacs.d/projectile-bookmarks.eld
/dotfiles/config/fontconfig/conf.d/10-hm-fonts.conf /dotfiles/config/fontconfig/conf.d/10-hm-fonts.conf
/dotfiles/config/fontconfig/conf.d/52-hm-default-fonts.conf
/dotfiles/config/taffybar/_scratch/
/dotfiles/config/taffybar/taffybar-*/
/dotfiles/config/taffybar/status-notifier-item/
/dotfiles/config/taffybar/.direnv/
/dotfiles/config/taffybar/dist-newstyle/
/dotfiles/config/taffybar/sni-priorities.dat
/dotfiles/config/xmonad/dist-newstyle/
/dotfiles/config/hypr/hyprscratch.conf
/.worktrees/
/result
# Secrets and machine-local state (managed via agenix/pass instead of git)
/dotfiles/config/asciinema/config
/dotfiles/config/remmina/remmina.pref
/dotfiles/config/screencloud/ScreenCloud.conf
# Local tool state
/.playwright-cli/
/nixos/action-cache-dir/
/dotfiles/config/taffybar/dbus-menu/

8
.travis.yml Normal file
View File

@@ -0,0 +1,8 @@
language: generic
script: bash ./gen-gh-pages/deploy.sh
env:
global:
- ENCRYPTION_LABEL: "73e6c870aa87"
- COMMIT_AUTHOR_EMAIL: "IvanMalison@gmail.com"
- COMMIT_AUTHOR_NAME: "Ivan Malison"

View File

@@ -1,37 +0,0 @@
# Cachix for this repo
This repo's NixOS flake lives under `nixos/`.
The workflow in `.github/workflows/cachix.yml` can build the `strixi-minaj`
system closure on GitHub Actions and push the results to a Cachix cache.
## One-time setup
1. Create a Cachix cache (on cachix.org).
2. Create a Cachix auth token with write access to that cache.
3. In the GitHub repo settings:
- Add a repo variable `CACHIX_CACHE_NAME` (the cache name).
- Add a repo secret `CACHIX_AUTH_TOKEN` (the write token).
After that, pushes to `master` will populate the cache.
## Using the cache locally
Option A: ad-hoc (non-declarative)
```sh
cachix use <your-cache-name>
```
Option B: declarative via flake `nixConfig` (recommended for NixOS)
1. Get the cache public key from the Cachix UI:
- Open `https://app.cachix.org/cache/<your-cache-name>#pull`
- Copy the `Public Key` value shown there.
2. Add it to `nixos/flake.nix` under `nixConfig.extra-substituters` and
`nixConfig.extra-trusted-public-keys`.
Note: `nixos/nix.nix` sets `nix.settings.accept-flake-config = true`, so the
flake `nixConfig` is honored during rebuilds.

View File

@@ -1,218 +0,0 @@
# Hyprland Lua Migration Checklist
This checklist tracks the migration described in `docs/tiling-wm-experience.md`.
Guiding rule for shelling out:
- Prefer Lua for compositor/window/workspace state changes.
- Avoid `hyprctl` for window manipulation unless there is no usable Lua API.
- `hyprctl` remains acceptable for non-window-control escape hatches such as
`hyprctl reload`.
- External utilities remain acceptable where they are the real tool being
launched, for example rofi, cliphist, grim/slurp/swappy, playerctl, hyprlock,
and systemd commands.
## 0. Version And Build Base
- [x] Update/confirm Hyprland Lua input at latest usable upstream target.
- [x] Keep stable Hyprland path intact until Lua path is proven.
- [x] Keep hy3 out of the Lua branch.
- [x] Keep hyprNStack following the Lua Hyprland input.
- [x] Rebuild hyprNStack against the Lua Hyprland branch.
- [x] Add a forked hyprexpo input for the Lua Hyprland branch.
- [x] Keep a cheap Lua check: parse config, execute against stub, reject
`hyprctl` in the Lua config's window/workspace manipulation path.
- [x] Add a real Hyprland Lua verifier check for the config parser path.
Current upstream note: latest Hyprland release observed during this migration is
`v0.54.3`; the Lua config input tracks PR 13817 and was already at the current
PR head `c35a8a5` dated 2026-04-26. The non-Lua fallback remains pinned to the older
hy3/hyprexpo-compatible stack; the Lua branch uses forked hyprexpo branch
`colonelpanic8/hyprland-plugins:hyprexpo-lua-hyprland`.
## 1. Core Layout
- [x] Primary layout is equal-width columns.
- [x] No scrolling layout.
- [x] No hy3 in Lua path.
- [x] Dynamic redistribution on open/close via Lua-managed nStack count.
- [x] Monocle/tabbed-style layout available.
- [x] Direct jump to columns layout.
- [x] Direct jump to monocle layout.
- [x] Directional focus cycles in monocle.
- [x] Visual indication of hidden monocle windows, currently notification.
- [x] Make layout state per workspace instead of one global current layout.
- [x] Preserve one-window smart gaps in the live config path.
- [x] Use a persistent monocle indicator instead of a transient notification.
Smart-gaps note: nStack uses `no_gaps_when_only = true`; Hyprland workspace
rules are still applied at runtime for broader parity, but skipped during
`--verify-config` because the current Lua PR segfaults when rule bindings run in
verifier mode.
## 2. Workspace Behavior
- [x] `Super+1..9` focuses bounded workspaces.
- [x] `Super+Shift+1..9` sends window without following.
- [x] `Super+Ctrl+1..9` sends and follows.
- [x] Previous workspace per monitor uses Lua-tracked history.
- [x] Implement next empty workspace focus in Lua.
- [x] Implement move focused window to next empty workspace without following.
- [x] Implement move focused window to next empty workspace and follow.
- [x] Implement bounded workspace cycling `1..9` in Lua, replacing
`workspace-scroll.sh`.
- [x] Implement workspace swap or decide whether native dispatcher is enough.
- [x] Track current monitor workspace history explicitly, with native
`previous_per_monitor` as fallback.
## 3. Directional Navigation
- [x] `Super+w/a/s/d` focuses windows.
- [x] `Super+Shift+w/a/s/d` swaps windows.
- [x] `Hyper+w/a/s/d` focuses monitors.
- [x] `Hyper+Shift+w/a/s/d` moves windows to monitors.
- [x] `Super+z` next monitor.
- [x] `Super+Shift+z` move to next monitor.
- [x] Replace any old cursor-follow/move scripts fully.
- [x] Add required `Super+Ctrl+w/a/s/d` move-to-monitor behavior preserving
useful focus.
- [x] Add "move to empty workspace on monitor in direction" without requiring
`Hyper+Ctrl`.
- [x] Route directional focus in monocle through deterministic Lua cycling.
- [ ] Live-verify directional focus in monocle behaves predictably.
## 4. Script Elimination Priority
- [x] Core layout switching no longer uses scripts.
- [x] Core column count logic no longer uses scripts or `hyprctl`.
- [x] Replace `find-empty-workspace.sh`.
- [x] Replace `workspace-goto-empty.sh`.
- [x] Replace `workspace-move-to-empty.sh`.
- [x] Replace `workspace-scroll.sh`.
- [x] Replace `cycle-layout.sh`.
- [x] Replace `movewindow-follow-cursor.sh`.
- [x] Replace `gather-class.sh`.
- [x] Replace `focus-next-class.sh`.
- [x] Replace `raise-or-run.sh`.
- [x] Replace minimize scripts if Lua can maintain hidden workspace state.
- [x] Replace `swap-workspaces.sh`.
- [x] Decide whether rofi-backed pickers remain scripts or become
Lua-generated command pipes. Rofi itself remains external.
## 5. Overview And Window Discovery
- [x] Restore visual hyprexpo for `Super+Tab` overview.
- [x] Restore visual hyprexpo `bring` mode for `Super+Shift+Tab`.
- [x] Keep first-pass Lua numbered window picker on secondary bindings.
- [x] Implement first-pass Lua-native go-to-window picker.
- [x] Implement first-pass Lua-native bring-window picker.
- [x] Implement first-pass Lua-native replace-window picker.
- [ ] Picker entries include icons.
- [x] Picker entries include title/workspace.
- [x] Hide scratchpad/minimized/internal windows from normal pickers.
- [x] Decide whether picker data generation can be Lua-native with rofi as only
external process.
Picker decision: current Lua API can query and manipulate windows directly, but
does not expose a synchronous way to run rofi and consume its selected output.
The first pass therefore uses Lua-native numbered submaps and notifications.
A final rofi/icon picker would need either a small IPC bridge or an upstream Lua
process-output/callback primitive.
Hyprexpo decision: hyprexpo is kept as the visual overview. The forked Lua
branch exposes `hl.plugin.hyprexpo.expo(...)`, so the Lua config can invoke
`toggle` and `bring` directly without shelling out to `hyprctl`.
## 6. Scratchpads
- [x] Preserve named scratchpads: element, gmail, htop, messages, slack,
spotify, transmission, volume.
- [x] Preserve dropdown terminal scratchpad.
- [x] Scratchpads near-fullscreen and centered.
- [x] Scratchpads hidden from normal listings/status bar.
- [x] Toggling scratchpad exits fullscreen/monocle state first.
- [x] Decide hyprscratch daemon is not needed in the Lua branch.
- [x] Replace `hyprscratch toggle` with Lua-managed scratchpad toggles.
- [x] Disable hyprscratch service on the Lua branch.
- [x] Handle delayed class/title assignment with window class/title event adoption.
- [x] Handle already-running app.
- [x] Handle minimized app.
- [x] Handle app on another workspace.
## 7. Minimization
- [x] Implement minimize active window.
- [x] Implement restore last minimized window.
- [x] Exclude minimized windows from layout.
- [x] Exclude minimized windows from normal go/bring lists.
- [x] Implement minimized picker.
- [x] Implement restore all minimized.
- [x] Implement minimize other windows of current workspace class.
- [x] Implement restore windows of focused class.
- [x] Decide hidden workspace naming/state model for minimized windows.
- [x] Hydrate minimized-window state from the hidden workspace on restore/picker
paths.
## 8. Class-Aware Workflows
- [x] Gather all windows of focused class onto current workspace.
- [x] Focus next window of different/same class as desired parity.
- [x] Browser raise-or-spawn.
- [x] Window info command exposes class/title/workspace/address/pid.
- [ ] Window menus expose real window icons.
- [x] Prefer Lua window queries over `hyprctl clients`.
## 9. Status Bar Contract
- [ ] Confirm taffybar can still list normal workspaces.
- [ ] Confirm special scratchpad/minimize workspaces are filtered.
- [ ] Confirm active workspace per monitor remains visible.
- [ ] Confirm class/title/active/minimized/urgent metadata is available.
- [x] Expose layout name/state if practical.
- [ ] Confirm workspace/window positioning remains enough for icon strips.
Layout state note: Lua writes `$XDG_RUNTIME_DIR/hyprland-layout-state` with the
active workspace, active layout, and per-workspace layout map. Taffybar still
needs a live readback check.
## 10. Session And Utilities
- [x] Terminal binding preserved.
- [x] Launcher/run menu preserved.
- [x] Media keys preserved.
- [x] Clipboard history binding preserved.
- [x] Screenshot binding preserved.
- [x] Lock binding preserved.
- [x] Session startup target integration preserved.
- [x] `hyprctl reload` may remain available as a non-window-manipulation escape
hatch.
- [x] Resolve `Hyper+w` conflict: monitor focus must win; wallpaper picker
needs another key.
- [x] Keep rofi utility commands as external commands unless there is a
meaningful Lua replacement.
- [x] Decide which shell utilities are acceptable because they are not Hyprland
control scripts.
## 11. Validation
- [x] Lua syntax check.
- [x] Lua stub execution check.
- [x] `hyprctl` rejection in Lua config for window/workspace manipulation.
- [x] Real `Hyprland --verify-config` check.
- [x] hyprNStack flake build check.
- [x] hyprexpo Lua-branch flake build check.
- [x] `ryzen-shine` system dry-run.
- [x] `just switch` activates successfully and deploys branch-owned
`~/.config/hypr/hyprland.lua`.
- [x] Re-run checks after Hyprland/Lua input confirmation.
- [ ] Try live compositor smoke test again after version bump.
- [x] Document `--verify-config` caveats for Lua rule/plugin-specific config.
- [x] Eventually run `just switch` only when the branch is coherent enough for a
live test.
Live-smoke note: this Hyprland binary exposes `--verify-config` but no
`--headless` CLI flag. `just switch` now installs the Lua branch binary and
deploys `hyprland.lua`, but the currently running compositor remains the old
0.53 process until the Hyprland session is restarted. A true compositor smoke
test still needs a session restart or a nested Wayland session that avoids
startup side effects.

View File

@@ -1,152 +0,0 @@
# Org-Agenda-API Consolidation Design
## Overview
Consolidate org-agenda-api container builds and fly.io deployment into the dotfiles repository. This eliminates the separate `colonelpanic-org-agenda-api` repo and provides:
- Container outputs available to NixOS machines directly
- Fly.io deployment from the same repo
- Fewer repos to maintain
- Cachix integration for faster builds
## Directory Structure
```
/home/imalison/dotfiles/
├── nixos/
│ ├── flake.nix # Main flake, adds container output
│ ├── org-agenda-api.nix # Existing tangling module (stays here)
│ └── ...
├── org-agenda-api/
│ ├── container.nix # Container build logic (mkContainer, etc.)
│ ├── configs/
│ │ ├── colonelpanic/
│ │ │ ├── custom-config.el
│ │ │ └── overrides.el (optional)
│ │ └── kat/
│ │ └── custom-config.el
│ ├── fly/
│ │ ├── fly.toml
│ │ ├── deploy.sh
│ │ └── config-{instance}.env
│ └── secrets/
│ ├── secrets.nix # agenix declarations
│ └── *.age # encrypted secrets
└── dotfiles/emacs.d/
└── org-config.org # Source of truth for org config
```
## Flake Integration
The main dotfiles flake at `/home/imalison/dotfiles/nixos/flake.nix` exposes container outputs:
```nix
outputs = inputs @ { self, nixpkgs, flake-utils, ... }:
{
nixosConfigurations = { ... }; # existing
} // flake-utils.lib.eachDefaultSystem (system:
let
pkgs = import nixpkgs { inherit system; };
containerLib = import ../org-agenda-api/container.nix {
inherit pkgs system;
tangledConfig = (import ./org-agenda-api.nix {
inherit pkgs system;
inputs = inputs;
}).org-agenda-custom-config;
};
in {
packages = {
container-colonelpanic = containerLib.mkInstanceContainer "colonelpanic";
container-kat = containerLib.mkInstanceContainer "kat";
};
}
);
```
Build with: `nix build .#container-colonelpanic`
## Custom Elisp & Tangling
Single source of truth: `org-config.org` tangles to elisp files loaded by containers.
**What stays in custom-config.el (container-specific glue):**
- Path overrides (`/data/org` instead of `~/org`)
- Stubs for unavailable packages (`org-bullets-mode` no-op)
- Customize-to-setq format conversion
- Template conversion for org-agenda-api format
- Instance-specific settings
**Audit:** During implementation, verify no actual org logic is duplicated in custom-config.el.
## Cachix Integration
### Phase 1: Use upstream cache as substituter
Add to dotfiles flake's `nixConfig`:
```nix
nixConfig = {
extra-substituters = [
"https://org-agenda-api.cachix.org"
];
extra-trusted-public-keys = [
"org-agenda-api.cachix.org-1:PUBLIC_KEY_HERE"
];
};
```
Benefits:
- `container-base` (~500MB+ dependencies) fetched from cache
- Rebuilds only process the small custom config layer
### Phase 2 (future): Push custom builds
Set up GitHub Action or local push for colonelpanic-specific container builds.
## Fly.io Deployment
**What moves:**
- `fly.toml``dotfiles/org-agenda-api/fly/fly.toml`
- `deploy.sh``dotfiles/org-agenda-api/fly/deploy.sh`
- `configs/*/config.env``dotfiles/org-agenda-api/fly/config-{instance}.env`
- Agenix secrets → `dotfiles/org-agenda-api/secrets/`
**Deploy script changes:**
- Build path: `nix build "../nixos#container-${INSTANCE}"`
- Secrets path adjusts to new location
- Otherwise same logic
## Implementation Phases
### Phase 1: Pull latest & verify current state
- Pull latest changes in org-agenda-api and colonelpanic-org-agenda-api
- Build container, verify it works
- Fix any issues before restructuring
### Phase 2: Create dotfiles structure
- Create `/home/imalison/dotfiles/org-agenda-api/` directory
- Move container.nix logic (adapted from current colonelpanic-org-agenda-api flake)
- Move instance configs (colonelpanic/, kat/)
- Move fly.io deployment files
- Move agenix secrets
### Phase 3: Integrate with dotfiles flake
- Update `/home/imalison/dotfiles/nixos/flake.nix` to expose container outputs
- Add cachix substituter configuration
- Test build from dotfiles: `nix build .#container-colonelpanic`
### Phase 4: Verify deployment
- Test deploy.sh from new location
- Verify fly.io deployment works
- Run the container locally on a NixOS machine
### Phase 5: Audit & cleanup
- Review custom-config.el for any duplicated org logic
- Archive colonelpanic-org-agenda-api repo
- Update any references/documentation
## Repos Affected
- **dotfiles** - Receives container build + fly.io deployment
- **colonelpanic-org-agenda-api** - Becomes obsolete after migration
- **org-agenda-api** (upstream) - No changes, used as flake input

View File

@@ -1,368 +0,0 @@
# Tiling WM Experience Spec
This document describes the tiling window manager experience I am targeting.
## Priority Levels
- Required: daily-driver behavior.
- Important: expected for parity, but a rough first version is acceptable.
- Nice: useful polish or compatibility.
## Modifier Terminology
- `Super` names the physical modifier key often labeled Windows, Command, GUI,
or OS depending on the keyboard.
- `Hyper` means a higher-order logical modifier layer used for monitor,
workspace, utility, and cross-context operations.
- Prefer implementing `Hyper` as its own virtual modifier or equivalent logical
mask when the environment supports that.
- If a dedicated virtual `Hyper` mask is not practical, `Ctrl+Alt+Super` is the
fallback chord.
- The fallback `Hyper` chord intentionally does not include `Shift`; portable
`Hyper` bindings only use the plain `Hyper` layer and the `Hyper+Shift`
layer.
- Do not require `Hyper+Ctrl`, `Hyper+Alt`, or `Hyper+Super` bindings. Those
modifiers may already be part of the fallback `Hyper` chord.
- Binding descriptions should use `Super` and `Hyper` rather than
hardware-vendor names.
## Workspaces and Monitors
Required behavior:
- Workspaces are a shared global set, not independent per-monitor namespaces.
- Focusing workspace `N` shows workspace `N` on the currently focused monitor.
- Moving a window to workspace `N` does not require caring which monitor
currently owns that workspace.
- Sending the focused window to workspace `N` without following it is a
first-class operation.
- Moving the focused window to workspace `N` and following it is a first-class
operation.
- Sending the focused window to the next empty workspace without following it is
a first-class operation.
- Moving the focused window to the next empty workspace and following it is a
first-class operation.
- Normal workspaces are bounded to `1..9`.
- Workspace history is tracked per monitor.
- Last-workspace toggle uses the current monitor's workspace history.
- Workspace cycling works on the current monitor within the bounded workspace
set.
Important behavior:
- Swapping the current workspace contents with another workspace is available.
- Moving a window to an empty workspace on another monitor is available.
- Moving the focused window to another monitor without following keeps keyboard
focus on the original monitor.
- Moving the focused window to another monitor and following it moves keyboard
focus to the destination monitor.
- Hidden/special workspaces exist for scratchpad state.
- Hidden/special workspaces exist for minimized state.
- Hidden/special workspaces are excluded from ordinary workspace cycling.
- Hidden/special workspaces are excluded from the status bar's normal workspace
list.
## Directional Navigation
Required behavior:
- Directional window focus is available.
- Directional window swapping or movement is available.
- Directional move-to-monitor is available while preserving useful focus.
- Directional monitor focus is available.
- Directional window movement between monitors is available.
- Moving the focused window to an empty workspace on the monitor in a direction
remains required behavior, but it should not require an extra `Hyper`
modifier beyond `Shift`.
- `Super+w/a/s/d` focuses windows directionally.
- `Super+Shift+w/a/s/d` swaps or moves the focused window directionally.
- `Super+Ctrl+w/a/s/d` moves the focused window to the monitor in that
direction while preserving useful focus.
- `Super+Ctrl+Shift+w/a/s/d` moves the focused window to an empty workspace on
the monitor in that direction.
- `Hyper+w/a/s/d` focuses monitors directionally.
- `Hyper+Shift+w/a/s/d` swaps or moves windows between monitors directionally.
- Directional focus in tabbed/fullscreen mode should cycle predictably through
windows even though their screen geometry overlaps.
Important behavior:
- Keyboard resize remains available, but it should not displace the directional
move-to-monitor binding.
## Layouts
Required behavior:
- Tiling is dynamic.
- Primary layout is equal-width vertical columns.
- Scrolling layouts are not acceptable.
- All ordinary splits are vertical.
- Adding windows dynamically redistributes all tiled windows evenly.
- Removing windows dynamically redistributes all tiled windows evenly.
- Ordinary use should not require manually managing a split tree.
- Tabbed/fullscreen-style monocle layout is available.
- Directional window navigation bindings continue to switch windows in
tabbed/fullscreen mode.
- The important layouts are columns and tabbed/fullscreen.
- Dialogs float.
- Dialogs are centered.
- There is a command to jump directly to the columns layout and one to jump
directly to the tabbed/fullscreen layout.
- Layout state is per workspace when the compositor supports it.
Important behavior:
- One-window workspaces should have no visible gaps or use smart gaps.
Nice behavior:
- Gaps can be toggled.
- Fullscreen can be toggled.
- Smart borders can be toggled.
- Layout-related modifiers remain available for experiments.
- Inactive windows are slightly dimmed when supported.
## Overview and Discovery
Required behavior:
- There is an expose-style way to inspect open windows or workspaces before
jumping.
- There is a rofi-style window picker.
- Window picker entries show icons.
- Window picker entries show titles.
- Window picker entries show workspace labels.
- Go-to-window focuses the selected window wherever it currently lives.
- Bring-window moves a selected non-visible window to the current workspace and
focuses it.
- Replace-window swaps the focused window with a selected window where feasible.
Important behavior:
- Overview supports both "go" and "bring" workflows.
- Window switchers hide scratchpad windows unless the user is explicitly using a
scratchpad picker.
- Window switchers hide minimized windows unless the user is explicitly using a
minimized picker.
- Window switchers hide internal windows.
- Go/bring actions unminimize selected windows when needed.
## Scratchpads
Required behavior:
- A named scratchpad exists for element.
- A named scratchpad exists for gmail.
- A named scratchpad exists for htop.
- A named scratchpad exists for messages.
- A named scratchpad exists for slack.
- A named scratchpad exists for spotify.
- A named scratchpad exists for transmission.
- A named scratchpad exists for volume.
- Scratchpads appear near-fullscreen and centered by default.
- Toggling a scratchpad deactivates fullscreen/tabbed state first.
- Scratchpads are hidden from normal workspace and window listings.
Important behavior:
- A dropdown terminal scratchpad exists.
- Scratchpad matching handles delayed class/title assignment.
- Scratchpad behavior is robust when the app is already running.
- Scratchpad behavior is robust when the app is minimized.
- Scratchpad behavior is robust when the app is on another workspace.
## Minimization
Required behavior:
- Focused window can be minimized.
- Last minimized window can be restored to the current workspace and focused.
- Minimized windows are excluded from normal layout.
- Minimized windows are excluded from ordinary go/bring lists.
Important behavior:
- A minimized picker mode exists.
- Restore-all-minimized exists.
- Other classes in the current workspace can be minimized.
- Windows of the focused class can be restored.
- All minimized windows can be restored.
## Class-Aware Workflows
Important behavior:
- Gather all windows of the focused class onto the current workspace.
- Raise-or-spawn exists for the browser.
- Window menus show class.
- Window menus show title.
- Window menus show workspace.
- Window menus show icon.
## Status Bar Contract
Required behavior:
- The status bar can list normal workspaces.
- The status bar can identify the active workspace per monitor.
- The status bar can list windows per workspace.
- The status bar can expose class hints for each listed window.
- The status bar can expose title for each listed window.
- The status bar can expose active state for each listed window.
- The status bar can expose minimized state when available.
- The status bar can expose urgency when available.
- The status bar can expose approximate window position when available.
- Scratchpad workspaces are marked as special or filtered out.
- Minimized workspaces are marked as special or filtered out.
- Internal workspaces are marked as special or filtered out.
Important behavior:
- Workspace labels are stable.
- Workspace icons are stable.
- Window positioning information is available enough for workspace icon strips
and future expose-like views.
- Layout information is available enough for workspace icon strips and future
expose-like views.
- Layout name is exposed if practical.
- Layout state is exposed if practical.
## Session and Utility Behavior
Important behavior:
- Terminal is `ghostty --gtk-single-instance=false`.
- Launcher is `rofi -show drun -show-icons`.
- Run menu is `rofi -show run`.
- Browser raise/spawn behavior exists.
- Border width is effectively zero.
- The status bar can be toggled per monitor.
- Session startup integrates with the normal graphical-session target.
- Session startup integrates with any required session-specific user target.
Nice behavior:
- Wallpaper behavior remains consistent.
- Wallpaper selection uses `Hyper+comma`; `Hyper+w/a/s/d` are reserved for
directional monitor focus.
- Idle behavior remains consistent.
- Lock behavior remains consistent.
- Clipboard history behavior remains consistent.
- Screenshot behavior remains consistent.
- Monitor DDC/input switching remains consistent.
- Rofi utility bindings remain consistent.
- Media keys remain consistent.
## Binding Appendix
Required behavior:
- `Hyper` bindings should remain available from a single physical key where
practical, even if that key emits the fallback chord internally.
- Extra modifiers on `Hyper` are limited to `Shift` for portable bindings.
Important behavior:
- `Hyper` utility bindings must not displace required directional monitor
bindings on `Hyper+w/a/s/d`.
### Core Bindings
Required behavior:
- `Super+p` opens the application launcher.
- `Super+Shift+p` opens the run menu.
- `Super+Shift+Return` opens a terminal.
- `Super+Tab` opens the overview.
- `Super+Shift+Tab` opens the overview in bring-window mode when supported.
- `Super+g` opens the go-to-window picker.
- `Super+b` opens the bring-window picker.
- `Super+Shift+b` opens the replace-window picker.
- `Super+\` toggles to the previous workspace on the current monitor.
- `Super+Shift+e` moves the focused window to the next empty workspace and
follows it. This is the target replacement for the older `Super+Shift+h`
binding.
- `Hyper+e` focuses the next empty workspace.
- `Hyper+5` swaps the current workspace with a selected workspace.
- `Hyper+g` gathers windows of the focused class onto the current workspace.
### Directional Navigation Bindings
Required behavior:
- `Super+w/a/s/d` focuses windows directionally.
- `Super+Shift+w/a/s/d` swaps or moves the focused window directionally.
- `Super+Ctrl+w/a/s/d` moves the focused window to the monitor in that
direction while preserving useful focus.
- `Hyper+w/a/s/d` focuses monitors directionally.
- `Hyper+Shift+w/a/s/d` swaps or moves windows between monitors directionally.
- Moving the focused window to an empty workspace on the monitor in a direction
remains required behavior, but it should not require a `Hyper+Ctrl` binding.
- `Super+z` focuses the next monitor.
- `Super+Shift+z` moves the focused window to the next monitor.
### Numbered Workspace Bindings
Required behavior:
- `Super+1..9` focuses workspace `1..9` on the current monitor.
- `Super+Shift+1..9` sends the focused window to workspace `1..9` without
following it.
- `Super+Ctrl+1..9` sends the focused window to workspace `1..9` and follows
it.
### Scratchpad Bindings
Required behavior:
- `Super+Alt+e` toggles the element scratchpad.
- `Super+Alt+g` toggles the gmail scratchpad.
- `Super+Alt+h` toggles the htop scratchpad.
- `Super+Alt+m` toggles the messages scratchpad.
- `Super+Alt+k` toggles the slack scratchpad.
- `Super+Alt+s` toggles the spotify scratchpad.
- `Super+Alt+t` toggles the transmission scratchpad.
- `Super+Alt+v` toggles the volume scratchpad.
Important behavior:
- `Super+Alt+grave` toggles the dropdown terminal scratchpad.
- `Super+Alt+c` raises or starts the browser.
- `Super+Alt+Return` enters the minimized-window picker or restores minimized
windows, depending on environment support.
- `Super+Alt` is reserved for app-specific raise/spawn, scratchpad, and
scratchpad-adjacent bindings.
### Utility Bindings
Required behavior:
- `Hyper+v` opens clipboard history with a rofi-backed clipboard command
such as `greenclip print` or `cliphist`.
- `Hyper+p` opens the password picker with `rofi-pass`.
- `Hyper+h` opens the screenshot tool with the compositor/session-appropriate
screenshot command.
- `Hyper+c` opens a shell command prompt with `shell_command.sh`.
- `Hyper+x` opens the command picker with `rofi_command.sh`.
- `Hyper+k` opens the process killer with `rofi_kill_process.sh`.
- `Hyper+Shift+k` opens the kill-all/process-tree killer with
`rofi_kill_all.sh`.
- `Hyper+r` opens the systemd/service menu with `rofi-systemd`.
- `Hyper+slash` toggles the status bar with the status-bar-appropriate command.
- `Hyper+backslash` toggles the monitor input with `mpg341cx_input toggle`.
- `Hyper+i` opens the audio input selector with `rofi_select_input.hs`.
- `Hyper+o` opens the audio output selector with `rofi_paswitch`.
- `Hyper+y` opens the agentic skill picker with `rofi_agentic_skill`.
- `Hyper+Shift+l` locks the session with the compositor/session-appropriate
locker.
Important behavior:
- Wallpaper selection is available under `Hyper` via `rofi_wallpaper.sh`, but
its exact key must avoid the required `Hyper+w/a/s/d` directional monitor
bindings.
- Expose-style overview remains available as a utility binding using the
compositor-appropriate implementation.
- Session-destructive operations use shifted or otherwise harder-to-hit
variants.

View File

@@ -1,124 +0,0 @@
# Agentic Session Preferences
## Multiplexer session titling
- If the `TMUX` or `ZELLIJ` environment variable is set, treat this chat as the controller for the current tmux or zellij session.
- Use `set_multiplexer_title '<project> - <task>'` to update the title. The command detects tmux vs. zellij internally, prefers tmux when both are present, and no-ops outside a multiplexer.
- Maintain a session/window/pane title that updates when the task focus changes substantially.
- Prefer automatic titling: infer a concise <task> from the current user request and context without asking.
- Title format: "<project> - <task>".
- <project> is the basename of the current project directory.
- Prefer git repo root basename if available; otherwise use basename of the current working directory.
- <task> is a short, user-friendly description of what we are doing.
- Ask for a short descriptive <task> only when the task is ambiguous or you are not confident in an inferred title.
- When the task changes substantially, update the <task> automatically if clear; otherwise ask for an updated <task>.
- When a title is provided or updated, immediately run `set_multiplexer_title '<project> - <task>'`; do not call raw tmux or zellij rename commands unless debugging the helper itself.
- For Claude Code sessions, a UserPromptSubmit hook will also update titles automatically based on the latest prompt.
## Pane usage
- Do not create extra panes or windows unless the user asks.
## Git worktrees
- Default to creating git worktrees under a project-local `.worktrees/` directory at the repository root.
- For a repository at `<repo_root>`, use worktree paths like `<repo_root>/.worktrees/<task-or-branch>`.
- Create `.worktrees/` if needed before running `git worktree add`.
- Only use a non-`.worktrees/` location when the user explicitly asks for a different path.
## NixOS workflow
- This system is managed with a Nix flake at `~/dotfiles/nixos`.
- Use `just switch` from that directory for rebuilds instead of plain `nixos-rebuild`.
- Host configs live under `machines/`; choose the appropriate host when needed.
## Ad-hoc utilities via Nix
- If you want to use a CLI utility you know about but it is not currently available on PATH, prefer using `nix run` / `nix shell` to get it temporarily rather than installing it globally.
- Use `nix run` for a single command:
nix run nixpkgs#ripgrep -- rg -n "pattern" .
- Use `nix shell` when you need multiple tools available for a short sequence of commands:
nix shell nixpkgs#{jq,ripgrep} --command bash -lc 'rg -n "pattern" . | head'
- If you are not sure what the package is called in nixpkgs, use:
nix search nixpkgs <name-or-keyword>
## Personal Information
- Full Legal Name: Ivan Anthony Malison
- Email: IvanMalison@gmail.com
- Country of Citizenship: United States of America
- Birthday: August 2, 1990 (1990-08-02)
- Address: 100 Broderick St APT 401, San Francisco, CA 94117, United States
- Employer: Railbird Inc.
- GitHub: colonelpanic8
- Phone: 301-244-8534
- Primary Credit Card: Chase-Reserve
## Repository Overview
This is an org-mode repository containing personal task management, calendars, habits, and project tracking files. It serves as the central hub for Ivan's personal organization.
## Available Tools
### Chrome DevTools MCP
A browser automation MCP is available for interacting with web pages. Use it to:
- Navigate to websites and fill out forms
- Take screenshots and snapshots of pages
- Click elements, type text, and interact with web UIs
- Read page content and extract information
- Automate multi-step web workflows (booking, purchasing, form submission, etc.)
### Google Workspace CLI (`gws`)
The local `gws` CLI is available for Google Workspace operations. Use it to:
- Search, read, and send Gmail messages
- Manage Gmail labels and filters
- Download attachments and inspect message payloads
- Access Drive, Calendar, Docs, Sheets, and other Google Workspace APIs
## Credentials via `pass`
Many credentials and personal details are stored in `pass` (the standard unix password manager). There are hundreds of entries covering a wide range of things, so always search before asking the user for information. Use `pass find <keyword>` to search and `pass show <entry>` to retrieve values.
Examples of what's stored:
- Personal documents - driver's license, passport number, etc.
- Credit/debit cards - card numbers, expiration, CVV for various cards
- Banking - account numbers, online banking logins
- Travel & loyalty - airline accounts, hotel programs, CLEAR, etc.
- Website logins - credentials for hundreds of services
- API keys & tokens - GitHub, various services
- The store is regularly updated with new entries. Always do a dynamic lookup with `pass find` rather than assuming what's there.
- Provide credentials to tools/config at runtime via environment variables or inline `pass` usage instead of committing them.
- Never hardcode credentials or store them in plain text files.
## Guidelines
- When filling out forms or making purchases, pull personal info from this file and credentials from `pass` rather than asking the user to provide them.
- For web tasks, prefer using the Chrome DevTools MCP to automate interactions directly.
- For email tasks, prefer using `gws gmail` over navigating to Gmail in the browser.
- If a task requires a credential not found in `pass`, ask the user rather than guessing.
- This repo's org files (gtd.org, calendar.org, habits.org, projects.org) contain task and scheduling data. The org-agenda-api skill/service can also be used to query agenda data programmatically.
## Project links (local symlink index)
- Paths in this section are relative to this file's directory (`dotfiles/agents/`).
- Keep a local symlink index under `./project-links/` for projects that are frequently referenced.
- Treat these links as machine-local discovery state maintained by agents (do not commit machine-specific targets).
- Reuse existing symlinks first. If a link is missing or stale, search for the repo, then update the link with:
ln -sfn "<absolute-path-to-repo>" "./project-links/<link-name>"
- If a project cannot be found quickly, do a targeted search (starting from likely roots) and only then widen the search.
## Project constellation guides
- Keep per-constellation context in `./project-guides/` and keep this file minimal.
- When a request involves one of these projects:
- Open the guide first.
- If a mentioned repo/package name matches a guide's related-project list, open that guide even if the user did not name the constellation explicitly.
- Ensure required links exist under `./project-links/`.
- If links are missing, run a targeted search from likely roots, then create/update the symlink.
- Guide index:
- `./project-guides/mova-org-agenda-api.md`
- `./project-guides/taffybar.md`
- `./project-guides/railbird.md`
- `./project-guides/org-emacs-packages.md`

View File

@@ -1,61 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
input=$(cat)
mapfile -d '' -t parsed < <(PAYLOAD="$input" python3 - <<'PY'
import json, os, sys
try:
data = json.loads(os.environ.get("PAYLOAD", ""))
except Exception:
data = {}
cwd = data.get("cwd") or os.getcwd()
prompt = (data.get("prompt") or "").strip()
sys.stdout.write(cwd)
sys.stdout.write("\0")
sys.stdout.write(prompt)
sys.stdout.write("\0")
PY
)
cwd="${parsed[0]:-}"
prompt="${parsed[1]:-}"
if [[ -z "${cwd}" ]]; then
cwd="$PWD"
fi
project_root=$(git -C "$cwd" rev-parse --show-toplevel 2>/dev/null || true)
if [[ -n "$project_root" ]]; then
project=$(basename "$project_root")
else
project=$(basename "$cwd")
fi
prompt_first_line=$(printf '%s' "$prompt" | head -n 1 | tr '\n' ' ' | sed -e 's/[[:space:]]\+/ /g' -e 's/^ *//; s/ *$//')
lower=$(printf '%s' "$prompt_first_line" | tr '[:upper:]' '[:lower:]')
case "$lower" in
""|"ok"|"okay"|"thanks"|"thx"|"cool"|"yep"|"yes"|"no"|"sure"|"done"|"k")
exit 0
;;
esac
task="$prompt_first_line"
if [[ -z "$task" ]]; then
task="work"
fi
# Trim to a reasonable length for multiplexer UI labels.
if [[ ${#task} -gt 60 ]]; then
task="${task:0:57}..."
fi
title="$project - $task"
if command -v set_multiplexer_title >/dev/null 2>&1; then
set_multiplexer_title "$title"
else
hook_dir=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)
"$hook_dir/../../lib/functions/set_multiplexer_title" "$title"
fi

View File

@@ -1,29 +0,0 @@
# Mova / org-agenda-api constellation
## Scope
- Use this guide for requests involving the mova constellation, including `org-agenda-api`.
- Primary anchor is the mova root repo; start there and branch out.
## Related packages/projects (trigger list)
- If any of these names are mentioned, open this guide for context.
- `mova-dev`: coordination repo for the mova ecosystem and cross-repo workflows.
- `mova`: React Native app (iOS/Android/Web).
- `org-agenda-api`: Emacs Lisp HTTP API and deployment container.
- `org-window-habit`: habit-tracking logic used by org workflows.
- `org-wild-notifier`: org notification logic and scheduling behavior.
- `dotfiles` (within mova-dev context): infra/config and deployment glue for org-agenda-api.
## Symlink targets
- `./project-links/mova-dev` -> mova constellation root.
## Discovery hints
- Check likely roots first, especially `~/Projects`.
- Common local path is `~/Projects/mova-dev`, but do not assume it exists.
- If the symlink is missing or stale, search by directory name first, then by repo names.
## Read-first docs
- `./project-links/mova-dev/README.md`
- `./project-links/mova-dev/org-agenda-api/README.md` (if present)
## Notes
- Prefer treating mova root docs as canonical project context.

View File

@@ -1,25 +0,0 @@
# Org / Emacs package constellation
## Scope
- Use this guide for org-related package repos, including `org-window-habit`.
- This is especially relevant when repos are managed through local Emacs package trees.
## Related packages/projects (trigger list)
- If any of these names are mentioned, open this guide for context.
- `org-window-habit`: org habit-tracking package/repo.
- `org-wild-notifier`: org notification package/repo.
- `org-agenda-api`: Emacs Lisp HTTP API project that loads org package deps.
- `elpaca`: Emacs package manager tree where local checkouts may live.
- `elpa`: traditional Emacs package install tree (fallback search area).
## Symlink targets
- `./project-links/org-window-habit` -> org-window-habit repo/root.
## Discovery hints
- Start with Emacs roots, especially `~/.emacs.d`.
- Prefer checking package manager trees (including `elpaca`) before broader searches.
- Common pattern is nested repos under `~/.emacs.d` package directories.
## Read-first docs
- `./project-links/org-window-habit/README.md`
- `./project-links/org-window-habit/README.org` (if present)

View File

@@ -1,28 +0,0 @@
# Railbird constellation
## Scope
- Use this guide for requests involving railbird backend/main repo and railbird mobile app work.
## Related packages/projects (trigger list)
- If any of these names are mentioned, open this guide for context.
- `railbird`: primary backend/main railbird repository.
- `railbird-mobile`: primary mobile app repository.
- `railbird2`: alternate/new-generation backend repo.
- `railbird-mobile2`: alternate/new-generation mobile repo.
- `railbird-docs`: documentation repository.
- `railbird-landing-page`: marketing/landing site repository.
- `railbird-alert-tuning`: alert/tuning and operational experimentation repo.
- `railbird-agents-architecture`: architecture notes/prototypes for agent workflows.
## Symlink targets
- `./project-links/railbird` -> primary railbird repo.
- `./project-links/railbird-mobile` -> railbird mobile app repo.
## Discovery hints
- Start from `~/Projects`.
- Common backend location is `~/Projects/railbird`.
- Mobile repo often also lives under `~/Projects`, but name/path may vary by machine.
## Read-first docs
- `./project-links/railbird/README.md`
- `./project-links/railbird-mobile/README.md` (if present)

View File

@@ -1,30 +0,0 @@
# Taffybar constellation
## Scope
- Use this guide for requests involving taffybar itself or local taffybar configuration.
## Related packages/projects (trigger list)
- If any of these names are mentioned, open this guide for context.
- `taffybar`: top-level desktop bar library/app.
- `imalison-taffybar`: personal taffybar configuration package/repo.
- `gtk-sni-tray`: StatusNotifier tray integration for taffybar.
- `gtk-strut`: X11/WM strut handling used by taffybar ecosystem.
- `status-notifier-item`: StatusNotifier protocol/types library.
- `dbus-menu`: DBus menu protocol support used by tray integrations.
- `dbus-hslogger`: DBus logging helper used in ecosystem packages.
## Symlink targets
- `./project-links/taffybar-main` -> main taffybar repo.
- `./project-links/taffybar-config` -> local taffybar config root.
## Discovery hints
- Start with `~/.config/taffybar`.
- Common layout is:
- config root at `~/.config/taffybar`
- main repo at `~/.config/taffybar/taffybar`
- Other taffybar-related repos may exist elsewhere; find them from docs in the main repo.
## Read-first docs
- `./project-links/taffybar-main/README.md`
- `./project-links/taffybar-config/README.md` (if present)
- `./project-links/taffybar-config/AGENTS.md` (if present)

View File

@@ -1,2 +0,0 @@
*
!.gitignore

View File

@@ -1,2 +0,0 @@
.system/
codex-primary-runtime/

View File

@@ -1,254 +0,0 @@
---
name: disk-space-cleanup
description: Investigate and safely reclaim disk space on this machine, especially on NixOS systems with heavy Nix, Rust/Haskell, Docker, and Podman usage. Use when disk is low, builds fail with no-space errors, /nix/store appears unexpectedly large, or the user asks for easy cleanup wins without deleting important data.
---
# Disk Space Cleanup
Reclaim disk space with a safety-first workflow: investigate first, run obvious low-risk cleanup wins, then do targeted analysis for larger opportunities.
Bundled helpers:
- `scripts/rust_target_dirs.py`: inventory and guarded deletion for explicit Rust `target/` directories
- `references/rust-target-roots.txt`: machine-specific roots for Rust artifact scans
- `references/ignore-paths.md`: machine-specific excludes for `du`/`ncdu`
## Execution Default
- Start with non-destructive investigation and quick sizing.
- Prioritize easy wins first (`nix-collect-garbage`, container prune, Cargo artifacts).
- Propose destructive actions with expected impact before running them.
- Run destructive actions only after confirmation, unless the user explicitly requests immediate execution of obvious wins.
- Capture new reusable findings by updating this skill before finishing.
## Workflow
1. Establish current pressure and biggest filesystems
2. Run easy cleanup wins
3. Inventory Rust build artifacts and clean the right kind of target
4. Investigate remaining heavy directories with `ncdu`/`du`
5. Investigate `/nix/store` roots when large toolchains still persist
6. Summarize reclaimed space and next candidate actions
7. Record new machine-specific ignore paths, Rust roots, or cleanup patterns in this skill
## Step 1: Baseline
Run a quick baseline before deleting anything:
```bash
df -h /
df -h /home
df -h /nix
```
Optionally add a quick home-level size snapshot:
```bash
du -xh --max-depth=1 "$HOME" 2>/dev/null | sort -h
```
## Step 2: Easy Wins
Use these first when the user wants fast, low-effort reclaiming:
```bash
sudo -n nix-collect-garbage -d
sudo -n docker system prune -a
sudo -n podman system prune -a
```
Notes:
- Add `--volumes` only when the user approves deleting unused volumes.
- Re-check free space after each command to show impact.
- Prefer `sudo -n` first so cleanup runs fail fast instead of hanging on password prompts.
- If root is still tight after these, run app cache cleaners before proposing raw `rm -rf`:
```bash
uv cache clean
pip cache purge
yarn cache clean
npm cache clean --force
```
## Step 3: Rust Build Artifact Cleanup
Do not start with a blind `find ~ -name target` or with hard-coded roots that may miss worktrees. Inventory explicit `target/` directories first using the bundled helper and the machine-specific root list in `references/rust-target-roots.txt`.
Inventory the biggest candidates:
```bash
python /home/imalison/dotfiles/dotfiles/agents/skills/disk-space-cleanup/scripts/rust_target_dirs.py list --min-size 500M --limit 30
```
Focus on stale targets only:
```bash
python /home/imalison/dotfiles/dotfiles/agents/skills/disk-space-cleanup/scripts/rust_target_dirs.py list --min-size 1G --older-than 14 --output tsv
```
Use `cargo-sweep` when the repo is still active and you want age/toolchain-aware cleanup inside a workspace:
```bash
nix run nixpkgs#cargo-sweep -- sweep -d -r -t 30 <workspace-root>
nix run nixpkgs#cargo-sweep -- sweep -r -t 30 <workspace-root>
nix run nixpkgs#cargo-sweep -- sweep -d -r -i <workspace-root>
nix run nixpkgs#cargo-sweep -- sweep -r -i <workspace-root>
```
Use direct `target/` deletion when inventory shows a discrete stale directory, especially for inactive repos or project-local worktrees. The helper only deletes explicit paths named `target` that are beneath configured roots and a Cargo project:
```bash
python /home/imalison/dotfiles/dotfiles/agents/skills/disk-space-cleanup/scripts/rust_target_dirs.py delete /abs/path/to/target
python /home/imalison/dotfiles/dotfiles/agents/skills/disk-space-cleanup/scripts/rust_target_dirs.py delete /abs/path/to/target --yes
```
Recommended sequence:
1. Run `rust_target_dirs.py list` to see the largest `target/` directories across `~/Projects`, `~/org`, `~/dotfiles`, and other configured roots.
2. For active repos, prefer `cargo-sweep` from the workspace root.
3. For inactive repos, abandoned branches, and `.worktrees/*/target`, prefer guarded direct deletion of the explicit `target/` directory.
4. Re-run the list command after each deletion round to show reclaimed space.
Machine-specific note:
- Project-local `.worktrees/*/target` directories are common cleanup wins on this machine and are easy to miss with the old hard-coded workflow.
## Step 4: Investigation with `ncdu` and `du`
Avoid mounted or remote filesystems when profiling space. Load ignore patterns from `references/ignore-paths.md`.
Use one-filesystem scans to avoid crossing mounts:
```bash
ncdu -x "$HOME"
sudo ncdu -x /
```
When excluding known noisy mountpoints:
```bash
ncdu -x --exclude "$HOME/keybase" "$HOME"
sudo ncdu -x --exclude /keybase --exclude /var/lib/railbird /
```
If `ncdu` is missing, use:
```bash
nix run nixpkgs#ncdu -- -x "$HOME"
```
For reusable, mount-safe snapshots on this machine, prefer the local wrapper:
```bash
safe_ncdu /
sudo -n env HOME=/home/imalison safe_ncdu /
safe_ncdu /nix/store
safe_ncdu top ~/.cache/ncdu/latest-root.json.zst 30 /home/imalison
safe_ncdu open ~/.cache/ncdu/latest-root.json.zst
```
`safe_ncdu` writes compressed ncdu exports under `~/.cache/ncdu`, records the exclude list beside the export, excludes mounted descendants of the scan root, and supports follow-up `top` queries without rescanning.
For quick, non-blocking triage on very large trees, prefer bounded probes:
```bash
timeout 30s du -xh --max-depth=1 "$HOME/.cache" 2>/dev/null | sort -h
timeout 30s du -xh --max-depth=1 "$HOME/.local/share" 2>/dev/null | sort -h
```
Machine-specific heavy hitters seen in practice:
- `~/.cache/uv` can exceed 20G and is reclaimable with `uv cache clean`.
- `~/.cache/pypoetry` can exceed 7G across artifacts, repository cache, and virtualenvs; inspect first, then use Poetry cache commands or targeted virtualenv removal.
- `~/.cache/google-chrome` can exceed 8G across multiple Chrome profiles; close Chrome before clearing profile cache directories.
- `~/.cache/spotify` can exceed 10G; treat as optional app-cache cleanup.
- `~/.gradle` can exceed 8G, mostly under `caches/`; prefer Gradle-aware cleanup and expect dependency redownloads.
- `~/.local/share/picom/debug.log` can grow past 15G when verbose picom debugging is enabled or crashes leave a stale log behind; if `picom` is not running, deleting or truncating the log is a high-yield low-risk win.
- `~/.local/share/Trash` can exceed several GB; empty only with user approval.
- `/var/lib/private/gitea-runner` can exceed 50G and is not visible to an unprivileged `ncdu /` scan; use `sudo -n env HOME=/home/imalison safe_ncdu /` when `/var` looks undercounted.
- Validated cleanup pattern: stop `gitea-runner-nix.service`, remove cache/work directories under `/var/lib/private/gitea-runner` (`.cache`, `.gradle`, `action-cache-dir`, `workspace`, stale nested `gitea-runner`, and nested `nix/.cache`/`nix/.local`), recreate `action-cache-dir`, `workspace`, and `.cache` owned by `gitea-runner:gitea-runner`, then restart the service.
- Preserve registration/config-like files such as `/var/lib/private/gitea-runner/nix/.runner`, `/var/lib/private/gitea-runner/nix/.labels`, `/var/lib/private/gitea-runner/.docker/config.json`, and SSH/Kube material.
- `~/Projects/*/target` directories can dominate home usage. Recent example candidates included stale `target/` directories under `scrobble-scrubber`, `http-client-vcr`, `http-client`, `subtr-actor`, `http-types`, `subtr-actor-py`, `sdk`, and `async-h1`.
## Step 5: `/nix/store` Deep Dive
When `/nix/store` is still large after GC, inspect root causes instead of deleting random paths.
Useful commands:
```bash
nix path-info -Sh /nix/store/* 2>/dev/null | sort -h | tail -n 50
nix-store --gc --print-roots
```
Avoid `du -sh /nix/store` as a first diagnostic; it can be very slow on large stores.
For repeated GHC/Rust toolchain copies:
```bash
nix path-info -Sh /nix/store/* 2>/dev/null | rg '(ghc|rustc|rust-std|cargo)'
nix-store --gc --print-roots | rg '(ghc|rust)'
```
Resolve why a path is retained:
```bash
/home/imalison/dotfiles/dotfiles/lib/functions/find_store_path_gc_roots /nix/store/<store-path>
nix why-depends <consumer-store-path> <dependency-store-path>
```
Common retention pattern on this machine:
- Many `.direnv/flake-profile-*` symlinks under `~/Projects` and worktrees keep `nix-shell-env`/`ghc-shell-*` roots alive.
- Old taffybar constellation repos under `~/Projects` can pin large Haskell closures through `.direnv` and `result` symlinks. Deleting `gtk-sni-tray`, `status-notifier-item`, `dbus-menu`, `dbus-hslogger`, and `gtk-strut` and then rerunning `nix-collect-garbage -d` reclaimed about 11G of store data in one validated run.
- `find_store_path_gc_roots` is especially useful for proving GHC retention: many large `ghc-9.10.3-with-packages` paths are unique per project, while the base `ghc-9.10.3` and docs paths are shared.
- NixOS system generations and a repo-root `nixos/result` symlink can pin multiple Android Studio and Android SDK versions. Check `/nix/var/nix/profiles/system-*-link`, `/run/current-system`, `/run/booted-system`, and `~/dotfiles/nixos/result` before assuming Android paths are pinned by project shells.
- `~/Projects/railbird-mobile/.direnv/flake-profile-*` can pin large Android SDK system images. Removing stale direnv profiles there is a more targeted first step than deleting Android store paths directly.
- For a repeatable `/nix/store` `ncdu` snapshot without driving the TUI, export and inspect it:
```bash
ncdu -0 -x -c -o /tmp/nix-store.ncdu.json.zst /nix/store
zstdcat /tmp/nix-store.ncdu.json.zst | jq 'def sumd: if type=="array" then ((.[0].dsize // 0) + ([.[1:][] | sumd] | add // 0)) elif type=="object" then (.dsize // 0) else 0 end; .[3] | sumd'
```
- `nix-store --gc --print-dead` plus the Nix SQLite database is a fast way to estimate immediate GC wins before deleting anything:
```bash
nix-store --gc --print-dead > /tmp/nix-dead-paths.txt
printf '%s\n' '.mode list' '.separator |' 'create temp table dead(path text);' \
'.import /tmp/nix-dead-paths.txt dead' \
'select count(*), sum(narSize) from ValidPaths join dead using(path);' \
| nix shell nixpkgs#sqlite --command sqlite3 /nix/var/nix/db/db.sqlite
```
- Quantify before acting:
```bash
find ~/Projects -type l -path '*/.direnv/flake-profile-*' | wc -l
find ~/Projects -type d -name .direnv | wc -l
nix-store --gc --print-roots | rg '/\\.direnv/flake-profile-' | awk -F' -> ' '{print $1 \"|\" $2}' \
| while IFS='|' read -r root target; do \
nix-store -qR \"$target\" | rg '^/nix/store/.+-ghc-[0-9]'; \
done | sort | uniq -c | sort -nr | head
```
- If counts are high and the projects are inactive, propose targeted `.direnv` cleanup for user confirmation.
## Safety Rules
- Do not delete user files directly unless explicitly requested.
- Prefer cleanup tools that understand ownership/metadata (`nix`, `docker`, `podman`, `cargo-sweep`) over `rm -rf`.
- For Rust build artifacts, deleting an explicit directory literally named `target` is acceptable when it is discovered by the bundled helper; Cargo will rebuild it.
- Present a concise “proposed actions” list before high-impact deletes.
- If uncertain whether data is needed, stop at investigation and ask.
## Learning Loop (Required)
Treat this skill as a living playbook.
After each disk cleanup task:
1. Add newly discovered mountpoints or directories to ignore in `references/ignore-paths.md`.
2. Add newly discovered Rust repo roots in `references/rust-target-roots.txt`.
3. Add validated command patterns or caveats discovered during the run to this `SKILL.md`.
4. Keep instructions practical and machine-specific; remove stale guidance.

View File

@@ -1,3 +0,0 @@
interface:
display_name: "Disk Space Cleanup"
short_description: "Find safe disk-space wins on NixOS hosts"

View File

@@ -1,31 +0,0 @@
# Ignore Paths for Disk Investigation
Use this file to track mountpoints or directories that should be excluded from `ncdu`/`du` scans because they are remote, special-purpose, or noisy.
## Known Ignores
- `$HOME/keybase`
- `$HOME/.cache/keybase`
- `$HOME/.local/share/keybase`
- `$HOME/.config/keybase`
- `/keybase`
- `/var/lib/railbird`
- `/run/user/*/doc` (FUSE portal mount; machine-specific example observed: `/run/user/1004/doc`)
## Discovery Commands
List mounted filesystems and spot special mounts:
```bash
findmnt -rn -o TARGET,FSTYPE,SOURCE
```
Target likely remote/special mounts:
```bash
findmnt -rn -o TARGET,FSTYPE,SOURCE | rg '(keybase|fuse|rclone|s3|railbird)'
```
## Maintenance Rule
When a disk cleanup run encounters a mount or path that should be ignored in future runs, add it here immediately with a short note.

View File

@@ -1,6 +0,0 @@
# One absolute path per line. Comments are allowed.
# Keep this list machine-specific and update it when Rust repos move.
/home/imalison/Projects
/home/imalison/org
/home/imalison/dotfiles

View File

@@ -1,271 +0,0 @@
#!/usr/bin/env python3
import argparse
import json
import os
import shutil
import subprocess
import sys
import time
from pathlib import Path
SCRIPT_DIR = Path(__file__).resolve().parent
DEFAULT_ROOTS_FILE = SCRIPT_DIR.parent / "references" / "rust-target-roots.txt"
def parse_size(value: str) -> int:
text = value.strip().upper()
units = {
"B": 1,
"K": 1024,
"KB": 1024,
"M": 1024**2,
"MB": 1024**2,
"G": 1024**3,
"GB": 1024**3,
"T": 1024**4,
"TB": 1024**4,
}
for suffix, multiplier in units.items():
if text.endswith(suffix):
number = text[: -len(suffix)].strip()
return int(float(number) * multiplier)
return int(float(text))
def human_size(num_bytes: int) -> str:
value = float(num_bytes)
for unit in ["B", "K", "M", "G", "T"]:
if value < 1024 or unit == "T":
if unit == "B":
return f"{int(value)}B"
return f"{value:.1f}{unit}"
value /= 1024
return f"{num_bytes}B"
def is_relative_to(path: Path, root: Path) -> bool:
try:
path.relative_to(root)
return True
except ValueError:
return False
def load_roots(roots_file: Path, cli_roots: list[str]) -> list[Path]:
roots: list[Path] = []
for raw in cli_roots:
candidate = Path(raw).expanduser().resolve()
if candidate.exists():
roots.append(candidate)
if roots_file.exists():
for line in roots_file.read_text().splitlines():
stripped = line.split("#", 1)[0].strip()
if not stripped:
continue
candidate = Path(stripped).expanduser().resolve()
if candidate.exists():
roots.append(candidate)
unique_roots: list[Path] = []
seen: set[Path] = set()
for root in roots:
if root not in seen:
unique_roots.append(root)
seen.add(root)
return unique_roots
def du_size_bytes(path: Path) -> int:
result = subprocess.run(
["du", "-sb", str(path)],
check=True,
capture_output=True,
text=True,
)
return int(result.stdout.split()[0])
def nearest_cargo_root(path: Path, stop_roots: list[Path]) -> str:
current = path.parent
stop_root_set = set(stop_roots)
while current != current.parent:
if (current / "Cargo.toml").exists():
return str(current)
if current in stop_root_set:
break
current = current.parent
return ""
def discover_targets(roots: list[Path]) -> list[dict]:
results: dict[Path, dict] = {}
now = time.time()
for root in roots:
for current, dirnames, _filenames in os.walk(root, topdown=True):
if "target" in dirnames:
target_dir = (Path(current) / "target").resolve()
dirnames.remove("target")
if target_dir in results or not target_dir.is_dir():
continue
stat_result = target_dir.stat()
size_bytes = du_size_bytes(target_dir)
age_days = int((now - stat_result.st_mtime) // 86400)
results[target_dir] = {
"path": str(target_dir),
"size_bytes": size_bytes,
"size_human": human_size(size_bytes),
"age_days": age_days,
"workspace": nearest_cargo_root(target_dir, roots),
}
return sorted(results.values(), key=lambda item: item["size_bytes"], reverse=True)
def print_table(rows: list[dict]) -> None:
if not rows:
print("No matching Rust target directories found.")
return
size_width = max(len(row["size_human"]) for row in rows)
age_width = max(len(str(row["age_days"])) for row in rows)
print(
f"{'SIZE'.ljust(size_width)} {'AGE'.rjust(age_width)} PATH"
)
for row in rows:
print(
f"{row['size_human'].ljust(size_width)} "
f"{str(row['age_days']).rjust(age_width)}d "
f"{row['path']}"
)
def filter_rows(rows: list[dict], min_size: int, older_than: int | None, limit: int | None) -> list[dict]:
filtered = [row for row in rows if row["size_bytes"] >= min_size]
if older_than is not None:
filtered = [row for row in filtered if row["age_days"] >= older_than]
if limit is not None:
filtered = filtered[:limit]
return filtered
def cmd_list(args: argparse.Namespace) -> int:
roots = load_roots(Path(args.roots_file).expanduser(), args.root)
if not roots:
print("No scan roots available.", file=sys.stderr)
return 1
rows = discover_targets(roots)
rows = filter_rows(rows, parse_size(args.min_size), args.older_than, args.limit)
if args.output == "json":
print(json.dumps(rows, indent=2))
elif args.output == "tsv":
for row in rows:
print(
"\t".join(
[
str(row["size_bytes"]),
str(row["age_days"]),
row["path"],
row["workspace"],
]
)
)
elif args.output == "paths":
for row in rows:
print(row["path"])
else:
print_table(rows)
return 0
def validate_delete_path(path_text: str, roots: list[Path]) -> Path:
target = Path(path_text).expanduser().resolve(strict=True)
if target.name != "target":
raise ValueError(f"{target} is not a target directory")
if target.is_symlink():
raise ValueError(f"{target} is a symlink")
if not target.is_dir():
raise ValueError(f"{target} is not a directory")
if not any(is_relative_to(target, root) for root in roots):
raise ValueError(f"{target} is outside configured scan roots")
if nearest_cargo_root(target, roots) == "":
raise ValueError(f"{target} is not beneath a Cargo project")
return target
def cmd_delete(args: argparse.Namespace) -> int:
roots = load_roots(Path(args.roots_file).expanduser(), args.root)
if not roots:
print("No scan roots available.", file=sys.stderr)
return 1
targets: list[Path] = []
for raw_path in args.path:
try:
targets.append(validate_delete_path(raw_path, roots))
except ValueError as exc:
print(str(exc), file=sys.stderr)
return 1
total_size = sum(du_size_bytes(target) for target in targets)
print(f"Matched {len(targets)} target directories totaling {human_size(total_size)}:")
for target in targets:
print(str(target))
if not args.yes:
print("Dry run only. Re-run with --yes to delete these target directories.")
return 0
for target in targets:
shutil.rmtree(target)
print(f"Deleted {len(targets)} target directories.")
return 0
def build_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
description="Inventory and delete Rust target directories under configured roots."
)
parser.add_argument(
"--roots-file",
default=str(DEFAULT_ROOTS_FILE),
help="Path to the newline-delimited root list.",
)
parser.add_argument(
"--root",
action="append",
default=[],
help="Additional root to scan. May be provided multiple times.",
)
subparsers = parser.add_subparsers(dest="command", required=True)
list_parser = subparsers.add_parser("list", help="List target directories.")
list_parser.add_argument("--min-size", default="0", help="Minimum size threshold, for example 500M or 2G.")
list_parser.add_argument("--older-than", type=int, help="Only include targets at least this many days old.")
list_parser.add_argument("--limit", type=int, help="Maximum number of rows to print.")
list_parser.add_argument(
"--output",
choices=["table", "tsv", "json", "paths"],
default="table",
help="Output format.",
)
list_parser.set_defaults(func=cmd_list)
delete_parser = subparsers.add_parser("delete", help="Delete explicit target directories.")
delete_parser.add_argument("path", nargs="+", help="One or more target directories to delete.")
delete_parser.add_argument("--yes", action="store_true", help="Actually delete the paths.")
delete_parser.set_defaults(func=cmd_delete)
return parser
def main() -> int:
parser = build_parser()
args = parser.parse_args()
return args.func(args)
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -1,111 +0,0 @@
---
name: email-unsubscribe-check
description: Use when user wants to find promotional or unwanted recurring emails to unsubscribe from, or when doing periodic inbox hygiene to identify senders worth unsubscribing from
---
# Email Unsubscribe Check
Scan recent inbox emails to surface promotional, newsletter, and digest senders the user likely wants to unsubscribe from. Actually unsubscribe via browser automation.
## Workflow
```dot
digraph unsubscribe_check {
"Search recent inbox emails" -> "Group by sender domain";
"Group by sender domain" -> "Classify each sender";
"Classify each sender" -> "Obvious unsubscribe?";
"Obvious unsubscribe?" -> "Present to user for confirmation" [label="yes"];
"Obvious unsubscribe?" -> "Borderline?" [label="no"];
"Borderline?" -> "Ask user" [label="yes"];
"Borderline?" -> "Skip" [label="no, personal"];
"Present to user for confirmation" -> "User confirms?";
"User confirms?" -> "Actually unsubscribe" [label="yes"];
"User confirms?" -> "Skip" [label="no"];
"Actually unsubscribe" -> "Mark matching emails read + archive";
"Mark matching emails read + archive" -> "Create Gmail filter";
"Create Gmail filter" -> "Retroactively clean old emails";
}
```
## Execution Default
- Start the workflow immediately when this skill is invoked.
- Do not ask a kickoff question like "should I start now?".
- Default scan window is `newer_than:7d` unless the user already specified a different range.
- Only ask a follow-up question before starting if required information is missing and execution would otherwise be blocked.
- Default user preference: they generally do not want subscription-style email in their inbox.
- For obvious marketing/newsletter/digest mail with a working unsubscribe path, unsubscribe by default without asking for confirmation first.
- Still ask first for borderline cases such as creator subscriptions, professional communities, event platforms, or anything that appears transactional/security-sensitive.
## How to Scan
1. Search recent emails: `newer_than:7d` (or wider if user requests)
2. Identify senders that look promotional/automated/digest
3. Present findings grouped by confidence:
- **Clearly unsubscribeable**: marketing, promos, digests user never engages with
- **Ask user**: newsletters, community content, event platforms (might be wanted)
When the user's standing preference is to keep subscriptions out of the inbox, treat the **Clearly unsubscribeable** bucket as auto-actionable.
## Unsubscribe Execution
For each confirmed sender, do ALL of these:
### 1. Actually unsubscribe via browser (most important step)
Two approaches depending on the sender:
**For emails with unsubscribe links:**
- Read the email via `gws gmail` to find the unsubscribe URL (usually at bottom of email body)
- Navigate to the URL with Chrome DevTools MCP
- Take a snapshot, find the confirmation button/checkbox
- Click through to complete the unsubscribe
- Verify the confirmation page
**For services with email settings pages (Nextdoor, LinkedIn, etc.):**
- Navigate to the service's notification/email settings page
- Log in using credentials from `pass` if needed
- Find and disable all email notification toggles
- Check ALL categories (digests, alerts, promotions, etc.)
### 2. Create Gmail filter as backup
Even after unsubscribing, create a filter to catch stragglers:
```
gws gmail users settings filters create \
--params '{"userId":"me"}' \
--json '{"criteria":{"from":"domain.com"},"action":{"removeLabelIds":["INBOX"]}}'
```
### 3. Mark old emails as read and archive them (minimum hygiene)
After unsubscribing, clean up existing email from the sender.
- At minimum: mark them as read.
- Preferred/default: also archive them (remove `INBOX` label).
Example:
```
gws gmail users messages list --params '{"userId":"me","q":"from:domain.com","maxResults":50}'
gws gmail users messages batchModify \
--params '{"userId":"me"}' \
--json '{"ids":["..."],"removeLabelIds":["UNREAD","INBOX"]}'
```
## Signals That an Email is Unsubscribeable
- "no-reply@" or "newsletter@" sender addresses
- Marketing subject lines: sales, promotions, "don't miss", digests
- Bulk senders: Nextdoor, Yelp, LinkedIn digest, social media notifications
- Community digests the user doesn't engage with
- Financial marketing (not transactional alerts)
- "Your weekly/daily/monthly" summaries
- Messages with explicit unsubscribe/manage-preferences links whose primary purpose is promotional or newsletter delivery
## Signals to NOT Auto-Unsubscribe (Ask First)
- Patreon/creator content
- Event platforms (Luma, Eventbrite, Meetup)
- Professional communities
- Services the user actively uses (even if noisy)
- Transactional emails from wanted services

View File

@@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -1,25 +0,0 @@
---
name: gh-address-comments
description: Help address review/issue comments on the open GitHub PR for the current branch using gh CLI; verify gh auth first and prompt the user to authenticate if not logged in.
metadata:
short-description: Address comments in a GitHub PR review
---
# PR Comment Handler
Guide to find the open PR for the current branch and address its comments with gh CLI. Run all `gh` commands with elevated network access.
Prereq: ensure `gh` is authenticated (for example, run `gh auth login` once), then run `gh auth status` with escalated permissions (include workflow/repo scopes) so `gh` commands succeed. If sandboxing blocks `gh auth status`, rerun it with `sandbox_permissions=require_escalated`.
## 1) Inspect comments needing attention
- Run scripts/fetch_comments.py which will print out all the comments and review threads on the PR
## 2) Ask the user for clarification
- Number all the review threads and comments and provide a short summary of what would be required to apply a fix for it
- Ask the user which numbered comments should be addressed
## 3) If user chooses comments
- Apply fixes for the selected comments
Notes:
- If gh hits auth/rate issues mid-run, prompt the user to re-authenticate with `gh auth login`, then retry.

View File

@@ -1,6 +0,0 @@
interface:
display_name: "GitHub Address Comments"
short_description: Address comments in a GitHub PR review"
icon_small: "./assets/github-small.svg"
icon_large: "./assets/github.png"
default_prompt: "Address all actionable GitHub PR review comments in this branch and summarize the updates."

View File

@@ -1,3 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" viewBox="0 0 16 16">
<path fill="currentColor" d="M8 1.3a6.665 6.665 0 0 1 5.413 10.56 6.677 6.677 0 0 1-3.288 2.432c-.333.067-.458-.142-.458-.316 0-.226.008-.942.008-1.834 0-.625-.208-1.025-.45-1.233 1.483-.167 3.042-.734 3.042-3.292a2.58 2.58 0 0 0-.684-1.792c.067-.166.3-.85-.066-1.766 0 0-.559-.184-1.834.683a6.186 6.186 0 0 0-1.666-.225c-.567 0-1.134.075-1.667.225-1.275-.858-1.833-.683-1.833-.683-.367.916-.134 1.6-.067 1.766a2.594 2.594 0 0 0-.683 1.792c0 2.55 1.55 3.125 3.033 3.292-.192.166-.367.458-.425.891-.383.175-1.342.459-1.942-.55-.125-.2-.5-.691-1.025-.683-.558.008-.225.317.009.442.283.158.608.75.683.941.133.376.567 1.092 2.242.784 0 .558.008 1.083.008 1.242 0 .174-.125.374-.458.316a6.662 6.662 0 0 1-4.559-6.325A6.665 6.665 0 0 1 8 1.3Z"/>
</svg>

Before

Width:  |  Height:  |  Size: 853 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

View File

@@ -1,237 +0,0 @@
#!/usr/bin/env python3
"""
Fetch all PR conversation comments + reviews + review threads (inline threads)
for the PR associated with the current git branch, by shelling out to:
gh api graphql
Requires:
- `gh auth login` already set up
- current branch has an associated (open) PR
Usage:
python fetch_comments.py > pr_comments.json
"""
from __future__ import annotations
import json
import subprocess
import sys
from typing import Any
QUERY = """\
query(
$owner: String!,
$repo: String!,
$number: Int!,
$commentsCursor: String,
$reviewsCursor: String,
$threadsCursor: String
) {
repository(owner: $owner, name: $repo) {
pullRequest(number: $number) {
number
url
title
state
# Top-level "Conversation" comments (issue comments on the PR)
comments(first: 100, after: $commentsCursor) {
pageInfo { hasNextPage endCursor }
nodes {
id
body
createdAt
updatedAt
author { login }
}
}
# Review submissions (Approve / Request changes / Comment), with body if present
reviews(first: 100, after: $reviewsCursor) {
pageInfo { hasNextPage endCursor }
nodes {
id
state
body
submittedAt
author { login }
}
}
# Inline review threads (grouped), includes resolved state
reviewThreads(first: 100, after: $threadsCursor) {
pageInfo { hasNextPage endCursor }
nodes {
id
isResolved
isOutdated
path
line
diffSide
startLine
startDiffSide
originalLine
originalStartLine
resolvedBy { login }
comments(first: 100) {
nodes {
id
body
createdAt
updatedAt
author { login }
}
}
}
}
}
}
}
"""
def _run(cmd: list[str], stdin: str | None = None) -> str:
p = subprocess.run(cmd, input=stdin, capture_output=True, text=True)
if p.returncode != 0:
raise RuntimeError(f"Command failed: {' '.join(cmd)}\n{p.stderr}")
return p.stdout
def _run_json(cmd: list[str], stdin: str | None = None) -> dict[str, Any]:
out = _run(cmd, stdin=stdin)
try:
return json.loads(out)
except json.JSONDecodeError as e:
raise RuntimeError(f"Failed to parse JSON from command output: {e}\nRaw:\n{out}") from e
def _ensure_gh_authenticated() -> None:
try:
_run(["gh", "auth", "status"])
except RuntimeError:
print("run `gh auth login` to authenticate the GitHub CLI", file=sys.stderr)
raise RuntimeError("gh auth status failed; run `gh auth login` to authenticate the GitHub CLI") from None
def gh_pr_view_json(fields: str) -> dict[str, Any]:
# fields is a comma-separated list like: "number,headRepositoryOwner,headRepository"
return _run_json(["gh", "pr", "view", "--json", fields])
def get_current_pr_ref() -> tuple[str, str, int]:
"""
Resolve the PR for the current branch (whatever gh considers associated).
Works for cross-repo PRs too, by reading head repository owner/name.
"""
pr = gh_pr_view_json("number,headRepositoryOwner,headRepository")
owner = pr["headRepositoryOwner"]["login"]
repo = pr["headRepository"]["name"]
number = int(pr["number"])
return owner, repo, number
def gh_api_graphql(
owner: str,
repo: str,
number: int,
comments_cursor: str | None = None,
reviews_cursor: str | None = None,
threads_cursor: str | None = None,
) -> dict[str, Any]:
"""
Call `gh api graphql` using -F variables, avoiding JSON blobs with nulls.
Query is passed via stdin using query=@- to avoid shell newline/quoting issues.
"""
cmd = [
"gh",
"api",
"graphql",
"-F",
"query=@-",
"-F",
f"owner={owner}",
"-F",
f"repo={repo}",
"-F",
f"number={number}",
]
if comments_cursor:
cmd += ["-F", f"commentsCursor={comments_cursor}"]
if reviews_cursor:
cmd += ["-F", f"reviewsCursor={reviews_cursor}"]
if threads_cursor:
cmd += ["-F", f"threadsCursor={threads_cursor}"]
return _run_json(cmd, stdin=QUERY)
def fetch_all(owner: str, repo: str, number: int) -> dict[str, Any]:
conversation_comments: list[dict[str, Any]] = []
reviews: list[dict[str, Any]] = []
review_threads: list[dict[str, Any]] = []
comments_cursor: str | None = None
reviews_cursor: str | None = None
threads_cursor: str | None = None
pr_meta: dict[str, Any] | None = None
while True:
payload = gh_api_graphql(
owner=owner,
repo=repo,
number=number,
comments_cursor=comments_cursor,
reviews_cursor=reviews_cursor,
threads_cursor=threads_cursor,
)
if "errors" in payload and payload["errors"]:
raise RuntimeError(f"GitHub GraphQL errors:\n{json.dumps(payload['errors'], indent=2)}")
pr = payload["data"]["repository"]["pullRequest"]
if pr_meta is None:
pr_meta = {
"number": pr["number"],
"url": pr["url"],
"title": pr["title"],
"state": pr["state"],
"owner": owner,
"repo": repo,
}
c = pr["comments"]
r = pr["reviews"]
t = pr["reviewThreads"]
conversation_comments.extend(c.get("nodes") or [])
reviews.extend(r.get("nodes") or [])
review_threads.extend(t.get("nodes") or [])
comments_cursor = c["pageInfo"]["endCursor"] if c["pageInfo"]["hasNextPage"] else None
reviews_cursor = r["pageInfo"]["endCursor"] if r["pageInfo"]["hasNextPage"] else None
threads_cursor = t["pageInfo"]["endCursor"] if t["pageInfo"]["hasNextPage"] else None
if not (comments_cursor or reviews_cursor or threads_cursor):
break
assert pr_meta is not None
return {
"pull_request": pr_meta,
"conversation_comments": conversation_comments,
"reviews": reviews,
"review_threads": review_threads,
}
def main() -> None:
_ensure_gh_authenticated()
owner, repo, number = get_current_pr_ref()
result = fetch_all(owner, repo, number)
print(json.dumps(result, indent=2))
if __name__ == "__main__":
main()

View File

@@ -1,65 +0,0 @@
---
name: hackage-release
description: Use when user asks to release, publish, or bump version of a Haskell package to Hackage
---
# Hackage Release
Bump version, build, validate, tag, push, and publish a Haskell package to Hackage.
## Workflow
1. **Bump version** in `package.yaml` (if using hpack) or `.cabal` file
2. **Update ChangeLog.md** with release notes
3. **Regenerate cabal** (if using hpack): `hpack`
4. **Build**: `cabal build`
5. **Check**: `cabal check` (must report zero warnings)
6. **Create sdist**: `cabal sdist`
7. **Commit & tag**: commit all changed files, `git tag vX.Y.Z.W`
8. **Push**: `git push && git push --tags`
9. **Get Hackage credentials**: `pass show hackage.haskell.org.gpg`
- Format: first line is password, `user:` line has username
10. **Publish package**: `cabal upload --publish <sdist-tarball> --username=<user> --password='<pass>'`
11. **Build & publish docs**: `cabal haddock --haddock-for-hackage` then `cabal upload --documentation --publish <docs-tarball> --username=<user> --password='<pass>'`
## Version Bumping (PVP)
Haskell uses the [Package Versioning Policy](https://pvp.haskell.org/) with format `A.B.C.D`:
| Component | When to Bump |
|-----------|-------------|
| A.B (major) | Breaking API changes |
| C (minor) | Backwards-compatible new features |
| D (patch) | Bug fixes, non-API changes |
## Nix-Based Projects
If the project uses a Nix flake, wrap cabal commands with `nix develop`:
```bash
nix develop --command cabal build
nix develop --command cabal check
nix develop --command hpack package.yaml
```
Prefer `nix develop` (flake) over `nix-shell` (legacy) to avoid ABI mismatches.
## PVP Dependency Bounds
Hackage warns about:
- **Missing upper bounds**: Every dependency should have an upper bound (e.g., `text >= 1.2 && < 2.2`)
- **Trailing zeros in upper bounds**: Use `< 2` not `< 2.0.0`; use `< 0.4` not `< 0.4.0.0`
Run `cabal check` to verify zero warnings before releasing.
## Checklist
- [ ] Version bumped in package.yaml / .cabal
- [ ] ChangeLog.md updated
- [ ] Cabal file regenerated (if hpack)
- [ ] `cabal build` succeeds
- [ ] `cabal check` reports no errors or warnings
- [ ] Changes committed and tagged
- [ ] Pushed to remote with tags
- [ ] Package published to Hackage
- [ ] Docs published to Hackage

View File

@@ -1,32 +0,0 @@
---
name: journaling
description: Use when user wants to journal, reflect, write a journal entry, or process thoughts. Also use when user mentions wanting to talk through what's on their mind.
---
# Journaling
## Overview
Guide the user through a freeform journaling conversation, then synthesize their thoughts into an organized `.org` file.
## How It Works
**1. Open the conversation.** Ask what's on their mind, how things have been going, or what they want to talk through. Keep it open-ended.
**2. Follow up naturally.** Listen for what seems important - dig into those threads. Don't rush through a checklist. One question at a time.
**3. Synthesize into a journal entry.** When the conversation winds down (or the user says they're done), write an organized `~/org/journal/YYYY-MM-DD.org` file with:
- A timestamp on the first line: `[YYYY-MM-DD Day HH:MM]`
- Org headings that emerge naturally from the conversation topics
- The user's thoughts in their own voice, but organized and cleaned up
- No rigid template - structure follows content
**4. Offer to review.** Show them the entry before writing, let them tweak it.
## Guidelines
- This is their space. Don't coach or advise unless asked.
- Reflect back what you hear - help them see their own patterns.
- If they seem stuck, gently prompt: recent events, feelings, goals, relationships, work.
- Keep the tone warm but not saccharine.
- Entries go in `~/org/journal/` as `YYYY-MM-DD.org`.

View File

@@ -1,124 +0,0 @@
---
name: logical-commits
description: Use when the user asks to split current git changes into logical commits, clean up commit history, create atomic commits, or stage by hunk. Review the whole worktree, group related changes, and produce ordered commits where each commit is a valid state (builds/tests pass with the project validation command).
---
# Logical Commits
Turn a mixed worktree into a clean sequence of atomic commits.
## Workflow
1. Inspect the full change set before staging anything.
2. Define commit boundaries by behavior or concern, not by file count.
3. Order commits so dependencies land first (types/api/schema/helpers before consumers).
4. Stage only the exact hunks for one commit.
5. Validate that staged commit state is healthy before committing.
6. Commit with a precise message.
7. Repeat until all intended changes are committed.
## 1) Inspect First
Run:
```bash
git status --short
git diff --stat
git diff
```
If there are staged changes already, inspect both views:
```bash
git diff --staged
git diff
```
## 2) Choose Validation Command Early
Select the fastest command that proves the repo is valid for this project. Prefer project-standard commands (for example: `just test`, `npm test`, `cargo test`, `go test ./...`, `nix flake check`, targeted build commands).
If no clear command exists:
1. Infer the best available command from repo scripts/config.
2. Tell the user what command you chose and why.
3. Do not claim full validation if coverage is partial.
## 3) Plan the Commit Stack
Before committing, write a short plan:
1. Commit title
2. Files and hunks included
3. Why this is a coherent unit
4. Validation command to run
If changes are intertwined, split by hunk (`git add -p`). If hunk splitting is not enough, use `git add -e` or perform a temporary refactor so each commit remains coherent and valid.
## 4) Stage Exactly One Commit
Preferred staging flow:
```bash
git add -p <file>
git diff --staged
```
Useful corrections:
```bash
git restore --staged -p <file> # unstage specific hunks
git reset -p <file> # alternate unstage flow
```
Never stage unrelated edits just to make the commit pass.
## 5) Validate Before Commit
Run the chosen validation command with the current staged/working tree state.
If validation fails:
1. Fix only what belongs in this logical commit, or
2. Unstage/re-split and revise the commit boundary.
Commit only after validation passes.
## 6) Commit and Verify
Commit:
```bash
git commit -m "<type>: <logical change>"
```
Then confirm:
```bash
git show --stat --oneline -1
```
Ensure remaining unstaged changes still make sense for later commits.
## 7) Final Checks
After finishing the stack:
```bash
git log --oneline --decorate -n <count>
git status
```
Report:
1. The commit sequence created
2. Validation command(s) run per commit
3. Any residual risks (for example, partial validation only)
## Guardrails
1. Keep commits atomic and reviewable.
2. Prefer hunk staging over broad file staging when a file contains multiple concerns.
3. Preserve user changes; do not discard unrelated work.
4. Avoid destructive commands unless the user explicitly requests them.
5. If a clean logical split is impossible without deeper refactor, explain the blocker and ask for direction.

View File

@@ -1,77 +0,0 @@
---
name: nixpkgs-review
description: Review or prepare nixpkgs package changes and PRs using a checklist distilled from review feedback on Ivan Malison's own NixOS/nixpkgs pull requests. Use when working in nixpkgs on package inits, updates, packaging fixes, or before opening or reviewing a nixpkgs PR.
---
# Nixpkgs Review
Use this skill when the task is specifically about reviewing or tightening a change in `NixOS/nixpkgs`.
The goal is not generic style review. The goal is to catch the kinds of issues that repeatedly came up in real nixpkgs feedback on Ivan's PRs: derivation structure, builder choice, metadata, PR hygiene, and JS packaging details.
## Workflow
1. Read the scope first.
Open the changed `package.nix` files, related metadata, and the PR title/body if there is one.
2. Run the historical checklist below.
Bias toward concrete review findings and actionable edits, not abstract style commentary.
3. Validate the package path.
Use the narrowest reasonable validation for the task: targeted build, package eval, or `nixpkgs-review` when appropriate.
4. If you are writing a review:
Lead with findings ordered by severity, include file references, and tie each point to a nixpkgs expectation.
5. If you are preparing a PR:
Fix the checklist items before opening it, then confirm title/body/commit hygiene.
## Historical Checklist
### Derivation structure
- Prefer `finalAttrs` over `rec` for derivations and nested derivations when self-references matter.
- Prefer `tag = "v${...}"` over `rev` when fetching a tagged upstream release.
- Check whether `strictDeps = true;` should be enabled.
- Use the narrowest builder/stdenv that matches the package. If no compiler is needed, consider `stdenvNoCC`.
- Put source modifications in `postPatch` or another appropriate hook, not inside `buildPhase`.
- Prefer `makeBinaryWrapper` over `makeWrapper` when a compiled wrapper is sufficient.
- Keep wrappers aligned with `meta.mainProgram` so overrides remain clean.
- Avoid `with lib;` in package expressions; prefer explicit `lib.*` references.
### Metadata and platform expectations
- For new packages, ensure maintainers are present and include the submitter when appropriate.
- Check whether platform restrictions are justified. Do not mark packages Linux-only or broken without evidence.
- If a package is only workable through patch accumulation and has no maintainer, call that out directly.
### JS, Bun, Electron, and wrapper-heavy packages
- Separate runtime deps from build-only deps. Large closures attract review attention.
- Remove redundant env vars and duplicated configuration if build hooks already cover them.
- Check bundled tool/runtime version alignment, especially browser/runtime pairs.
- Install completions, desktop files, or icons when upstream clearly ships them and the package already exposes the feature.
- Be careful with wrappers that hardcode env vars users may want to override.
### PR hygiene
- PR title should match nixpkgs naming and the package version.
- Keep the PR template intact unless there is a strong reason not to.
- Avoid unrelated commits in the PR branch.
- Watch for duplicate or overlapping PRs before investing in deeper review.
- If asked, squash fixup history before merge.
## Review Output
When producing a review, prefer this shape:
- Finding: what is wrong or risky.
- Why it matters in nixpkgs terms.
- Concrete fix, ideally with the exact attr/hook/builder to use.
If there are no findings, say so explicitly and mention remaining validation gaps.
## References
- Read [references/review-patterns.md](references/review-patterns.md) for the curated list of recurring review themes and concrete PR examples.
- Run `scripts/mine_pr_feedback.py --repo NixOS/nixpkgs --author colonelpanic8 --limit 20 --format markdown` to refresh the source material from newer PRs.

View File

@@ -1,4 +0,0 @@
interface:
display_name: "Nixpkgs Review"
short_description: "Review nixpkgs changes with historical guidance"
default_prompt: "Use $nixpkgs-review to review this nixpkgs package change before I open the PR."

View File

@@ -1,105 +0,0 @@
# Nixpkgs Review Patterns
This reference is a curated summary of recurring feedback from Ivan Malison's `NixOS/nixpkgs` PRs. Use it to ground reviews in patterns that have already come up from nixpkgs reviewers.
## Most Repeated Themes
### 1. Prefer `finalAttrs` over `rec`
This came up repeatedly on both package init and update PRs.
- [PR #490230](https://github.com/NixOS/nixpkgs/pull/490230) `playwright-cli`: reviewer asked for `buildNpmPackage (finalAttrs: { ... })` instead of `rec`.
- [PR #490033](https://github.com/NixOS/nixpkgs/pull/490033) `rumno`: same feedback for `rustPlatform.buildRustPackage`.
Practical rule:
- If the derivation self-references `version`, `src`, `pname`, `meta.mainProgram`, or nested outputs, default to `finalAttrs`.
### 2. Prefer `tag` when upstream release is a tag
This also repeated across multiple PRs.
- [PR #490230](https://github.com/NixOS/nixpkgs/pull/490230) `playwright-cli`
- [PR #490033](https://github.com/NixOS/nixpkgs/pull/490033) `rumno`
- [PR #497465](https://github.com/NixOS/nixpkgs/pull/497465) `t3code`
Practical rule:
- If upstream publishes a named release tag, prefer `tag = "v${finalAttrs.version}";` or the exact tag format instead of a raw `rev`.
### 3. Use the right hook and builder
Reviewers often push on hook placement and builder/stdenv choice.
- [PR #497465](https://github.com/NixOS/nixpkgs/pull/497465) `t3code`: feedback to move work from `buildPhase` into `postPatch`.
- [PR #497465](https://github.com/NixOS/nixpkgs/pull/497465) `t3code`: feedback to consider `stdenvNoCC`.
- [PR #490230](https://github.com/NixOS/nixpkgs/pull/490230) `playwright-cli`: prefer `makeBinaryWrapper` for a simple wrapper.
Practical rule:
- Check whether each mutation belongs in `postPatch`, `preConfigure`, `buildPhase`, or `installPhase`.
- Check whether the package genuinely needs a compiler toolchain.
- For simple env/arg wrappers, prefer `makeBinaryWrapper`.
### 4. Enable `strictDeps` unless there is a reason not to
This was called out explicitly on [PR #497465](https://github.com/NixOS/nixpkgs/pull/497465).
Practical rule:
- For new derivations, ask whether `strictDeps = true;` should be present.
- If not, be ready to justify why the builder or package layout makes it unnecessary.
### 5. Keep metadata explicit and override-friendly
- [PR #490230](https://github.com/NixOS/nixpkgs/pull/490230) `playwright-cli`: reviewer asked to avoid `with lib;`.
- [PR #497465](https://github.com/NixOS/nixpkgs/pull/497465) `t3code`: reviewer suggested deriving wrapper executable name from `finalAttrs.meta.mainProgram`.
Practical rule:
- Prefer `lib.licenses.mit` over `with lib;`.
- Keep `meta.mainProgram` authoritative and have wrappers follow it when practical.
### 6. Maintainers matter for new packages
- [PR #496806](https://github.com/NixOS/nixpkgs/pull/496806) `gws`: reviewer would not merge until the submitter appeared in maintainers.
Practical rule:
- For package inits, check maintainers early rather than waiting for review feedback.
### 7. PR title and template hygiene are review targets
- [PR #497465](https://github.com/NixOS/nixpkgs/pull/497465) `t3code`: asked to fix the PR title to match the version.
- [PR #490033](https://github.com/NixOS/nixpkgs/pull/490033) `rumno`: reviewer asked what happened to the PR template.
Practical rule:
- Before opening or updating a PR, verify the title, template, and branch scope.
### 8. Duplicate or overlapping PRs get noticed quickly
- [PR #490227](https://github.com/NixOS/nixpkgs/pull/490227) was replaced by [PR #490230](https://github.com/NixOS/nixpkgs/pull/490230).
- [PR #490053](https://github.com/NixOS/nixpkgs/pull/490053) overlapped with [PR #490033](https://github.com/NixOS/nixpkgs/pull/490033).
- [PR #488606](https://github.com/NixOS/nixpkgs/pull/488606), [PR #488602](https://github.com/NixOS/nixpkgs/pull/488602), and [PR #488603](https://github.com/NixOS/nixpkgs/pull/488603) were closed after reviewers pointed to existing work.
Practical rule:
- Search for existing PRs on the package before spending time polishing a review.
- If a branch contains unrelated commits, fix that before asking for review.
### 9. JS/Bun/Electron packages draw runtime-layout scrutiny
This came up heavily on `t3code` and `playwright-cli`.
- [PR #497465](https://github.com/NixOS/nixpkgs/pull/497465) `t3code`: reviewers proposed trimming the runtime closure, removing unnecessary env vars, and adding shell completions and desktop integration.
- [PR #490230](https://github.com/NixOS/nixpkgs/pull/490230) `playwright-cli`: reviewers called out mismatched bundled `playwright-core` and browser binaries, and wrapper behavior that prevented user overrides.
Practical rule:
- For JS-heavy packages, inspect closure size, runtime vs build-only deps, wrapper env vars, and version alignment between bundled libraries and external binaries.
### 10. Cross-platform evidence helps
- [PR #490230](https://github.com/NixOS/nixpkgs/pull/490230) received an approval explicitly noting Darwin success.
- [PR #497465](https://github.com/NixOS/nixpkgs/pull/497465) got feedback questioning platform restrictions and build behavior.
Practical rule:
- If the package plausibly supports Darwin, avoid premature Linux-only restrictions and mention what was or was not tested.
## How To Use This Reference
- Use these patterns as a focused checklist before submitting or reviewing nixpkgs changes.
- Do not blindly apply every point. Check whether the builder, language ecosystem, and upstream release model actually match.
- When in doubt, prefer concrete evidence from the current package diff over generic convention.

View File

@@ -1,2 +0,0 @@
__pycache__/
*.pyc

View File

@@ -1,208 +0,0 @@
#!/usr/bin/env python3
"""
Mine external feedback from recent GitHub PRs.
Examples:
python scripts/mine_pr_feedback.py --repo NixOS/nixpkgs --author colonelpanic8
python scripts/mine_pr_feedback.py --repo NixOS/nixpkgs --author colonelpanic8 --limit 30 --format json
"""
from __future__ import annotations
import argparse
import json
import subprocess
import sys
from collections import Counter
from concurrent.futures import ThreadPoolExecutor, as_completed
def run(cmd: list[str]) -> str:
proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0:
raise RuntimeError(proc.stderr.strip() or f"command failed: {' '.join(cmd)}")
return proc.stdout
def gh_json(args: list[str]) -> object:
return json.loads(run(["gh", *args]))
def fetch_prs(repo: str, author: str, limit: int) -> list[dict]:
prs: dict[int, dict] = {}
for state in ("open", "closed"):
data = gh_json(
[
"search",
"prs",
"--repo",
repo,
"--author",
author,
"--limit",
str(max(limit, 30)),
"--state",
state,
"--json",
"number,title,state,closedAt,updatedAt,url",
]
)
for pr in data:
prs[pr["number"]] = pr
return sorted(
prs.values(),
key=lambda pr: (pr["updatedAt"], pr["number"]),
reverse=True,
)[:limit]
def fetch_feedback(repo: str, author: str, pr: dict) -> dict:
owner, name = repo.split("/", 1)
number = pr["number"]
def api(path: str) -> list[dict]:
return gh_json(["api", f"repos/{owner}/{name}/{path}", "--paginate"])
issue_comments = api(f"issues/{number}/comments")
review_comments = api(f"pulls/{number}/comments")
reviews = api(f"pulls/{number}/reviews")
comments = []
for comment in issue_comments:
login = comment["user"]["login"]
body = (comment.get("body") or "").strip()
if login != author and body:
comments.append({"kind": "issue", "user": login, "body": body})
for comment in review_comments:
login = comment["user"]["login"]
body = (comment.get("body") or "").strip()
if login != author and body:
comments.append(
{
"kind": "review_comment",
"user": login,
"body": body,
"path": comment.get("path"),
"line": comment.get("line"),
}
)
for review in reviews:
login = review["user"]["login"]
body = (review.get("body") or "").strip()
if login != author and body:
comments.append(
{
"kind": "review",
"user": login,
"body": body,
"state": review.get("state"),
}
)
return {**pr, "comments": comments}
def is_bot(login: str) -> bool:
return login.endswith("[bot]") or login in {"github-actions", "app/dependabot"}
def render_markdown(results: list[dict], include_bots: bool) -> str:
commenters = Counter()
kept = []
for pr in results:
comments = [
comment
for comment in pr["comments"]
if include_bots or not is_bot(comment["user"])
]
if comments:
kept.append({**pr, "comments": comments})
commenters.update(comment["user"] for comment in comments)
lines = [
"# PR Feedback Summary",
"",
f"- PRs scanned: {len(results)}",
f"- PRs with external feedback: {len(kept)}",
"",
"## Top commenters",
"",
]
for user, count in commenters.most_common(10):
lines.append(f"- `{user}`: {count}")
for pr in kept:
lines.extend(
[
"",
f"## PR #{pr['number']}: {pr['title']}",
"",
f"- URL: {pr['url']}",
f"- State: {pr['state']}",
"",
]
)
for comment in pr["comments"]:
body = comment["body"].replace("\r", " ").replace("\n", " ").strip()
snippet = body[:280] + ("..." if len(body) > 280 else "")
lines.append(f"- `{comment['user']}` `{comment['kind']}`: {snippet}")
return "\n".join(lines) + "\n"
def main() -> int:
parser = argparse.ArgumentParser(description="Collect review feedback from recent GitHub PRs.")
parser.add_argument("--repo", required=True, help="GitHub repo in owner/name form")
parser.add_argument("--author", required=True, help="PR author to inspect")
parser.add_argument("--limit", type=int, default=20, help="How many recent PRs to inspect")
parser.add_argument(
"--format",
choices=("markdown", "json"),
default="markdown",
help="Output format",
)
parser.add_argument(
"--include-bots",
action="store_true",
help="Keep bot comments in the output",
)
parser.add_argument(
"--workers",
type=int,
default=6,
help="Maximum concurrent GitHub API workers",
)
args = parser.parse_args()
try:
run(["gh", "auth", "status"])
except RuntimeError as err:
print(err, file=sys.stderr)
return 1
prs = fetch_prs(args.repo, args.author, args.limit)
results = []
with ThreadPoolExecutor(max_workers=args.workers) as pool:
futures = [pool.submit(fetch_feedback, args.repo, args.author, pr) for pr in prs]
for future in as_completed(futures):
results.append(future.result())
results.sort(key=lambda pr: (pr["updatedAt"], pr["number"]), reverse=True)
if args.format == "json":
if not args.include_bots:
for pr in results:
pr["comments"] = [
comment for comment in pr["comments"] if not is_bot(comment["user"])
]
json.dump(results, sys.stdout, indent=2)
sys.stdout.write("\n")
else:
sys.stdout.write(render_markdown(results, args.include_bots))
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -1,51 +0,0 @@
---
name: org-agenda-api-production
description: Use when investigating production org-agenda-api state, testing endpoints, or debugging production issues
---
# org-agenda-api Production Access
## Overview
Access the production org-agenda-api instance at https://colonelpanic-org-agenda.fly.dev/ for debugging, testing, or verification.
## Credentials
Get the password from `pass`:
```bash
pass show org-agenda-api/imalison
```
Username is currently `imalison`.
## Quick Access with just
This repo includes a `justfile` under `~/dotfiles/org-agenda-api` with pre-configured commands:
```bash
cd ~/dotfiles/org-agenda-api
just health
just get-all-todos
just get-todays-agenda
just agenda
just agenda-files
just todo-states
just create-todo "Test todo"
```
## Manual curl
Prefer using the `just` recipes above so we don't bake auth syntax into docs.
## Key Endpoints
| Endpoint | Method | Description |
|----------|--------|-------------|
| /health | GET | Health check |
| /version | GET | API version |
| /get-all-todos | GET | All TODO items |
| /agenda | GET | Agenda (span=day\|week) |
| /capture | POST | Create entry |
| /update | POST | Update heading |
| /complete | POST | Complete item |
| /delete | POST | Delete heading |

View File

@@ -1,312 +0,0 @@
---
name: org-agenda-api
description: Use when interacting with the org-agenda-api HTTP server to read/write org-mode agenda data
---
# Org Agenda API Reference
HTTP API for org-mode agenda data. Use this skill when you need to query or modify org agenda entries programmatically.
## Authentication
Get credentials from pass:
```bash
pass show colonelpanic-org-agenda.fly.dev
```
Returns: password on first line, then `user:` and `url:` fields.
**Note:** The `url` field in pass may be outdated. Use the base URL below.
## Base URL
`https://colonelpanic-org-agenda.fly.dev`
All requests use Basic Auth with the credentials from pass.
## Read Endpoints
### GET /agenda
Get agenda entries for a day or week.
Query params:
- `span`: `day` (default) or `week`
- `date`: `YYYY-MM-DD` (default: today)
- `include_overdue`: `true` to include overdue items from previous days
- `include_completed`: `true` to include items completed on the queried date
- `refresh`: `true` to git pull repos first
Response includes `span`, `date`, `entries` array, and optionally `gitRefresh` results.
### GET /get-all-todos
Get all TODO items from agenda files.
Query params:
- `refresh`: `true` to git pull first
Response includes `defaults` (with `notifyBefore`), `todos` array, and optionally `gitRefresh`.
### GET /metadata
Get all app metadata in a single request. Returns:
- `templates`: capture templates
- `filterOptions`: tags, categories, priorities, todoStates
- `todoStates`: active and done states
- `customViews`: available custom agenda views
- `errors`: any errors encountered fetching above
### GET /todo-states
Get configured TODO states. Returns:
- `active`: array of not-done states (TODO, NEXT, etc.)
- `done`: array of done states (DONE, CANCELLED, etc.)
### GET /filter-options
Get available filter options. Returns:
- `todoStates`: all states
- `priorities`: available priorities (A, B, C)
- `tags`: all tags from agenda files
- `categories`: all categories
### GET /custom-views
List available custom agenda views. Returns array of `{key, name}` objects.
### GET /custom-view
Run a custom agenda view.
Query params:
- `key` (required): custom agenda command key
- `refresh`: `true` to git pull first
### GET /agenda-files
Get list of org-agenda-files with existence and readability status.
### GET /capture-templates (alias: /templates)
List available capture templates with their prompts.
### GET /health
Health check. Returns `status`, `uptime`, `requests`, and `captureStatus` if unhealthy.
### GET /version
Version info. Returns `version` and `gitCommit`.
### GET /debug-config
Current org configuration for debugging.
## Write Endpoints
### POST /capture
Create a new entry using a capture template.
**Important:** Use `capture-g` (GTD Todo) for most tasks - it properly records creation time and logbook history. Only use `default` when you specifically don't want GTD tracking.
Body:
```json
{
"template": "capture-g",
"values": {
"Title": "Task title",
"scheduled": "2026-01-20",
"deadline": "2026-01-25",
"priority": "A",
"tags": ["work", "urgent"],
"todo": "TODO"
}
}
```
### POST /complete
Mark a TODO as complete.
Body (use any combination to identify the item):
```json
{
"id": "org-id-if-available",
"file": "/path/to/file.org",
"pos": 12345,
"title": "Task title",
"state": "DONE"
}
```
Lookup order: id -> file+pos+title -> file+title -> title only
### POST /update
Update a TODO's scheduled date, deadline, priority, tags, or properties.
Body:
```json
{
"id": "org-id",
"file": "/path/to/file.org",
"pos": 12345,
"title": "Task title",
"scheduled": "2026-01-20T10:00:00",
"deadline": "2026-01-25",
"priority": "B",
"tags": ["updated", "tags"],
"properties": {
"CUSTOM_PROP": "value"
}
}
```
Set value to `null` or empty string to clear. Response includes new `pos` for cache updates.
### POST /delete
Delete an org item permanently.
Body:
```json
{
"id": "org-id",
"file": "/path/to/file.org",
"position": 12345,
"include_children": true
}
```
Requires `include_children: true` if item has children, otherwise returns error.
### POST /restart
Restart the Emacs server (exits gracefully, supervisord restarts).
## Category Strategy Endpoints
These require org-category-capture to be configured.
### GET /category-types
List registered category strategy types. Returns array with:
- `name`: strategy type name
- `hasCategories`: boolean
- `captureTemplate`: template string
- `prompts`: array of prompt definitions
### GET /categories
Get categories for a strategy type.
Query params:
- `type` (required): strategy type name (e.g., "projects")
- `existing_only`: `true` to only return categories with capture locations
Returns `type`, `categories` array, `todoFiles` array.
### GET /category-tasks
Get tasks for a specific category.
Query params:
- `type` (required): strategy type name
- `category` (required): category name
### POST /category-capture
Capture a new entry to a category.
Body:
```json
{
"type": "projects",
"category": "my-project",
"title": "Task title",
"todo": "TODO",
"scheduled": "2026-01-20",
"deadline": "2026-01-25",
"priority": "A",
"tags": ["work"],
"properties": {"EFFORT": "1h"}
}
```
## Response Format
Agenda/todo entries include:
- `todo`: TODO state (TODO, NEXT, DONE, etc.)
- `title`: Heading text
- `scheduled`: ISO date or datetime
- `deadline`: ISO date or datetime
- `priority`: A, B, or C (only if explicitly set)
- `tags`: Array of tags
- `file`: Source file path
- `pos`: Position in file (may change after edits)
- `id`: Org ID if set (stable identifier)
- `olpath`: Outline path array
- `level`: Heading level
- `category`: Category of the item
- `properties`: All properties from the property drawer
- `completedAt`: ISO timestamp when completed (if applicable)
- `agendaLine`: Raw agenda display text (agenda endpoint only)
- `notifyBefore`: Array of minutes for notifications
- `isWindowHabit`: Boolean for window habits
- `habitSummary`: Summary object for habits (if applicable)
## Common Workflows
**View today's agenda:**
```bash
curl -s -u "$USER:$PASS" "$URL/agenda?span=day" | jq '.entries[] | {todo, title, scheduled}'
```
**View this week:**
```bash
curl -s -u "$USER:$PASS" "$URL/agenda?span=week" | jq .
```
**View completed tasks for a specific date:**
```bash
curl -s -u "$USER:$PASS" "$URL/agenda?date=2026-01-17&include_completed=true" | jq '.entries[] | select(.completedAt != null) | {title, completedAt}'
```
**Get all metadata at once:**
```bash
curl -s -u "$USER:$PASS" "$URL/metadata" | jq .
```
**Create a task:**
```bash
curl -s -u "$USER:$PASS" -X POST "$URL/capture" \
-H "Content-Type: application/json" \
-d '{"template":"capture-g","values":{"Title":"New task","scheduled":"2026-01-20"}}'
```
**Complete a task by title:**
```bash
curl -s -u "$USER:$PASS" -X POST "$URL/complete" \
-H "Content-Type: application/json" \
-d '{"title":"Task title"}'
```
**Update a task's schedule:**
```bash
curl -s -u "$USER:$PASS" -X POST "$URL/update" \
-H "Content-Type: application/json" \
-d '{"title":"Task title","scheduled":"2026-01-21T14:00:00"}'
```
**Clear a deadline:**
```bash
curl -s -u "$USER:$PASS" -X POST "$URL/update" \
-H "Content-Type: application/json" \
-d '{"title":"Task title","deadline":null}'
```
**Delete a task:**
```bash
curl -s -u "$USER:$PASS" -X POST "$URL/delete" \
-H "Content-Type: application/json" \
-d '{"title":"Task to delete","file":"/path/to/file.org","position":12345}'
```
## Error Handling
All endpoints return JSON. Errors include:
```json
{
"status": "error",
"message": "Error description"
}
```
Success responses include:
```json
{
"status": "created" | "completed" | "updated",
...additional fields
}
```

View File

@@ -1,122 +0,0 @@
---
name: password-reset
description: Use when the user wants to reset or rotate a website or service password end-to-end, including finding the right `pass` entry, generating a new password with `xkcdpassgen`, retrieving reset emails through `gws gmail` or a local mail CLI, completing the reset in the browser with Chrome DevTools MCP, and updating the password store safely without losing entry metadata.
---
# Password Reset
## Overview
Handle password resets end-to-end. Prefer `gws gmail` for reset-email retrieval, Chrome DevTools MCP for website interaction, and the local `xkcdpassgen` helper for password generation.
## Tool Priorities
- Prefer `gws gmail` over opening Gmail in the browser.
- If `gws` is unavailable, use an installed Gmail CLI or IMAP-based mail tool if one exists locally. Inspect the environment first instead of guessing command names.
- Prefer Chrome DevTools MCP for all browser interaction.
- Use `pass find` and `pass show` before asking the user for credentials or account details.
## Password Generation
The local password generator is `xkcdpassgen`, defined in `dotfiles/lib/functions/xkcdpassgen` and available in shell as an autoloaded function.
```bash
xkcdpassgen <pass-entry-name>
```
Behavior:
- Generates `xkcdpass -n 3 | tr -d ' '` as the base password.
- Appends one uppercase letter, one digit, and one symbol by default.
- Supports:
- `-U` to omit uppercase
- `-N` to omit number
- `-S` to omit symbol
Do not substitute a different password generator ungless the user explicitly asks.
## Safe `pass` Update Pattern
`xkcdpassgen` writes directly to the `pass` entry it is given. Do not run it against the canonical entry before the reset succeeds, because:
- it would overwrite the current password immediately
- it would replace any extra metadata lines in a multiline `pass` entry
Use this pattern instead:
```bash
entry="service/example"
tmp_entry="${entry}-password-reset-tmp"
existing_contents="$(pass show "$entry" 2>/dev/null || true)"
metadata="$(printf '%s\n' "$existing_contents" | tail -n +2)"
xkcdpassgen "$tmp_entry"
new_password="$(pass show "$tmp_entry" | head -1)"
# ... use $new_password in the reset flow ...
if [ -n "$metadata" ]; then
printf '%s\n%s\n' "$new_password" "$metadata" | pass insert -m -f "$entry"
else
printf '%s\n' "$new_password" | pass insert -m -f "$entry"
fi
pass rm -f "$tmp_entry"
```
If the site rejects the password because of policy constraints, keep the canonical entry unchanged, delete or reuse the temp entry, and generate another candidate with different flags only if needed.
## Reset Workflow
1. Identify the account and canonical `pass` entry.
2. Run `pass find <service>` and inspect likely matches with `pass show`.
3. Capture existing metadata before generating a new password.
4. Generate the candidate password into a temporary `pass` entry with `xkcdpassgen`.
5. Start the reset flow in Chrome DevTools MCP:
- navigate to the login or account page
- use the site's "forgot password" flow, or
- sign in and navigate to security settings if the user asked for a rotation rather than a reset
6. Use `gws gmail` to retrieve the reset email when needed:
- search recent mail by sender domain, subject, or reset-related keywords
- open the message and extract the reset link
- navigate to that link in Chrome DevTools MCP
7. Fill the new password from the temporary `pass` entry and complete the form.
8. Verify success:
- confirmation page, or
- successful login with the new password
9. Promote the temp password into the canonical `pass` entry while preserving metadata, then remove the temp entry.
## Email Guidance
Prefer `gws gmail` for reset-email handling. Typical pattern:
- list recent messages with `gws gmail users messages list --params '{"userId":"me","q":"from:service.example newer_than:7d"}'`
- bias toward reset keywords such as `reset`, `password`, `security`, `verify`, or `signin`
- read shortlisted messages with `gws gmail users messages get --params '{"userId":"me","id":"MESSAGE_ID","format":"full"}'` rather than browsing Gmail manually
If `gws` is unavailable, use an installed Gmail CLI or local mail helper only as a fallback. Keep that discovery lightweight and local to the current environment.
## Browser Guidance
Use Chrome DevTools MCP to complete the reset flow directly:
- navigate to the reset or security page
- take snapshots to identify the relevant inputs and buttons
- click, fill, and submit through the site UI
- verify the success state before updating the canonical `pass` entry
Prefer MCP interaction over describing steps for the user to perform manually.
## Credentials And Account Data
- Search `pass` before asking the user for usernames, recovery emails, or OTP-related entries.
- Preserve existing metadata lines in multiline `pass` entries whenever possible.
- Never print the new password in the final response unless the user explicitly asks for it.
## Failure Handling
- If account discovery is ambiguous, ask a short clarifying question only after checking `pass`.
- If the reset email does not arrive, search spam or alternate senders before giving up.
- If login or reset requires another secret that is not in `pass`, then ask the user.
- If the reset flow fails after temp-password generation, leave the canonical entry untouched.

View File

@@ -1,4 +0,0 @@
interface:
display_name: "Password Reset"
short_description: "Reset passwords and update pass safely"
default_prompt: "Use $password-reset to reset this account password, complete the browser flow, and update pass safely."

View File

@@ -1,402 +0,0 @@
---
name: planning-coaching
description: Use when helping with daily planning, task prioritization, reviewing agenda, or when user seems stuck on what to do next
---
# Planning Coaching
Help Ivan with planning through question-driven coaching, honest feedback, and data-informed accountability.
## Persistent Files
**IMPORTANT:** Always read these at the start of planning sessions.
### Context File: `/home/imalison/org/planning/context.org`
Persistent context about Ivan's life, goals, struggles, and current focus. Claude maintains this file - update it when:
- Goals or priorities shift
- New patterns emerge
- Life circumstances change
- We learn something about what helps/doesn't help
Read this first. It's the "state of Ivan" that persists across sessions.
### Daily Journals: `/home/imalison/org/planning/dailies/YYYY-MM-DD.org`
One file per day we do planning. Contains:
- That day's plan (short list, focus areas)
- Stats table from the previous day review (inline)
- Notes from the session
- End-of-day reflection (if we do one)
Create a new file for each planning session day. Reference past dailies to see patterns.
### Stats File: `/home/imalison/org/planning/stats.org`
Running tables for trend analysis:
- **Daily Log**: One row per planning day with all metrics
- **Weekly Summary**: Aggregated weekly totals with notes
### Raw Logs: `/home/imalison/org/planning/logs.jsonl`
Detailed machine-readable log (one JSON object per line, per day). Captures full task data so we can calculate new metrics retroactively.
Each line contains:
```json
{
"date": "2026-01-20",
"planned": [{"title": "...", "friction": 3, "effort": 2, "id": "...", "file": "...", ...}],
"completed": [{"title": "...", "friction": 3, "effort": 2, "completedAt": "...", ...}],
"rescheduled": [{"title": "...", "from": "2026-01-20", "to": "2026-01-21", ...}],
"context": {"energy": "medium", "available_time": "full day", "notes": "..."}
}
```
When recording stats:
1. Append full JSON object to logs.jsonl
2. Add summary row to stats.org Daily Log table
3. Include inline stats table in that day's journal
4. Update Weekly Summary when a week ends
## Core Principles
1. **Question-driven**: Ask questions to help think through priorities rather than dictating
2. **Direct and honest**: Call out avoidance patterns directly - this is wanted
3. **Data-informed**: Use org-agenda-api to look at patterns, velocity, scheduling history
4. **Balance pressure**: Push on procrastination but don't overwhelm on decision-heavy tasks
5. **Lightweight and flexible**: Always offer option to skip parts if not feeling it
6. **No guilt**: If we fall off the wagon, make it easy and encouraging to get back on
## Planning Session Flow
```dot
digraph planning_session {
rankdir=TB;
"Read context.org" [shape=box];
"Yesterday review (skippable)" [shape=box];
"Capture new items" [shape=box];
"Check current state" [shape=box];
"Inbox processing (skippable)" [shape=box];
"Pick focus areas" [shape=box];
"Create short list" [shape=box];
"Meta check (optional)" [shape=box];
"Write daily journal" [shape=box];
"Read context.org" -> "Yesterday review (skippable)";
"Yesterday review (skippable)" -> "Capture new items";
"Capture new items" -> "Check current state";
"Check current state" -> "Inbox processing (skippable)";
"Inbox processing (skippable)" -> "Pick focus areas";
"Pick focus areas" -> "Create short list";
"Create short list" -> "Meta check (optional)";
"Meta check (optional)" -> "Write daily journal";
}
```
Every step marked "skippable" - offer it, but accept "let's skip that today" without question.
### 0. Read Context (Always)
Read `/home/imalison/org/planning/context.org` first. This grounds the session in what's currently going on.
### 1. Yesterday Review (Skippable)
Quick look back at the previous day. Keep it lightweight - a minute or two, not an interrogation.
**Subjective check-in:**
- "How do you feel about yesterday?" (open-ended, not demanding)
- "Anything you want to talk about - productivity or otherwise?"
**Objective stats (if wanted):**
- Completion rate: X of Y planned tasks done
- Friction conquered: total/average friction of completed tasks
- Rescheduled: N tasks bumped to today
- Effort accuracy: any tasks that took way more/less than estimated?
**Keep it encouraging:**
- Celebrate wins, especially high-friction completions
- If it was a rough day, acknowledge it without judgment
- "Yesterday was yesterday. What do we want today to look like?"
**If we haven't done this in a while:**
- "Hey, we haven't done a planning session in [X days]. No big deal - want to ease back in?"
- Don't guilt trip. Just pick up where we are.
### 2. Capture New Items
Before diving into today's state, ask: "Anything new come up that needs to be captured?"
- New tasks, ideas, commitments that surfaced since last session
- Things remembered overnight or during the day
- Add these to org before continuing
**Which capture command to use:**
- `just inbox "Task title"` - Default for new todos. Quick capture without setting properties. Items go to inbox for later triage (setting effort, friction, priority, category).
- `just capture "Task title"` - Only when we're setting effort, friction, priority, or category upfront during the planning session.
This prevents things from falling through the cracks and clears mental load before planning.
### 3. Check Current State
Ask about:
- Energy level right now (low/medium/high)
- Time available and structure of the day
- Any hard deadlines or commitments
- Mental state (scattered? focused? anxious?)
### 4. Inbox Processing (Skippable)
Process items captured to inbox since last session. These are quick captures (`just inbox`) that need triage.
**For each inbox item, decide:**
1. Is this actually actionable? (If not: delete, or convert to reference/someday)
2. Assign FRICTION and EFFORT estimates
3. Set priority if obvious
4. Schedule if it has a natural date, otherwise leave unscheduled for later prioritization
5. **IMPORTANT: Transition state from INBOX to NEXT** using `just set-state "Task title" "NEXT"`
**Process for property assignment:**
1. Both of us estimate FRICTION and EFFORT
2. Use Ivan's values unless we differ by 2+ points
3. If discrepancy >= 2, discuss: "I estimated this as [X] because [reason] - what makes you see it as [Y]?"
**Why this matters:** Items sitting in inbox create mental overhead. Regular processing keeps the system trustworthy.
### 5. Pick Focus Areas
Based on energy and context, choose what *types* of work to tackle:
- High friction tasks (if energy supports it)
- Quick wins (if need momentum)
- Deep work (if have focus time)
- Admin/shallow work (if low energy)
### 6. Create Short List
Curate 3-5 tasks that match the day's reality. Not a full dump - a focused list.
### 7. Meta Check (Optional)
Occasionally (weekly-ish, or when it feels right), ask:
- "Is this planning process working for you?"
- "Anything we should change about how we do this?"
- "Are the FRICTION/EFFORT scales making sense?"
This is how we iterate on the system itself.
## Task Properties
Store in org properties drawer via `just update` with a `properties` field in the JSON body.
### FRICTION (0-5)
Psychological resistance / avoidance tendency / decision paralysis factor.
| Value | Meaning |
|-------|---------|
| 0 | No friction - could start right now |
| 1 | Minimal - minor reluctance |
| 2 | Some - need to push a bit |
| 3 | Moderate - will procrastinate without intention |
| 4 | High - significant avoidance |
| 5 | Maximum - dread/paralysis |
### EFFORT (Fibonacci: 1, 2, 3, 5, 8)
Time/energy investment. Store as number, discuss as t-shirt size.
| Number | T-shirt | Meaning |
|--------|---------|---------|
| 1 | XS | Trivial, <30min |
| 2 | S | Small, ~1-2h |
| 3 | M | Medium, half-day |
| 5 | L | Large, full day |
| 8 | XL | Multi-day effort |
### Setting Properties
```bash
just update '{"title": "Task name", "properties": {"FRICTION": "3", "EFFORT": "5"}}'
```
## Priority Framework
When helping decide what to work on, weigh these factors:
1. **Energy/context match**: Does current energy support this task's friction level?
2. **Deadlines**: What's due soon or has external pressure?
3. **Impact**: What moves the needle most?
High-friction + high-impact tasks need the right conditions. Don't push these when energy is low.
## Handling Avoidance
**Be direct.** Ivan wants honest feedback.
When noticing avoidance patterns:
- "You've rescheduled X three times now. What's making this hard?"
- "This has been on your list for two weeks. Let's talk about what's blocking it."
- "I notice you keep picking small tasks over [big important thing]. What would make that more approachable?"
**Use data:**
- Look at scheduling history via `just agenda-day YYYY-MM-DD`
- Track how long tasks have been scheduled
- Notice patterns in what gets done vs. avoided
## Coaching Stance
**Do:**
- Ask "what's making this hard?" not "why haven't you done this?"
- Offer to break down high-friction tasks into smaller steps
- Notice and celebrate progress, especially on hard things
- Be honest about patterns you see
**Don't:**
- Overwhelm with too many decisions at once
- Push high-friction tasks when energy is clearly low
- Judge - observe and inquire instead
- Let things slide without comment (directness is wanted)
## Red Flags to Watch For
- Same task rescheduled 3+ times
- Consistently avoiding a category of work
- Taking on new commitments while existing ones slip
- Only doing low-friction tasks day after day
- Overcommitting (too many items scheduled for one day)
When you see these: name it directly and explore what's going on.
## Mid-Day Check-ins
These can happen impromptu - not every day, just when useful.
**When to offer:**
- If morning plan isn't working out
- Energy shifted significantly
- Got stuck or derailed
- Finished the short list early
**Keep it brief:**
- "How's it going with [today's focus]?"
- "Want to adjust the plan for the afternoon?"
- "Anything blocking you right now?"
## Metrics We Track
For the daily review, pull these from the API:
| Metric | How to calculate | Why it matters |
|--------|------------------|----------------|
| Completion rate | completed / planned for day | Overall follow-through |
| Friction conquered | sum of FRICTION on completed tasks | Are we tackling hard things? |
| Rescheduling count | tasks that moved from yesterday to today | Chronic rescheduling = avoidance |
| Effort accuracy | compare EFFORT estimate vs actual | Calibrate future estimates |
**Don't obsess over numbers.** They're conversation starters, not report cards.
## Queries for Planning
Use the `just` commands in `/home/imalison/org/justfile` for all API interactions.
**Tasks needing property assignment:**
```bash
just todos # Get all todos, filter for missing FRICTION or EFFORT in properties
```
**Today's agenda (including overdue):**
```bash
just agenda-overdue # Use this for planning - shows today + all overdue items
just agenda # Only today's scheduled items (misses overdue tasks)
```
**Note:** Always use `agenda-overdue` during planning sessions to see the full picture of what needs attention.
**Agenda for specific date:**
```bash
just agenda-day 2026-01-20
```
**Completed items for a specific date:**
```bash
just completed 2026-01-22 # Get items completed on a specific date
just completed-today # Get items completed today
```
**This week's agenda:**
```bash
just agenda-week
```
**Overdue/rescheduled items:**
```bash
just agenda-overdue
```
**Capture new items:**
```bash
just inbox "New task title" # Quick capture to inbox (default)
just capture "Task title" "2026-01-22" # With scheduling
```
**Update task properties:**
```bash
just update '{"title": "Task name", "properties": {"FRICTION": "3", "EFFORT": "5"}}'
```
**Reschedule a task:**
```bash
just reschedule "Task title" "2026-01-25"
```
**Complete a task:**
```bash
just complete "Task title"
```
**Change task state (e.g., INBOX -> NEXT):**
```bash
just set-state "Task title" "NEXT"
```
## Daily Journal Template
Create `/home/imalison/org/planning/dailies/YYYY-MM-DD.org` for each session:
```org
#+TITLE: Planning - YYYY-MM-DD
#+DATE: [YYYY-MM-DD Day]
* Yesterday Review
** Stats
| Metric | Value |
|-------------+-------|
| Planned | N |
| Completed | N |
| Rate | N% |
| Friction | N |
| Rescheduled | N |
** Reflection
[How Ivan felt about yesterday, anything discussed]
* Today's Context
- Energy: [low/medium/high]
- Available time: [description]
- Mental state: [notes]
* Focus Areas
- [What types of work we're tackling today]
* Today's Short List
Use org ID links to reference tasks - don't duplicate task definitions here.
- [[id:uuid-here][Task 1 title]]
- [[id:uuid-here][Task 2 title]]
- [[id:uuid-here][Task 3 title]]
* Notes
[Anything else from the session]
* End of Day (optional)
[If we do an evening check-in]
```
**Also add row to** `/home/imalison/org/planning/stats.org` Daily Log table.
## Updating Context File
Update `/home/imalison/org/planning/context.org` when:
- Ivan mentions a new goal or project
- We notice a recurring pattern
- Something significant changes in life/work
- We discover what helps or doesn't help
- The meta check reveals process adjustments
Don't ask permission to update it - just do it and mention what changed.

View File

@@ -1,47 +0,0 @@
---
name: playwright-cli
description: Automate browser interactions from the shell using Playwright via the `playwright-cli` command (open/goto/snapshot/click/type/screenshot, tabs/storage/network). Use when you need deterministic browser automation for web testing, form filling, screenshots/PDFs, or data extraction.
---
# Browser Automation With playwright-cli
This system provides `playwright-cli` via Nix (see `nixos/flake.nix` for the nixpkgs PR patch and `nixos/code.nix` for installation), so its available on `PATH` without any `npm -g` installs.
## Quick Start
```bash
# First run (downloads browser bits used by Playwright)
playwright-cli install-browser
# Open a new browser session (optionally with a URL)
playwright-cli open
playwright-cli open https://example.com/
# Navigate, inspect, and interact
playwright-cli goto https://playwright.dev
playwright-cli snapshot
playwright-cli click e15
playwright-cli type "search query"
playwright-cli press Enter
# Save artifacts
playwright-cli screenshot --filename=page.png
playwright-cli pdf --filename=page.pdf
# Close the browser
playwright-cli close
```
## Practical Workflow
1. `playwright-cli open` (or `open <url>`)
2. `playwright-cli snapshot`
3. Use element refs (`e1`, `e2`, ...) from the snapshot with `click`, `fill`, `hover`, `check`, etc.
4. Take `screenshot`/`pdf` as needed
5. `playwright-cli close`
## Tips
- Use `playwright-cli state-save auth.json` / `state-load auth.json` to persist login state across runs.
- Use named sessions with `-s=mysession` when you need multiple concurrent browsers.
- Set `PLAYWRIGHT_CLI_PACKAGE` to pin the npm package (default is `@playwright/cli@latest`).

View File

@@ -1,5 +0,0 @@
interface:
display_name: "Playwright CLI"
short_description: "Automate browser interactions"
default_prompt: "Use playwright-cli to automate browser actions (open/goto/snapshot/click/type/screenshot) and save useful artifacts (screenshots, PDFs, auth state)."

View File

@@ -1,54 +0,0 @@
---
name: release
description: Use when user asks to release, publish, bump version, or prepare a new version for deployment
---
# Release
Validate, format, bump version, and tag for release.
## Workflow
1. **Validate** - Run project's validation command
2. **Fix formatting** - Auto-fix prettier/formatting issues if any
3. **Bump version** - Ask user for bump type, update package.json
4. **Commit & tag** - Commit version bump, create git tag
5. **Optionally push** - Ask if user wants to push
## Commands
```bash
# 1. Validate
yarn validate # or: npm run validate
# 2. Fix formatting if needed
yarn prettier:fix # or: npm run prettier:fix
# 3. Bump version (edit package.json)
# patch: 1.2.3 → 1.2.4
# minor: 1.2.3 → 1.3.0
# major: 1.2.3 → 2.0.0
# 4. Commit and tag
git add package.json
git commit -m "chore: bump version to X.Y.Z"
git tag vX.Y.Z
# 5. Push (if requested)
git push && git push --tags
```
## Quick Reference
| Bump Type | When to Use |
|-----------|-------------|
| patch | Bug fixes, small changes |
| minor | New features, backwards compatible |
| major | Breaking changes |
## Before Release Checklist
- [ ] All tests pass
- [ ] No lint errors
- [ ] Formatting is clean
- [ ] Changes are committed

View File

@@ -1,86 +0,0 @@
---
name: taffybar-ecosystem-release
description: Use when releasing, version-bumping, or propagating changes across taffybar GitHub org packages (taffybar, gtk-sni-tray, gtk-strut, status-notifier-item, dbus-menu, dbus-hslogger)
---
# Taffybar Ecosystem Release
Release and propagate changes across the taffybar Haskell package ecosystem.
See also: `taffybar-nixos-flake-chain` for how these packages are consumed by the NixOS configuration and what flake.lock updates may be needed after a release.
## Package Dependency Graph
```
taffybar
├── gtk-sni-tray
│ ├── dbus-menu
│ ├── gtk-strut
│ └── status-notifier-item
├── dbus-menu
├── gtk-strut
├── status-notifier-item
└── dbus-hslogger
```
**Leaf packages** (no ecosystem deps): `gtk-strut`, `status-notifier-item`, `dbus-hslogger`, `dbus-menu`
**Mid-level**: `gtk-sni-tray` (depends on dbus-menu, gtk-strut, status-notifier-item)
**Top-level**: `taffybar` (depends on all above)
## Repositories & Local Checkouts
| Package | GitHub | Local Checkout |
|---------|--------|---------------|
| taffybar | taffybar/taffybar | `~/.config/taffybar/taffybar/` |
| gtk-sni-tray | taffybar/gtk-sni-tray | `~/Projects/gtk-sni-tray/` |
| gtk-strut | taffybar/gtk-strut | `~/Projects/gtk-strut/` |
| status-notifier-item | taffybar/status-notifier-item | `~/Projects/status-notifier-item/` |
| dbus-menu | taffybar/dbus-menu | `~/Projects/dbus-menu/` |
| dbus-hslogger | IvanMalison/dbus-hslogger | `~/Projects/dbus-hslogger/` |
## Releasing a Package
Always release leaf packages before their dependents. Changes propagate **upward** through the graph.
### 1. Release the Changed Package
Use the `hackage-release` skill for the full Hackage publish workflow. In the local checkout:
1. Bump version in `.cabal` file (PVP: A.B.C.D)
2. Update ChangeLog.md
3. `cabal build && cabal check`
4. `cabal sdist`
5. Commit, tag `vX.Y.Z.W`, push with tags
6. Publish to Hackage
7. Publish docs
**Manual doc upload required for GTK-dependent packages:** Hackage cannot build documentation for packages that depend on GTK/GI libraries (the build servers lack the system dependencies). This affects `taffybar`, `gtk-sni-tray`, `gtk-strut`, and `dbus-menu`. For these packages you must build haddocks locally and upload them yourself — see the `hackage-release` skill for the `cabal haddock --haddock-for-hackage` and `cabal upload --documentation` commands. Only `status-notifier-item` and `dbus-hslogger` (pure DBus/Haskell deps) can have their docs built by Hackage automatically.
### 2. Update Dependents' Version Bounds
For each package higher in the graph that depends on what you just released, update the dependency bound in its `.cabal` file. For example, if you bumped `gtk-strut` to 0.1.5.0:
- In `gtk-sni-tray.cabal`: update `gtk-strut >= 0.1.5 && < 0.2`
- In `taffybar.cabal`: update `gtk-strut >= 0.1.5 && < 0.2`
Then release those packages too if needed (repeat from step 1).
### 3. Update Flake Inputs
Each package's `flake.nix` references its ecosystem dependencies as inputs (typically `flake = false` pointing at GitHub). After pushing changes, update the flake.lock in any repo that directly references the changed package:
```bash
cd ~/Projects/gtk-sni-tray # if it depends on what changed
nix flake update gtk-strut
```
```bash
cd ~/.config/taffybar/taffybar # taffybar references all ecosystem pkgs
nix flake update gtk-strut
```
### Full Ecosystem Release Order
1. `gtk-strut`, `status-notifier-item`, `dbus-hslogger`, `dbus-menu` (leaves — parallel OK)
2. `gtk-sni-tray` (update bounds for any leaf changes first)
3. `taffybar` (update bounds for all changes)

View File

@@ -1,61 +0,0 @@
---
name: taffybar-nixos-flake-chain
description: Use when doing NixOS rebuilds involving taffybar, or when flake.lock updates are needed after changing taffybar ecosystem packages. Also use when debugging stale taffybar versions after `just switch`.
---
# Taffybar NixOS Flake Chain
How the taffybar ecosystem packages are consumed by the NixOS configuration through a chain of nested flakes, and what flake.lock updates may be needed when something changes.
See also: `taffybar-ecosystem-release` for the package dependency graph, release workflow, and Hackage publishing.
## The Three-Layer Flake Chain
The NixOS system build pulls in taffybar through three nested flake.nix files:
```
nixos/flake.nix (top — `just switch` reads this)
│ ├── taffybar path:.../taffybar/taffybar
│ ├── imalison-taffybar path:../dotfiles/config/taffybar
│ └── gtk-sni-tray, gtk-strut, etc. (GitHub inputs)
dotfiles/config/taffybar/flake.nix (middle — imalison-taffybar config)
│ ├── taffybar path:.../taffybar/taffybar
│ └── gtk-sni-tray, gtk-strut, etc. (GitHub inputs)
dotfiles/config/taffybar/taffybar/flake.nix (bottom — taffybar library)
│ └── gtk-sni-tray, gtk-strut, etc. (flake = false GitHub inputs)
```
All three flakes declare their own top-level inputs for the ecosystem packages and use `follows` to keep versions consistent within each layer.
## Why Bottom-Up Updates Matter
`path:` inputs snapshot the target flake **including its flake.lock** at lock time. If you only run `nix flake update` at the top (nixos) layer, the middle and bottom layers keep whatever was previously locked in their own flake.lock files.
So when propagating a change to a system rebuild, you generally need to update flake.lock files from the bottom up — the bottom layer first so the middle layer picks up fresh locks when it re-resolves, then the middle so the top picks up fresh locks.
```bash
# Bottom (if an ecosystem dep changed):
cd ~/.config/taffybar/taffybar && nix flake update <pkg>
# Middle:
cd ~/.config/taffybar && nix flake update <pkg> taffybar
# Top:
cd ~/dotfiles/nixos && nix flake update <pkg> imalison-taffybar taffybar
```
Not every change requires touching all three layers. Think about which flake.lock files actually contain stale references:
- Changed **taffybar itself** — it's the bottom layer, so start at the middle (`nix flake update taffybar`) then the top.
- Changed a **leaf ecosystem package** (e.g. gtk-strut) — start at the bottom since taffybar's flake.lock references it, then cascade up.
- The nixos flake also has **direct GitHub inputs** for ecosystem packages with `follows` overrides. Updating those at the top level may be sufficient if nothing changed in the middle/bottom flake.lock files themselves.
## Rebuilding
```bash
cd ~/dotfiles/nixos && just switch
```
If taffybar seems stale after a rebuild, check whether the flake.lock at each layer actually points at the expected revision — a missed cascade step is the usual cause.

View File

@@ -31,6 +31,7 @@
"iterm2", "iterm2",
"java", "java",
"jumpcut", "jumpcut",
"karabiner",
"libreoffice", "libreoffice",
"macpass", "macpass",
"mirrordisplays", "mirrordisplays",
@@ -169,7 +170,6 @@
"tig", "tig",
"tmate", "tmate",
"tmux", "tmux",
"zellij",
"unoconv", "unoconv",
"vim", "vim",
"w3m", "w3m",

View File

@@ -1,6 +0,0 @@
*
!.gitignore
!CLAUDE.md
!settings.json
!settings.local.json
!settings.local.json.example

View File

@@ -1 +0,0 @@
../agents/AGENTS.md

View File

@@ -1,20 +0,0 @@
{
"hooks": {
"UserPromptSubmit": [
{
"hooks": [
{
"type": "command",
"command": "~/.agents/hooks/tmux-title.sh"
}
]
}
]
},
"enabledPlugins": {
"superpowers@superpowers-marketplace": true,
"agent-browser@agent-browser": true
},
"effortLevel": "high",
"skipDangerousModePermissionPrompt": true
}

View File

@@ -1,39 +0,0 @@
{
"permissions": {
"allow": [
"Bash(find:*)",
"Bash(cat:*)"
],
"deny": []
},
"mcp": {
"servers": {
"gitea-mcp": {
"command": "bash",
"args": [
"-lc",
"set -euo pipefail; export GITEA_BASE_URL='https://dev.railbird.ai'; export GITEA_ACCESS_TOKEN=\"$(pass show claude-mcp/gitea-access-token | head -1)\"; exec docker run -i --rm -e GITEA_ACCESS_TOKEN -e GITEA_BASE_URL docker.gitea.com/gitea-mcp-server"
]
},
"chrome-devtools": {
"command": "npx",
"args": [
"chrome-devtools-mcp@latest",
"--auto-connect"
]
},
"imap-email": {
"command": "bash",
"args": [
"-lc",
"set -euo pipefail; export IMAP_USER='IvanMalison@gmail.com'; export IMAP_HOST='imap.gmail.com'; export IMAP_PASSWORD=\"$(pass show claude-mcp/gmail-imap-app-password | head -1)\"; exec npx -y imap-email-mcp"
]
}
}
},
"enabledMcpjsonServers": [
"chrome-devtools",
"imap-email"
],
"enableAllProjectMcpServers": true
}

View File

@@ -1,43 +0,0 @@
{
"permissions": {
"allow": [
"Bash(find:*)",
"Bash(cat:*)"
],
"deny": []
},
"mcp": {
"servers": {
"gitea-mcp": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"-e",
"GITEA_ACCESS_TOKEN",
"-e",
"GITEA_BASE_URL=https://dev.railbird.ai",
"docker.gitea.com/gitea-mcp-server"
]
},
"chrome-devtools": {
"command": "npx",
"args": [
"chrome-devtools-mcp@latest",
"--auto-connect"
]
},
"imap-email": {
"command": "npx",
"args": ["-y", "imap-email-mcp"],
"env": {}
}
}
},
"enabledMcpjsonServers": [
"chrome-devtools",
"imap-email"
],
"enableAllProjectMcpServers": true
}

View File

@@ -1,5 +0,0 @@
*
!.gitignore
!AGENTS.md
!config.toml
!skills

View File

@@ -1 +0,0 @@
../agents/AGENTS.md

View File

@@ -1,201 +0,0 @@
model = "gpt-5.5"
model_reasoning_effort = "high"
personality = "pragmatic"
notify = ["/Users/kat/dotfiles/dotfiles/codex/plugins/cache/openai-bundled/computer-use/1.0.755/Codex Computer Use.app/Contents/SharedSupport/SkyComputerUseClient.app/Contents/MacOS/SkyComputerUseClient", "turn-ended"]
[projects."/home/imalison/Projects/nixpkgs"]
trust_level = "trusted"
[projects."/home/imalison/dotfiles"]
trust_level = "trusted"
[projects."/home/imalison/Projects/railbird"]
trust_level = "trusted"
[projects."/home/imalison/Projects/subtr-actor"]
trust_level = "trusted"
[projects."/home/imalison/Projects/google-messages-api"]
trust_level = "trusted"
[projects."/home/imalison"]
trust_level = "trusted"
[projects."/home/imalison/Projects/scrobble-scrubber"]
trust_level = "trusted"
[projects."/home/imalison/temp"]
trust_level = "trusted"
[projects."/home/imalison/Projects/org-agenda-api"]
trust_level = "untrusted"
[projects."/home/imalison/org"]
trust_level = "trusted"
[projects."/home/imalison/dotfiles/.git/modules/dotfiles/config/taffybar"]
trust_level = "trusted"
[projects."/home/imalison/Projects/notifications-tray-icon"]
trust_level = "trusted"
[projects."/home/imalison/Projects/hyprland"]
trust_level = "trusted"
[projects."/home/imalison/Projects/git-sync-rs"]
trust_level = "trusted"
[projects."/home/imalison/Projects/keepbook"]
trust_level = "trusted"
[projects."/home/imalison/Projects/boxcars"]
trust_level = "trusted"
[projects."/home/imalison/Projects/rumno"]
trust_level = "trusted"
[projects."/home/imalison/Projects/git-blame-rank"]
trust_level = "trusted"
[projects."/home/imalison/Projects/hatchet"]
trust_level = "trusted"
[projects."/home/imalison/dotfiles/dotfiles/emacs.d/elpaca/sources/org-project-capture"]
trust_level = "trusted"
[projects."/home/imalison/dotfiles/dotfiles/config/taffybar/taffybar/packages"]
trust_level = "trusted"
[projects."/home/imalison/Projects/scrobble-tools"]
trust_level = "trusted"
[projects."/home/imalison/.password-store"]
trust_level = "trusted"
[projects."/home/imalison/Projects/subtr-actor-mechanics"]
trust_level = "trusted"
[projects."/home/imalison/Projects/lastfm-edit"]
trust_level = "trusted"
[projects."/home/imalison/Projects/mova"]
trust_level = "trusted"
[projects."/home/imalison/dotfiles/dotfiles/config/taffybar/taffybar"]
trust_level = "trusted"
[projects."/home/imalison/Projects"]
trust_level = "trusted"
[projects."/home/imalison/Projects/rofi-systemd"]
trust_level = "trusted"
[projects."/home/imalison/Projects/map-quiz"]
trust_level = "trusted"
[projects."/run/media/imalison/NETDEBUGUSB"]
trust_level = "trusted"
[projects."/home/imalison/Projects/coqui-tts-streamer"]
trust_level = "trusted"
[projects."/home/imalison/Downloads"]
trust_level = "trusted"
[projects."/home/imalison/keysmith_generated"]
trust_level = "trusted"
[projects."/run/media/imalison/NIXOS_SD"]
trust_level = "trusted"
[projects."/Users/kat/dotfiles"]
trust_level = "trusted"
[projects."/Users/kat"]
trust_level = "trusted"
[projects."/Users/kat/org"]
trust_level = "trusted"
[projects."/Users/kat/Documents/Codex/2026-04-25/do-you-see-the-sandisk-external"]
trust_level = "trusted"
[projects."/Volumes/Extreme SSD/Projects/keepbook"]
trust_level = "trusted"
[projects."/Users/kat/Documents/Codex/2026-04-25/it-seems-like-maybe-we-dont"]
trust_level = "trusted"
[projects."/Users/kat/Documents/Codex/2026-04-25/what-is-the-state-of-tiling"]
trust_level = "trusted"
[projects."/home/imalison/Pictures/ai/2026/celeb"]
trust_level = "trusted"
[projects."/home/imalison/.local/share/keepbook"]
trust_level = "trusted"
[notice]
hide_gpt5_1_migration_prompt = true
"hide_gpt-5.1-codex-max_migration_prompt" = true
[notice.model_migrations]
"gpt-5.2" = "gpt-5.2-codex"
[mcp_servers.chrome-devtools]
command = "npx"
args = ["-y", "chrome-devtools-mcp@latest", "--auto-connect"]
[mcp_servers.observability]
command = "npx"
args = ["-y", "@google-cloud/observability-mcp"]
[mcp_servers.openaiDeveloperDocs]
url = "https://developers.openai.com/mcp"
[features]
unified_exec = true
apps = true
steer = true
[marketplaces.openai-bundled]
last_updated = "2026-04-21T17:43:57Z"
source_type = "local"
source = "/Users/kat/.codex/.tmp/bundled-marketplaces/openai-bundled"
[marketplaces.openai-primary-runtime]
last_updated = "2026-04-25T23:49:36Z"
source_type = "local"
source = "/Users/kat/.cache/codex-runtimes/codex-primary-runtime/plugins/openai-primary-runtime"
[plugins."google-calendar@openai-curated"]
enabled = true
[plugins."gmail@openai-curated"]
enabled = true
[plugins."google-drive@openai-curated"]
enabled = true
[plugins."github@openai-curated"]
enabled = true
[plugins."computer-use@openai-bundled"]
enabled = true
[plugins."documents@openai-primary-runtime"]
enabled = true
[plugins."spreadsheets@openai-primary-runtime"]
enabled = true
[plugins."presentations@openai-primary-runtime"]
enabled = true
[plugins."browser-use@openai-bundled"]
enabled = true
[tui.model_availability_nux]
"gpt-5.5" = 4

View File

@@ -1 +0,0 @@
../agents/skills

View File

@@ -1,8 +1,7 @@
[general] import = ["/home/imalison/.config/alacritty/themes/themes/dracula.toml"]
import = ["~/.config/alacritty/themes/themes/dracula.toml"]
[font] [font]
size = 12 size = 8
[scrolling] [scrolling]
history = 10000 history = 10000

View File

@@ -0,0 +1,2 @@
[api]
token = 417ba97c-b532-4e4b-86df-a240314ae840

View File

@@ -1,18 +0,0 @@
output HDMI-0
off
output DP-1
off
output DP-2
off
output DP-3
off
output DP-4
off
output DP-5
off
output DP-0
crtc 0
mode 3440x1440
pos 0x0
rate 240.00
x-prop-non_desktop 0

View File

@@ -1 +0,0 @@
DP-0 00ffffffffffff003669d04d0000000033210104b55022783bac05b04d3db7250f5054bfcf00714f81c0814081809500b300d1c00101e77c70a0d0a0295030203a0020513100001a023a801871382d40582c450020513100001e000000fd0c30f0919196010a202020202020000000fc004d50473334314358204f4c45440257020339f14901030204901211133f2309070783010000e2002a741a0000030330f000a066024f03f0000000000000e305e201e6060701664b00565e00a0a0a029503020350020513100001a6fc200a0a0a055503020350020513100001a00000000000000000000000000000000000000000000000000000000000000000000fc7012790300030150a2e300086f0d9f002f801f009f05b20031000900520101086f0d9f002f801f009f05540002000900b76901086f0d9f002f801f009f057600020009006f0502086f0d8f002f801f009f0563001d00090000000000000000000000000000000000000000000000000000000000000000000000000000001590

View File

@@ -1,39 +0,0 @@
output DP-1
off
output HDMI-1
off
output DP-2
off
output HDMI-2
off
output DP-1-0
off
output DP-1-1
off
output DP-1-2
off
output DP-1-3
off
output DP-1-4
off
output DP-1-5
off
output DP-1-6
off
output eDP-1
crtc 0
mode 2560x1600
pos 0x0
primary
rate 240.00
x-prop-broadcast_rgb Automatic
x-prop-colorspace Default
x-prop-max_bpc 12
x-prop-non_desktop 0
x-prop-scaling_mode Full aspect
output HDMI-1-0
crtc 4
mode 3440x1440
pos 2560x0
rate 99.98
x-prop-non_desktop 0

View File

@@ -1,2 +0,0 @@
HDMI-1-0 00ffffffffffff0010ace3a1535a333016210103805123782a25a1b14d3db7250e505421080001010101010101010101010101010101e77c70a0d0a029503020350029623100001a000000ff00237442737a474441594542634e000000fd0018781e963c010a202020202020000000fc0044656c6c204157333432334457015f020337f148101f04130312013f230907018301000068030c002000383c006ad85dc401788000000278e305c000e2006ae60605018d4b004ed470a0d0a046503020350029623100001a9d6770a0d0a022503020350029623100001a565e00a0a0a029503020350029623100001a6fc200a0a0a055503020350029623100001a3c
eDP-1 00ffffffffffff0009e5580c0000000001210104b527187803bbc5ae503fb7250c515500000001010101010101010101010101010101c07200a0a040c8603020360084f21000001a000000fd0c30f0b1b176010a202020202020000000fe00424f452043510a202020202020000000fc004e4531383051444d2d4e4d310a029602030f00e3058080e606050195731000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fa702079020021001d280f7409000a400680dd2a511824b249120e023554b060ec64662a1378220014ffed1185ff099f002f001f003f06c700020005002b000c27003cef00002700303b0000810015741a0000030b30f0006095107310f0000000008d00000000000000000000000000000000000000000000000000000000bc90

View File

@@ -1,39 +0,0 @@
output DP-1
off
output HDMI-1
off
output DP-2
off
output HDMI-2
off
output DP-1-0
off
output DP-1-1
off
output DP-1-2
off
output DP-1-3
off
output DP-1-4
off
output DP-1-5
off
output DP-1-6
off
output eDP-1
crtc 0
mode 2560x1600
pos 0x0
primary
rate 240.00
x-prop-broadcast_rgb Automatic
x-prop-colorspace Default
x-prop-max_bpc 12
x-prop-non_desktop 0
x-prop-scaling_mode Full aspect
output HDMI-1-0
crtc 4
mode 3440x1440
pos 2560x0
rate 99.98
x-prop-non_desktop 0

View File

@@ -1,2 +0,0 @@
HDMI-1-0 00ffffffffffff0010ace3a1535a333016210103805123782a25a1b14d3db7250e505421080001010101010101010101010101010101e77c70a0d0a029503020350029623100001a000000ff00237442737a474441594542634e000000fd0018781e963c010a202020202020000000fc0044656c6c204157333432334457015f020337f148101f04130312013f230907018301000068030c003000383c006ad85dc401788000000278e305c000e2006ae60605018d4b004ed470a0d0a046503020350029623100001a9d6770a0d0a022503020350029623100001a565e00a0a0a029503020350029623100001a6fc200a0a0a055503020350029623100001a2c
eDP-1 00ffffffffffff0009e5580c0000000001210104b527187803bbc5ae503fb7250c515500000001010101010101010101010101010101c07200a0a040c8603020360084f21000001a000000fd0c30f0b1b176010a202020202020000000fe00424f452043510a202020202020000000fc004e4531383051444d2d4e4d310a029602030f00e3058080e606050195731000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fa702079020021001d280f7409000a400680dd2a511824b249120e023554b060ec64662a1378220014ffed1185ff099f002f001f003f06c700020005002b000c27003cef00002700303b0000810015741a0000030b30f0006095107310f0000000008d00000000000000000000000000000000000000000000000000000000bc90

View File

@@ -0,0 +1,27 @@
[rc]
use_copy=true
use_primary=true
synchronize=false
automatic_paste=false
show_indexes=true
save_uris=true
use_rmb_menu=false
save_history=true
history_limit=1000
history_timeout_seconds=30
history_timeout=false
items_menu=50
statics_show=true
statics_items=10
hyperlinks_only=false
confirm_clear=false
single_line=false
reverse_history=false
item_length=50
ellipsize=2
history_key=
actions_key=
menu_key=
search_key=
offline_key=
offline_mode=false

View File

@@ -0,0 +1,90 @@
@binding-set gtk-emacs-text-entry
{
bind "<ctrl>b" { "move-cursor" (logical-positions, -1, 0) };
bind "<shift><ctrl>b" { "move-cursor" (logical-positions, -1, 1) };
bind "<ctrl>f" { "move-cursor" (logical-positions, 1, 0) };
bind "<shift><ctrl>f" { "move-cursor" (logical-positions, 1, 1) };
bind "<alt>b" { "move-cursor" (words, -1, 0) };
bind "<shift><alt>b" { "move-cursor" (words, -1, 1) };
bind "<alt>f" { "move-cursor" (words, 1, 0) };
bind "<shift><alt>f" { "move-cursor" (words, 1, 1) };
bind "<ctrl>a" { "move-cursor" (paragraph-ends, -1, 0) };
bind "<shift><ctrl>a" { "move-cursor" (paragraph-ends, -1, 1) };
bind "<ctrl>e" { "move-cursor" (paragraph-ends, 1, 0) };
bind "<shift><ctrl>e" { "move-cursor" (paragraph-ends, 1, 1) };
bind "<ctrl>w" { "cut-clipboard" () };
bind "<ctrl>y" { "paste-clipboard" () };
bind "<ctrl>d" { "delete-from-cursor" (chars, 1) };
bind "<alt>d" { "delete-from-cursor" (word-ends, 1) };
bind "<alt>BackSpace" { "delete-from-cursor" (word-ends, -1) };
bind "<ctrl>k" { "delete-from-cursor" (paragraph-ends, 1) };
bind "<alt>space" { "delete-from-cursor" (whitespace, 1)
"insert-at-cursor" (" ") };
bind "<alt>KP_Space" { "delete-from-cursor" (whitespace, 1)
"insert-at-cursor" (" ") };
/*
* Some non-Emacs keybindings people are attached to
*/
bind "<ctrl>u" { "move-cursor" (paragraph-ends, -1, 0)
"delete-from-cursor" (paragraph-ends, 1) };
bind "<ctrl>h" { "delete-from-cursor" (chars, -1) };
bind "<ctrl>w" { "delete-from-cursor" (word-ends, -1) };
}
/*
* Bindings for GtkTextView
*/
@binding-set gtk-emacs-text-view
{
bind "<ctrl>p" { "move-cursor" (display-lines, -1, 0) };
bind "<shift><ctrl>p" { "move-cursor" (display-lines, -1, 1) };
bind "<ctrl>n" { "move-cursor" (display-lines, 1, 0) };
bind "<shift><ctrl>n" { "move-cursor" (display-lines, 1, 1) };
bind "<ctrl>space" { "set-anchor" () };
bind "<ctrl>KP_Space" { "set-anchor" () };
}
/*
* Bindings for GtkTreeView
*/
@binding-set gtk-emacs-tree-view
{
bind "<ctrl>s" { "start-interactive-search" () };
bind "<ctrl>f" { "move-cursor" (logical-positions, 1) };
bind "<ctrl>b" { "move-cursor" (logical-positions, -1) };
}
/*
* Bindings for menus
*/
@binding-set gtk-emacs-menu
{
bind "<ctrl>n" { "move-current" (next) };
bind "<ctrl>p" { "move-current" (prev) };
bind "<ctrl>f" { "move-current" (child) };
bind "<ctrl>b" { "move-current" (parent) };
}
entry {
-gtk-key-bindings: gtk-emacs-text-entry;
}
textview {
-gtk-key-bindings: gtk-emacs-text-entry, gtk-emacs-text-view;
}
treeview {
-gtk-key-bindings: gtk-emacs-tree-view;
}
GtkMenuShell {
-gtk-key-bindings: gtk-emacs-menu;
}
@import 'colors.css';

View File

@@ -1,11 +0,0 @@
general {
lock_cmd = pidof hyprlock || hyprlock
before_sleep_cmd = loginctl lock-session
after_sleep_cmd = hyprctl dispatch dpms on
}
listener {
timeout = 900
on-timeout = hypr-screensaver stop && hyprctl dispatch dpms off
on-resume = hyprctl dispatch dpms on
}

View File

@@ -1,562 +0,0 @@
# Hyprland Configuration
# XMonad-like dynamic tiling using hy3 plugin
# Based on XMonad configuration from xmonad.hs
# =============================================================================
# PLUGINS (Hyprland pinned to 0.53.0 to match hy3)
# =============================================================================
# Load the plugin before parsing keybinds/layouts that depend on it
plugin = /run/current-system/sw/lib/libhy3.so
plugin = /run/current-system/sw/lib/libhyprexpo.so
# =============================================================================
# MONITORS
# =============================================================================
monitor=,preferred,auto,1
# =============================================================================
# PROGRAMS
# =============================================================================
$terminal = ghostty --gtk-single-instance=false
$fileManager = dolphin
$menu = rofi -show drun -show-icons
$runMenu = rofi -show run
# =============================================================================
# ENVIRONMENT VARIABLES
# =============================================================================
env = XCURSOR_SIZE,24
env = QT_QPA_PLATFORMTHEME,qt5ct
# Used by ~/.config/hypr/scripts/* to keep workspace IDs bounded.
env = HYPR_MAX_WORKSPACE,9
# =============================================================================
# INPUT CONFIGURATION
# =============================================================================
input {
kb_layout = us
kb_variant =
kb_model =
kb_options =
kb_rules =
follow_mouse = 1
touchpad {
natural_scroll = no
}
sensitivity = 0
}
# Cursor warping behavior
cursor {
persistent_warps = true
}
# =============================================================================
# GENERAL SETTINGS
# =============================================================================
general {
gaps_in = 5
gaps_out = 10
border_size = 0
col.active_border = rgba(edb443ee) rgba(33ccffee) 45deg
col.inactive_border = rgba(595959aa)
# Use hy3 layout for XMonad-like dynamic tiling
layout = hy3
allow_tearing = false
}
# =============================================================================
# DECORATION
# =============================================================================
decoration {
rounding = 5
blur {
enabled = true
size = 3
passes = 1
}
# Fade inactive windows (like XMonad's fadeInactive)
active_opacity = 1.0
inactive_opacity = 0.9
}
# =============================================================================
# ANIMATIONS
# =============================================================================
animations {
enabled = yes
# Hyprland supports bezier curves, not true spring physics.
# Use a mild overshoot plus GNOME-like window animation style.
bezier = overshoot, 0.05, 0.9, 0.1, 1.1
bezier = smoothOut, 0.36, 1, 0.3, 1
bezier = smoothInOut, 0.42, 0, 0.58, 1
bezier = linear, 0, 0, 1, 1
# SPEED is in deciseconds (e.g. 6 == 600ms).
animation = windows, 1, 6, overshoot, gnomed
animation = windowsIn, 1, 6, overshoot, gnomed
animation = windowsOut, 1, 5, smoothInOut, gnomed
animation = windowsMove, 1, 6, smoothOut
animation = border, 0
animation = borderangle, 0
animation = fade, 1, 5, smoothOut
animation = workspaces, 1, 6, smoothOut, slidefade 15%
animation = specialWorkspace, 1, 6, smoothOut, slidevert
}
# =============================================================================
# MASTER LAYOUT CONFIGURATION
# =============================================================================
master {
new_status = slave
mfact = 0.5
orientation = left
}
# Dwindle layout (alternative - binary tree like i3)
dwindle {
pseudotile = yes
preserve_split = yes
}
# =============================================================================
# WORKSPACE RULES (SMART GAPS)
# =============================================================================
# Replace no_gaps_when_only (removed in newer Hyprland)
# Remove gaps when there's only one visible tiled window (ignore special workspaces)
workspace = w[tv1]s[false], gapsout:0, gapsin:0
workspace = f[1]s[false], gapsout:0, gapsin:0
# Group/tabbed window configuration (built-in alternative to hy3 tabs)
group {
col.border_active = rgba(edb443ff)
col.border_inactive = rgba(091f2eff)
groupbar {
enabled = true
font_size = 12
height = 22
col.active = rgba(edb443ff)
col.inactive = rgba(091f2eff)
text_color = rgba(091f2eff)
}
}
# =============================================================================
# HY3/HYPREXPO PLUGIN CONFIG
# =============================================================================
plugin {
hy3 {
# Disable autotile to get XMonad-like manual control
autotile {
enable = false
}
# Tab configuration
tabs {
height = 22
padding = 6
render_text = true
text_font = "Sans"
text_height = 10
text_padding = 3
col.active = rgba(edb443ff)
col.inactive = rgba(091f2eff)
col.urgent = rgba(ff0000ff)
col.text.active = rgba(091f2eff)
col.text.inactive = rgba(ffffffff)
col.text.urgent = rgba(ffffffff)
}
}
hyprexpo {
# Always include workspace 1 in the overview grid
workspace_method = first 1
# Only show workspaces with windows
skip_empty = true
# Show numeric workspace labels in the expo grid
show_workspace_numbers = true
# 3 columns -> 3x3 grid when 9 workspaces are visible
columns = 3
}
}
# =============================================================================
# MISC
# =============================================================================
misc {
force_default_wallpaper = 0
disable_hyprland_logo = true
}
# =============================================================================
# BINDS OPTIONS
# =============================================================================
binds {
# Keep workspace history so "previous" can toggle back reliably.
allow_workspace_cycles = true
workspace_back_and_forth = true
}
# =============================================================================
# WINDOW RULES
# =============================================================================
# Float dialogs
windowrule = match:class ^()$, match:title ^()$, float on
windowrule = match:title ^(Picture-in-Picture)$, float on
windowrule = match:title ^(Open File)$, float on
windowrule = match:title ^(Save File)$, float on
windowrule = match:title ^(Confirm)$, float on
# Rumno OSD/notifications: treat as an overlay, not a "real" managed window.
# (Matches both class and title because rumno may set either depending on backend.)
windowrule = match:class ^(.*[Rr]umno.*)$, float on
windowrule = match:class ^(.*[Rr]umno.*)$, pin on
windowrule = match:class ^(.*[Rr]umno.*)$, center on
windowrule = match:class ^(.*[Rr]umno.*)$, decorate off
windowrule = match:class ^(.*[Rr]umno.*)$, no_shadow on
windowrule = match:title ^(.*[Rr]umno.*)$, float on
windowrule = match:title ^(.*[Rr]umno.*)$, pin on
windowrule = match:title ^(.*[Rr]umno.*)$, center on
windowrule = match:title ^(.*[Rr]umno.*)$, decorate off
windowrule = match:title ^(.*[Rr]umno.*)$, no_shadow on
# Scratchpad sizing handled by hyprscratch exec rules (see hyprland.nix)
# Using hyprscratch rules instead of windowrule to avoid affecting child windows (e.g. Slack meets)
# =============================================================================
# KEY BINDINGS
# =============================================================================
# Modifier keys
$mainMod = SUPER
$modAlt = SUPER ALT
$hyper = SUPER CTRL ALT
# -----------------------------------------------------------------------------
# Program Launching
# -----------------------------------------------------------------------------
bind = $mainMod, P, exec, $menu
bind = $mainMod SHIFT, P, exec, $runMenu
bind = $mainMod SHIFT, Return, exec, $terminal
# -----------------------------------------------------------------------------
# Overview (Hyprexpo)
# -----------------------------------------------------------------------------
bind = $mainMod, TAB, hyprexpo:expo, toggle
bind = $mainMod SHIFT, TAB, hyprexpo:expo, bring
bind = $mainMod, Q, killactive,
bind = $mainMod SHIFT, C, killactive,
bind = $mainMod SHIFT, Q, exit,
# Emacs-everywhere (like XMonad's emacs-everywhere)
bind = $mainMod, E, exec, emacsclient --eval '(emacs-everywhere)'
bind = $mainMod, V, exec, wl-paste | xdotool type --file -
# Chrome/Browser (raise or spawn like XMonad's bindBringAndRaise)
bind = $modAlt, C, exec, ~/.config/hypr/scripts/raise-or-run.sh google-chrome google-chrome-stable
# -----------------------------------------------------------------------------
# SCRATCHPADS (managed by hyprscratch daemon with auto-dismiss)
# -----------------------------------------------------------------------------
bind = $modAlt, E, exec, hyprscratch toggle element
bind = $modAlt, G, exec, hyprscratch toggle gmail
bind = $modAlt, H, exec, hyprscratch toggle htop
bind = $modAlt, M, exec, hyprscratch toggle messages
bind = $modAlt, K, exec, hyprscratch toggle slack
bind = $modAlt, S, exec, hyprscratch toggle spotify
bind = $modAlt, T, exec, hyprscratch toggle transmission
bind = $modAlt, V, exec, hyprscratch toggle volume
bind = $modAlt, grave, exec, hyprscratch toggle dropdown
# Hidden workspace (like XMonad's NSP)
bind = $mainMod, X, movetoworkspace, special:NSP
bind = $mainMod SHIFT, X, togglespecialworkspace, NSP
# -----------------------------------------------------------------------------
# DIRECTIONAL NAVIGATION (WASD - like XMonad Navigation2D)
# Using hy3 dispatchers for proper tree-based navigation
# -----------------------------------------------------------------------------
# Focus movement (Mod + WASD) - hy3:movefocus navigates the tree
bind = $mainMod, W, hy3:movefocus, u
bind = $mainMod, S, hy3:movefocus, d
bind = $mainMod, A, hy3:movefocus, l
bind = $mainMod, D, hy3:movefocus, r
# Move windows (Mod + Shift + WASD) - hy3:movewindow with once=true for swapping
bind = $mainMod SHIFT, W, exec, ~/.config/hypr/scripts/movewindow-follow-cursor.sh u once
bind = $mainMod SHIFT, S, exec, ~/.config/hypr/scripts/movewindow-follow-cursor.sh d once
bind = $mainMod SHIFT, A, exec, ~/.config/hypr/scripts/movewindow-follow-cursor.sh l once
bind = $mainMod SHIFT, D, exec, ~/.config/hypr/scripts/movewindow-follow-cursor.sh r once
# Resize windows (Mod + Ctrl + WASD)
binde = $mainMod CTRL, W, resizeactive, 0 -50
binde = $mainMod CTRL, S, resizeactive, 0 50
binde = $mainMod CTRL, A, resizeactive, -50 0
binde = $mainMod CTRL, D, resizeactive, 50 0
# Screen/Monitor focus (Hyper + WASD)
bind = $hyper, W, focusmonitor, u
bind = $hyper, S, focusmonitor, d
bind = $hyper, A, focusmonitor, l
bind = $hyper, D, focusmonitor, r
# Move window to monitor and follow (Hyper + Shift + WASD)
bind = $hyper SHIFT, W, movewindow, mon:u
bind = $hyper SHIFT, S, movewindow, mon:d
bind = $hyper SHIFT, A, movewindow, mon:l
bind = $hyper SHIFT, D, movewindow, mon:r
# Shift to empty workspace on screen direction (Super + Ctrl + Shift + WASD)
# Like XMonad's shiftToEmptyOnScreen
bind = $mainMod CTRL SHIFT, W, exec, ~/.config/hypr/scripts/shift-to-empty-on-screen.sh u
bind = $mainMod CTRL SHIFT, S, exec, ~/.config/hypr/scripts/shift-to-empty-on-screen.sh d
bind = $mainMod CTRL SHIFT, A, exec, ~/.config/hypr/scripts/shift-to-empty-on-screen.sh l
bind = $mainMod CTRL SHIFT, D, exec, ~/.config/hypr/scripts/shift-to-empty-on-screen.sh r
# -----------------------------------------------------------------------------
# LAYOUT CONTROL (XMonad-like with hy3)
# -----------------------------------------------------------------------------
# Create groups with different orientations (like XMonad layouts)
# hy3:makegroup creates a split/tab group from focused window
bind = $mainMod, Space, hy3:changegroup, toggletab
bind = $mainMod SHIFT, Space, hy3:changegroup, opposite
# Create specific group types
bind = $mainMod, H, hy3:makegroup, h
bind = $mainMod SHIFT, V, hy3:makegroup, v
# Mod+Ctrl+Space mirrors Mod+Space (tabs instead of fullscreen)
bind = $mainMod CTRL, Space, hy3:changegroup, toggletab
# Change group type (cycle h -> v -> tab)
bind = $mainMod, slash, hy3:changegroup, h
bind = $mainMod SHIFT, slash, hy3:changegroup, v
# Tab navigation (like XMonad's focus next/prev in tabbed)
bind = $mainMod, bracketright, hy3:focustab, r, wrap
bind = $mainMod, bracketleft, hy3:focustab, l, wrap
# Move window within tab group (hy3 has no movetab dispatcher)
bind = $mainMod SHIFT, bracketright, hy3:movewindow, r, visible
bind = $mainMod SHIFT, bracketleft, hy3:movewindow, l, visible
# Expand focus to parent group (like XMonad's focus parent)
bind = $mainMod, grave, hy3:expand, expand
bind = $mainMod SHIFT, grave, hy3:expand, base
# Fullscreen (like XMonad's NBFULL toggle)
bind = $mainMod, F, fullscreen, 0
bind = $mainMod SHIFT, F, fullscreen, 1
# Toggle floating
bind = $mainMod, T, togglefloating,
# Resize split ratio (hy3 uses resizeactive for splits)
binde = $mainMod, comma, resizeactive, -50 0
binde = $mainMod, period, resizeactive, 50 0
# Equalize window sizes on workspace (hy3)
bind = $mainMod SHIFT, equal, hy3:equalize, workspace
# Kill group - removes the focused window from its group
bind = $mainMod, N, hy3:killactive
# hy3:setswallow - set a window to swallow newly spawned windows
bind = $mainMod CTRL, M, hy3:setswallow, toggle
# Minimize/unminimize (via special workspace)
bind = $mainMod, M, exec, ~/.config/hypr/scripts/minimize-active.sh minimized
bind = $mainMod SHIFT, M, exec, ~/.config/hypr/scripts/unminimize-last.sh minimized
# Minimized "picker" mode:
# Open the minimized special workspace, focus a window, press Enter to restore it.
bind = $modAlt, Return, exec, ~/.config/hypr/scripts/minimized-mode.sh minimized
submap = minimized
bind = , Return, exec, ~/.config/hypr/scripts/unminimize-last.sh minimized; hyprctl dispatch submap reset
bind = , Escape, exec, ~/.config/hypr/scripts/minimized-cancel.sh minimized
bind = $modAlt, Return, exec, ~/.config/hypr/scripts/minimized-cancel.sh minimized
# Optional: basic focus navigation inside the picker.
bind = , H, movefocus, l
bind = , J, movefocus, d
bind = , K, movefocus, u
bind = , L, movefocus, r
bind = , left, movefocus, l
bind = , down, movefocus, d
bind = , up, movefocus, u
bind = , right, movefocus, r
submap = reset
# -----------------------------------------------------------------------------
# WORKSPACE CONTROL
# -----------------------------------------------------------------------------
# Switch workspaces (1-9 only) on the currently focused monitor.
bind = $mainMod, 1, focusworkspaceoncurrentmonitor, 1
bind = $mainMod, 2, focusworkspaceoncurrentmonitor, 2
bind = $mainMod, 3, focusworkspaceoncurrentmonitor, 3
bind = $mainMod, 4, focusworkspaceoncurrentmonitor, 4
bind = $mainMod, 5, focusworkspaceoncurrentmonitor, 5
bind = $mainMod, 6, focusworkspaceoncurrentmonitor, 6
bind = $mainMod, 7, focusworkspaceoncurrentmonitor, 7
bind = $mainMod, 8, focusworkspaceoncurrentmonitor, 8
bind = $mainMod, 9, focusworkspaceoncurrentmonitor, 9
# Move window to workspace
bind = $mainMod SHIFT, 1, movetoworkspace, 1
bind = $mainMod SHIFT, 2, movetoworkspace, 2
bind = $mainMod SHIFT, 3, movetoworkspace, 3
bind = $mainMod SHIFT, 4, movetoworkspace, 4
bind = $mainMod SHIFT, 5, movetoworkspace, 5
bind = $mainMod SHIFT, 6, movetoworkspace, 6
bind = $mainMod SHIFT, 7, movetoworkspace, 7
bind = $mainMod SHIFT, 8, movetoworkspace, 8
bind = $mainMod SHIFT, 9, movetoworkspace, 9
# Move and follow to workspace (like XMonad's shiftThenView)
bind = $mainMod CTRL, 1, movetoworkspacesilent, 1
bind = $mainMod CTRL, 1, focusworkspaceoncurrentmonitor, 1
bind = $mainMod CTRL, 2, movetoworkspacesilent, 2
bind = $mainMod CTRL, 2, focusworkspaceoncurrentmonitor, 2
bind = $mainMod CTRL, 3, movetoworkspacesilent, 3
bind = $mainMod CTRL, 3, focusworkspaceoncurrentmonitor, 3
bind = $mainMod CTRL, 4, movetoworkspacesilent, 4
bind = $mainMod CTRL, 4, focusworkspaceoncurrentmonitor, 4
bind = $mainMod CTRL, 5, movetoworkspacesilent, 5
bind = $mainMod CTRL, 5, focusworkspaceoncurrentmonitor, 5
bind = $mainMod CTRL, 6, movetoworkspacesilent, 6
bind = $mainMod CTRL, 6, focusworkspaceoncurrentmonitor, 6
bind = $mainMod CTRL, 7, movetoworkspacesilent, 7
bind = $mainMod CTRL, 7, focusworkspaceoncurrentmonitor, 7
bind = $mainMod CTRL, 8, movetoworkspacesilent, 8
bind = $mainMod CTRL, 8, focusworkspaceoncurrentmonitor, 8
bind = $mainMod CTRL, 9, movetoworkspacesilent, 9
bind = $mainMod CTRL, 9, focusworkspaceoncurrentmonitor, 9
# Toggle to the previous workspace on the current monitor using Hyprland's
# built-in per-monitor workspace history.
bind = $mainMod, backslash, workspace, previous_per_monitor
# Swap current workspace with another (like XMonad's swapWithCurrent)
bind = $hyper, 5, exec, ~/.config/hypr/scripts/swap-workspaces.sh
# Go to next empty workspace (like XMonad's moveTo Next emptyWS)
bind = $hyper, E, exec, ~/.config/hypr/scripts/workspace-goto-empty.sh
# Move to next screen (like XMonad's shiftToNextScreenX)
bind = $mainMod, Z, focusmonitor, +1
bind = $mainMod SHIFT, Z, movewindow, mon:+1
# Shift to empty workspace and view (like XMonad's shiftToEmptyAndView)
bind = $mainMod SHIFT, H, exec, ~/.config/hypr/scripts/workspace-move-to-empty.sh
# -----------------------------------------------------------------------------
# WINDOW MANAGEMENT
# -----------------------------------------------------------------------------
# Go to window (rofi window switcher with icons)
bind = $mainMod, G, exec, ~/.config/hypr/scripts/go-to-window.sh
# Bring window (move to current workspace)
bind = $mainMod, B, exec, ~/.config/hypr/scripts/bring-window.sh
# Replace window (swap focused with selected - like XMonad's myReplaceWindow)
bind = $mainMod SHIFT, B, exec, ~/.config/hypr/scripts/replace-window.sh
# Gather windows of same class (like XMonad's gatherThisClass)
bind = $hyper, G, exec, ~/.config/hypr/scripts/gather-class.sh
# Focus next window of different class (like XMonad's focusNextClass)
bind = $mainMod, apostrophe, exec, ~/.config/hypr/scripts/focus-next-class.sh
# -----------------------------------------------------------------------------
# MEDIA KEYS
# -----------------------------------------------------------------------------
# Volume control (matching XMonad: Mod+I=up, Mod+K=down, Mod+U=mute)
binde = , XF86AudioRaiseVolume, exec, set_volume --unmute --change-volume +5
binde = , XF86AudioLowerVolume, exec, set_volume --unmute --change-volume -5
bind = , XF86AudioMute, exec, set_volume --toggle-mute
binde = $mainMod, I, exec, set_volume --unmute --change-volume +5
binde = $mainMod, K, exec, set_volume --unmute --change-volume -5
bind = $mainMod, U, exec, set_volume --toggle-mute
# Media player controls (matching XMonad: Mod+;=play, Mod+L=next, Mod+J=prev)
bind = $mainMod, semicolon, exec, playerctl play-pause
bind = , XF86AudioPlay, exec, playerctl play-pause
bind = , XF86AudioPause, exec, playerctl play-pause
bind = $mainMod, L, exec, playerctl next
bind = , XF86AudioNext, exec, playerctl next
bind = $mainMod, J, exec, playerctl previous
bind = , XF86AudioPrev, exec, playerctl previous
# Mute current window (like XMonad's toggle_mute_current_window)
bind = $hyper SHIFT, Q, exec, toggle_mute_current_window.sh
bind = $hyper CTRL, Q, exec, toggle_mute_current_window.sh only
# Brightness control
binde = , XF86MonBrightnessUp, exec, brightness.sh up
binde = , XF86MonBrightnessDown, exec, brightness.sh down
# -----------------------------------------------------------------------------
# UTILITY BINDINGS
# -----------------------------------------------------------------------------
bind = $hyper, V, exec, cliphist list | rofi -dmenu -p "Clipboard" | cliphist decode | wl-copy
bind = $hyper, P, exec, rofi-pass
bind = $hyper, H, exec, grim -g "$(slurp)" - | swappy -f -
bind = $hyper, C, exec, shell_command.sh
bind = $hyper, X, exec, rofi_command.sh
bind = $hyper SHIFT, L, exec, hyprlock
bind = $hyper, K, exec, rofi_kill_process.sh
bind = $hyper SHIFT, K, exec, rofi_kill_all.sh
bind = $hyper, R, exec, rofi-systemd
bind = $hyper, slash, exec, toggle_taffybar
bind = $hyper, 9, exec, start_synergy.sh
bind = $hyper, I, exec, rofi_select_input.hs
bind = $hyper, backslash, exec, /home/imalison/dotfiles/dotfiles/lib/functions/mpg341cx_input toggle
bind = $hyper, O, exec, rofi_paswitch
bind = $hyper, comma, exec, rofi_wallpaper.sh
bind = $hyper, Y, exec, rofi_agentic_skill
# Reload config
bind = $mainMod, R, exec, hyprctl reload
# -----------------------------------------------------------------------------
# MOUSE BINDINGS
# -----------------------------------------------------------------------------
bindm = $mainMod, mouse:272, movewindow
bindm = $mainMod, mouse:273, resizewindow
# Scroll through workspaces
bind = $mainMod, mouse_down, exec, ~/.config/hypr/scripts/workspace-scroll.sh +1
bind = $mainMod, mouse_up, exec, ~/.config/hypr/scripts/workspace-scroll.sh -1
# =============================================================================
# AUTOSTART
# =============================================================================
# Wire Hyprland into Home Manager's standard user-session targets.
# `graphical-session.target` pulls in most tray/SNI applets (which in turn pull in `tray.target`).
# Keep the systemd user manager in sync with the current Hyprland session before
# starting any session-bound units. Separate `exec-once` commands race.
exec-once = sh -lc 'export IMALISON_SESSION_TYPE=wayland; dbus-update-activation-environment --systemd WAYLAND_DISPLAY DISPLAY XAUTHORITY HYPRLAND_INSTANCE_SIGNATURE XDG_CURRENT_DESKTOP XDG_SESSION_TYPE IMALISON_SESSION_TYPE; systemctl --user start graphical-session.target hyprland-session.target'
# Force a fresh daemon after compositor restarts so hyprscratch doesn't keep a stale socket.
exec-once = systemctl --user restart hyprscratch.service
exec-once = hypridle
# Clipboard history daemon
exec-once = wl-paste --type text --watch cliphist store
exec-once = wl-paste --type image --watch cliphist store

File diff suppressed because it is too large Load Diff

View File

@@ -1,39 +0,0 @@
background {
monitor =
path = screenshot
blur_passes = 3
blur_size = 8
noise = 0.0117
contrast = 0.8916
brightness = 0.8172
vibrancy = 0.1696
}
input-field {
monitor =
size = 280, 56
outline_thickness = 3
dots_size = 0.2
dots_spacing = 0.2
outer_color = rgb(edb443)
inner_color = rgb(1e1e2e)
font_color = rgb(cdd6f4)
fade_on_empty = false
rounding = 12
placeholder_text = <i>Password...</i>
hide_input = false
position = 0, -80
halign = center
valign = center
}
label {
monitor =
text = cmd[update:1000] echo "$(date +'%a %b %-d %I:%M %p')"
color = rgb(cdd6f4)
font_size = 40
font_family = Noto Sans
position = 0, 80
halign = center
valign = center
}

View File

@@ -1,40 +0,0 @@
#!/usr/bin/env bash
# Bring window to current workspace (like XMonad's bringWindow)
# Uses rofi with icons to select a window, then moves it here.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
source "$SCRIPT_DIR/window-icon-map.sh"
CURRENT_WS=$(hyprctl activeworkspace -j | jq -r '.id')
# Get windows on OTHER workspaces as TSV
WINDOW_DATA=$(hyprctl clients -j | jq -r --argjson cws "$CURRENT_WS" '
.[] | select(.workspace.id >= 0 and .workspace.id != $cws)
| [.address, .class, (.title | gsub("\t"; " ")), (.workspace.id | tostring)]
| @tsv')
if [ -z "$WINDOW_DATA" ]; then
notify-send "Bring Window" "No windows on other workspaces"
exit 0
fi
addresses=()
TMPFILE=$(mktemp)
trap 'rm -f "$TMPFILE"' EXIT
while IFS=$'\t' read -r address class title ws_id; do
icon=$(icon_for_class "$class")
addresses+=("$address")
printf '%-24s %s WS:%s\0icon\x1f%s\n' \
"$class" "$title" "$ws_id" "$icon"
done <<< "$WINDOW_DATA" > "$TMPFILE"
INDEX=$(rofi -dmenu -i -show-icons -p "Bring window" -format i < "$TMPFILE") || exit 0
if [ -n "$INDEX" ] && [ -n "${addresses[$INDEX]:-}" ]; then
ADDRESS="${addresses[$INDEX]}"
hyprctl dispatch movetoworkspace "$CURRENT_WS,address:$ADDRESS"
hyprctl dispatch focuswindow "address:$ADDRESS"
fi

View File

@@ -1,15 +0,0 @@
#!/usr/bin/env bash
# Cycle between master and dwindle layouts
# Like XMonad's NextLayout
set -euo pipefail
CURRENT=$(hyprctl getoption general:layout -j | jq -r '.str')
if [ "$CURRENT" = "master" ]; then
hyprctl keyword general:layout dwindle
notify-send "Layout" "Switched to Dwindle (binary tree)"
else
hyprctl keyword general:layout master
notify-send "Layout" "Switched to Master (XMonad-like)"
fi

View File

@@ -1,72 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Print an "empty" workspace id within 1..$HYPR_MAX_WORKSPACE (default 9).
#
# Preference order (lowest id wins within each tier):
# 1. Workspace exists on the target monitor and has 0 windows
# 2. Workspace id does not exist at all (will be created on dispatch)
# 3. Workspace exists (elsewhere) and has 0 windows
#
# Usage:
# find-empty-workspace.sh [monitor] [exclude_id]
max_ws="${HYPR_MAX_WORKSPACE:-9}"
monitor="${1:-}"
exclude_id="${2:-}"
if [[ -z "${monitor}" ]]; then
monitor="$(hyprctl activeworkspace -j | jq -r '.monitor' 2>/dev/null || true)"
fi
if [[ -z "${monitor}" || "${monitor}" == "null" ]]; then
exit 1
fi
workspaces_json="$(hyprctl workspaces -j 2>/dev/null || echo '[]')"
unused_candidate=""
elsewhere_empty_candidate=""
for i in $(seq 1 "${max_ws}"); do
if [[ -n "${exclude_id}" && "${i}" == "${exclude_id}" ]]; then
continue
fi
exists="$(jq -r --argjson id "${i}" '[.[] | select(.id == $id)] | length' <<<"${workspaces_json}")"
if [[ "${exists}" == "0" ]]; then
if [[ -z "${unused_candidate}" ]]; then
unused_candidate="${i}"
fi
continue
fi
windows="$(jq -r --argjson id "${i}" '([.[] | select(.id == $id) | .windows] | .[0]) // 0' <<<"${workspaces_json}")"
if [[ "${windows}" != "0" ]]; then
continue
fi
ws_monitor="$(jq -r --argjson id "${i}" '([.[] | select(.id == $id) | .monitor] | .[0]) // ""' <<<"${workspaces_json}")"
if [[ "${ws_monitor}" == "${monitor}" ]]; then
printf '%s\n' "${i}"
exit 0
fi
if [[ -z "${elsewhere_empty_candidate}" ]]; then
elsewhere_empty_candidate="${i}"
fi
done
if [[ -n "${unused_candidate}" ]]; then
printf '%s\n' "${unused_candidate}"
exit 0
fi
if [[ -n "${elsewhere_empty_candidate}" ]]; then
printf '%s\n' "${elsewhere_empty_candidate}"
exit 0
fi
exit 1

View File

@@ -1,48 +0,0 @@
#!/usr/bin/env bash
# Focus next window of a different class (like XMonad's focusNextClass)
set -euo pipefail
# Get focused window class
FOCUSED_CLASS=$(hyprctl activewindow -j | jq -r '.class')
FOCUSED_ADDR=$(hyprctl activewindow -j | jq -r '.address')
if [ "$FOCUSED_CLASS" = "null" ] || [ -z "$FOCUSED_CLASS" ]; then
# No focused window, just focus any window
hyprctl dispatch cyclenext
exit 0
fi
# Get all unique classes
ALL_CLASSES=$(hyprctl clients -j | jq -r '[.[] | select(.workspace.id >= 0) | .class] | unique | .[]')
# Get sorted list of classes
CLASSES_ARRAY=()
while IFS= read -r class; do
CLASSES_ARRAY+=("$class")
done <<< "$ALL_CLASSES"
# Find current class index and get next class
CURRENT_INDEX=-1
for i in "${!CLASSES_ARRAY[@]}"; do
if [ "${CLASSES_ARRAY[$i]}" = "$FOCUSED_CLASS" ]; then
CURRENT_INDEX=$i
break
fi
done
if [ $CURRENT_INDEX -eq -1 ] || [ ${#CLASSES_ARRAY[@]} -le 1 ]; then
# Only one class or class not found
exit 0
fi
# Get next class (wrapping around)
NEXT_INDEX=$(( (CURRENT_INDEX + 1) % ${#CLASSES_ARRAY[@]} ))
NEXT_CLASS="${CLASSES_ARRAY[$NEXT_INDEX]}"
# Find first window of next class
NEXT_WINDOW=$(hyprctl clients -j | jq -r ".[] | select(.class == \"$NEXT_CLASS\" and .workspace.id >= 0) | .address" | head -1)
if [ -n "$NEXT_WINDOW" ]; then
hyprctl dispatch focuswindow "address:$NEXT_WINDOW"
fi

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env bash
# Gather all windows of the same class as focused window (like XMonad's gatherThisClass)
set -euo pipefail
# Get focused window class
FOCUSED_CLASS=$(hyprctl activewindow -j | jq -r '.class')
CURRENT_WS=$(hyprctl activeworkspace -j | jq -r '.id')
if [ "$FOCUSED_CLASS" = "null" ] || [ -z "$FOCUSED_CLASS" ]; then
notify-send "Gather Class" "No focused window"
exit 0
fi
# Find all windows with same class on other workspaces
WINDOWS=$(hyprctl clients -j | jq -r ".[] | select(.class == \"$FOCUSED_CLASS\" and .workspace.id != $CURRENT_WS and .workspace.id >= 0) | .address")
if [ -z "$WINDOWS" ]; then
notify-send "Gather Class" "No other windows of class '$FOCUSED_CLASS'"
exit 0
fi
# Move each window to current workspace
COUNT=0
for ADDR in $WINDOWS; do
hyprctl dispatch movetoworkspace "$CURRENT_WS,address:$ADDR"
COUNT=$((COUNT + 1))
done
notify-send "Gather Class" "Gathered $COUNT windows of class '$FOCUSED_CLASS'"

View File

@@ -1,33 +0,0 @@
#!/usr/bin/env bash
# Go to a window selected via rofi (with icons from desktop entries).
# Replaces "rofi -show window" which doesn't work well on Wayland.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
source "$SCRIPT_DIR/window-icon-map.sh"
# Get all windows on regular workspaces as TSV
WINDOW_DATA=$(hyprctl clients -j | jq -r '
.[] | select(.workspace.id >= 0)
| [.address, .class, (.title | gsub("\t"; " ")), (.workspace.id | tostring)]
| @tsv')
[ -n "$WINDOW_DATA" ] || exit 0
addresses=()
TMPFILE=$(mktemp)
trap 'rm -f "$TMPFILE"' EXIT
while IFS=$'\t' read -r address class title ws_id; do
icon=$(icon_for_class "$class")
addresses+=("$address")
printf '%-24s %s WS:%s\0icon\x1f%s\n' \
"$class" "$title" "$ws_id" "$icon"
done <<< "$WINDOW_DATA" > "$TMPFILE"
INDEX=$(rofi -dmenu -i -show-icons -p "Go to window" -format i < "$TMPFILE") || exit 0
if [ -n "$INDEX" ] && [ -n "${addresses[$INDEX]:-}" ]; then
hyprctl dispatch focuswindow "address:${addresses[$INDEX]}"
fi

View File

@@ -1,49 +0,0 @@
#!/usr/bin/env bash
# Minimize the active window by moving it to a special workspace without
# toggling that special workspace open.
#
# Usage: minimize-active.sh <name>
# Example: minimize-active.sh minimized
set -euo pipefail
NAME="${1:-minimized}"
NAME="${NAME#special:}"
if ! command -v hyprctl >/dev/null 2>&1; then
exit 0
fi
if ! command -v jq >/dev/null 2>&1; then
# We could parse plain output, but jq should exist in this setup; if it
# doesn't, fail soft.
exit 0
fi
ACTIVE_JSON="$(hyprctl -j activewindow 2>/dev/null || true)"
ADDR="$(printf '%s' "$ACTIVE_JSON" | jq -r '.address // empty')"
if [ -z "$ADDR" ] || [ "$ADDR" = "null" ]; then
exit 0
fi
# If the minimized special workspace is currently visible, closing it after the
# move keeps the window hidden (what "minimize" usually means).
MONITOR_ID="$(printf '%s' "$ACTIVE_JSON" | jq -r '.monitor // empty')"
SPECIAL_OPEN="$(
hyprctl -j monitors 2>/dev/null \
| jq -r --arg n "special:$NAME" --argjson mid "${MONITOR_ID:-0}" '
.[]
| select(.id == $mid)
| (.specialWorkspace.name // "")
| select(. == $n)
' \
| head -n 1 \
|| true
)"
hyprctl dispatch movetoworkspacesilent "special:${NAME},address:${ADDR}" >/dev/null 2>&1 || true
if [ -n "$SPECIAL_OPEN" ]; then
hyprctl dispatch togglespecialworkspace "$NAME" >/dev/null 2>&1 || true
fi
exit 0

View File

@@ -1,39 +0,0 @@
#!/usr/bin/env bash
# Exit minimized picker mode:
# - Hide the minimized special workspace on the active monitor (if visible)
# - Reset the submap
#
# Usage: minimized-cancel.sh <name>
set -euo pipefail
NAME="${1:-minimized}"
NAME="${NAME#special:}"
SPECIAL_WS="special:${NAME}"
if ! command -v hyprctl >/dev/null 2>&1; then
exit 0
fi
if ! command -v jq >/dev/null 2>&1; then
exit 0
fi
MONITOR_ID="$(hyprctl -j activeworkspace 2>/dev/null | jq -r '.monitorID // empty' || true)"
if [ -z "$MONITOR_ID" ] || [ "$MONITOR_ID" = "null" ]; then
MONITOR_ID=0
fi
OPEN="$(
hyprctl -j monitors 2>/dev/null \
| jq -r --argjson mid "$MONITOR_ID" '.[] | select(.id == $mid) | (.specialWorkspace.name // "")' \
| head -n 1 \
|| true
)"
if [ "$OPEN" = "$SPECIAL_WS" ]; then
hyprctl dispatch togglespecialworkspace "$NAME" >/dev/null 2>&1 || true
fi
hyprctl dispatch submap reset >/dev/null 2>&1 || true
exit 0

View File

@@ -1,40 +0,0 @@
#!/usr/bin/env bash
# Enter a "picker" mode for minimized windows:
# - Ensure the minimized special workspace is visible on the active monitor
# - Switch Hyprland into a submap so Enter restores and Escape cancels
#
# Usage: minimized-mode.sh <name>
set -euo pipefail
NAME="${1:-minimized}"
NAME="${NAME#special:}"
SPECIAL_WS="special:${NAME}"
if ! command -v hyprctl >/dev/null 2>&1; then
exit 0
fi
if ! command -v jq >/dev/null 2>&1; then
exit 0
fi
MONITOR_ID="$(hyprctl -j activeworkspace 2>/dev/null | jq -r '.monitorID // empty' || true)"
if [ -z "$MONITOR_ID" ] || [ "$MONITOR_ID" = "null" ]; then
MONITOR_ID=0
fi
OPEN="$(
hyprctl -j monitors 2>/dev/null \
| jq -r --argjson mid "$MONITOR_ID" '.[] | select(.id == $mid) | (.specialWorkspace.name // "")' \
| head -n 1 \
|| true
)"
# Ensure it's visible (but don't toggle it off if already open).
if [ "$OPEN" != "$SPECIAL_WS" ]; then
hyprctl dispatch togglespecialworkspace "$NAME" >/dev/null 2>&1 || true
fi
hyprctl dispatch submap minimized >/dev/null 2>&1 || true
exit 0

View File

@@ -1,83 +0,0 @@
#!/usr/bin/env bash
# Move the active window in a direction and warp the cursor to keep its
# relative position inside the moved window.
set -euo pipefail
export PATH="/run/current-system/sw/bin:${PATH}"
if [[ $# -lt 1 ]]; then
echo "usage: $0 <dir> [mode]" >&2
exit 1
fi
dir="$1"
mode="${2:-}"
if ! command -v hyprctl >/dev/null; then
exit 0
fi
move_window() {
if [[ -n "$mode" ]]; then
hyprctl dispatch hy3:movewindow "$dir, $mode" >/dev/null 2>&1 || true
else
hyprctl dispatch hy3:movewindow "$dir" >/dev/null 2>&1 || true
fi
}
win_json="$(hyprctl -j activewindow 2>/dev/null || true)"
cur_json="$(hyprctl -j cursorpos 2>/dev/null || true)"
if [[ -z "$win_json" || "$win_json" == "null" || -z "$cur_json" || "$cur_json" == "null" ]]; then
move_window
exit 0
fi
win_x="$(jq -er '.at[0]' <<<"$win_json" 2>/dev/null || true)"
win_y="$(jq -er '.at[1]' <<<"$win_json" 2>/dev/null || true)"
win_w="$(jq -er '.size[0]' <<<"$win_json" 2>/dev/null || true)"
win_h="$(jq -er '.size[1]' <<<"$win_json" 2>/dev/null || true)"
cur_x="$(jq -er '.x' <<<"$cur_json" 2>/dev/null || true)"
cur_y="$(jq -er '.y' <<<"$cur_json" 2>/dev/null || true)"
if [[ ! "$win_x" =~ ^-?[0-9]+$ || ! "$win_y" =~ ^-?[0-9]+$ || ! "$win_w" =~ ^-?[0-9]+$ || ! "$win_h" =~ ^-?[0-9]+$ || ! "$cur_x" =~ ^-?[0-9]+$ || ! "$cur_y" =~ ^-?[0-9]+$ ]]; then
move_window
exit 0
fi
rel_x=$((cur_x - win_x))
rel_y=$((cur_y - win_y))
move_window
win_json="$(hyprctl -j activewindow 2>/dev/null || true)"
if [[ -z "$win_json" || "$win_json" == "null" ]]; then
exit 0
fi
win_x="$(jq -er '.at[0]' <<<"$win_json" 2>/dev/null || true)"
win_y="$(jq -er '.at[1]' <<<"$win_json" 2>/dev/null || true)"
win_w="$(jq -er '.size[0]' <<<"$win_json" 2>/dev/null || true)"
win_h="$(jq -er '.size[1]' <<<"$win_json" 2>/dev/null || true)"
if [[ ! "$win_x" =~ ^-?[0-9]+$ || ! "$win_y" =~ ^-?[0-9]+$ || ! "$win_w" =~ ^-?[0-9]+$ || ! "$win_h" =~ ^-?[0-9]+$ ]]; then
exit 0
fi
if ((rel_x < 0)); then
rel_x=0
elif ((rel_x > win_w)); then
rel_x=$win_w
fi
if ((rel_y < 0)); then
rel_y=0
elif ((rel_y > win_h)); then
rel_y=$win_h
fi
new_x=$((win_x + rel_x))
new_y=$((win_y + rel_y))
hyprctl dispatch movecursor "$new_x" "$new_y" >/dev/null 2>&1 || true

View File

@@ -1,19 +0,0 @@
#!/usr/bin/env bash
# Raise existing window or run command (like XMonad's raiseNextMaybe)
# Usage: raise-or-run.sh <class-pattern> <command>
set -euo pipefail
CLASS_PATTERN="$1"
COMMAND="$2"
# Find windows matching the class pattern
MATCHING=$(hyprctl clients -j | jq -r ".[] | select(.class | test(\"$CLASS_PATTERN\"; \"i\")) | .address" | head -1)
if [ -n "$MATCHING" ]; then
# Window exists, focus it
hyprctl dispatch focuswindow "address:$MATCHING"
else
# No matching window, run the command
exec $COMMAND
fi

View File

@@ -1,43 +0,0 @@
#!/usr/bin/env bash
# Replace focused window with selected window (like XMonad's myReplaceWindow)
# Swaps the positions of focused window and selected window
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
source "$SCRIPT_DIR/window-icon-map.sh"
FOCUSED=$(hyprctl activewindow -j | jq -r '.address')
if [ "$FOCUSED" = "null" ] || [ -z "$FOCUSED" ]; then
notify-send "Replace Window" "No focused window"
exit 0
fi
# Get all windows except focused as TSV
WINDOW_DATA=$(hyprctl clients -j | jq -r --arg focused "$FOCUSED" '
.[] | select(.workspace.id >= 0 and .address != $focused)
| [.address, .class, (.title | gsub("\t"; " ")), (.workspace.id | tostring)]
| @tsv')
if [ -z "$WINDOW_DATA" ]; then
notify-send "Replace Window" "No other windows available"
exit 0
fi
addresses=()
TMPFILE=$(mktemp)
trap 'rm -f "$TMPFILE"' EXIT
while IFS=$'\t' read -r address class title ws_id; do
icon=$(icon_for_class "$class")
addresses+=("$address")
printf '%-24s %s WS:%s\0icon\x1f%s\n' \
"$class" "$title" "$ws_id" "$icon"
done <<< "$WINDOW_DATA" > "$TMPFILE"
INDEX=$(rofi -dmenu -i -show-icons -p "Replace with" -format i < "$TMPFILE") || exit 0
if [ -n "$INDEX" ] && [ -n "${addresses[$INDEX]:-}" ]; then
hyprctl dispatch hy3:movewindow "address:${addresses[$INDEX]}"
fi

View File

@@ -1,43 +0,0 @@
#!/usr/bin/env bash
# Shift window to empty workspace on screen in given direction
# Like XMonad's shiftToEmptyOnScreen
# Usage: shift-to-empty-on-screen.sh <direction: u|d|l|r>
set -euo pipefail
DIRECTION="$1"
max_ws="${HYPR_MAX_WORKSPACE:-9}"
# Track the current monitor so we can return
ORIG_MONITOR=$(hyprctl activeworkspace -j | jq -r '.monitor')
# Move focus to the screen in that direction
hyprctl dispatch focusmonitor "$DIRECTION"
# Get the monitor we're now on (target monitor)
MONITOR=$(hyprctl activeworkspace -j | jq -r '.monitor')
# If there is no monitor in that direction, bail
if [ "$MONITOR" = "$ORIG_MONITOR" ]; then
exit 0
fi
# Find an empty workspace within 1..$HYPR_MAX_WORKSPACE.
EMPTY_WS="$(~/.config/hypr/scripts/find-empty-workspace.sh "${MONITOR}" 2>/dev/null || true)"
if [[ -z "${EMPTY_WS}" ]]; then
# No empty workspace available within the cap; restore focus and bail.
hyprctl dispatch focusmonitor "$ORIG_MONITOR"
exit 0
fi
if (( EMPTY_WS < 1 || EMPTY_WS > max_ws )); then
hyprctl dispatch focusmonitor "$ORIG_MONITOR"
exit 0
fi
# Ensure the workspace exists on the target monitor
hyprctl dispatch workspace "$EMPTY_WS"
# Go back to original monitor and move the window (without following)
hyprctl dispatch focusmonitor "$ORIG_MONITOR"
hyprctl dispatch movetoworkspacesilent "$EMPTY_WS"

View File

@@ -1,52 +0,0 @@
#!/usr/bin/env bash
# Swap the contents of the current workspace with another workspace.
# Intended to mirror XMonad's swapWithCurrent behavior.
set -euo pipefail
max_ws="${HYPR_MAX_WORKSPACE:-9}"
CURRENT_WS="$(hyprctl activeworkspace -j | jq -r '.id')"
if [[ -z "${CURRENT_WS}" || "${CURRENT_WS}" == "null" ]]; then
exit 0
fi
TARGET_WS="${1:-}"
if [[ -z "${TARGET_WS}" ]]; then
WS_LIST="$({
seq 1 "${max_ws}"
hyprctl workspaces -j | jq -r '.[].id' 2>/dev/null || true
} | awk 'NF {print $1}' | awk '!seen[$0]++' | sort -n)"
TARGET_WS="$(printf "%s\n" "${WS_LIST}" | rofi -dmenu -p "Swap with workspace")"
fi
if [[ -z "${TARGET_WS}" || "${TARGET_WS}" == "null" ]]; then
exit 0
fi
if [[ "${TARGET_WS}" == "${CURRENT_WS}" ]]; then
exit 0
fi
if ! [[ "${TARGET_WS}" =~ ^-?[0-9]+$ ]]; then
notify-send "Swap Workspace" "Invalid workspace: ${TARGET_WS}"
exit 1
fi
if (( TARGET_WS < 1 || TARGET_WS > max_ws )); then
notify-send "Swap Workspace" "Workspace out of range (1-${max_ws}): ${TARGET_WS}"
exit 1
fi
WINDOWS_CURRENT="$(hyprctl clients -j | jq -r --arg ws "${CURRENT_WS}" '.[] | select((.workspace.id|tostring) == $ws) | .address')"
WINDOWS_TARGET="$(hyprctl clients -j | jq -r --arg ws "${TARGET_WS}" '.[] | select((.workspace.id|tostring) == $ws) | .address')"
for ADDR in ${WINDOWS_CURRENT}; do
hyprctl dispatch movetoworkspace "${TARGET_WS},address:${ADDR}"
done
for ADDR in ${WINDOWS_TARGET}; do
hyprctl dispatch movetoworkspace "${CURRENT_WS},address:${ADDR}"
done

View File

@@ -1,51 +0,0 @@
#!/usr/bin/env bash
# Toggle a named Hyprland scratchpad, spawning it if needed.
# Usage: toggle-scratchpad.sh <name> <class_regex|-> <title_regex|-> <command...>
set -euo pipefail
if [ "$#" -lt 4 ]; then
echo "usage: $0 <name> <class_regex|-> <title_regex|-> <command...>" >&2
exit 1
fi
NAME="$1"
shift
CLASS_REGEX="$1"
shift
TITLE_REGEX="$1"
shift
COMMAND=("$@")
if [ "$CLASS_REGEX" = "-" ]; then
CLASS_REGEX=""
fi
if [ "$TITLE_REGEX" = "-" ]; then
TITLE_REGEX=""
fi
if [ -z "$CLASS_REGEX" ] && [ -z "$TITLE_REGEX" ]; then
echo "toggle-scratchpad: provide a class or title regex" >&2
exit 1
fi
MATCHING=$(hyprctl clients -j | jq -r --arg cre "$CLASS_REGEX" --arg tre "$TITLE_REGEX" '
.[]
| select(
(($cre == "") or (.class | test($cre; "i")))
and
(($tre == "") or (.title | test($tre; "i")))
)
| .address
')
if [ -z "$MATCHING" ]; then
"${COMMAND[@]}" &
else
while IFS= read -r ADDR; do
[ -n "$ADDR" ] || continue
hyprctl dispatch movetoworkspacesilent "special:$NAME,address:$ADDR"
done <<< "$MATCHING"
fi
hyprctl dispatch togglespecialworkspace "$NAME"

View File

@@ -1,86 +0,0 @@
#!/usr/bin/env bash
# Restore a minimized window by moving it out of a special workspace.
#
# Usage: unminimize-last.sh <name>
# Example: unminimize-last.sh minimized
set -euo pipefail
NAME="${1:-minimized}"
NAME="${NAME#special:}"
SPECIAL_WS="special:${NAME}"
if ! command -v hyprctl >/dev/null 2>&1; then
exit 0
fi
if ! command -v jq >/dev/null 2>&1; then
exit 0
fi
ACTIVE_JSON="$(hyprctl -j activewindow 2>/dev/null || true)"
ACTIVE_ADDR="$(printf '%s' "$ACTIVE_JSON" | jq -r '.address // empty')"
ACTIVE_WS="$(printf '%s' "$ACTIVE_JSON" | jq -r '.workspace.name // empty')"
MONITOR_ID="$(printf '%s' "$ACTIVE_JSON" | jq -r '.monitor // empty')"
# Destination is the normal active workspace for the active monitor.
DEST_WS="$(
hyprctl -j monitors 2>/dev/null \
| jq -r --argjson mid "${MONITOR_ID:-0}" '.[] | select(.id == $mid) | .activeWorkspace.name' \
| head -n 1 \
|| true
)"
if [ -z "$DEST_WS" ] || [ "$DEST_WS" = "null" ]; then
DEST_WS="$(hyprctl -j activeworkspace 2>/dev/null | jq -r '.name // empty' || true)"
fi
if [ -z "$DEST_WS" ] || [ "$DEST_WS" = "null" ]; then
exit 0
fi
# If we're focused on a minimized window already, restore that one.
ADDR=""
if [ "$ACTIVE_WS" = "$SPECIAL_WS" ] && [ -n "$ACTIVE_ADDR" ] && [ "$ACTIVE_ADDR" != "null" ]; then
ADDR="$ACTIVE_ADDR"
else
# Otherwise, restore the "most recent" minimized window we can find.
# focusHistoryID tends to have 0 as most recent; pick the smallest value.
ADDR="$(
hyprctl -j clients 2>/dev/null \
| jq -r --arg sw "$SPECIAL_WS" '
[ .[]
| select(.workspace.name == $sw)
| { addr: .address, fh: (.focusHistoryID // 999999999) }
]
| sort_by(.fh)
| (.[0].addr // empty)
' \
| head -n 1 \
|| true
)"
fi
if [ -z "$ADDR" ] || [ "$ADDR" = "null" ]; then
exit 0
fi
hyprctl dispatch movetoworkspacesilent "${DEST_WS},address:${ADDR}" >/dev/null 2>&1 || true
hyprctl dispatch focuswindow "address:${ADDR}" >/dev/null 2>&1 || true
# If the minimized special workspace is currently visible, close it so we don't
# leave things in a special state after a restore.
SPECIAL_OPEN="$(
hyprctl -j monitors 2>/dev/null \
| jq -r --arg n "$SPECIAL_WS" --argjson mid "${MONITOR_ID:-0}" '
.[]
| select(.id == $mid)
| (.specialWorkspace.name // "")
| select(. == $n)
' \
| head -n 1 \
|| true
)"
if [ -n "$SPECIAL_OPEN" ]; then
hyprctl dispatch togglespecialworkspace "$NAME" >/dev/null 2>&1 || true
fi
exit 0

View File

@@ -1,66 +0,0 @@
#!/usr/bin/env bash
# Source this file to get icon_for_class function.
# Builds a mapping from window class → freedesktop icon name
# by scanning .desktop files for StartupWMClass and Icon fields.
#
# Usage:
# source "$(dirname "$0")/window-icon-map.sh"
# icon=$(icon_for_class "google-chrome")
declare -A _WINDOW_ICON_MAP
_build_window_icon_map() {
local IFS=':'
local -a search_dirs=()
local dir
for dir in ${XDG_DATA_DIRS:-/run/current-system/sw/share:/usr/share:/usr/local/share}; do
[ -d "$dir/applications" ] && search_dirs+=("$dir/applications")
done
[ -d "$HOME/.local/share/applications" ] && search_dirs+=("$HOME/.local/share/applications")
[ ${#search_dirs[@]} -eq 0 ] && return
# Expand globs per-directory so the pattern works correctly
local -a desktop_files=()
for dir in "${search_dirs[@]}"; do
desktop_files+=("$dir"/*.desktop)
done
[ ${#desktop_files[@]} -eq 0 ] && return
# Single grep pass across all desktop files
local -A file_icons file_wmclass
local filepath line
while IFS=: read -r filepath line; do
case "$line" in
Icon=*)
[ -z "${file_icons[$filepath]:-}" ] && file_icons["$filepath"]="${line#Icon=}"
;;
StartupWMClass=*)
[ -z "${file_wmclass[$filepath]:-}" ] && file_wmclass["$filepath"]="${line#StartupWMClass=}"
;;
esac
done < <(grep -H '^Icon=\|^StartupWMClass=' "${desktop_files[@]}" 2>/dev/null)
# Build class → icon map
local icon wm_class bn name
for filepath in "${!file_icons[@]}"; do
icon="${file_icons[$filepath]}"
[ -n "$icon" ] || continue
wm_class="${file_wmclass[$filepath]:-}"
if [ -n "$wm_class" ]; then
_WINDOW_ICON_MAP["${wm_class,,}"]="$icon"
fi
bn="${filepath##*/}"
name="${bn%.desktop}"
_WINDOW_ICON_MAP["${name,,}"]="$icon"
done
}
_build_window_icon_map
icon_for_class() {
local class_lower="${1,,}"
echo "${_WINDOW_ICON_MAP[$class_lower]:-$class_lower}"
}

View File

@@ -1,16 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
cur_ws="$(hyprctl activeworkspace -j | jq -r '.id' 2>/dev/null || true)"
monitor="$(hyprctl activeworkspace -j | jq -r '.monitor' 2>/dev/null || true)"
ws="$(
~/.config/hypr/scripts/find-empty-workspace.sh "${monitor}" "${cur_ws}" 2>/dev/null || true
)"
if [[ -z "${ws}" ]]; then
exit 0
fi
hyprctl dispatch workspace "${ws}" >/dev/null 2>&1 || true

View File

@@ -1,16 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
cur_ws="$(hyprctl activeworkspace -j | jq -r '.id' 2>/dev/null || true)"
monitor="$(hyprctl activeworkspace -j | jq -r '.monitor' 2>/dev/null || true)"
ws="$(
~/.config/hypr/scripts/find-empty-workspace.sh "${monitor}" "${cur_ws}" 2>/dev/null || true
)"
if [[ -z "${ws}" ]]; then
exit 0
fi
hyprctl dispatch movetoworkspace "${ws}" >/dev/null 2>&1 || true

View File

@@ -1,42 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
max_ws="${HYPR_MAX_WORKSPACE:-9}"
delta="${1:-}"
case "${delta}" in
+1|-1) ;;
next) delta="+1" ;;
prev) delta="-1" ;;
*)
exit 2
;;
esac
cur="$(hyprctl activeworkspace -j | jq -r '.id' 2>/dev/null || true)"
if ! [[ "${cur}" =~ ^[0-9]+$ ]]; then
exit 0
fi
if (( cur < 1 )); then
cur=1
elif (( cur > max_ws )); then
cur="${max_ws}"
fi
if [[ "${delta}" == "+1" ]]; then
if (( cur >= max_ws )); then
nxt=1
else
nxt=$((cur + 1))
fi
else
if (( cur <= 1 )); then
nxt="${max_ws}"
else
nxt=$((cur - 1))
fi
fi
hyprctl dispatch workspace "${nxt}" >/dev/null 2>&1 || true

View File

@@ -0,0 +1,106 @@
{
"global": {
"check_for_updates_on_startup": true,
"show_in_menu_bar": true,
"show_profile_name_in_menu_bar": false
},
"profiles": [
{
"complex_modifications": {
"parameters": {
"basic.to_if_alone_timeout_milliseconds": 1000
},
"rules": [
{
"manipulators": [
{
"description": "Change right command to command+control+option+shift.",
"from": {
"key_code": "right_command",
"modifiers": {
"optional": [
"any"
]
}
},
"to": [
{
"key_code": "left_shift",
"modifiers": [
"left_command",
"left_control",
"left_option"
]
}
],
"to_if_alone": [
{
"key_code": "escape",
"modifiers": {
"optional": [
"any"
]
}
}
],
"type": "basic"
}
]
}
]
},
"devices": [
{
"disable_built_in_keyboard_if_exists": false,
"fn_function_keys": {},
"identifiers": {
"is_keyboard": true,
"is_pointing_device": false,
"product_id": 610,
"vendor_id": 1452
},
"ignore": false,
"simple_modifications": {}
},
{
"disable_built_in_keyboard_if_exists": false,
"fn_function_keys": {},
"identifiers": {
"is_keyboard": true,
"is_pointing_device": false,
"product_id": 597,
"vendor_id": 1452
},
"ignore": false,
"simple_modifications": {}
}
],
"fn_function_keys": {
"f1": "vk_consumer_brightness_down",
"f10": "mute",
"f11": "volume_down",
"f12": "volume_up",
"f2": "vk_consumer_brightness_up",
"f3": "vk_mission_control",
"f4": "vk_launchpad",
"f5": "vk_consumer_illumination_down",
"f6": "vk_consumer_illumination_up",
"f7": "vk_consumer_previous",
"f8": "vk_consumer_play",
"f9": "vk_consumer_next"
},
"name": "Default profile",
"one_to_many_mappings": {},
"selected": true,
"simple_modifications": {
"caps_lock": "left_control"
},
"standalone_keys": {},
"virtual_hid_keyboard": {
"caps_lock_delay_milliseconds": 0,
"keyboard_type": "ansi",
"standalone_keys_delay_milliseconds": 200
}
}
]
}

View File

@@ -0,0 +1,21 @@
[Added Associations]
video/x-matroska=vlc.desktop;
audio/flac=vlc.desktop;
image/jpeg=feh.desktop;
video/x-msvideo=vlc.desktop;
text/vnd.trolltech.linguist=vlc.desktop;
audio/mpeg=vlc.desktop;
application/pdf=okularApplication_pdf.desktop;
image/png=okularApplication_kimgio.desktop;
video/mp4=vlc.desktop;org.gnome.Totem.desktop;
x-scheme-handler/magnet=userapp-transmission-gtk-24GQLZ.desktop;
element=element-desktop.desktop
[Default Applications]
text/html=google-chrome.desktop
x-scheme-handler/http=google-chrome.desktop
x-scheme-handler/https=google-chrome.desktop
x-scheme-handler/about=google-chrome.desktop
x-scheme-handler/unknown=google-chrome.desktop
x-scheme-handler/magnet=userapp-transmission-gtk-24GQLZ.desktop
x-scheme-handler/element=element-desktop.desktop

View File

@@ -0,0 +1,128 @@
[remmina_pref]
secret=SEkwV+ilNl+x9eTDKU6tLKFTKdJv2OK2ROlV3Z4K0uY=
uid=Linux+4.7.4-1-ARCH+x86_64+en_US+52817413
bdate=736234
save_view_mode=true
save_when_connect=true
survey=false
invisible_toolbar=false
floating_toolbar_placement=0
toolbar_placement=3
always_show_tab=true
hide_connection_toolbar=false
default_action=0
scale_quality=3
ssh_loglevel=1
screenshot_path=/home/imalison/Pictures
ssh_parseconfig=true
hide_toolbar=false
hide_statusbar=false
small_toolbutton=false
view_file_mode=0
resolutions=640x480,800x600,1024x768,1152x864,1280x960,1400x1050
keystrokes=Send hello world§hello world\\n
main_width=668
main_height=1321
main_maximize=false
main_sort_column_id=1
main_sort_order=0
expanded_group=
toolbar_pin_down=false
sshtunnel_port=4732
applet_new_ontop=false
applet_hide_count=false
applet_enable_avahi=false
disable_tray_icon=false
dark_tray_icon=false
recent_maximum=10
default_mode=0
tab_mode=0
show_buttons_icons=0
show_menu_icons=0
auto_scroll_step=10
hostkey=65508
shortcutkey_fullscreen=102
shortcutkey_autofit=49
shortcutkey_nexttab=65363
shortcutkey_prevtab=65361
shortcutkey_scale=115
shortcutkey_grab=65508
shortcutkey_screenshot=65481
shortcutkey_minimize=65478
shortcutkey_disconnect=65473
shortcutkey_toolbar=116
vte_font=
vte_allow_bold_text=true
vte_lines=512
vte_system_colors=false
vte_foreground_color=rgb(192,192,192)
vte_background_color=rgb(0,0,0)
rdp_use_client_keymap=0
rdp_quality_0=6F
rdp_quality_1=7
rdp_quality_2=1
rdp_quality_9=80
datadir_path=
remmina_file_name=%G_%P_%N_%h
screenshot_name=remmina_%p_%h_%Y%m%d-%H%M%S
deny_screenshot_clipboard=true
confirm_close=true
use_primary_password=false
unlock_timeout=300
unlock_password=
lock_connect=false
lock_edit=false
lock_view_passwords=false
enc_mode=1
audit=false
trust_all=false
prevent_snap_welcome_message=false
last_quickconnect_protocol=
fullscreen_on_auto=true
always_show_notes=false
hide_searchbar=false
ssh_tcp_keepidle=20
ssh_tcp_keepintvl=10
ssh_tcp_keepcnt=3
ssh_tcp_usrtimeout=60000
dark_theme=false
fullscreen_toolbar_visibility=0
shortcutkey_multimon=65365
shortcutkey_viewonly=109
vte_shortcutkey_copy=99
vte_shortcutkey_paste=118
vte_shortcutkey_select_all=97
vte_shortcutkey_increase_font=65365
vte_shortcutkey_decrease_font=65366
vte_shortcutkey_search_text=103
grab_color=#00ff00
grab_color_switch=false
[ssh_colors]
background=#d5ccba
cursor=#45373c
cursor_foreground=#d5ccba
highlight=#45373c
highlight_foreground=#d5ccba
colorBD=#45373c
foreground=#45373c
color0=#20111b
color1=#be100e
color2=#858162
color3=#eaa549
color4=#426a79
color5=#97522c
color6=#989a9c
color7=#968c83
color8=#5e5252
color9=#be100e
color10=#858162
color11=#eaa549
color12=#426a79
color13=#97522c
color14=#989a9c
color15=#d5ccba
[remmina]
name=
ignore-tls-errors=1

View File

@@ -1,53 +0,0 @@
[remmina_pref]
secret=
uid=
bdate=
save_view_mode=true
save_when_connect=true
survey=false
invisible_toolbar=false
floating_toolbar_placement=0
toolbar_placement=3
always_show_tab=true
hide_connection_toolbar=false
default_action=0
scale_quality=3
ssh_loglevel=1
screenshot_path=
ssh_parseconfig=true
hide_toolbar=false
hide_statusbar=false
small_toolbutton=false
view_file_mode=0
resolutions=640x480,800x600,1024x768,1152x864,1280x960,1400x1050
main_width=0
main_height=0
main_maximize=false
main_sort_column_id=1
main_sort_order=0
expanded_group=
toolbar_pin_down=false
sshtunnel_port=4732
applet_new_ontop=false
applet_hide_count=false
applet_enable_avahi=false
disable_tray_icon=false
dark_tray_icon=false
recent_maximum=10
default_mode=0
tab_mode=0
show_buttons_icons=0
show_menu_icons=0
auto_scroll_step=10
confirm_close=true
use_primary_password=false
unlock_timeout=300
unlock_password=
lock_connect=false
lock_edit=false
lock_view_passwords=false
enc_mode=1
audit=false
trust_all=false
prevent_snap_welcome_message=false
last_quickconnect_protocol=

View File

@@ -8,10 +8,10 @@
*/ */
configuration { configuration {
font: "Fira Code 10"; font: "Noto Sans 10";
show-icons: true; show-icons: true;
display-drun: ""; display-drun: "";
drun-display-format: "{name} {generic}"; drun-display-format: "{name}";
disable-history: false; disable-history: false;
sidebar-mode: false; sidebar-mode: false;
} }
@@ -25,8 +25,7 @@ window {
border: 0px; border: 0px;
border-color: @ac; border-color: @ac;
border-radius: 12px; border-radius: 12px;
width: 88%; width: 30%;
height: 78%;
location: center; location: center;
x-offset: 0; x-offset: 0;
y-offset: 0; y-offset: 0;
@@ -37,6 +36,7 @@ prompt {
padding: 0.30% 1% 0% -0.5%; padding: 0.30% 1% 0% -0.5%;
background-color: @al; background-color: @al;
text-color: @fg; text-color: @fg;
font: "Fira Code 12";
} }
entry { entry {
@@ -65,8 +65,8 @@ inputbar {
listview { listview {
background-color: @al; background-color: @al;
padding: 10px; padding: 10px;
columns: 1; columns: 2;
lines: 18; lines: 7;
spacing: 1%; spacing: 1%;
cycle: false; cycle: false;
dynamic: true; dynamic: true;

View File

@@ -5,8 +5,7 @@ configuration {
terminal: "alacritty"; terminal: "alacritty";
sidebar-mode: false; sidebar-mode: false;
fullscreen: true; fullscreen: true;
/* Let rofi auto-detect DPI under Wayland/Xwayland to avoid blurry scaling. */ dpi: 100;
dpi: 0;
} }
@theme "colorful/style_7.rasi" @theme "colorful/style_7.rasi"

View File

@@ -4,9 +4,9 @@ first-run=false
[account] [account]
email=IvanMalison@gmail.com email=IvanMalison@gmail.com
logged-in=false logged-in=true
token= token=
token-secret= token-secret=d3f4ef69df4242e9b10ca034b28f6831
[hotkeys] [hotkeys]
captureFullScreen=Shift+Alt+1 captureFullScreen=Shift+Alt+1
@@ -31,10 +31,10 @@ proxy-req-auth=false
check-updates-automatically=true check-updates-automatically=true
[uploaders] [uploaders]
imgur\\access-token= imgur\access-token=bcea050c708998597610f22a20c2121afa4cfad9
imgur\\anonymous=false imgur\anonymous=false
imgur\\copy-direct-link=false imgur\copy-direct-link=false
imgur\\copy-link=true imgur\copy-link=true
imgur\\name-format=Screenshot at %H:%M:%S imgur\name-format=Screenshot at %H:%M:%S
imgur\\refresh-token= imgur\refresh-token=9890577aa0486ecb5ddc5a04cf9ceb0228e91b26
imgur\\username= imgur\username=

Some files were not shown because too many files have changed in this diff Show More