Compare commits

...

43 commits

Author SHA1 Message Date
raf
01939c2136
Merge pull request #89 from NotAShelf/dependabot/nix/crane-28462d6
build(deps): bump crane from `7cf72d9` to `28462d6`
2026-04-24 20:34:24 +03:00
dependabot[bot]
0ebf62fa5d
build(deps): bump crane from 7cf72d9 to 28462d6
Bumps [crane](https://github.com/ipetkov/crane) from `7cf72d9` to `28462d6`.
- [Release notes](https://github.com/ipetkov/crane/releases)
- [Commits](7cf72d9786...28462d6d55)

---
updated-dependencies:
- dependency-name: crane
  dependency-version: 28462d6d55c33206ffa5a56c7907ca3125ed788f
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-04-24 14:57:41 +00:00
raf
4d3c99368f
Merge pull request #87 from NotAShelf/dependabot/cargo/libc-0.2.185
build(deps): bump libc from 0.2.184 to 0.2.185
2026-04-21 18:00:30 +03:00
dependabot[bot]
7498d688c9
build(deps): bump libc from 0.2.184 to 0.2.185
Bumps [libc](https://github.com/rust-lang/libc) from 0.2.184 to 0.2.185.
- [Release notes](https://github.com/rust-lang/libc/releases)
- [Changelog](https://github.com/rust-lang/libc/blob/0.2.185/CHANGELOG.md)
- [Commits](https://github.com/rust-lang/libc/compare/0.2.184...0.2.185)

---
updated-dependencies:
- dependency-name: libc
  dependency-version: 0.2.185
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-04-21 14:57:54 +00:00
raf
3c61cc19f6
Merge pull request #86 from NotAShelf/dependabot/github_actions/softprops/action-gh-release-3
build(deps): bump softprops/action-gh-release from 2 to 3
2026-04-12 23:07:28 +03:00
dependabot[bot]
cd692ba002
build(deps): bump softprops/action-gh-release from 2 to 3
Bumps [softprops/action-gh-release](https://github.com/softprops/action-gh-release) from 2 to 3.
- [Release notes](https://github.com/softprops/action-gh-release/releases)
- [Changelog](https://github.com/softprops/action-gh-release/blob/master/CHANGELOG.md)
- [Commits](https://github.com/softprops/action-gh-release/compare/v2...v3)

---
updated-dependencies:
- dependency-name: softprops/action-gh-release
  dependency-version: '3'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-04-12 20:00:34 +00:00
ac7fbe293b
build: bump dependencies
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If7985aa26f98a6aac1a994118df886046a6a6964
2026-04-12 22:59:45 +03:00
84cf1b46ad
stash: add a note about Clap's multicall handling
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I4aec7f38ab24a6cd6310630f2169690c6a6a6964
2026-04-12 22:59:45 +03:00
81683ded03
nix: bump inputs
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I4ae530fc33a1d4033600801193a2566d6a6a6964
2026-04-12 22:59:44 +03:00
20504a6e8b
ci: update flake inputs with dependabot; add cooldown to Rust deps
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Iac735278f32f323106314eb9d94159f06a6a6964
2026-04-12 22:59:43 +03:00
raf
f139bda7b2
Merge pull request #82 from fxzzi/dfldsjfslkjf
nix: don't source old build script
2026-04-03 22:13:52 +03:00
Fazzi
32cf1936b6 nix: don't source old build script 2026-04-03 20:08:31 +01:00
b0ee7f59a3
commands: deprecate plain wipe command in favor of db wipe
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I62dbcc00b6b79f160318f9704fab001b6a6a6964
2026-04-03 14:46:08 +03:00
75ca501e29
chore: bump dependencies
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ibecde757e509c21ad612fc9b8e0fb5876a6a6964
2026-04-03 14:12:02 +03:00
5cb6c84f08
docs: document clipboard persistence opt-in behaviour
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ie0830d547ba0e4fcbd620290b3d314b16a6a6964
2026-04-03 14:12:01 +03:00
da9bf5ea3e
treewide: make logging format more consistent; make clipboard persistence opt-in
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9092f93c29fcbe99c90483875f4acd0c6a6a6964
2026-04-03 14:12:00 +03:00
9702e67599
build: get rid of the overzealous build script; leave symlinking to packagers
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I39c590f0a703ab71d3cb5a8df9b095a46a6a6964
2026-04-03 14:11:59 +03:00
77ac70f0d3 db/nonblocking: add test-only imports for the Fnv1aHasher
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I66effd259c6654bd4efac2f4e6bc4e176a6a6964
2026-04-01 16:25:21 +03:00
d643376cd7 stash: deduplicate Fnv1aHasher; add derive for u64 wrapper
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ic2886815721f6eefc66a8ddacd44fb286a6a6964
2026-04-01 16:23:58 +03:00
raf
a2a609f07d
Merge pull request #80 from NotAShelf/notashelf/push-yvkonkrnonvs
various: implement clipboard persistence
2026-04-01 08:46:30 +03:00
d9bee33aba
stash: consolidate confirmation prompts; install color_eyre hook
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I7fb4ba67098f897849fc9b317c7fde646a6a6964
2026-03-31 15:25:09 +03:00
030be21ea5
clipboard: persist clipboard contents after source application closes
When the source application closes, the forked child continues serving
clipboard data so it remains available for paste operations.

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I14fbcf8cbc47c40bfa1da7f8b09245936a6a6964
2026-03-31 11:50:47 +03:00
fe86356399
wayland: use arc-swap over Mutex for FOCUSED_APP for better concurrency
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Id6b40d5c533c35dda5bce7b852b836f26a6a6964
2026-03-31 11:50:46 +03:00
raf
0c57f9b4bd
Merge pull request #76 from NotAShelf/dependabot/github_actions/cachix/cachix-action-17
build(deps): bump cachix/cachix-action from 16 to 17
2026-03-31 09:33:42 +03:00
aabf40ac6e
build: bump dependencies
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I7a974572e4e36c9013e5c1c808677eaf6a6a6964
2026-03-31 09:28:59 +03:00
dependabot[bot]
909bb53afa
build(deps): bump cachix/cachix-action from 16 to 17
Bumps [cachix/cachix-action](https://github.com/cachix/cachix-action) from 16 to 17.
- [Release notes](https://github.com/cachix/cachix-action/releases)
- [Commits](https://github.com/cachix/cachix-action/compare/v16...v17)

---
updated-dependencies:
- dependency-name: cachix/cachix-action
  dependency-version: '17'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-03-19 14:55:19 +00:00
raf
208359dc0c
Merge pull request #72 from NotAShelf/dependabot/cargo/libc-0.2.183
build(deps): bump libc from 0.2.182 to 0.2.183
2026-03-09 19:55:07 +03:00
dependabot[bot]
3faadd709f
build(deps): bump libc from 0.2.182 to 0.2.183
Bumps [libc](https://github.com/rust-lang/libc) from 0.2.182 to 0.2.183.
- [Release notes](https://github.com/rust-lang/libc/releases)
- [Changelog](https://github.com/rust-lang/libc/blob/0.2.183/CHANGELOG.md)
- [Commits](https://github.com/rust-lang/libc/compare/0.2.182...0.2.183)

---
updated-dependencies:
- dependency-name: libc
  dependency-version: 0.2.183
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-03-09 16:36:04 +00:00
raf
8754921106
Merge pull request #70 from NotAShelf/dependabot/cargo/ctrlc-3.5.2
build(deps): bump ctrlc from 3.5.1 to 3.5.2
2026-03-06 16:55:56 +03:00
raf
be6cde092a
Merge pull request #71 from NotAShelf/notashelf/push-nnnqqrzkpywp
stash/db: general cleanup; async db ops for `watch` & deterministic hashing
2026-03-06 16:55:43 +03:00
b1f43bdf7f
db: replace \CHECKED\ atomic flag with pattern-keyed regex cache
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9d5fa5212c5418ce6bca02d05149e1356a6a6964
2026-03-05 16:07:49 +03:00
373affabee
db: improve content hashing; cache only positive scan result
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If8035bf1dcd598a992762b9c714253406a6a6964
2026-03-05 15:07:32 +03:00
0865a1f139
commands/list: debounce for rapid copy operations
Tracks the entry ID currently being copied in `TuiState` to prevent
concurrent `copy_entry()` calls on the same entity. Otherwise we hit a
race condition. Fun!


Track the entry ID currently being copied in TuiState to prevent
concurrent copy_entry() calls on the same entry. Fixes database
race conditions when users trigger copy commands in rapid succession.



Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If8e8fe56bf6dc35960e47decf59636116a6a6964
2026-03-05 15:07:31 +03:00
cf5b1e8205
db: tests for determinism & async ops
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I2591e607a945c0aaa28a75247fc638436a6a6964
2026-03-05 15:07:30 +03:00
95bf1766ce
stash: async db operations; make hashes deterministic
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Iccc9980fa13a752e0e6c9fb630c28ba96a6a6964
2026-03-05 15:07:24 +03:00
7184c8b682
db: consolidate duplicated SQL queries
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I8b6889d1e420865d0a8d3b8da916d8086a6a6964
2026-03-05 12:56:56 +03:00
ffdc13e8f5
commands/list: allow printing in reversed order with --reverse
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I305cfdc68d877dc5d5083a76dccc62db6a6a6964
2026-03-05 09:34:37 +03:00
dependabot[bot]
5e0599dc71
build(deps): bump ctrlc from 3.5.1 to 3.5.2
Bumps [ctrlc](https://github.com/Detegr/rust-ctrlc) from 3.5.1 to 3.5.2.
- [Release notes](https://github.com/Detegr/rust-ctrlc/releases)
- [Commits](https://github.com/Detegr/rust-ctrlc/compare/3.5.1...3.5.2)

---
updated-dependencies:
- dependency-name: ctrlc
  dependency-version: 3.5.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-03-02 16:28:36 +00:00
181edcefb1
db: add MIME sniffing for binary clipboard previews
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I70416269dd40496758b6e5431e77a9456a6a6964
2026-02-27 12:15:10 +03:00
ebf46de99d
docs: add installation instructions for crates.io
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ib9a3fc7ee21324707d046d52a24b50596a6a6964
2026-02-27 10:34:38 +03:00
ba2e29d5b7
docs: fix HTML formatting; mention Cliphist's features
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I92716daef01c00bbe8e75426c3662fbb6a6a6964
2026-02-27 10:09:04 +03:00
3a14860ae1
various: validate lower and upper boundaries before storing; add CLI flags
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I6484f9579a8799d952b15adcb47c8eec6a6a6964
2026-02-27 07:59:28 +03:00
02ba05dc95
db: add new error variants for entries below minimum and above maximum sizes
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Icba2920cfef0ffb0ce6435ab6d7809166a6a6964
2026-02-27 07:58:46 +03:00
25 changed files with 2856 additions and 962 deletions

View file

@ -1,13 +1,23 @@
version: 2 version: 2
updates: updates:
# Update Cargo deps
- package-ecosystem: cargo
directory: "/"
schedule:
interval: "weekly"
# Update used workflows # Update used workflows
- package-ecosystem: github-actions - package-ecosystem: github-actions
directory: "/" directory: "/"
schedule: schedule:
interval: daily interval: daily
# Update Cargo deps
- package-ecosystem: cargo
directory: "/"
cooldown:
default-days: 7
schedule:
interval: "weekly"
# Update Nixpkgs & Crane
- package-ecosystem: nix
directory: "/"
cooldown:
default-days: 7
schedule:
interval: daily

View file

@ -20,7 +20,7 @@ jobs:
with: with:
nix_path: nixpkgs=channel:nixos-unstable nix_path: nixpkgs=channel:nixos-unstable
- uses: cachix/cachix-action@v16 - uses: cachix/cachix-action@v17
with: with:
name: nyx name: nyx
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'

View file

@ -40,7 +40,7 @@ jobs:
steps: steps:
- name: Create Release - name: Create Release
id: create_release id: create_release
uses: softprops/action-gh-release@v2 uses: softprops/action-gh-release@v3
with: with:
draft: false draft: false
prerelease: false prerelease: false
@ -98,7 +98,7 @@ jobs:
cp target/${{ matrix.target }}/release/stash ${{ matrix.name }} cp target/${{ matrix.target }}/release/stash ${{ matrix.name }}
- name: Upload Release Asset - name: Upload Release Asset
uses: softprops/action-gh-release@v2 uses: softprops/action-gh-release@v3
with: with:
files: ${{ matrix.name }} files: ${{ matrix.name }}
@ -120,7 +120,7 @@ jobs:
sha256sum stash-* > SHA256SUMS sha256sum stash-* > SHA256SUMS
- name: Upload Checksums - name: Upload Checksums
uses: softprops/action-gh-release@v2 uses: softprops/action-gh-release@v3
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
files: SHA256SUMS files: SHA256SUMS

999
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -14,40 +14,44 @@ name = "stash" # actual binary name for Nix, Cargo, etc.
path = "src/main.rs" path = "src/main.rs"
[dependencies] [dependencies]
arc-swap = { version = "1.9.1", optional = true }
base64 = "0.22.1" base64 = "0.22.1"
clap = { version = "4.5.60", features = [ "derive", "env" ] } blocking = "1.6.2"
clap = { version = "4.6.0", features = [ "derive", "env" ] }
clap-verbosity-flag = "3.0.4" clap-verbosity-flag = "3.0.4"
color-eyre = "0.6.5" color-eyre = "0.6.5"
crossterm = "0.29.0" crossterm = "0.29.0"
ctrlc = "3.5.1" ctrlc = "3.5.2"
dirs = "6.0.0" dirs = "6.0.0"
env_logger = "0.11.8" env_logger = "0.11.10"
humantime = "2.3.0" humantime = "2.3.0"
imagesize = "0.14.0" imagesize = "0.14.0"
inquire = { version = "0.9.4", default-features = false, features = [ "crossterm" ] } inquire = { version = "0.9.4", default-features = false, features = [ "crossterm" ] }
libc = "0.2.182" libc = "0.2.185"
log = "0.4.29" log = "0.4.29"
notify-rust = { version = "4.12.0", optional = true } mime-sniffer = "0.1.3"
notify-rust = { version = "4.14.0", optional = true }
ratatui = "0.30.0" ratatui = "0.30.0"
regex = "1.12.3" regex = "1.12.3"
rusqlite = { version = "0.38.0", features = [ "bundled" ] } rusqlite = { version = "0.39.0", features = [ "bundled" ] }
serde = { version = "1.0.228", features = [ "derive" ] } serde = { version = "1.0.228", features = [ "derive" ] }
serde_json = "1.0.149" serde_json = "1.0.149"
smol = "2.0.2" smol = "2.0.2"
thiserror = "2.0.18" thiserror = "2.0.18"
unicode-segmentation = "1.12.0" unicode-segmentation = "1.13.2"
unicode-width = "0.2.2" unicode-width = "0.2.2"
wayland-client = { version = "0.31.12", features = [ "log" ], optional = true } wayland-client = { version = "0.31.14", features = [ "log" ], optional = true }
wayland-protocols-wlr = { version = "0.3.10", default-features = false, optional = true } wayland-protocols-wlr = { version = "0.3.12", default-features = false, optional = true }
wl-clipboard-rs = "0.9.3" wl-clipboard-rs = "0.9.3"
[dev-dependencies] [dev-dependencies]
tempfile = "3.26.0" futures = "0.3.32"
tempfile = "3.27.0"
[features] [features]
default = [ "notifications", "use-toplevel" ] default = [ "notifications", "use-toplevel" ]
notifications = [ "dep:notify-rust" ] notifications = [ "dep:notify-rust" ]
use-toplevel = [ "dep:wayland-client", "dep:wayland-protocols-wlr" ] use-toplevel = [ "dep:arc-swap", "dep:wayland-client", "dep:wayland-protocols-wlr" ]
[profile.release] [profile.release]
lto = true lto = true

View file

@ -28,7 +28,7 @@
<div align="center"> <div align="center">
<br/> <br/>
<a href="#features">Features</a><br/> <a href="#features">Features</a><br/>
<a href="#installation">Installation</a> | <a href="#usage">Usage</a> | <a href="#usage">Motivation</a> <a href="#installation">Installation</a> | <a href="#usage">Usage</a> | <a href="#usage">Motivation</a></br>
<a href="#tips--tricks">Tips and Tricks</a> <a href="#tips--tricks">Tips and Tricks</a>
<br/> <br/>
</div> </div>
@ -46,21 +46,34 @@ with many features such as but not necessarily limited to:
- Image preview (shows dimensions and format) - Image preview (shows dimensions and format)
- Text previews with customizable width - Text previews with customizable width
- De-duplication, whitespace prevention and entry limit control - De-duplication, whitespace prevention and entry limit control
- Automatic clipboard monitoring with `stash watch` - Automatic clipboard monitoring with
[`stash watch`](#watch-clipboard-for-changes-and-store-automatically)
- Configurable auto-expiry of old entries in watch mode as a safety buffer - Configurable auto-expiry of old entries in watch mode as a safety buffer
- Drop-in replacement for `wl-clipboard` tools (`wl-copy` and `wl-paste`) - Drop-in replacement for `wl-clipboard` tools (`wl-copy` and `wl-paste`)
- Sensitive clipboard filtering via regex (see below) - Sensitive clipboard filtering via regex (see below)
- Sensitive clipboard filtering by application (see below) - Sensitive clipboard filtering by application (see below)
See [usage section](#usage) for more details. on top of the existing features of Cliphist, which are as follows:
- Write clipboard changes to a history file.
- Recall history with dmenu, rofi, wofi (or whatever other picker you like).
- Both text and images are supported.
- Clipboard is preserved byte-for-byte.
- Leading/trailing whitespace, no whitespace, or newlines are preserved.
- Wont break fancy editor selections like Vim wordwise, linewise, or block
mode.
Most of Stash's usage is documented in the [usage section](#usage) for more
details. Refer to the [Tips & Tricks section](#tips--tricks) for more "advanced"
features, or conveniences provided by Stash.
## Installation ## Installation
### With Nix ### With Nix
Nix is the recommended way of downloading Stash. You can install it using Nix Nix is the recommended way of downloading (and developing!) Stash. You can
flakes using `nix profile add` if on non-nixos or add Stash as a flake input if install it using Nix flakes using `nix profile add` if on non-nixos or add Stash
you are on NixOS. as a flake input if you are on NixOS.
```nix ```nix
{ {
@ -91,7 +104,8 @@ If you want to give Stash a try before you switch to it, you may also run it one
time with `nix run`. time with `nix run`.
```sh ```sh
nix run github:NotAShelf/stash -- watch # start the watch daemon # Run directly from the git repository; will be garbage collected
$ nix run github:NotAShelf/stash -- watch # start the watch daemon
``` ```
### Without Nix ### Without Nix
@ -110,16 +124,23 @@ releases are made when a version gets tagged, and are available under
- Build and install from source with Cargo: - Build and install from source with Cargo:
```bash ```bash
cargo install --git https://github.com/notashelf/stash cargo install stash --locked
``` ```
Additionally, you may get Stash from source via `cargo install` using
`cargo install --git https://github.com/notashelf/stash --locked` or you may
check out to the repository, and use Cargo to build it. You'll need Rust 1.91.0
or above. Most distributions should package this version already. You may, of
course, prefer to package the built releases if you'd like.
## Usage ## Usage
> [!NOTE] > [!IMPORTANT]
> It is not a priority to provide 1:1 backwards compatibility with Cliphist. > It is not a priority to provide 1:1 backwards compatibility with Cliphist.
> While the interface is _almost_ identical, Stash chooses to build upon > While the interface is generally similar, Stash chooses to build upon
> Cliphist's design and extend existing design choices. See > Cliphist's design and extend existing design choices. See
> [Migrating from Cliphist](#migrating-from-cliphist) for more details. > [Migrating from Cliphist](#migrating-from-cliphist) for more details. Refer to
> help text if confused.
The command interface of Stash is _only slightly_ different from Cliphist. In The command interface of Stash is _only slightly_ different from Cliphist. In
most cases, you may simply replace `cliphist` with `stash` and your commands, most cases, you may simply replace `cliphist` with `stash` and your commands,
@ -275,7 +296,7 @@ entry has expired from history.
> This behavior only applies when the watch daemon is actively running. Manual > This behavior only applies when the watch daemon is actively running. Manual
> expiration or deletion of entries will not clear the clipboard. > expiration or deletion of entries will not clear the clipboard.
### MIME Type Preference for Watch #### MIME Type Preference for Watch
`stash watch` supports a `--mime-type` (short `-t`) option that lets you `stash watch` supports a `--mime-type` (short `-t`) option that lets you
prioritise which MIME type the daemon should request from the clipboard when prioritise which MIME type the daemon should request from the clipboard when
@ -299,6 +320,25 @@ ask the compositor for image data first. Most users will be fine using the
default value (`any`) but in the case your browser (or other applications!) default value (`any`) but in the case your browser (or other applications!)
regularly misrepresent data, you might wish to prioritize a different type. regularly misrepresent data, you might wish to prioritize a different type.
#### Clipboard Persistence
By default, when you copy something and close the source application, Wayland
clears the clipboard. Stash can optionally keep the clipboard contents available
after the source closes using the `--persist` flag.
```bash
stash watch --persist
```
When enabled, Stash will fork a background process to serve the clipboard
contents, keeping them available even after the original application exits.
> [!NOTE]
> This feature is **opt-in** and disabled by default, as it may not be desirable
> for all users and can leave clipboard data in memory longer than expected. You
> must start the `stash watch` daemon with `--persist` for clipboard
> persistence.
### Options ### Options
Some commands take additional flags to modify Stash's behavior. See each Some commands take additional flags to modify Stash's behavior. See each
@ -554,7 +594,8 @@ your database:
reclaim space and defragment the database. This is safe to run periodically. reclaim space and defragment the database. This is safe to run periodically.
It is recommended to run `stash db vacuum` occasionally (e.g., monthly) to keep It is recommended to run `stash db vacuum` occasionally (e.g., monthly) to keep
the database compact, especially after deleting many entries. the database compact, especially after deleting many entries. You can, of
course, wipe the database entirely if it has grown too large.
## Attributions ## Attributions

View file

@ -1,65 +0,0 @@
use std::{env, fs, path::Path};
/// List of multicall symlinks to create (name, target)
const MULTICALL_LINKS: &[&str] =
&["stash-copy", "stash-paste", "wl-copy", "wl-paste"];
/// Wayland-specific symlinks that can be disabled separately
const WAYLAND_LINKS: &[&str] = &["wl-copy", "wl-paste"];
fn main() {
// OUT_DIR is something like .../target/debug/build/<pkg>/out
// We want .../target/debug or .../target/release
let out_dir = env::var("OUT_DIR").expect("OUT_DIR not set");
let bin_dir = Path::new(&out_dir)
.ancestors()
.nth(3)
.expect("Failed to find binary dir");
// Path to the main stash binary
let stash_bin = bin_dir.join("stash");
// Check for environment variables to disable symlinking
let disable_all_symlinks = env::var("STASH_NO_SYMLINKS").is_ok();
let disable_wayland_symlinks = env::var("STASH_NO_WL_SYMLINKS").is_ok();
// Create symlinks for each multicall binary
for link in MULTICALL_LINKS {
if disable_all_symlinks {
println!("cargo:warning=Skipping symlink {link} (all symlinks disabled)");
continue;
}
if disable_wayland_symlinks && WAYLAND_LINKS.contains(link) {
println!(
"cargo:warning=Skipping symlink {link} (wayland symlinks disabled)"
);
continue;
}
let link_path = bin_dir.join(link);
// Remove existing symlink or file if present
let _ = fs::remove_file(&link_path);
#[cfg(unix)]
{
use std::os::unix::fs::symlink;
match symlink(&stash_bin, &link_path) {
Ok(()) => {
println!(
"cargo:warning=Created symlink: {} -> {}",
link_path.display(),
stash_bin.display()
);
},
Err(e) => {
println!(
"cargo:warning=Failed to create symlink {} -> {}: {}",
link_path.display(),
stash_bin.display(),
e
);
},
}
}
}
}

12
flake.lock generated
View file

@ -2,11 +2,11 @@
"nodes": { "nodes": {
"crane": { "crane": {
"locked": { "locked": {
"lastModified": 1766194365, "lastModified": 1776635034,
"narHash": "sha256-4AFsUZ0kl6MXSm4BaQgItD0VGlEKR3iq7gIaL7TjBvc=", "narHash": "sha256-OEOJrT3ZfwbChzODfIH4GzlNTtOFuZFWPtW7jIeR8xU=",
"owner": "ipetkov", "owner": "ipetkov",
"repo": "crane", "repo": "crane",
"rev": "7d8ec2c71771937ab99790b45e6d9b93d15d9379", "rev": "dc7496d8ea6e526b1254b55d09b966e94673750f",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -17,11 +17,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1766309749, "lastModified": 1775710090,
"narHash": "sha256-3xY8CZ4rSnQ0NqGhMKAy5vgC+2IVK0NoVEzDoOh4DA4=", "narHash": "sha256-ar3rofg+awPB8QXDaFJhJ2jJhu+KqN/PRCXeyuXR76E=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "a6531044f6d0bef691ea18d4d4ce44d0daa6e816", "rev": "4c1018dae018162ec878d42fec712642d214fdfa",
"type": "github" "type": "github"
}, },
"original": { "original": {

View file

@ -4,6 +4,7 @@
stdenv, stdenv,
mold, mold,
versionCheckHook, versionCheckHook,
useMold ? stdenv.isLinux,
createSymlinks ? true, createSymlinks ? true,
}: let }: let
pname = "stash"; pname = "stash";
@ -18,7 +19,6 @@
(fs.fileFilter (file: builtins.any file.hasExt ["rs"]) (s + /src)) (fs.fileFilter (file: builtins.any file.hasExt ["rs"]) (s + /src))
(s + /Cargo.lock) (s + /Cargo.lock)
(s + /Cargo.toml) (s + /Cargo.toml)
(s + /build.rs)
]; ];
}; };
@ -55,7 +55,7 @@ in
done done
''; '';
env = lib.optionalAttrs (stdenv.isLinux && !stdenv.hostPlatform.isAarch) { env = lib.optionalAttrs useMold {
CARGO_LINKER = "clang"; CARGO_LINKER = "clang";
CARGO_RUSTFLAGS = "-Clink-arg=-fuse-ld=${mold}/bin/mold"; CARGO_RUSTFLAGS = "-Clink-arg=-fuse-ld=${mold}/bin/mold";
}; };

3
src/clipboard/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod persist;
pub use persist::{ClipboardData, get_serving_pid, persist_clipboard};

262
src/clipboard/persist.rs Normal file
View file

@ -0,0 +1,262 @@
use std::{
process::exit,
sync::atomic::{AtomicI32, Ordering},
};
use wl_clipboard_rs::copy::{
ClipboardType,
MimeType as CopyMimeType,
Options,
PreparedCopy,
ServeRequests,
Source,
};
/// Maximum number of paste requests to serve before exiting. This (hopefully)
/// prevents runaway processes while still providing persistence.
const MAX_SERVE_REQUESTS: usize = 1000;
/// PID of the current clipboard persistence child process. Used to detect when
/// clipboard content is from our own serve process.
static SERVING_PID: AtomicI32 = AtomicI32::new(0);
/// Get the current serving PID if any. Used by the watch loop to avoid
/// duplicate persistence processes.
pub fn get_serving_pid() -> Option<i32> {
let pid = SERVING_PID.load(Ordering::SeqCst);
if pid != 0 { Some(pid) } else { None }
}
/// Result type for persistence operations.
pub type PersistenceResult<T> = Result<T, PersistenceError>;
/// Errors that can occur during clipboard persistence.
#[derive(Debug, thiserror::Error)]
pub enum PersistenceError {
#[error("Failed to prepare copy: {0}")]
PrepareFailed(String),
#[error("Failed to fork: {0}")]
ForkFailed(String),
#[error("Clipboard data too large: {0} bytes")]
DataTooLarge(usize),
#[error("Clipboard content is empty")]
EmptyContent,
#[error("No MIME types to offer")]
NoMimeTypes,
}
/// Clipboard data with all MIME types for persistence.
#[derive(Debug, Clone)]
pub struct ClipboardData {
/// The actual clipboard content.
pub content: Vec<u8>,
/// All MIME types offered by the source. Preserves order.
pub mime_types: Vec<String>,
/// The MIME type that was selected for storage.
pub selected_mime: String,
}
impl ClipboardData {
/// Create new clipboard data.
pub fn new(
content: Vec<u8>,
mime_types: Vec<String>,
selected_mime: String,
) -> Self {
Self {
content,
mime_types,
selected_mime,
}
}
/// Check if data is valid for persistence.
pub fn is_valid(&self) -> Result<(), PersistenceError> {
const MAX_SIZE: usize = 100 * 1024 * 1024; // 100MB
if self.content.is_empty() {
return Err(PersistenceError::EmptyContent);
}
if self.content.len() > MAX_SIZE {
return Err(PersistenceError::DataTooLarge(self.content.len()));
}
if self.mime_types.is_empty() {
return Err(PersistenceError::NoMimeTypes);
}
Ok(())
}
}
/// Persist clipboard data by forking a background process that serves it.
///
/// 1. Prepares a clipboard copy operation with all MIME types
/// 2. Forks a child process
/// 3. The child serves clipboard data indefinitely (until MAX_SERVE_REQUESTS)
/// 4. The parent returns immediately
///
/// # Safety
///
/// This function uses `libc::fork()` which is unsafe. The child process
/// must not modify any shared state or file descriptors.
pub unsafe fn persist_clipboard(data: ClipboardData) -> PersistenceResult<()> {
// Validate data
data.is_valid()?;
// Prepare the copy operation
let prepared = prepare_clipboard_copy(&data)?;
// Fork and serve
unsafe { fork_and_serve(prepared) }
}
/// Prepare a clipboard copy operation with all MIME types.
fn prepare_clipboard_copy(
data: &ClipboardData,
) -> PersistenceResult<PreparedCopy> {
let mut opts = Options::new();
opts.clipboard(ClipboardType::Regular);
opts.serve_requests(ServeRequests::Only(MAX_SERVE_REQUESTS));
opts.foreground(true); // we'll fork manually for better control
// Determine MIME type for the primary offer
let mime_type = if data.selected_mime.starts_with("text/") {
CopyMimeType::Text
} else {
CopyMimeType::Specific(data.selected_mime.clone())
};
// Prepare the copy
let prepared = opts
.prepare_copy(Source::Bytes(data.content.clone().into()), mime_type)
.map_err(|e| PersistenceError::PrepareFailed(e.to_string()))?;
Ok(prepared)
}
/// Fork a child process to serve clipboard data.
///
/// The child process will:
///
/// 1. Register its process ID with the self-detection module
/// 2. Serve clipboard requests until MAX_SERVE_REQUESTS
/// 3. Exit cleanly
///
/// The parent stores the child `PID` in `SERVING_PID` and returns immediately.
unsafe fn fork_and_serve(prepared: PreparedCopy) -> PersistenceResult<()> {
// Enable automatic child reaping to prevent zombie processes
unsafe {
libc::signal(libc::SIGCHLD, libc::SIG_IGN);
}
match unsafe { libc::fork() } {
0 => {
// Child process - clear serving PID
// Look at me. I'm the server now.
SERVING_PID.store(0, Ordering::SeqCst);
serve_clipboard_child(prepared);
exit(0);
},
-1 => {
// Oops.
Err(PersistenceError::ForkFailed(
"libc::fork() returned -1".to_string(),
))
},
pid => {
// Parent process, store child PID for loop detection
log::debug!("forked clipboard persistence process (pid: {pid})");
SERVING_PID.store(pid, Ordering::SeqCst);
Ok(())
},
}
}
/// Child process entry point for serving clipboard data.
fn serve_clipboard_child(prepared: PreparedCopy) {
let pid = std::process::id() as i32;
log::debug!("clipboard persistence child process started (pid: {pid})");
// Serve clipboard requests. The PreparedCopy::serve() method blocks and
// handles all the Wayland protocol interactions internally via
// wl-clipboard-rs
match prepared.serve() {
Ok(()) => {
log::debug!("clipboard persistence: serve completed normally");
},
Err(e) => {
log::error!("clipboard persistence: serve failed: {e}");
exit(1);
},
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_clipboard_data_validation() {
// Valid data
let valid = ClipboardData::new(
b"hello".to_vec(),
vec!["text/plain".to_string()],
"text/plain".to_string(),
);
assert!(valid.is_valid().is_ok());
// Empty content
let empty = ClipboardData::new(
vec![],
vec!["text/plain".to_string()],
"text/plain".to_string(),
);
assert!(matches!(
empty.is_valid(),
Err(PersistenceError::EmptyContent)
));
// No MIME types
let no_mimes =
ClipboardData::new(b"hello".to_vec(), vec![], "text/plain".to_string());
assert!(matches!(
no_mimes.is_valid(),
Err(PersistenceError::NoMimeTypes)
));
// Too large
let huge = ClipboardData::new(
vec![0u8; 101 * 1024 * 1024], // 101MB
vec!["text/plain".to_string()],
"text/plain".to_string(),
);
assert!(matches!(
huge.is_valid(),
Err(PersistenceError::DataTooLarge(_))
));
}
#[test]
fn test_clipboard_data_creation() {
let data = ClipboardData::new(
b"test content".to_vec(),
vec!["text/plain".to_string(), "text/html".to_string()],
"text/plain".to_string(),
);
assert_eq!(data.content, b"test content");
assert_eq!(data.mime_types.len(), 2);
assert_eq!(data.selected_mime, "text/plain");
}
}

View file

@ -32,7 +32,7 @@ impl DecodeCommand for SqliteClipboardDb {
// If input is empty or whitespace, treat as error and trigger fallback // If input is empty or whitespace, treat as error and trigger fallback
if input_str.trim().is_empty() { if input_str.trim().is_empty() {
log::debug!("No input provided to decode; relaying clipboard to stdout"); log::debug!("no input provided to decode; relaying clipboard to stdout");
if let Ok((mut reader, _mime)) = if let Ok((mut reader, _mime)) =
get_contents(ClipboardType::Regular, Seat::Unspecified, MimeType::Any) get_contents(ClipboardType::Regular, Seat::Unspecified, MimeType::Any)
{ {

View file

@ -9,7 +9,7 @@ pub trait DeleteCommand {
impl DeleteCommand for SqliteClipboardDb { impl DeleteCommand for SqliteClipboardDb {
fn delete(&self, input: impl Read) -> Result<usize, StashError> { fn delete(&self, input: impl Read) -> Result<usize, StashError> {
let deleted = self.delete_entries(input)?; let deleted = self.delete_entries(input)?;
log::info!("Deleted {deleted} entries"); log::info!("deleted {deleted} entries");
Ok(deleted) Ok(deleted)
} }
} }

View file

@ -55,11 +55,11 @@ impl ImportCommand for SqliteClipboardDb {
imported += 1; imported += 1;
} }
log::info!("Imported {imported} records from TSV into SQLite database."); log::info!("imported {imported} records from TSV into SQLite database.");
// Trim database to max_items after import // Trim database to max_items after import
self.trim_db(max_items)?; self.trim_db(max_items)?;
log::info!("Trimmed clipboard database to max_items = {max_items}"); log::info!("trimmed clipboard database to max_items = {max_items}");
Ok(()) Ok(())
} }

View file

@ -11,6 +11,7 @@ pub trait ListCommand {
out: impl Write, out: impl Write,
preview_width: u32, preview_width: u32,
include_expired: bool, include_expired: bool,
reverse: bool,
) -> Result<(), StashError>; ) -> Result<(), StashError>;
} }
@ -20,9 +21,10 @@ impl ListCommand for SqliteClipboardDb {
out: impl Write, out: impl Write,
preview_width: u32, preview_width: u32,
include_expired: bool, include_expired: bool,
reverse: bool,
) -> Result<(), StashError> { ) -> Result<(), StashError> {
self self
.list_entries(out, preview_width, include_expired) .list_entries(out, preview_width, include_expired, reverse)
.map(|_| ()) .map(|_| ())
} }
} }
@ -52,6 +54,12 @@ struct TuiState {
/// Whether we're currently in search input mode. /// Whether we're currently in search input mode.
search_mode: bool, search_mode: bool,
/// Whether to show entries in reverse order (oldest first).
reverse: bool,
/// ID of entry currently being copied.
copying_entry: Option<i64>,
} }
impl TuiState { impl TuiState {
@ -61,6 +69,7 @@ impl TuiState {
include_expired: bool, include_expired: bool,
window_size: usize, window_size: usize,
preview_width: u32, preview_width: u32,
reverse: bool,
) -> Result<Self, StashError> { ) -> Result<Self, StashError> {
let total = db.count_entries(include_expired, None)?; let total = db.count_entries(include_expired, None)?;
let window = if total > 0 { let window = if total > 0 {
@ -70,6 +79,7 @@ impl TuiState {
window_size, window_size,
preview_width, preview_width,
None, None,
reverse,
)? )?
} else { } else {
Vec::new() Vec::new()
@ -83,6 +93,8 @@ impl TuiState {
dirty: false, dirty: false,
search_query: String::new(), search_query: String::new(),
search_mode: false, search_mode: false,
reverse,
copying_entry: None,
}) })
} }
@ -228,6 +240,7 @@ impl TuiState {
self.window_size, self.window_size,
preview_width, preview_width,
search, search,
self.reverse,
)? )?
} else { } else {
Vec::new() Vec::new()
@ -266,6 +279,7 @@ impl SqliteClipboardDb {
&self, &self,
preview_width: u32, preview_width: u32,
include_expired: bool, include_expired: bool,
reverse: bool,
) -> Result<(), StashError> { ) -> Result<(), StashError> {
use std::io::stdout; use std::io::stdout;
@ -316,8 +330,13 @@ impl SqliteClipboardDb {
.unwrap_or(24); .unwrap_or(24);
let initial_height = initial_height.max(1); let initial_height = initial_height.max(1);
let mut tui = let mut tui = TuiState::new(
TuiState::new(self, include_expired, initial_height, preview_width)?; self,
include_expired,
initial_height,
preview_width,
reverse,
)?;
// ratatui ListState; only tracks selection within the *window* slice. // ratatui ListState; only tracks selection within the *window* slice.
let mut list_state = ListState::default(); let mut list_state = ListState::default();
@ -393,7 +412,7 @@ impl SqliteClipboardDb {
}, },
(KeyCode::Enter, _) => actions.copy = true, (KeyCode::Enter, _) => actions.copy = true,
(KeyCode::Char('D'), KeyModifiers::SHIFT) => { (KeyCode::Char('D'), KeyModifiers::SHIFT) => {
actions.delete = true actions.delete = true;
}, },
(KeyCode::Char('/'), _) => actions.toggle_search = true, (KeyCode::Char('/'), _) => actions.toggle_search = true,
_ => {}, _ => {},
@ -663,42 +682,51 @@ impl SqliteClipboardDb {
if actions.copy if actions.copy
&& let Some(&(id, ..)) = tui.selected_entry() && let Some(&(id, ..)) = tui.selected_entry()
{ {
match self.copy_entry(id) { if tui.copying_entry == Some(id) {
Ok((new_id, contents, mime)) => { log::debug!(
if new_id != id { "Skipping duplicate copy for entry {id} (already in \
tui.dirty = true; progress)"
} );
let opts = Options::new(); } else {
let mime_type = match mime { tui.copying_entry = Some(id);
Some(ref m) if m == "text/plain" => MimeType::Text, match self.copy_entry(id) {
Some(ref m) => MimeType::Specific(m.clone().to_owned()), Ok((new_id, contents, mime)) => {
None => MimeType::Text, if new_id != id {
}; tui.dirty = true;
let copy_result = opts }
.copy(Source::Bytes(contents.clone().into()), mime_type); let opts = Options::new();
match copy_result { let mime_type = match mime {
Ok(()) => { Some(ref m) if m == "text/plain" => MimeType::Text,
let _ = Notification::new() Some(ref m) => MimeType::Specific(m.clone().clone()),
.summary("Stash") None => MimeType::Text,
.body("Copied entry to clipboard") };
.show(); let copy_result = opts
}, .copy(Source::Bytes(contents.clone().into()), mime_type);
Err(e) => { match copy_result {
log::error!("Failed to copy entry to clipboard: {e}"); Ok(()) => {
let _ = Notification::new() let _ = Notification::new()
.summary("Stash") .summary("Stash")
.body(&format!("Failed to copy to clipboard: {e}")) .body("Copied entry to clipboard")
.show(); .show();
}, },
} Err(e) => {
}, log::error!("failed to copy entry to clipboard: {e}");
Err(e) => { let _ = Notification::new()
log::error!("Failed to fetch entry {id}: {e}"); .summary("Stash")
let _ = Notification::new() .body(&format!("Failed to copy to clipboard: {e}"))
.summary("Stash") .show();
.body(&format!("Failed to fetch entry: {e}")) },
.show(); }
}, },
Err(e) => {
log::error!("failed to fetch entry {id}: {e}");
let _ = Notification::new()
.summary("Stash")
.body(&format!("Failed to fetch entry: {e}"))
.show();
},
}
tui.copying_entry = None;
} }
} }
} }

View file

@ -5,4 +5,3 @@ pub mod list;
pub mod query; pub mod query;
pub mod store; pub mod store;
pub mod watch; pub mod watch;
pub mod wipe;

View file

@ -2,6 +2,7 @@ use std::io::Read;
use crate::db::{ClipboardDb, SqliteClipboardDb}; use crate::db::{ClipboardDb, SqliteClipboardDb};
#[allow(clippy::too_many_arguments)]
pub trait StoreCommand { pub trait StoreCommand {
fn store( fn store(
&self, &self,
@ -10,6 +11,8 @@ pub trait StoreCommand {
max_items: u64, max_items: u64,
state: Option<String>, state: Option<String>,
excluded_apps: &[String], excluded_apps: &[String],
min_size: Option<usize>,
max_size: usize,
) -> Result<(), crate::db::StashError>; ) -> Result<(), crate::db::StashError>;
} }
@ -21,18 +24,24 @@ impl StoreCommand for SqliteClipboardDb {
max_items: u64, max_items: u64,
state: Option<String>, state: Option<String>,
excluded_apps: &[String], excluded_apps: &[String],
min_size: Option<usize>,
max_size: usize,
) -> Result<(), crate::db::StashError> { ) -> Result<(), crate::db::StashError> {
if let Some("sensitive" | "clear") = state.as_deref() { if let Some("sensitive" | "clear") = state.as_deref() {
self.delete_last()?; self.delete_last()?;
log::info!("Entry deleted"); log::info!("entry deleted");
} else { } else {
self.store_entry( self.store_entry(
input, input,
max_dedupe_search, max_dedupe_search,
max_items, max_items,
Some(excluded_apps), Some(excluded_apps),
min_size,
max_size,
None, // no pre-computed hash for CLI store
None, // no mime types for CLI store
)?; )?;
log::info!("Entry stored"); log::info!("entry stored");
} }
Ok(()) Ok(())
} }

View file

@ -1,9 +1,4 @@
use std::{ use std::{collections::BinaryHeap, hash::Hasher, io::Read, time::Duration};
collections::{BinaryHeap, hash_map::DefaultHasher},
hash::{Hash, Hasher},
io::Read,
time::Duration,
};
use smol::Timer; use smol::Timer;
use wl_clipboard_rs::{ use wl_clipboard_rs::{
@ -17,7 +12,11 @@ use wl_clipboard_rs::{
}, },
}; };
use crate::db::{ClipboardDb, SqliteClipboardDb}; use crate::{
clipboard::{self, ClipboardData, get_serving_pid},
db::{SqliteClipboardDb, nonblocking::AsyncClipboardDb},
hash::Fnv1aHasher,
};
/// Wrapper to provide [`Ord`] implementation for `f64` by negating values. /// Wrapper to provide [`Ord`] implementation for `f64` by negating values.
/// This allows [`BinaryHeap`], which is a max-heap, to function as a min-heap. /// This allows [`BinaryHeap`], which is a max-heap, to function as a min-heap.
@ -59,7 +58,7 @@ impl std::cmp::Ord for Neg {
} }
/// Min-heap for tracking entry expirations with sub-second precision. /// Min-heap for tracking entry expirations with sub-second precision.
/// Uses Neg wrapper to turn BinaryHeap (max-heap) into min-heap behavior. /// Uses Neg wrapper to turn `BinaryHeap` (max-heap) into min-heap behavior.
#[derive(Debug, Default)] #[derive(Debug, Default)]
struct ExpirationQueue { struct ExpirationQueue {
heap: BinaryHeap<(Neg, i64)>, heap: BinaryHeap<(Neg, i64)>,
@ -97,6 +96,16 @@ impl ExpirationQueue {
} }
expired expired
} }
/// Check if the queue is empty
fn is_empty(&self) -> bool {
self.heap.is_empty()
}
/// Get the number of entries in the queue
fn len(&self) -> usize {
self.heap.len()
}
} }
/// Get clipboard contents using the source application's preferred MIME type. /// Get clipboard contents using the source application's preferred MIME type.
@ -118,21 +127,29 @@ impl ExpirationQueue {
/// When `preference` is `"text"`, uses `MimeType::Text` directly (single call). /// When `preference` is `"text"`, uses `MimeType::Text` directly (single call).
/// When `preference` is `"image"`, picks the first offered `image/*` type. /// When `preference` is `"image"`, picks the first offered `image/*` type.
/// Otherwise picks the source's first offered type. /// Otherwise picks the source's first offered type.
///
/// # Returns
///
/// The content reader, the selected MIME type, and ALL offered MIME
/// types.
#[expect(clippy::type_complexity)]
fn negotiate_mime_type( fn negotiate_mime_type(
preference: &str, preference: &str,
) -> Result<(Box<dyn Read>, String), wl_clipboard_rs::paste::Error> { ) -> Result<(Box<dyn Read>, String, Vec<String>), wl_clipboard_rs::paste::Error>
{
// Get all offered MIME types first (needed for persistence)
let offered =
get_mime_types_ordered(ClipboardType::Regular, Seat::Unspecified)?;
if preference == "text" { if preference == "text" {
let (reader, mime_str) = get_contents( let (reader, mime_str) = get_contents(
ClipboardType::Regular, ClipboardType::Regular,
Seat::Unspecified, Seat::Unspecified,
PasteMimeType::Text, PasteMimeType::Text,
)?; )?;
return Ok((Box::new(reader) as Box<dyn Read>, mime_str)); return Ok((Box::new(reader) as Box<dyn Read>, mime_str, offered));
} }
let offered =
get_mime_types_ordered(ClipboardType::Regular, Seat::Unspecified)?;
let chosen = if preference == "image" { let chosen = if preference == "image" {
// Pick the first offered image type, fall back to first overall // Pick the first offered image type, fall back to first overall
offered offered
@ -169,235 +186,286 @@ fn negotiate_mime_type(
Seat::Unspecified, Seat::Unspecified,
PasteMimeType::Specific(mime_str), PasteMimeType::Specific(mime_str),
)?; )?;
Ok((Box::new(reader) as Box<dyn Read>, actual_mime))
Ok((Box::new(reader) as Box<dyn Read>, actual_mime, offered))
}, },
None => Err(wl_clipboard_rs::paste::Error::NoSeats), None => Err(wl_clipboard_rs::paste::Error::NoSeats),
} }
} }
#[allow(clippy::too_many_arguments)]
pub trait WatchCommand { pub trait WatchCommand {
fn watch( async fn watch(
&self, &self,
max_dedupe_search: u64, max_dedupe_search: u64,
max_items: u64, max_items: u64,
excluded_apps: &[String], excluded_apps: &[String],
expire_after: Option<Duration>, expire_after: Option<Duration>,
mime_type_preference: &str, mime_type_preference: &str,
min_size: Option<usize>,
max_size: usize,
persist: bool,
); );
} }
impl WatchCommand for SqliteClipboardDb { impl WatchCommand for SqliteClipboardDb {
fn watch( async fn watch(
&self, &self,
max_dedupe_search: u64, max_dedupe_search: u64,
max_items: u64, max_items: u64,
excluded_apps: &[String], excluded_apps: &[String],
expire_after: Option<Duration>, expire_after: Option<Duration>,
mime_type_preference: &str, mime_type_preference: &str,
min_size: Option<usize>,
max_size: usize,
persist: bool,
) { ) {
smol::block_on(async { let async_db = AsyncClipboardDb::new(self.db_path.clone());
log::info!( log::info!(
"Starting clipboard watch daemon with MIME type preference: \ "Starting clipboard watch daemon with MIME type preference: \
{mime_type_preference}" {mime_type_preference}"
); );
// Build expiration queue from existing entries if persist {
let mut exp_queue = ExpirationQueue::new(); log::info!("clipboard persistence enabled");
if let Ok(Some((expires_at, id))) = self.get_next_expiration() { }
exp_queue.push(expires_at, id);
// Load remaining expirations (exclude already-marked expired entries) // Build expiration queue from existing entries
let mut stmt = self let mut exp_queue = ExpirationQueue::new();
.conn
.prepare( // Load all expirations from database asynchronously
"SELECT expires_at, id FROM clipboard WHERE expires_at IS NOT \ match async_db.load_all_expirations().await {
NULL AND (is_expired IS NULL OR is_expired = 0) ORDER BY \ Ok(expirations) => {
expires_at ASC", for (expires_at, id) in expirations {
) exp_queue.push(expires_at, id);
.ok(); }
if let Some(ref mut stmt) = stmt { if !exp_queue.is_empty() {
let mut rows = stmt.query([]).ok(); log::info!("loaded {} expirations from database", exp_queue.len());
if let Some(ref mut rows) = rows { }
while let Ok(Some(row)) = rows.next() { },
if let (Ok(exp), Ok(row_id)) = Err(e) => {
(row.get::<_, f64>(0), row.get::<_, i64>(1)) log::warn!("failed to load expirations: {e}");
{ },
// Skip first entry which is already added }
if exp_queue
.heap // We use hashes for comparison instead of storing full contents
.iter() let mut last_hash: Option<u64> = None;
.any(|(_, existing_id)| *existing_id == row_id) let mut buf = Vec::with_capacity(4096);
{
continue; // Helper to hash clipboard contents using FNV-1a (deterministic across
} // runs)
exp_queue.push(exp, row_id); let hash_contents = |data: &[u8]| -> u64 {
let mut hasher = Fnv1aHasher::new();
hasher.write(data);
hasher.finish()
};
// Initialize with current clipboard using smart MIME negotiation
if let Ok((mut reader, ..)) = negotiate_mime_type(mime_type_preference) {
buf.clear();
if reader.read_to_end(&mut buf).is_ok() && !buf.is_empty() {
last_hash = Some(hash_contents(&buf));
}
}
let poll_interval = Duration::from_millis(500);
loop {
// Process any pending expirations that are due now
if let Some(next_exp) = exp_queue.peek_next() {
let now = SqliteClipboardDb::now();
if next_exp <= now {
// Expired entries to process
let expired_ids = exp_queue.pop_expired(now);
for id in expired_ids {
// Verify entry still exists and get its content_hash
let expired_hash: Option<i64> =
match async_db.get_content_hash(id).await {
Ok(hash) => hash,
Err(e) => {
log::warn!("failed to get content hash for entry {id}: {e}");
None
},
};
if let Some(stored_hash) = expired_hash {
// Mark as expired
if let Err(e) = async_db.mark_expired(id).await {
log::warn!("failed to mark entry {id} as expired: {e}");
} else {
log::info!("entry {id} marked as expired");
} }
}
}
}
}
// We use hashes for comparison instead of storing full contents // Check if this expired entry is currently in the clipboard
let mut last_hash: Option<u64> = None; if let Ok((mut reader, ..)) =
let mut buf = Vec::with_capacity(4096); negotiate_mime_type(mime_type_preference)
{
// Helper to hash clipboard contents let mut current_buf = Vec::new();
let hash_contents = |data: &[u8]| -> u64 { if reader.read_to_end(&mut current_buf).is_ok()
let mut hasher = DefaultHasher::new(); && !current_buf.is_empty()
data.hash(&mut hasher);
hasher.finish()
};
// Initialize with current clipboard using smart MIME negotiation
if let Ok((mut reader, _)) = negotiate_mime_type(mime_type_preference) {
buf.clear();
if reader.read_to_end(&mut buf).is_ok() && !buf.is_empty() {
last_hash = Some(hash_contents(&buf));
}
}
loop {
// Process any pending expirations
if let Some(next_exp) = exp_queue.peek_next() {
let now = SqliteClipboardDb::now();
if next_exp <= now {
// Expired entries to process
let expired_ids = exp_queue.pop_expired(now);
for id in expired_ids {
// Verify entry still exists and get its content_hash
let expired_hash: Option<i64> = self
.conn
.query_row(
"SELECT content_hash FROM clipboard WHERE id = ?1",
[id],
|row| row.get(0),
)
.ok();
if let Some(stored_hash) = expired_hash {
// Mark as expired
self
.conn
.execute(
"UPDATE clipboard SET is_expired = 1 WHERE id = ?1",
[id],
)
.ok();
log::info!("Entry {id} marked as expired");
// Check if this expired entry is currently in the clipboard
if let Ok((mut reader, _)) =
negotiate_mime_type(mime_type_preference)
{ {
let mut current_buf = Vec::new(); let current_hash = hash_contents(&current_buf);
if reader.read_to_end(&mut current_buf).is_ok() // Convert stored i64 to u64 for comparison (preserves bit
&& !current_buf.is_empty() // pattern)
{ if current_hash == stored_hash as u64 {
let current_hash = hash_contents(&current_buf); // Clear the clipboard since expired content is still
// Compare as i64 (database stores as i64) // there
if current_hash as i64 == stored_hash { let mut opts = Options::new();
// Clear the clipboard since expired content is still opts
// there .clipboard(wl_clipboard_rs::copy::ClipboardType::Regular);
let mut opts = Options::new(); if opts
opts.clipboard( .copy(
wl_clipboard_rs::copy::ClipboardType::Regular, Source::Bytes(Vec::new().into()),
CopyMimeType::Autodetect,
)
.is_ok()
{
log::info!(
"cleared clipboard containing expired entry {id}"
);
last_hash = None; // reset tracked hash
} else {
log::warn!(
"failed to clear clipboard for expired entry {id}"
); );
if opts
.copy(
Source::Bytes(Vec::new().into()),
CopyMimeType::Autodetect,
)
.is_ok()
{
log::info!(
"Cleared clipboard containing expired entry {id}"
);
last_hash = None; // reset tracked hash
} else {
log::warn!(
"Failed to clear clipboard for expired entry {id}"
);
}
} }
} }
} }
} }
} }
} else {
// Sleep *precisely* until next expiration
let sleep_duration = next_exp - now;
Timer::after(Duration::from_secs_f64(sleep_duration)).await;
continue; // skip normal poll, process expirations first
} }
} }
}
// Normal clipboard polling // Normal clipboard polling (always run, even when expirations are
match negotiate_mime_type(mime_type_preference) { // pending)
Ok((mut reader, _mime_type)) => { match negotiate_mime_type(mime_type_preference) {
buf.clear(); Ok((mut reader, _mime_type, _all_mimes)) => {
if let Err(e) = reader.read_to_end(&mut buf) { buf.clear();
log::error!("Failed to read clipboard contents: {e}"); if let Err(e) = reader.read_to_end(&mut buf) {
Timer::after(Duration::from_millis(500)).await; log::error!("failed to read clipboard contents: {e}");
continue; Timer::after(Duration::from_millis(500)).await;
} continue;
}
// Only store if changed and not empty // Only store if changed and not empty
if !buf.is_empty() { if !buf.is_empty() {
let current_hash = hash_contents(&buf); let current_hash = hash_contents(&buf);
if last_hash != Some(current_hash) { if last_hash != Some(current_hash) {
match self.store_entry( // Clone buf for the async operation since it needs 'static
&buf[..], let buf_clone = buf.clone();
#[allow(clippy::cast_possible_wrap)]
let content_hash = Some(current_hash as i64);
// Clone data for persistence after successful store
let buf_for_persist = buf.clone();
let mime_types_for_persist = _all_mimes.clone();
let selected_mime = _mime_type.clone();
match async_db
.store_entry(
buf_clone,
max_dedupe_search, max_dedupe_search,
max_items, max_items,
Some(excluded_apps), Some(excluded_apps.to_vec()),
) { min_size,
Ok(id) => { max_size,
log::info!("Stored new clipboard entry (id: {id})"); content_hash,
last_hash = Some(current_hash); Some(mime_types_for_persist.clone()),
)
.await
{
Ok(id) => {
log::info!("stored new clipboard entry (id: {id})");
last_hash = Some(current_hash);
// Set expiration if configured // Persist clipboard: fork child to serve data
if let Some(duration) = expire_after { // This keeps the clipboard alive when source app closes
let expires_at = // Check if we're already serving to avoid duplicate processes
SqliteClipboardDb::now() + duration.as_secs_f64(); if persist && get_serving_pid().is_none() {
self.set_expiration(id, expires_at).ok(); let clipboard_data = ClipboardData::new(
buf_for_persist,
mime_types_for_persist,
selected_mime,
);
// Validate and persist in blocking task
if clipboard_data.is_valid().is_ok() {
smol::spawn(async move {
// Use blocking task for fork operation
let result = smol::unblock(move || unsafe {
clipboard::persist_clipboard(clipboard_data)
})
.await;
if let Err(e) = result {
log::debug!("clipboard persistence failed: {e}");
}
})
.detach();
}
} else if persist {
log::trace!(
"Already serving clipboard, skipping persistence fork"
);
}
// Set expiration if configured
if let Some(duration) = expire_after {
let expires_at =
SqliteClipboardDb::now() + duration.as_secs_f64();
if let Err(e) =
async_db.set_expiration(id, expires_at).await
{
log::warn!(
"Failed to set expiration for entry {id}: {e}"
);
} else {
exp_queue.push(expires_at, id); exp_queue.push(expires_at, id);
} }
}, }
Err(crate::db::StashError::ExcludedByApp(_)) => { },
log::info!("Clipboard entry excluded by app filter"); Err(crate::db::StashError::ExcludedByApp(_)) => {
last_hash = Some(current_hash); log::info!("clipboard entry excluded by app filter");
}, last_hash = Some(current_hash);
Err(crate::db::StashError::Store(ref msg)) },
if msg.contains("Excluded by app filter") => Err(crate::db::StashError::Store(ref msg))
{ if msg.contains("Excluded by app filter") =>
log::info!("Clipboard entry excluded by app filter"); {
last_hash = Some(current_hash); log::info!("clipboard entry excluded by app filter");
}, last_hash = Some(current_hash);
Err(e) => { },
log::error!("Failed to store clipboard entry: {e}"); Err(e) => {
last_hash = Some(current_hash); log::error!("failed to store clipboard entry: {e}");
}, last_hash = Some(current_hash);
} },
} }
} }
}, }
Err(e) => { },
let error_msg = e.to_string(); Err(e) => {
if !error_msg.contains("empty") { let error_msg = e.to_string();
log::error!("Failed to get clipboard contents: {e}"); if !error_msg.contains("empty") {
} log::error!("failed to get clipboard contents: {e}");
}, }
} },
// Normal poll interval (only if no expirations pending)
if exp_queue.peek_next().is_none() {
Timer::after(Duration::from_millis(500)).await;
}
} }
});
// Calculate sleep time: min of poll interval and time until next
// expiration
let sleep_duration = if let Some(next_exp) = exp_queue.peek_next() {
let now = SqliteClipboardDb::now();
let time_to_exp = (next_exp - now).max(0.0);
poll_interval.min(Duration::from_secs_f64(time_to_exp))
} else {
poll_interval
};
Timer::after(sleep_duration).await;
}
} }
} }
/// Unit-testable helper: given ordered offers and a preference, return the /// Given ordered offers and a preference, return the
/// chosen MIME type. This mirrors the selection logic in /// chosen MIME type. This mirrors the selection logic in
/// [`negotiate_mime_type`] without requiring a Wayland connection. /// [`negotiate_mime_type`] without requiring a Wayland connection.
#[cfg(test)] #[cfg(test)]
@ -500,4 +568,145 @@ mod tests {
let offered = vec!["text/uri-list".to_string(), "text/plain".to_string()]; let offered = vec!["text/uri-list".to_string(), "text/plain".to_string()];
assert_eq!(pick_mime(&offered, "any").unwrap(), "text/uri-list"); assert_eq!(pick_mime(&offered, "any").unwrap(), "text/uri-list");
} }
/// Test that "text" preference is handled separately from pick_mime logic.
/// Documents that "text" preference uses PasteMimeType::Text directly
/// without querying MIME type ordering. This is functionally a regression
/// test for `negotiate_mime_type()`, which is load bearing, to ensure that
/// we don't mess it up.
#[test]
fn test_text_preference_behavior() {
// When preference is "text", negotiate_mime_type() should:
// 1. Use PasteMimeType::Text directly (no ordering query via
// get_mime_types_ordered)
// 2. Return content with text/plain MIME type
//
// Note: "text" is NOT passed to pick_mime() - it's handled separately
// in negotiate_mime_type() before the pick_mime logic.
// This test documents the separation of concerns.
let offered = vec![
"text/html".to_string(),
"image/png".to_string(),
"text/plain".to_string(),
];
// pick_mime is only called for "image" and "any" preferences
// "text" goes through a different code path
assert_eq!(pick_mime(&offered, "any").unwrap(), "image/png");
}
/// Test MIME type selection priority for "any" preference with multiple
/// types. Documents that:
/// 1. Image types are preferred over text/html
/// 2. Non-html text types are preferred over text/html
/// 3. First offered type is used when no special cases match
#[test]
fn test_any_preference_selection_priority() {
// Priority 1: Image over HTML
let offered = vec!["text/html".to_string(), "image/png".to_string()];
assert_eq!(pick_mime(&offered, "any").unwrap(), "image/png");
// Priority 2: Plain text over HTML
let offered = vec!["text/html".to_string(), "text/plain".to_string()];
assert_eq!(pick_mime(&offered, "any").unwrap(), "text/plain");
// Priority 3: First type when no special handling
let offered =
vec!["application/json".to_string(), "text/plain".to_string()];
assert_eq!(pick_mime(&offered, "any").unwrap(), "application/json");
}
/// Test "image" preference behavior.
/// Documents that:
/// 1. First image/* type is selected
/// 2. Falls back to first type if no images
#[test]
fn test_image_preference_selection_behavior() {
// Multiple images - pick first one
let offered = vec![
"image/jpeg".to_string(),
"image/png".to_string(),
"text/plain".to_string(),
];
assert_eq!(pick_mime(&offered, "image").unwrap(), "image/jpeg");
// No images - fall back to first
let offered = vec!["text/html".to_string(), "text/plain".to_string()];
assert_eq!(pick_mime(&offered, "image").unwrap(), "text/html");
}
/// Test edge case: text/html as only option.
/// Documents that text/html is used when it's the only type available.
#[test]
fn test_html_fallback_as_only_option() {
let offered = vec!["text/html".to_string()];
assert_eq!(pick_mime(&offered, "any").unwrap(), "text/html");
assert_eq!(pick_mime(&offered, "image").unwrap(), "text/html");
}
/// Test complex Firefox scenario with all MIME types.
/// Documents expected behavior when source offers many types.
#[test]
fn test_firefox_copy_image_all_types() {
// Firefox "Copy Image" offers:
// text/html, text/_moz_htmlcontext, text/_moz_htmlinfo,
// image/png, image/bmp, image/x-bmp, image/x-ico,
// text/ico, application/ico, image/ico, image/icon,
// text/icon, image/x-win-bitmap, image/x-win-bmp,
// image/x-icon, text/plain
let offered = vec![
"text/html".to_string(),
"text/_moz_htmlcontext".to_string(),
"image/png".to_string(),
"image/bmp".to_string(),
"text/plain".to_string(),
];
// "any" should pick image/png (first image, skipping HTML)
assert_eq!(pick_mime(&offered, "any").unwrap(), "image/png");
// "image" should pick image/png
assert_eq!(pick_mime(&offered, "image").unwrap(), "image/png");
}
/// Test complex Electron app scenario.
#[test]
fn test_electron_app_mime_types() {
// Electron apps often offer: text/html, image/png, text/plain
let offered = vec![
"text/html".to_string(),
"image/png".to_string(),
"text/plain".to_string(),
];
assert_eq!(pick_mime(&offered, "any").unwrap(), "image/png");
assert_eq!(pick_mime(&offered, "image").unwrap(), "image/png");
}
/// Test that the function handles empty offers correctly.
/// Documents that empty offers result in an error (NoSeats equivalent).
#[test]
fn test_empty_offers_behavior() {
let offered: Vec<String> = vec![];
assert!(pick_mime(&offered, "any").is_none());
assert!(pick_mime(&offered, "image").is_none());
assert!(pick_mime(&offered, "text").is_none());
}
/// Test file manager behavior with URI lists.
#[test]
fn test_file_manager_uri_list_behavior() {
// File managers typically offer: text/uri-list, text/plain,
// x-special/gnome-copied-files
let offered = vec![
"text/uri-list".to_string(),
"text/plain".to_string(),
"x-special/gnome-copied-files".to_string(),
];
// "any" should pick text/uri-list (first)
assert_eq!(pick_mime(&offered, "any").unwrap(), "text/uri-list");
// "image" should fall back to text/uri-list
assert_eq!(pick_mime(&offered, "image").unwrap(), "text/uri-list");
}
} }

View file

@ -1,13 +0,0 @@
use crate::db::{ClipboardDb, SqliteClipboardDb, StashError};
pub trait WipeCommand {
fn wipe(&self) -> Result<(), StashError>;
}
impl WipeCommand for SqliteClipboardDb {
fn wipe(&self) -> Result<(), StashError> {
self.wipe_db()?;
log::info!("Database wiped");
Ok(())
}
}

File diff suppressed because it is too large Load diff

375
src/db/nonblocking.rs Normal file
View file

@ -0,0 +1,375 @@
use std::path::PathBuf;
use rusqlite::OptionalExtension;
use crate::db::{ClipboardDb, SqliteClipboardDb, StashError};
/// Async wrapper for database operations that runs blocking operations
/// on a thread pool to avoid blocking the async runtime. Since
/// [`rusqlite::Connection`] is not Send, we store the database path and open a
/// new connection for each operation.
pub struct AsyncClipboardDb {
db_path: PathBuf,
}
impl AsyncClipboardDb {
pub fn new(db_path: PathBuf) -> Self {
Self { db_path }
}
#[expect(clippy::too_many_arguments)]
pub async fn store_entry(
&self,
data: Vec<u8>,
max_dedupe_search: u64,
max_items: u64,
excluded_apps: Option<Vec<String>>,
min_size: Option<usize>,
max_size: usize,
content_hash: Option<i64>,
mime_types: Option<Vec<String>>,
) -> Result<i64, StashError> {
let path = self.db_path.clone();
blocking::unblock(move || {
let db = Self::open_db_internal(&path)?;
db.store_entry(
std::io::Cursor::new(data),
max_dedupe_search,
max_items,
excluded_apps.as_deref(),
min_size,
max_size,
content_hash,
mime_types.as_deref(),
)
})
.await
}
pub async fn set_expiration(
&self,
id: i64,
expires_at: f64,
) -> Result<(), StashError> {
let path = self.db_path.clone();
blocking::unblock(move || {
let db = Self::open_db_internal(&path)?;
db.set_expiration(id, expires_at)
})
.await
}
pub async fn load_all_expirations(
&self,
) -> Result<Vec<(f64, i64)>, StashError> {
let path = self.db_path.clone();
blocking::unblock(move || {
let db = Self::open_db_internal(&path)?;
let mut stmt = db
.conn
.prepare(
"SELECT expires_at, id FROM clipboard WHERE expires_at IS NOT NULL \
AND (is_expired IS NULL OR is_expired = 0) ORDER BY expires_at ASC",
)
.map_err(|e| StashError::ListDecode(e.to_string().into()))?;
let mut rows = stmt
.query([])
.map_err(|e| StashError::ListDecode(e.to_string().into()))?;
let mut expirations = Vec::new();
while let Some(row) = rows
.next()
.map_err(|e| StashError::ListDecode(e.to_string().into()))?
{
let exp = row
.get::<_, f64>(0)
.map_err(|e| StashError::ListDecode(e.to_string().into()))?;
let id = row
.get::<_, i64>(1)
.map_err(|e| StashError::ListDecode(e.to_string().into()))?;
expirations.push((exp, id));
}
Ok(expirations)
})
.await
}
pub async fn get_content_hash(
&self,
id: i64,
) -> Result<Option<i64>, StashError> {
let path = self.db_path.clone();
blocking::unblock(move || {
let db = Self::open_db_internal(&path)?;
let result: Option<i64> = db
.conn
.query_row(
"SELECT content_hash FROM clipboard WHERE id = ?1",
[id],
|row| row.get(0),
)
.optional()
.map_err(|e| StashError::ListDecode(e.to_string().into()))?;
Ok(result)
})
.await
}
pub async fn mark_expired(&self, id: i64) -> Result<(), StashError> {
let path = self.db_path.clone();
blocking::unblock(move || {
let db = Self::open_db_internal(&path)?;
db.conn
.execute("UPDATE clipboard SET is_expired = 1 WHERE id = ?1", [id])
.map_err(|e| StashError::Store(e.to_string().into()))?;
Ok(())
})
.await
}
fn open_db_internal(path: &PathBuf) -> Result<SqliteClipboardDb, StashError> {
let conn = rusqlite::Connection::open(path).map_err(|e| {
StashError::Store(format!("Failed to open database: {e}").into())
})?;
SqliteClipboardDb::new(conn, path.clone())
}
}
impl Clone for AsyncClipboardDb {
fn clone(&self) -> Self {
Self {
db_path: self.db_path.clone(),
}
}
}
#[cfg(test)]
mod tests {
use std::{collections::HashSet, hash::Hasher};
use tempfile::tempdir;
use super::*;
use crate::hash::Fnv1aHasher;
fn setup_test_db() -> (AsyncClipboardDb, tempfile::TempDir) {
let temp_dir = tempdir().expect("Failed to create temp dir");
let db_path = temp_dir.path().join("test.db");
// Create initial database
{
let conn =
rusqlite::Connection::open(&db_path).expect("Failed to open database");
crate::db::SqliteClipboardDb::new(conn, db_path.clone())
.expect("Failed to create database");
}
let async_db = AsyncClipboardDb::new(db_path);
(async_db, temp_dir)
}
#[test]
fn test_async_store_entry() {
smol::block_on(async {
let (async_db, _temp_dir) = setup_test_db();
let data = b"async test data";
let id = async_db
.store_entry(
data.to_vec(),
100,
1000,
None,
None,
5_000_000,
None,
None,
)
.await
.expect("Failed to store entry");
assert!(id > 0, "Should return positive id");
// Verify it was stored by checking content hash
let hash = async_db
.get_content_hash(id)
.await
.expect("Failed to get hash")
.expect("Hash should exist");
// Calculate expected hash
let mut hasher = Fnv1aHasher::new();
hasher.write(data);
let expected_hash = hasher.finish() as i64;
assert_eq!(hash, expected_hash, "Stored hash should match");
});
}
#[test]
fn test_async_set_expiration_and_load() {
smol::block_on(async {
let (async_db, _temp_dir) = setup_test_db();
let data = b"expiring entry";
let id = async_db
.store_entry(
data.to_vec(),
100,
1000,
None,
None,
5_000_000,
None,
None,
)
.await
.expect("Failed to store entry");
let expires_at = 1234567890.5;
async_db
.set_expiration(id, expires_at)
.await
.expect("Failed to set expiration");
// Load all expirations
let expirations = async_db
.load_all_expirations()
.await
.expect("Failed to load expirations");
assert_eq!(expirations.len(), 1, "Should have one expiration");
assert!(
(expirations[0].0 - expires_at).abs() < 0.001,
"Expiration time should match"
);
assert_eq!(expirations[0].1, id, "Expiration id should match");
});
}
#[test]
fn test_async_mark_expired() {
smol::block_on(async {
let (async_db, _temp_dir) = setup_test_db();
let data = b"entry to expire";
let id = async_db
.store_entry(
data.to_vec(),
100,
1000,
None,
None,
5_000_000,
None,
None,
)
.await
.expect("Failed to store entry");
async_db
.mark_expired(id)
.await
.expect("Failed to mark as expired");
// Load expirations, this should be empty since entry is now marked
// expired
let expirations = async_db
.load_all_expirations()
.await
.expect("Failed to load expirations");
assert!(
expirations.is_empty(),
"Expired entries should not be loaded"
);
});
}
#[test]
fn test_async_get_content_hash_not_found() {
smol::block_on(async {
let (async_db, _temp_dir) = setup_test_db();
let hash = async_db
.get_content_hash(999999)
.await
.expect("Should not fail on non-existent entry");
assert!(hash.is_none(), "Hash should be None for non-existent entry");
});
}
#[test]
fn test_async_clone() {
let (async_db, _temp_dir) = setup_test_db();
let cloned = async_db.clone();
smol::block_on(async {
// Both should work independently
let data = b"clone test";
let id1 = async_db
.store_entry(
data.to_vec(),
100,
1000,
None,
None,
5_000_000,
None,
None,
)
.await
.expect("Failed with original");
let id2 = cloned
.store_entry(
data.to_vec(),
100,
1000,
None,
None,
5_000_000,
None,
None,
)
.await
.expect("Failed with clone");
assert_ne!(id1, id2, "Should store as separate entries");
});
}
#[test]
fn test_async_concurrent_operations() {
smol::block_on(async {
let (async_db, _temp_dir) = setup_test_db();
// Spawn multiple concurrent store operations
let futures: Vec<_> = (0..5)
.map(|i| {
let db = async_db.clone();
let data = format!("concurrent test {}", i).into_bytes();
smol::spawn(async move {
db.store_entry(data, 100, 1000, None, None, 5_000_000, None, None)
.await
})
})
.collect();
let results: Result<Vec<_>, _> = futures::future::join_all(futures)
.await
.into_iter()
.collect();
let ids = results.expect("All stores should succeed");
assert_eq!(ids.len(), 5, "Should have 5 entries");
// All IDs should be unique
let unique_ids: HashSet<_> = ids.iter().collect();
assert_eq!(unique_ids.len(), 5, "All IDs should be unique");
});
}
}

101
src/hash.rs Normal file
View file

@ -0,0 +1,101 @@
/// FNV-1a hasher for deterministic hashing across process runs.
///
/// Unlike `std::collections::hash_map::DefaultHasher` (which uses SipHash
/// with a random seed), this produces stable hashes suitable for persistent
/// storage and cross-process comparison.
///
/// # Example
///
/// ```
/// use std::hash::Hasher;
///
/// use stash::hash::Fnv1aHasher;
///
/// let mut hasher = Fnv1aHasher::new();
/// hasher.write(b"hello");
/// let hash = hasher.finish();
/// ```
#[derive(Clone, Copy, Debug)]
pub struct Fnv1aHasher {
state: u64,
}
impl Fnv1aHasher {
const FNV_OFFSET: u64 = 0xCBF29CE484222325;
const FNV_PRIME: u64 = 0x100000001B3;
/// Creates a new hasher initialized with the FNV-1a offset basis.
#[must_use]
pub fn new() -> Self {
Self {
state: Self::FNV_OFFSET,
}
}
}
impl Default for Fnv1aHasher {
fn default() -> Self {
Self::new()
}
}
impl std::hash::Hasher for Fnv1aHasher {
fn write(&mut self, bytes: &[u8]) {
for byte in bytes {
self.state ^= u64::from(*byte);
self.state = self.state.wrapping_mul(Self::FNV_PRIME);
}
}
fn finish(&self) -> u64 {
self.state
}
}
#[cfg(test)]
mod tests {
use std::hash::Hasher;
use super::*;
#[test]
fn test_fnv1a_basic() {
let mut hasher = Fnv1aHasher::new();
hasher.write(b"hello");
// FNV-1a hash for "hello" (little-endian u64)
assert_eq!(hasher.finish(), 0xA430D84680AABD0B);
}
#[test]
fn test_fnv1a_empty() {
let hasher = Fnv1aHasher::new();
// Empty input should return offset basis
assert_eq!(hasher.finish(), Fnv1aHasher::FNV_OFFSET);
}
#[test]
fn test_fnv1a_deterministic() {
// Same input must produce same hash
let mut h1 = Fnv1aHasher::new();
let mut h2 = Fnv1aHasher::new();
h1.write(b"test data");
h2.write(b"test data");
assert_eq!(h1.finish(), h2.finish());
}
#[test]
fn test_default_trait() {
let h1 = Fnv1aHasher::new();
let h2 = Fnv1aHasher::default();
assert_eq!(h1.finish(), h2.finish());
}
#[test]
fn test_copy_trait() {
let mut hasher = Fnv1aHasher::new();
hasher.write(b"data");
let copied = hasher;
// Both should have same state after copy
assert_eq!(hasher.finish(), copied.finish());
}
}

View file

@ -1,3 +1,10 @@
mod clipboard;
mod commands;
mod db;
mod hash;
mod mime;
mod multicall;
use std::{ use std::{
env, env,
io::{self, IsTerminal}, io::{self, IsTerminal},
@ -6,24 +13,27 @@ use std::{
}; };
use clap::{CommandFactory, Parser, Subcommand}; use clap::{CommandFactory, Parser, Subcommand};
use color_eyre::eyre;
use humantime::parse_duration; use humantime::parse_duration;
use inquire::Confirm; use inquire::Confirm;
mod commands; // While the module is named "wayland", the Wayland module is *strictly* for the
pub(crate) mod db; // use-toplevel feature as it requires some low-level wayland crates that are
pub(crate) mod mime; // not required *by default*. The module is named that way because "toplevel"
mod multicall; // sounded too silly. Stash is strictly a Wayland clipboard manager.
#[cfg(feature = "use-toplevel")] mod wayland; #[cfg(feature = "use-toplevel")] mod wayland;
use crate::commands::{ use crate::{
decode::DecodeCommand, commands::{
delete::DeleteCommand, decode::DecodeCommand,
import::ImportCommand, delete::DeleteCommand,
list::ListCommand, import::ImportCommand,
query::QueryCommand, list::ListCommand,
store::StoreCommand, query::QueryCommand,
watch::WatchCommand, store::StoreCommand,
wipe::WipeCommand, watch::WatchCommand,
},
db::{ClipboardDb, DEFAULT_MAX_ENTRY_SIZE},
}; };
#[derive(Parser)] #[derive(Parser)]
@ -42,6 +52,16 @@ struct Cli {
#[arg(long, default_value_t = 20)] #[arg(long, default_value_t = 20)]
max_dedupe_search: u64, max_dedupe_search: u64,
/// Minimum size (in bytes) for clipboard entries. Entries smaller than this
/// will not be stored.
#[arg(long, env = "STASH_MIN_SIZE")]
min_size: Option<usize>,
/// Maximum size (in bytes) for clipboard entries. Entries larger than this
/// will not be stored. Defaults to 5MB.
#[arg(long, default_value_t = DEFAULT_MAX_ENTRY_SIZE, env = "STASH_MAX_SIZE")]
max_size: usize,
/// Maximum width (in characters) for clipboard entry previews in list /// Maximum width (in characters) for clipboard entry previews in list
/// output. /// output.
#[arg(long, default_value_t = 100)] #[arg(long, default_value_t = 100)]
@ -78,6 +98,10 @@ enum Command {
/// Show only expired entries (diagnostic, does not remove them) /// Show only expired entries (diagnostic, does not remove them)
#[arg(long)] #[arg(long)]
expired: bool, expired: bool,
/// Reverse the order of entries (oldest first instead of newest first)
#[arg(long)]
reverse: bool,
}, },
/// Decode and output clipboard entry by id /// Decode and output clipboard entry by id
@ -99,16 +123,6 @@ enum Command {
ask: bool, ask: bool,
}, },
/// Wipe all clipboard history
///
/// DEPRECATED: Use `stash db wipe` instead
#[command(hide = true)]
Wipe {
/// Ask for confirmation before wiping
#[arg(long)]
ask: bool,
},
/// Database management operations /// Database management operations
Db { Db {
#[command(subcommand)] #[command(subcommand)]
@ -135,6 +149,10 @@ enum Command {
/// MIME type preference for clipboard reading. /// MIME type preference for clipboard reading.
#[arg(short = 't', long, default_value = "any")] #[arg(short = 't', long, default_value = "any")]
mime_type: String, mime_type: String,
/// Persist clipboard contents after the source application closes.
#[arg(long)]
persist: bool,
}, },
} }
@ -171,9 +189,27 @@ fn report_error<T>(
} }
} }
fn confirm(prompt: &str) -> bool {
Confirm::new(prompt)
.with_default(false)
.prompt()
.unwrap_or_else(|e| {
log::error!("confirmation prompt failed: {e}");
false
})
}
#[allow(clippy::too_many_lines)] // whatever #[allow(clippy::too_many_lines)] // whatever
fn main() -> color_eyre::eyre::Result<()> { fn main() -> eyre::Result<()> {
color_eyre::install()?;
// Check if we're being called as a multicall binary // Check if we're being called as a multicall binary
//
// NOTE: We cannot use clap's multicall here because it requires the main
// command to have no arguments (only subcommands), but our Cli has global
// arguments like --max-items, --db-path, etc. Instead, we manually detect
// the invocation name and route appropriately. While this is ugly, it's
// seemingly the only option.
let program_name = env::args().next().map(|s| { let program_name = env::args().next().map(|s| {
PathBuf::from(s) PathBuf::from(s)
.file_name() .file_name()
@ -199,19 +235,25 @@ fn main() -> color_eyre::eyre::Result<()> {
.filter_level(cli.verbosity.into()) .filter_level(cli.verbosity.into())
.init(); .init();
let db_path = cli.db_path.unwrap_or_else(|| { let db_path = match cli.db_path {
dirs::cache_dir() Some(path) => path,
.unwrap_or_else(|| PathBuf::from("/tmp")) None => {
.join("stash") let cache_dir = dirs::cache_dir().ok_or_else(|| {
.join("db") eyre::eyre!(
}); "Could not determine cache directory. Set --db-path or \
$STASH_DB_PATH explicitly."
)
})?;
cache_dir.join("stash").join("db")
},
};
if let Some(parent) = db_path.parent() { if let Some(parent) = db_path.parent() {
std::fs::create_dir_all(parent)?; std::fs::create_dir_all(parent)?;
} }
let conn = rusqlite::Connection::open(&db_path)?; let conn = rusqlite::Connection::open(&db_path)?;
let db = db::SqliteClipboardDb::new(conn)?; let db = db::SqliteClipboardDb::new(conn, db_path)?;
match cli.command { match cli.command {
Some(Command::Store) => { Some(Command::Store) => {
@ -226,20 +268,26 @@ fn main() -> color_eyre::eyre::Result<()> {
&cli.excluded_apps, &cli.excluded_apps,
#[cfg(not(feature = "use-toplevel"))] #[cfg(not(feature = "use-toplevel"))]
&[], &[],
cli.min_size,
cli.max_size,
), ),
"failed to store entry", "failed to store entry",
); );
}, },
Some(Command::List { format, expired }) => { Some(Command::List {
format,
expired,
reverse,
}) => {
match format.as_deref() { match format.as_deref() {
Some("tsv") => { Some("tsv") => {
report_error( report_error(
db.list(io::stdout(), cli.preview_width, expired), db.list(io::stdout(), cli.preview_width, expired, reverse),
"failed to list entries", "failed to list entries",
); );
}, },
Some("json") => { Some("json") => {
match db.list_json(expired) { match db.list_json(expired, reverse) {
Ok(json) => { Ok(json) => {
println!("{json}"); println!("{json}");
}, },
@ -254,12 +302,12 @@ fn main() -> color_eyre::eyre::Result<()> {
None => { None => {
if std::io::stdout().is_terminal() { if std::io::stdout().is_terminal() {
report_error( report_error(
db.list_tui(cli.preview_width, expired), db.list_tui(cli.preview_width, expired, reverse),
"failed to list entries in TUI", "failed to list entries in TUI",
); );
} else { } else {
report_error( report_error(
db.list(io::stdout(), cli.preview_width, expired), db.list(io::stdout(), cli.preview_width, expired, reverse),
"failed to list entries", "failed to list entries",
); );
} }
@ -276,10 +324,7 @@ fn main() -> color_eyre::eyre::Result<()> {
let mut should_proceed = true; let mut should_proceed = true;
if ask { if ask {
should_proceed = should_proceed =
Confirm::new("Are you sure you want to delete clipboard entries?") confirm("Are you sure you want to delete clipboard entries?");
.with_default(false)
.prompt()
.unwrap_or(false);
if !should_proceed { if !should_proceed {
log::info!("aborted by user."); log::info!("aborted by user.");
@ -330,27 +375,6 @@ fn main() -> color_eyre::eyre::Result<()> {
} }
} }
}, },
Some(Command::Wipe { ask }) => {
eprintln!(
"Warning: The 'stash wipe' command is deprecated. Use 'stash db \
wipe' instead."
);
let mut should_proceed = true;
if ask {
should_proceed = Confirm::new(
"Are you sure you want to wipe all clipboard history?",
)
.with_default(false)
.prompt()
.unwrap_or(false);
if !should_proceed {
log::info!("wipe command aborted by user.");
}
}
if should_proceed {
report_error(db.wipe(), "failed to wipe database");
}
},
Some(Command::Db { action }) => { Some(Command::Db { action }) => {
match action { match action {
@ -362,10 +386,7 @@ fn main() -> color_eyre::eyre::Result<()> {
} else { } else {
"Are you sure you want to wipe ALL clipboard history?" "Are you sure you want to wipe ALL clipboard history?"
}; };
should_proceed = Confirm::new(message) should_proceed = confirm(message);
.with_default(false)
.prompt()
.unwrap_or(false);
if !should_proceed { if !should_proceed {
log::info!("db wipe command aborted by user."); log::info!("db wipe command aborted by user.");
} }
@ -374,21 +395,21 @@ fn main() -> color_eyre::eyre::Result<()> {
if expired { if expired {
match db.cleanup_expired() { match db.cleanup_expired() {
Ok(count) => { Ok(count) => {
log::info!("Wiped {} expired entries", count); log::info!("wiped {count} expired entries");
}, },
Err(e) => { Err(e) => {
log::error!("failed to wipe expired entries: {e}"); log::error!("failed to wipe expired entries: {e}");
}, },
} }
} else { } else {
report_error(db.wipe(), "failed to wipe database"); report_error(db.wipe_db(), "failed to wipe database");
} }
} }
}, },
DbAction::Vacuum => { DbAction::Vacuum => {
match db.vacuum() { match db.vacuum() {
Ok(()) => { Ok(()) => {
log::info!("Database optimized successfully"); log::info!("database optimized successfully");
}, },
Err(e) => { Err(e) => {
log::error!("failed to vacuum database: {e}"); log::error!("failed to vacuum database: {e}");
@ -398,7 +419,7 @@ fn main() -> color_eyre::eyre::Result<()> {
DbAction::Stats => { DbAction::Stats => {
match db.stats() { match db.stats() {
Ok(stats) => { Ok(stats) => {
println!("{}", stats); println!("{stats}");
}, },
Err(e) => { Err(e) => {
log::error!("failed to get database stats: {e}"); log::error!("failed to get database stats: {e}");
@ -411,13 +432,10 @@ fn main() -> color_eyre::eyre::Result<()> {
Some(Command::Import { r#type, ask }) => { Some(Command::Import { r#type, ask }) => {
let mut should_proceed = true; let mut should_proceed = true;
if ask { if ask {
should_proceed = Confirm::new( should_proceed = confirm(
"Are you sure you want to import clipboard data? This may \ "Are you sure you want to import clipboard data? This may \
overwrite existing entries.", overwrite existing entries.",
) );
.with_default(false)
.prompt()
.unwrap_or(false);
if !should_proceed { if !should_proceed {
log::info!("import command aborted by user."); log::info!("import command aborted by user.");
} }
@ -441,6 +459,7 @@ fn main() -> color_eyre::eyre::Result<()> {
Some(Command::Watch { Some(Command::Watch {
expire_after, expire_after,
mime_type, mime_type,
persist,
}) => { }) => {
db.watch( db.watch(
cli.max_dedupe_search, cli.max_dedupe_search,
@ -451,7 +470,11 @@ fn main() -> color_eyre::eyre::Result<()> {
&[], &[],
expire_after, expire_after,
&mime_type, &mime_type,
); cli.min_size,
cli.max_size,
persist,
)
.await;
}, },
None => { None => {

View file

@ -360,7 +360,7 @@ fn execute_watch_command(
/// Select the best MIME type from available types when none is specified. /// Select the best MIME type from available types when none is specified.
/// Prefers specific content types (image/*, application/*) over generic /// Prefers specific content types (image/*, application/*) over generic
/// text representations (TEXT, STRING, UTF8_STRING). /// text representations (TEXT, STRING, `UTF8_STRING`).
fn select_best_mime_type( fn select_best_mime_type(
types: &std::collections::HashSet<String>, types: &std::collections::HashSet<String>,
) -> Option<String> { ) -> Option<String> {
@ -421,7 +421,7 @@ fn handle_regular_paste(
let selected_type = available_types.as_ref().and_then(select_best_mime_type); let selected_type = available_types.as_ref().and_then(select_best_mime_type);
let mime_type = if let Some(ref best) = selected_type { let mime_type = if let Some(ref best) = selected_type {
log::debug!("Auto-selecting MIME type: {}", best); log::debug!("auto-selecting MIME type: {best}");
PasteMimeType::Specific(best) PasteMimeType::Specific(best)
} else { } else {
get_paste_mime_type(args.mime_type.as_deref()) get_paste_mime_type(args.mime_type.as_deref())
@ -461,14 +461,14 @@ fn handle_regular_paste(
// Only add newline for text content, not binary data // Only add newline for text content, not binary data
// Check if the MIME type indicates text content // Check if the MIME type indicates text content
let is_text_content = if !types.is_empty() { let is_text_content = if types.is_empty() {
// If no MIME type, check if content is valid UTF-8
std::str::from_utf8(&buf).is_ok()
} else {
types.starts_with("text/") types.starts_with("text/")
|| types == "application/json" || types == "application/json"
|| types == "application/xml" || types == "application/xml"
|| types == "application/x-sh" || types == "application/x-sh"
} else {
// If no MIME type, check if content is valid UTF-8
std::str::from_utf8(&buf).is_ok()
}; };
if !args.no_newline if !args.no_newline

View file

@ -1,8 +1,9 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
sync::{LazyLock, Mutex}, sync::{Arc, LazyLock, Mutex},
}; };
use arc_swap::ArcSwapOption;
use log::debug; use log::debug;
use wayland_client::{ use wayland_client::{
Connection as WaylandConnection, Connection as WaylandConnection,
@ -17,7 +18,7 @@ use wayland_protocols_wlr::foreign_toplevel::v1::client::{
zwlr_foreign_toplevel_manager_v1::{self, ZwlrForeignToplevelManagerV1}, zwlr_foreign_toplevel_manager_v1::{self, ZwlrForeignToplevelManagerV1},
}; };
static FOCUSED_APP: Mutex<Option<String>> = Mutex::new(None); static FOCUSED_APP: ArcSwapOption<String> = ArcSwapOption::const_empty();
static TOPLEVEL_APPS: LazyLock<Mutex<HashMap<ObjectId, String>>> = static TOPLEVEL_APPS: LazyLock<Mutex<HashMap<ObjectId, String>>> =
LazyLock::new(|| Mutex::new(HashMap::new())); LazyLock::new(|| Mutex::new(HashMap::new()));
@ -32,12 +33,11 @@ pub fn init_wayland_state() {
/// Get the currently focused window application name using Wayland protocols /// Get the currently focused window application name using Wayland protocols
pub fn get_focused_window_app() -> Option<String> { pub fn get_focused_window_app() -> Option<String> {
// Try Wayland protocol first // Load the focused app using lock-free arc-swap
if let Ok(focused) = FOCUSED_APP.lock() let focused = FOCUSED_APP.load();
&& let Some(ref app) = *focused if let Some(app) = focused.as_ref() {
{
debug!("Found focused app via Wayland protocol: {app}"); debug!("Found focused app via Wayland protocol: {app}");
return Some(app.clone()); return Some(app.to_string());
} }
debug!("No focused window detection method worked"); debug!("No focused window detection method worked");
@ -152,12 +152,11 @@ impl Dispatch<ZwlrForeignToplevelHandleV1, ()> for AppState {
}) { }) {
debug!("Toplevel activated"); debug!("Toplevel activated");
// Update focused app to the `app_id` of this handle // Update focused app to the `app_id` of this handle
if let (Ok(apps), Ok(mut focused)) = if let Ok(apps) = TOPLEVEL_APPS.lock()
(TOPLEVEL_APPS.lock(), FOCUSED_APP.lock())
&& let Some(app_id) = apps.get(&handle_id) && let Some(app_id) = apps.get(&handle_id)
{ {
debug!("Setting focused app to: {app_id}"); debug!("Setting focused app to: {app_id}");
*focused = Some(app_id.clone()); FOCUSED_APP.store(Some(Arc::new(app_id.clone())));
} }
} }
}, },