Compare commits

...

27 commits

Author SHA1 Message Date
d40cbb74fc
various: shared HTTP client with connection pooling
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Id13c17e9352da970a289f4e3ad909c5b6a6a6964
2026-02-19 00:15:57 +03:00
05c946a155
platform: add mockito HTTP tests to modrinth
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I880c11195559fcfb9701e945a10fe87b6a6a6964
2026-02-13 00:50:13 +03:00
73f881c336
chore: bump dependencies
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ic1fda520473e53d1a584a3dda63ffda86a6a6964
2026-02-13 00:50:12 +03:00
787e93fdaa
cli: add --all, --updates, --no-deps flags to commands
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I25581b8de945284b4ce7c2c85601a86f6a6a6964
2026-02-13 00:50:11 +03:00
1938158b07
infra: add clippy allows; fix PathBuf -> Path
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I07795374f678fa2ec17b4171fa7e32276a6a6964
2026-02-13 00:50:10 +03:00
a58b956374
platform: add CurseForge side detection from categories
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I62c5117ed97bbc2389330720b4761a716a6a6964
2026-02-13 00:50:09 +03:00
d4bc6b3887
cli: wire shelve flag; more clippy fixes
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I694da71afe93bcb33687ff7d8e75f04f6a6a6964
2026-02-13 00:50:08 +03:00
f0ff262643
cli: wire get_site_url in inspect; fix clippy in remote_update
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ifdbc34dd7a5a51edc5dff326eac095516a6a6964
2026-02-13 00:50:07 +03:00
1251255bd5
cli: add version mismatch warning to ls; wire ErrorSeverity in status
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I170f944127333c552e8a230972ed89d66a6a6964
2026-02-13 00:50:06 +03:00
ee63c803ab
cli: add --all flag to update; wire UpdateStrategy enforcement
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9570557396ac46e82cbabbd8e39be0936a6a6964
2026-02-13 00:50:05 +03:00
96c2468097
cli: wire MultiError in add/rm; add typo suggestions
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I98240ec0f9e3932a46e79f82f32cd5d36a6a6964
2026-02-13 00:50:05 +03:00
bb5523303f
cli: make init and cfg interactive by default
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I7596bb9913a8d98133bdf3c531241bf06a6a6964
2026-02-13 00:50:04 +03:00
6b585a5f21
fetch: add parallel downloads and --shelve flag
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Id61b7c502923c697599cfb3afed948d56a6a6964
2026-02-13 00:50:03 +03:00
85c69c3372
export: add text replacement and missing projects rules
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I3f404448278e8b1d492fa5d1cf7397736a6a6964
2026-02-13 00:50:02 +03:00
67191b0523
ui: add interactive prompts and typo suggestions
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Iec773550dca1f0ddc2f60360e6b7cb956a6a6964
2026-02-13 00:50:01 +03:00
ec92eea46e
model: add cross-provider version mismatch detection
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I71ea6c9141ec6b36edf708af1c8ed53d6a6a6964
2026-02-13 00:50:00 +03:00
31c2664e58
model: add lockfile migration system (v1 -> v2)
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I335406fc4ee4a04071f6dcb6782e1a076a6a6964
2026-02-13 00:49:59 +03:00
fbbce5edbd
error: add MultiError for batch error aggregation
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I468289d7a5c3956fc410b6af1a8d070d6a6a6964
2026-02-13 00:49:58 +03:00
72bd495037
nix: bump nixpkgs
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9f90f6be29fe675c1ee6f03c6bd0046f6a6a6964
2026-02-13 00:49:57 +03:00
95da8911a4
tests: add CLI argument and config tests
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ibf3751724aff64e6a901c2703d7778d16a6a6964
2026-02-13 00:49:56 +03:00
a7dd825b58
fix: update Config initializers with new field
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I6c1abd66baf81256f73ae03846673ebf6a6a6964
2026-02-13 00:49:55 +03:00
941aa7b304
model: add exportServerSideProjectsToClient config property
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I638f8a1f2eb7d4f40de55aebd884ed9c6a6a6964
2026-02-13 00:49:54 +03:00
b8de6210b4
cli: add --show-io-errors and `--no-server to export cmd
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ib3b3818fbd7902678c3a4cecc079f8fb6a6a6964
2026-02-13 00:49:53 +03:00
72fc5158ed
cli: add --deps flag to import command
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I19ac6af4c80fe7e86fe8259ed3bf49166a6a6964
2026-02-13 00:15:07 +03:00
dac803f740
cli: implement rm --all to remove all projects
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9776c31b90336bf33475adb1e36dc3a36a6a6964
2026-02-13 00:15:06 +03:00
69eaa272d9
cli: add Pakku-parity command flags
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ib1afd8838dfbd1b70a35d313ed3505f26a6a6964
2026-02-13 00:15:05 +03:00
5a0a5cedeb
docs: prepare Pakker documentation
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I03df3787081bde6ebf0366a24320307a6a6a6964
2026-02-13 00:15:04 +03:00
41 changed files with 5864 additions and 594 deletions

373
Cargo.lock generated
View file

@ -16,7 +16,7 @@ checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
dependencies = [
"cfg-if",
"cipher",
"cpufeatures",
"cpufeatures 0.2.17",
]
[[package]]
@ -80,9 +80,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.100"
version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
[[package]]
name = "assert-json-diff"
@ -211,6 +211,17 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chacha20"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601"
dependencies = [
"cfg-if",
"cpufeatures 0.3.0",
"rand_core 0.10.0",
]
[[package]]
name = "cipher"
version = "0.4.4"
@ -223,9 +234,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.54"
version = "4.5.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394"
checksum = "63be97961acde393029492ce0be7a1af7e323e6bae9511ebfac33751be5e6806"
dependencies = [
"clap_builder",
"clap_derive",
@ -233,9 +244,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.54"
version = "4.5.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00"
checksum = "7f13174bda5dfd69d7e947827e5af4b0f2f94a4a3ee92912fba07a66150f21e2"
dependencies = [
"anstream",
"anstyle",
@ -245,9 +256,9 @@ dependencies = [
[[package]]
name = "clap_derive"
version = "4.5.49"
version = "4.5.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671"
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
dependencies = [
"heck",
"proc-macro2",
@ -257,9 +268,9 @@ dependencies = [
[[package]]
name = "clap_lex"
version = "0.7.6"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831"
[[package]]
name = "cmake"
@ -321,9 +332,9 @@ dependencies = [
[[package]]
name = "constant_time_eq"
version = "0.3.1"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b"
[[package]]
name = "core-foundation"
@ -360,6 +371,15 @@ dependencies = [
"libc",
]
[[package]]
name = "cpufeatures"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201"
dependencies = [
"libc",
]
[[package]]
name = "crc"
version = "3.3.0"
@ -498,9 +518,9 @@ dependencies = [
[[package]]
name = "env_filter"
version = "0.1.4"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2"
checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f"
dependencies = [
"log",
"regex",
@ -508,9 +528,9 @@ dependencies = [
[[package]]
name = "env_logger"
version = "0.11.8"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f"
checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d"
dependencies = [
"anstream",
"anstyle",
@ -564,6 +584,12 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "form_urlencoded"
version = "1.2.2"
@ -706,10 +732,26 @@ dependencies = [
]
[[package]]
name = "git2"
version = "0.20.3"
name = "getrandom"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e2b37e2f62729cdada11f0e6b3b6fe383c69c29fc619e391223e12856af308c"
checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"r-efi",
"rand_core 0.10.0",
"wasip2",
"wasip3",
"wasm-bindgen",
]
[[package]]
name = "git2"
version = "0.20.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b88256088d75a56f8ecfa070513a775dd9107f6530ef14919dac831af9cfe2b"
dependencies = [
"bitflags 2.10.0",
"libc",
@ -720,6 +762,12 @@ dependencies = [
"url",
]
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]]
name = "h2"
version = "0.4.13"
@ -739,6 +787,15 @@ dependencies = [
"tracing",
]
[[package]]
name = "hashbrown"
version = "0.15.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
dependencies = [
"foldhash",
]
[[package]]
name = "hashbrown"
version = "0.16.1"
@ -951,6 +1008,12 @@ dependencies = [
"zerovec",
]
[[package]]
name = "id-arena"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954"
[[package]]
name = "idna"
version = "1.1.0"
@ -979,7 +1042,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017"
dependencies = [
"equivalent",
"hashbrown",
"hashbrown 0.16.1",
"serde",
"serde_core",
]
[[package]]
@ -1108,6 +1173,12 @@ dependencies = [
"zeroize",
]
[[package]]
name = "leb128fmt"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]]
name = "libbz2-rs-sys"
version = "0.2.2"
@ -1116,9 +1187,9 @@ checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7"
[[package]]
name = "libc"
version = "0.2.180"
version = "0.2.181"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
checksum = "459427e2af2b9c839b132acb702a1c654d95e10f8c326bfc2ad11310e458b1c5"
[[package]]
name = "libgit2-sys"
@ -1263,9 +1334,9 @@ dependencies = [
[[package]]
name = "mockito"
version = "1.7.1"
version = "1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e0603425789b4a70fcc4ac4f5a46a566c116ee3e2a6b768dc623f7719c611de"
checksum = "90820618712cab19cfc46b274c6c22546a82affcb3c3bdf0f29e3db8e1bb92c0"
dependencies = [
"assert-json-diff",
"bytes",
@ -1278,7 +1349,7 @@ dependencies = [
"hyper-util",
"log",
"pin-project-lite",
"rand",
"rand 0.9.2",
"regex",
"serde_json",
"serde_urlencoded",
@ -1288,9 +1359,9 @@ dependencies = [
[[package]]
name = "num-conv"
version = "0.1.0"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050"
[[package]]
name = "once_cell"
@ -1340,16 +1411,17 @@ dependencies = [
"env_logger",
"futures",
"git2",
"glob",
"indicatif",
"keyring",
"libc",
"log",
"md-5",
"mockito",
"once_cell",
"rand",
"rand 0.10.0",
"regex",
"reqwest",
"semver",
"serde",
"serde_json",
"sha1",
@ -1357,7 +1429,7 @@ dependencies = [
"strsim",
"tempfile",
"textwrap",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"walkdir",
"yansi",
@ -1453,9 +1525,9 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]]
name = "ppmd-rust"
version = "1.3.0"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d558c559f0450f16f2a27a1f017ef38468c1090c9ce63c8e51366232d53717b4"
checksum = "efca4c95a19a79d1c98f791f10aebd5c1363b473244630bb7dbde1dc98455a24"
[[package]]
name = "ppv-lite86"
@ -1466,6 +1538,16 @@ dependencies = [
"zerocopy",
]
[[package]]
name = "prettyplease"
version = "0.2.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
dependencies = [
"proc-macro2",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.105"
@ -1489,7 +1571,7 @@ dependencies = [
"rustc-hash",
"rustls",
"socket2",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tracing",
"web-time",
@ -1505,13 +1587,13 @@ dependencies = [
"bytes",
"getrandom 0.3.4",
"lru-slab",
"rand",
"rand 0.9.2",
"ring",
"rustc-hash",
"rustls",
"rustls-pki-types",
"slab",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tinyvec",
"tracing",
"web-time",
@ -1553,7 +1635,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [
"rand_chacha",
"rand_core",
"rand_core 0.9.3",
]
[[package]]
name = "rand"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8"
dependencies = [
"chacha20",
"getrandom 0.4.1",
"rand_core 0.10.0",
]
[[package]]
@ -1563,7 +1656,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
dependencies = [
"ppv-lite86",
"rand_core",
"rand_core 0.9.3",
]
[[package]]
@ -1575,6 +1668,12 @@ dependencies = [
"getrandom 0.3.4",
]
[[package]]
name = "rand_core"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba"
[[package]]
name = "redox_syscall"
version = "0.5.18"
@ -1586,9 +1685,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.12.2"
version = "1.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276"
dependencies = [
"aho-corasick",
"memchr",
@ -1615,9 +1714,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
[[package]]
name = "reqwest"
version = "0.13.1"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04e9018c9d814e5f30cc16a0f03271aeab3571e609612d9fe78c1aa8d11c2f62"
checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801"
dependencies = [
"base64",
"bytes",
@ -1820,6 +1919,12 @@ dependencies = [
"libc",
]
[[package]]
name = "semver"
version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
[[package]]
name = "serde"
version = "1.0.228"
@ -1882,7 +1987,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
"cpufeatures",
"cpufeatures 0.2.17",
"digest",
]
@ -1893,7 +1998,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
dependencies = [
"cfg-if",
"cpufeatures",
"cpufeatures 0.2.17",
"digest",
]
@ -2031,12 +2136,12 @@ dependencies = [
[[package]]
name = "tempfile"
version = "3.24.0"
version = "3.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c"
checksum = "0136791f7c95b1f6dd99f9cc786b91bb81c3800b639b3478e561ddb7be95e5f1"
dependencies = [
"fastrand",
"getrandom 0.3.4",
"getrandom 0.4.1",
"once_cell",
"rustix",
"windows-sys 0.61.2",
@ -2064,11 +2169,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.17"
version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4"
dependencies = [
"thiserror-impl 2.0.17",
"thiserror-impl 2.0.18",
]
[[package]]
@ -2084,9 +2189,9 @@ dependencies = [
[[package]]
name = "thiserror-impl"
version = "2.0.17"
version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5"
dependencies = [
"proc-macro2",
"quote",
@ -2095,22 +2200,23 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.44"
version = "0.3.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c"
dependencies = [
"deranged",
"js-sys",
"num-conv",
"powerfmt",
"serde",
"serde_core",
"time-core",
]
[[package]]
name = "time-core"
version = "0.1.6"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca"
[[package]]
name = "tinystr"
@ -2258,6 +2364,12 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "typed-path"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e28f89b80c87b8fb0cf04ab448d5dd0dd0ade2f8891bae878de66a75a28600e"
[[package]]
name = "typenum"
version = "1.19.0"
@ -2288,6 +2400,12 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
[[package]]
name = "unicode-xid"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
[[package]]
name = "unit-prefix"
version = "0.5.2"
@ -2367,7 +2485,16 @@ version = "1.0.1+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
dependencies = [
"wit-bindgen",
"wit-bindgen 0.46.0",
]
[[package]]
name = "wasip3"
version = "0.4.0+wasi-0.3.0-rc-2026-01-06"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5"
dependencies = [
"wit-bindgen 0.51.0",
]
[[package]]
@ -2428,6 +2555,40 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "wasm-encoder"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319"
dependencies = [
"leb128fmt",
"wasmparser",
]
[[package]]
name = "wasm-metadata"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909"
dependencies = [
"anyhow",
"indexmap",
"wasm-encoder",
"wasmparser",
]
[[package]]
name = "wasmparser"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
dependencies = [
"bitflags 2.10.0",
"hashbrown 0.15.5",
"indexmap",
"semver",
]
[[package]]
name = "web-sys"
version = "0.3.83"
@ -2760,6 +2921,94 @@ version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
name = "wit-bindgen"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
dependencies = [
"wit-bindgen-rust-macro",
]
[[package]]
name = "wit-bindgen-core"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc"
dependencies = [
"anyhow",
"heck",
"wit-parser",
]
[[package]]
name = "wit-bindgen-rust"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21"
dependencies = [
"anyhow",
"heck",
"indexmap",
"prettyplease",
"syn",
"wasm-metadata",
"wit-bindgen-core",
"wit-component",
]
[[package]]
name = "wit-bindgen-rust-macro"
version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a"
dependencies = [
"anyhow",
"prettyplease",
"proc-macro2",
"quote",
"syn",
"wit-bindgen-core",
"wit-bindgen-rust",
]
[[package]]
name = "wit-component"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
dependencies = [
"anyhow",
"bitflags 2.10.0",
"indexmap",
"log",
"serde",
"serde_derive",
"serde_json",
"wasm-encoder",
"wasm-metadata",
"wasmparser",
"wit-parser",
]
[[package]]
name = "wit-parser"
version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736"
dependencies = [
"anyhow",
"id-arena",
"indexmap",
"log",
"semver",
"serde",
"serde_derive",
"serde_json",
"unicode-xid",
"wasmparser",
]
[[package]]
name = "writeable"
version = "0.6.2"
@ -2891,9 +3140,9 @@ dependencies = [
[[package]]
name = "zip"
version = "7.1.0"
version = "7.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9013f1222db8a6d680f13a7ccdc60a781199cd09c2fa4eff58e728bb181757fc"
checksum = "cc12baa6db2b15a140161ce53d72209dacea594230798c24774139b54ecaa980"
dependencies = [
"aes",
"bzip2",
@ -2901,8 +3150,7 @@ dependencies = [
"crc32fast",
"deflate64",
"flate2",
"generic-array",
"getrandom 0.3.4",
"getrandom 0.4.1",
"hmac",
"indexmap",
"lzma-rust2",
@ -2911,6 +3159,7 @@ dependencies = [
"ppmd-rust",
"sha1",
"time",
"typed-path",
"zeroize",
"zopfli",
"zstd",

View file

@ -1,47 +1,47 @@
[package]
name = "pakker"
version = "0.1.0"
edition = "2024"
authors = [ "NotAShelf <raf@notashelf.dev" ]
name = "pakker"
version = "0.1.0"
edition = "2024"
authors = [ "NotAShelf <raf@notashelf.dev>" ]
rust-version = "1.91.0"
readme = true
[dependencies]
anyhow = "1.0.100"
anyhow = "1.0.101"
async-trait = "0.1.89"
clap = { version = "4.5.54", features = [ "derive" ] }
comfy-table = "7.1"
clap = { version = "4.5.58", features = [ "derive" ] }
comfy-table = "7.2.2"
dialoguer = "0.12.0"
env_logger = "0.11.8"
env_logger = "0.11.9"
futures = "0.3.31"
git2 = "0.20.3"
git2 = "0.20.4"
glob = "0.3.3"
indicatif = "0.18.3"
keyring = "3.6.3"
libc = "0.2.180"
libc = "0.2.181"
log = "0.4.29"
md-5 = "0.10.6"
once_cell = "1.20"
rand = "0.9.2"
regex = "1.12"
reqwest = { version = "0.13.1", features = [ "json" ] }
rand = "0.10.0"
regex = "1.12.3"
reqwest = { version = "0.13.2", features = [ "json" ] }
semver = "1.0.27"
serde = { version = "1.0.228", features = [ "derive" ] }
serde_json = "1.0.149"
sha1 = "0.10.6"
sha2 = "0.10.0"
sha2 = "0.10.9"
strsim = "0.11.1"
tempfile = "3.24.0"
textwrap = "0.16"
thiserror = "2.0.17"
tempfile = "3.25.0"
textwrap = "0.16.2"
thiserror = "2.0.18"
tokio = { version = "1.49.0", features = [ "full" ] }
walkdir = "2.5.0"
yansi = "1.0.1"
zip = "7.1.0"
zip = "7.4.0"
[dev-dependencies]
mockito = "1.7.1"
tempfile = "3.24.0"
[[bin]]
name = "pakker"
path = "src/main.rs"
mockito = "1.7.2"
tempfile = "3.25.0"
# Optimize crypto stuff. Building them with optimizations makes that build script
# run ~5x faster, more than offsetting the additional build time added to the

971
docs/COMMANDS.md Normal file
View file

@ -0,0 +1,971 @@
# Pakker Command Reference
Complete reference for all Pakker commands.
## Table of Contents
- [Global Options](#global-options)
- [Project Management](#project-management)
- [init](#init)
- [import](#import)
- [add](#add)
- [add-prj](#add-prj)
- [rm](#rm)
- [ls](#ls)
- [inspect](#inspect)
- [Updates and Sync](#updates-and-sync)
- [update](#update)
- [fetch](#fetch)
- [sync](#sync)
- [status](#status)
- [Project Configuration](#project-configuration)
- [set](#set)
- [link](#link)
- [unlink](#unlink)
- [Export](#export)
- [export](#export-1)
- [Configuration](#configuration)
- [cfg](#cfg)
- [credentials](#credentials)
- [Remote Management](#remote-management)
- [remote](#remote)
- [remote-update](#remote-update)
- [fork](#fork)
- [Utilities](#utilities)
- [diff](#diff)
---
## Global Options
All commands support these global flags:
```bash
-v, --verbose # Enable verbose output
-vv # Very verbose (debug level)
-vvv # Trace level logging
```
Examples:
```bash
pakker -v add sodium # Info logging
pakker -vv fetch # Debug logging
pakker -vvv export # Trace logging
```
---
## Project Management
### init
Initialize a new modpack project in the current directory.
**Usage:**
```bash
pakker init [OPTIONS]
```
**Options:**
- `-t, --target <TARGET>` - Target platform (default: multiplatform)
- `-m, --mc-version <VERSION>` - Minecraft version (default: 1.20.1)
- `-l, --loader <LOADER>` - Mod loader (default: fabric)
- `--loader-version <VERSION>` - Specific loader version
**Examples:**
```bash
# Initialize Fabric 1.20.1 modpack
pakker init
# Initialize Forge 1.19.4 modpack
pakker init -m 1.19.4 -l forge
# Initialize with specific loader version
pakker init -l fabric --loader-version 0.15.0
# Initialize for CurseForge only
pakker init --target curseforge
```
**Output:**
Creates:
- `pakku.json` - Configuration file
- `pakku-lock.json` - Lockfile
- `overrides/` - Override directory
---
### import
Import an existing modpack from CurseForge or Modrinth.
**Usage:**
```bash
pakker import <SOURCE>
```
**Supported Formats:**
- CurseForge ZIP files
- Modrinth .mrpack files
- Local directories with manifest
- Remote URLs
**Examples:**
```bash
# Import from local file
pakker import modpack.zip
pakker import pack.mrpack
# Import from URL
pakker import https://example.com/modpack.zip
# Import from directory
pakker import ./existing-modpack/
```
---
### add
Interactively add projects to the modpack.
**Usage:**
```bash
pakker add <QUERY>...
```
**Features:**
- Interactive platform selection
- Automatic dependency resolution
- Fuzzy search with suggestions
- Version selection
**Examples:**
```bash
# Add single project
pakker add sodium
# Add multiple projects
pakker add sodium lithium phosphor
# Add with typo correction
pakker add sodum
# Suggests: Did you mean "sodium"?
```
---
### add-prj
Add projects non-interactively with explicit platform specification.
**Usage:**
```bash
pakker add-prj [OPTIONS] <PROJECT>
```
**Platform Options:**
- `--cf, --curseforge <ID>` - CurseForge project ID or slug
- `--mr, --modrinth <ID>` - Modrinth project ID or slug
- `--gh, --github <REPO>` - GitHub repository (format: owner/repo)
**Property Options:**
- `--type <TYPE>` - Project type (mod, resourcepack, shader, modpack)
- `--side <SIDE>` - Project side (client, server, both)
- `--strategy <STRATEGY>` - Update strategy (latest, specific, pinned)
- `--version <VERSION>` - Specific version to use
- `--redistributable <BOOL>` - Allow redistribution (true/false)
- `--subpath <PATH>` - Custom subpath in mods directory
- `--alias <NAME>` - Project alias
- `--export <BOOL>` - Include in exports (true/false)
**Control Options:**
- `--yes, -y` - Skip all confirmations
- `--no-deps` - Don't resolve dependencies
**Examples:**
```bash
# Add from Modrinth
pakker add-prj --mr sodium --yes
# Add from CurseForge
pakker add-prj --cf jei --yes
# Add from multiple platforms (creates unified project)
pakker add-prj --mr sodium --cf sodium --yes
# Add with custom properties
pakker add-prj --mr sodium --side client --strategy pinned --yes
# Add without dependencies (for CI)
pakker add-prj --mr fabric-api --yes --no-deps
# Add with alias
pakker add-prj --mr sodium --alias "sodium-mod" --yes
# Add GitHub release
pakker add-prj --gh "IrisShaders/Iris" --yes
```
**CI/CD Usage:**
```bash
#!/bin/bash
# Batch add mods
mods=(
"fabric-api"
"sodium"
"lithium"
"phosphor"
)
for mod in "${mods[@]}"; do
pakker add-prj --mr "$mod" --yes --no-deps || exit 1
done
# Resolve all dependencies at once
pakker update --yes
```
---
### rm
Remove projects from the modpack.
**Usage:**
```bash
pakker rm <PROJECT>...
```
**Examples:**
```bash
# Remove single project
pakker rm sodium
# Remove multiple projects
pakker rm sodium lithium phosphor
# Remove with confirmation
pakker rm outdated-mod
```
---
### ls
List all projects in the modpack.
**Usage:**
```bash
pakker ls [OPTIONS]
```
**Options:**
- `--type <TYPE>` - Filter by type
- `--side <SIDE>` - Filter by side
- `--platform <PLATFORM>` - Filter by platform
**Examples:**
```bash
# List all projects
pakker ls
# List only mods
pakker ls --type mod
# List client-only projects
pakker ls --side client
# List CurseForge projects
pakker ls --platform curseforge
```
**Output Format:**
```
sodium (v0.5.0) - both - [CF, MR]
lithium (v0.11.2) - both - [CF, MR]
iris (v1.6.4) - client - [CF, MR]
```
---
### inspect
View detailed information about projects including dependencies.
**Usage:**
```bash
pakker inspect [PROJECT]...
```
**Features:**
- Project metadata (name, version, platforms)
- File information (name, size, hashes)
- Dependency trees with visualization
- Circular dependency detection
- Property display
**Examples:**
```bash
# Inspect single project
pakker inspect sodium
# Inspect multiple projects
pakker inspect sodium lithium phosphor
# View modpack overview (no arguments)
pakker inspect
# Fuzzy matching with suggestions
pakker inspect sodum
# Suggests: Did you mean "sodium"?
```
**Example Output:**
```
Sodium v0.5.0
Type: mod
Side: client
Platforms: curseforge, modrinth
Files:
sodium-fabric-0.5.0+mc1.20.jar
Size: 2.3 MB
SHA1: abc123...
Dependencies:
└── fabric-api v0.87.0
└── fabric-loader >=0.14.0
Properties:
Redistributable: true
Update Strategy: latest
Export: true
```
---
## Updates and Sync
### update
Update projects to newer versions.
**Usage:**
```bash
pakker update [PROJECT]... [OPTIONS]
```
**Options:**
- `--yes, -y` - Skip confirmations
- `--all` - Update all projects
**Examples:**
```bash
# Update specific project
pakker update sodium
# Update multiple projects
pakker update sodium lithium
# Update all projects
pakker update --all
# Update without confirmation (CI)
pakker update --all --yes
```
---
### fetch
Download all project files.
**Usage:**
```bash
pakker fetch [OPTIONS]
```
**Options:**
- `--force` - Re-download existing files
**Examples:**
```bash
# Fetch all files
pakker fetch
# Force re-download
pakker fetch --force
```
**Output:**
```
Fetching 42 files...
✓ sodium-fabric-0.5.0.jar (local)
✓ lithium-fabric-0.11.2.jar (download)
✓ fabric-api-0.87.0.jar (download)
...
```
---
### sync
Update projects and fetch files in one command.
**Usage:**
```bash
pakker sync [OPTIONS]
```
**Options:**
- `--yes, -y` - Skip confirmations
**Examples:**
```bash
# Sync everything
pakker sync
# Sync without confirmation
pakker sync --yes
```
Equivalent to:
```bash
pakker update --all --yes
pakker fetch
```
---
### status
Check for available updates.
**Usage:**
```bash
pakker status
```
**Examples:**
```bash
pakker status
```
**Example Output:**
```
Updates available:
sodium: v0.5.0 → v0.5.1
lithium: v0.11.1 → v0.11.2
No updates:
fabric-api (v0.87.0)
iris (v1.6.4)
```
---
## Project Configuration
### set
Modify project properties.
**Usage:**
```bash
pakker set <PROJECT> [OPTIONS]
```
**Options:**
- `--type <TYPE>` - Set project type
- `--side <SIDE>` - Set project side
- `--strategy <STRATEGY>` - Set update strategy
- `--version <VERSION>` - Set specific version (use with strategy=specific)
- `--redistributable <BOOL>` - Set redistributable flag
- `--subpath <PATH>` - Set custom subpath
- `--alias <NAME>` - Set project alias
- `--export <BOOL>` - Set export flag
**Examples:**
```bash
# Change project side
pakker set sodium --side client
# Pin to specific version
pakker set fabric-api --strategy specific --version 0.87.0
# Always update to latest
pakker set lithium --strategy latest
# Never update
pakker set custom-mod --strategy pinned
# Mark as non-redistributable
pakker set optifine --redistributable false
# Don't include in exports
pakker set dev-mod --export false
# Set custom subpath
pakker set performance-mod --subpath "mods/performance/"
```
---
### link
Link projects together (define relationships).
**Usage:**
```bash
pakker link <PROJECT> <TARGET>
```
**Examples:**
```bash
# Link projects
pakker link sodium fabric-api
```
---
### unlink
Remove project links.
**Usage:**
```bash
pakker unlink <PROJECT> <TARGET>
```
**Examples:**
```bash
# Unlink projects
pakker unlink sodium fabric-api
```
---
## Export
### export
Export modpack for distribution.
**Usage:**
```bash
pakker export [OPTIONS]
```
**Options:**
- `--profile <PROFILE>` - Export profile (curseforge, modrinth, serverpack, or custom)
- `-o, --output <PATH>` - Output file path
**Built-in Profiles:**
- `curseforge` - CurseForge ZIP format with manifest.json
- `modrinth` - Modrinth .mrpack format
- `serverpack` - Server-optimized pack (no client-only mods)
**Examples:**
```bash
# Export for CurseForge
pakker export --profile curseforge
# Export for Modrinth
pakker export --profile modrinth
# Export with custom output path
pakker export --profile curseforge -o dist/modpack-v1.0.0.zip
# Export server pack
pakker export --profile serverpack -o server.zip
# Export all formats
pakker export --profile curseforge -o dist/cf.zip
pakker export --profile modrinth -o dist/mr.mrpack
pakker export --profile serverpack -o dist/server.zip
```
**Profile Configuration:**
Configure profiles in `pakku.json`:
```json
{
"exportProfiles": {
"curseforge": {
"filterPlatform": "curseforge",
"includeNonRedistributable": false
},
"production": {
"filterPlatform": "modrinth",
"overrides": ["overrides", "prod-config"]
}
}
}
```
See [EXPORT_PROFILES.md](./EXPORT_PROFILES.md) for complete documentation.
---
## Configuration
### cfg
Configure modpack properties.
**Usage:**
```bash
pakker cfg [OPTIONS]
```
**Options:**
- `--name <NAME>` - Set modpack name
- `--version <VERSION>` - Set modpack version
- `--description <TEXT>` - Set description
- `--author <NAME>` - Set author
**Examples:**
```bash
# Set modpack name
pakker cfg --name "My Awesome Pack"
# Set version
pakker cfg --version "1.0.0"
# Set multiple properties
pakker cfg --name "Performance Pack" --author "YourName" --version "2.0.0"
```
---
### credentials
Manage platform API credentials.
**Usage:**
```bash
# View credentials
pakker credentials
# Set credentials
pakker credentials set --curseforge <KEY>
pakker credentials set --modrinth <TOKEN>
# Remove credentials
pakker credentials remove --curseforge
pakker credentials remove --modrinth
```
**Examples:**
```bash
# Set CurseForge API key
pakker credentials set --curseforge $CF_API_KEY
# Set Modrinth token
pakker credentials set --modrinth $MR_TOKEN
# View current credentials
pakker credentials
# Remove CurseForge key
pakker credentials remove --curseforge
```
**Credential Storage:**
Credentials are stored securely:
1. System keyring (if available)
2. Pakker credentials file (encrypted)
3. Environment variables
**Benefits:**
- Higher API rate limits
- Access to private/early-access mods
- Faster downloads
- Better error messages
---
## Remote Management
### remote
Manage Git remote repositories for modpack configuration.
**Usage:**
```bash
# Add remote
pakker remote add <NAME> <URL>
# List remotes
pakker remote list
# Remove remote
pakker remote remove <NAME>
```
**Examples:**
```bash
# Add origin remote
pakker remote add origin https://github.com/user/modpack.git
# Add upstream
pakker remote add upstream https://github.com/original/modpack.git
# List all remotes
pakker remote list
# Remove remote
pakker remote remove origin
```
---
### remote-update
Pull modpack configuration updates from remote Git repository.
**Usage:**
```bash
pakker remote-update [REMOTE]
```
**Examples:**
```bash
# Update from default remote
pakker remote-update
# Update from specific remote
pakker remote-update upstream
# Update and sync
pakker remote-update && pakker sync
```
---
### fork
Manage fork configuration.
**Usage:**
```bash
pakker fork [OPTIONS]
```
**Options:**
- `--ref <REF>` - Set fork reference (branch, tag, commit)
- `--type <TYPE>` - Set reference type
**Examples:**
```bash
# Fork from branch
pakker fork --ref main --type branch
# Fork from tag
pakker fork --ref v1.0.0 --type tag
# Fork from commit
pakker fork --ref abc1234 --type commit
```
---
## Utilities
### diff
Show differences between local and remote modpack configuration.
**Usage:**
```bash
pakker diff [REMOTE]
```
**Examples:**
```bash
# Show diff with default remote
pakker diff
# Show diff with specific remote
pakker diff upstream
# Show diff in detail
pakker -v diff
```
**Example Output:**
```
Changes in remote:
+ sodium v0.5.1 (added)
- old-mod v1.0.0 (removed)
~ lithium v0.11.1 → v0.11.2 (updated)
Local changes:
+ custom-mod v1.0.0 (added)
```
---
## Common Patterns
### CI/CD Pipeline
```bash
#!/bin/bash
set -e
# Add projects non-interactively
pakker add-prj --mr fabric-api --yes --no-deps
pakker add-prj --mr sodium --yes --no-deps
pakker add-prj --mr lithium --yes --no-deps
# Resolve dependencies
pakker update --all --yes
# Fetch files
pakker fetch
# Export for all platforms
pakker export --profile curseforge -o dist/curseforge.zip
pakker export --profile modrinth -o dist/modrinth.mrpack
pakker export --profile serverpack -o dist/server.zip
```
### Batch Updates
```bash
# Update all projects
pakker update --all --yes
# Fetch new versions
pakker fetch
# Export updated pack
pakker export --profile modrinth -o updated-pack.mrpack
```
### Platform Migration
```bash
# Add projects from new platform
pakker add-prj --mr sodium --cf sodium --yes
pakker add-prj --mr lithium --cf lithium --yes
# Export for both platforms
pakker export --profile curseforge -o cf-pack.zip
pakker export --profile modrinth -o mr-pack.mrpack
```
---
## Environment Variables
### Credentials
```bash
export CURSEFORGE_API_KEY="your-key"
export MODRINTH_TOKEN="your-token"
```
### Verbosity
```bash
export PAKKER_LOG=debug # or info, warn, error
```
### Cache
```bash
export PAKKER_CACHE_DIR="$HOME/.cache/pakker"
```
---
## Exit Codes
- `0` - Success
- `1` - General error
- `2` - Invalid arguments
- `3` - Project not found
- `4` - Network error
- `5` - File system error
---
## See Also
- [Main Documentation](./README.md)
- [Export Profiles](./EXPORT_PROFILES.md)
- [Migration Guide](./MIGRATION.md)
- [Examples](../examples/)

348
docs/EXPORT_PROFILES.md Normal file
View file

@ -0,0 +1,348 @@
# Export Profiles Configuration
Pakker supports profile-specific export configurations, allowing you to customize export behavior for different platforms and use cases.
## Overview
Export profiles enable you to:
- Use different override directories per profile
- Filter projects by platform availability
- Control non-redistributable mod inclusion
- Customize client-only mod filtering
- Override settings per project
## Configuration
Add an `exportProfiles` section to your `pakku.json` file:
```json
{
"name": "My Modpack",
"version": "1.0.0",
"overrides": ["overrides"],
"exportProfiles": {
"curseforge": {
"filterPlatform": "curseforge",
"includeNonRedistributable": false,
"overrides": ["overrides", "curseforge-overrides"]
},
"modrinth": {
"filterPlatform": "modrinth",
"includeNonRedistributable": true
},
"serverpack": {
"includeClientOnly": false,
"serverOverrides": ["server-config"]
}
}
}
```
## Profile Configuration Options
### `filterPlatform` (string, optional)
Filter projects to only include those available on the specified platform.
**Supported values**: `"curseforge"`, `"modrinth"`, `"github"`
**Example**:
```json
{
"filterPlatform": "modrinth"
}
```
Projects without a Modrinth version will be excluded from the export.
---
### `includeNonRedistributable` (boolean, optional)
Control whether non-redistributable mods are included in the export.
**Default**: `false` for CurseForge, `true` for others
**Example**:
```json
{
"includeNonRedistributable": false
}
```
---
### `includeClientOnly` (boolean, optional)
Control whether client-only mods are included in server pack exports.
**Default**: `false` for serverpack profile, `true` for others
**Example**:
```json
{
"includeClientOnly": false
}
```
---
### `overrides` (array of strings, optional)
Override directories to include in the export. Replaces global `overrides` setting.
**Example**:
```json
{
"overrides": ["overrides", "platform-specific-overrides"]
}
```
---
### `serverOverrides` (array of strings, optional)
Server-specific override directories. Replaces global `serverOverrides` setting.
**Example**:
```json
{
"serverOverrides": ["server-config", "server-scripts"]
}
```
---
### `clientOverrides` (array of strings, optional)
Client-specific override directories. Replaces global `clientOverrides` setting.
**Example**:
```json
{
"clientOverrides": ["client-config", "shaderpacks"]
}
```
---
### `projectOverrides` (object, optional)
Per-project configuration within this profile. Not yet implemented.
**Future feature**:
```json
{
"projectOverrides": {
"sodium": {
"export": false
},
"optifine": {
"export": true,
"subpath": "mods/performance/"
}
}
}
```
## Default Profiles
Pakker provides sensible defaults for common profiles:
### CurseForge Default
```json
{
"filterPlatform": "curseforge",
"includeNonRedistributable": false
}
```
### Modrinth Default
```json
{
"filterPlatform": "modrinth",
"includeNonRedistributable": true
}
```
### ServerPack Default
```json
{
"includeClientOnly": false
}
```
These defaults are used automatically when exporting with `pakker export --profile <name>` if no custom configuration is specified.
## Usage Examples
### Example 1: Multi-Platform Modpack
```json
{
"name": "Multi-Platform Pack",
"version": "1.0.0",
"overrides": ["overrides"],
"exportProfiles": {
"curseforge": {
"filterPlatform": "curseforge",
"includeNonRedistributable": false,
"overrides": ["overrides", "curseforge-overrides"]
},
"modrinth": {
"filterPlatform": "modrinth",
"includeNonRedistributable": true,
"overrides": ["overrides", "modrinth-overrides"]
}
}
}
```
**Usage**:
```bash
pakker export --profile curseforge
pakker export --profile modrinth
```
---
### Example 2: Server Pack with Custom Config
```json
{
"name": "Survival Server",
"version": "2.0.0",
"overrides": ["overrides"],
"serverOverrides": ["server-base"],
"exportProfiles": {
"serverpack": {
"includeClientOnly": false,
"serverOverrides": ["server-base", "server-production"]
}
}
}
```
**Usage**:
```bash
pakker export --profile serverpack
```
---
### Example 3: Development vs Production
```json
{
"name": "Dev Pack",
"version": "1.0.0-dev",
"overrides": ["overrides"],
"exportProfiles": {
"dev": {
"includeNonRedistributable": true,
"overrides": ["overrides", "dev-config"]
},
"production": {
"filterPlatform": "curseforge",
"includeNonRedistributable": false,
"overrides": ["overrides", "prod-config"]
}
}
}
```
**Usage**:
```bash
pakker export --profile dev # For testing
pakker export --profile production # For release
```
## How Profile Configuration Works
### Priority Order
1. **Profile-specific settings** from `exportProfiles.<profile>`
2. **Profile defaults** (curseforge/modrinth/serverpack)
3. **Global settings** from root `pakku.json`
### Example Resolution
Given this configuration:
```json
{
"overrides": ["overrides"],
"serverOverrides": ["server-global"],
"exportProfiles": {
"myprofile": {
"overrides": ["custom-overrides"]
}
}
}
```
When exporting with `--profile myprofile`:
- `overrides`: Uses `["custom-overrides"]` (from profile)
- `serverOverrides`: Uses `["server-global"]` (from global, no profile override)
### Filter Execution Order
Export rules are executed in this order:
1. **Copy project files** - Download and copy mod files
2. **Filter by platform** - Remove mods not available on target platform
3. **Copy overrides** - Copy override directories
4. **Generate manifest** - Create platform-specific manifest
5. **Filter non-redistributable** - Remove non-redistributable mods (if configured)
6. **Filter client-only** - Remove client-only mods (for server packs)
## Migration from Pakku
If you're migrating from Pakku, export profiles work the same way. The configuration format is compatible.
### Pakku Configuration
```json
{
"exportProfiles": {
"curseforge": {
"filterPlatform": "curseforge"
}
}
}
```
This works identically in Pakker.
## Troubleshooting
### Mods Missing from Export
**Problem**: Some mods are missing after export.
**Solution**: Check if `filterPlatform` is excluding them. Use `pakker inspect <project>` to see platform availability.
---
### Override Files Not Copied
**Problem**: Override files aren't appearing in the export.
**Solution**: Verify the override path in your profile config matches the actual directory structure.
---
### Non-Redistributable Mods Included
**Problem**: Non-redistributable mods are in the export when they shouldn't be.
**Solution**: Set `includeNonRedistributable: false` in your profile configuration.
## See Also
- [Export Command Documentation](./COMMANDS.md#export)
- [Configuration Reference](./CONFIGURATION.md)
- [Project Configuration](./PROJECTS.md)

511
docs/MIGRATION.md Normal file
View file

@ -0,0 +1,511 @@
# Migrating from Pakku to Pakker
This guide helps you migrate from Pakku (Kotlin) to Pakker (Rust).
## Overview
Pakker is designed to be largely compatible with Pakku, maintaining the same configuration format and workflow. Most Pakku modpacks can be imported directly into Pakker with minimal changes.
## Key Differences
### Performance
- **Faster execution** - Rust implementation provides significant speed improvements
- **Lower memory usage** - More efficient resource utilization
- **Parallel operations** - Better concurrency for downloads and processing
### Enhanced Features
- **Non-interactive mode** - `add-prj` command for CI/CD workflows
- **Advanced inspection** - Enhanced `inspect` command with dependency trees
- **Export profiles** - More flexible profile-based export configuration
- **Platform filtering** - Automatic platform-specific mod filtering
### Compatibility
**Compatible**:
- Configuration format (`pakku.json`)
- Lockfile format (`pakku-lock.json`)
- Export formats (CurseForge ZIP, Modrinth .mrpack)
- Override directory structure
- Project properties and metadata
⚠️ **Differences**:
- Some CLI flag names (documented below)
- New features not in Pakku
- Performance characteristics
## Quick Migration
### Step 1: Import Existing Modpack
If you have an existing Pakku modpack:
```bash
# Navigate to your modpack directory
cd my-modpack
# Pakker will use existing pakku.json and pakku-lock.json
pakker ls
```
That's it! Pakker reads the same configuration files.
### Step 2: Verify Projects
```bash
# List all projects
pakker ls
# Inspect specific projects
pakker inspect sodium lithium
```
### Step 3: Test Export
```bash
# Export for your target platform
pakker export --profile curseforge
pakker export --profile modrinth
```
## Configuration Migration
### pakku.json Format
Pakker uses the **same format** as Pakku:
```json
{
"name": "My Modpack",
"version": "1.0.0",
"description": "A performance modpack",
"author": "YourName",
"overrides": ["overrides"],
"serverOverrides": ["server-overrides"],
"clientOverrides": ["client-overrides"]
}
```
### Export Profiles
Pakker extends Pakku's configuration with enhanced export profiles:
**Pakku format** (still supported):
```json
{
"exportProfiles": {
"curseforge": {
"filterPlatform": "curseforge"
}
}
}
```
**Pakker enhancements** (new features):
```json
{
"exportProfiles": {
"curseforge": {
"filterPlatform": "curseforge",
"includeNonRedistributable": false,
"overrides": ["overrides", "curseforge-specific"]
},
"modrinth": {
"filterPlatform": "modrinth",
"includeNonRedistributable": true
},
"serverpack": {
"includeClientOnly": false,
"serverOverrides": ["server-config"]
}
}
}
```
See [EXPORT_PROFILES.md](./EXPORT_PROFILES.md) for complete documentation.
## Command Mapping
Most commands are identical or very similar:
| Pakku | Pakker | Notes |
|-------|--------|-------|
| `pakku init` | `pakker init` | Identical |
| `pakku import` | `pakker import` | Identical |
| `pakku add` | `pakker add` | Identical |
| `pakku add` | `pakker add-prj` | New non-interactive variant |
| `pakku rm` | `pakker rm` | Identical |
| `pakku update` | `pakker update` | Identical |
| `pakku ls` | `pakker ls` | Identical |
| `pakku status` | `pakker status` | Identical |
| `pakku fetch` | `pakker fetch` | Identical |
| `pakku export` | `pakker export` | Enhanced with profiles |
| `pakku link` | `pakker link` | Identical |
| `pakku set` | `pakker set` | Identical |
| `pakku cfg` | `pakker cfg` | Identical |
| `pakku diff` | `pakker diff` | Identical |
### New Commands in Pakker
- `pakker add-prj` - Non-interactive project addition for CI/CD
- `pakker inspect` - Enhanced project inspection with dependency trees
- `pakker sync` - Combined update + fetch operation
- `pakker credentials` - Credential management
- `pakker remote` - Git remote management
- `pakker fork` - Fork configuration
## Feature Comparison
### Project Management
| Feature | Pakku | Pakker | Notes |
|---------|-------|--------|-------|
| Add projects | ✅ | ✅ | Enhanced with `add-prj` |
| Remove projects | ✅ | ✅ | Identical |
| Update projects | ✅ | ✅ | Identical |
| List projects | ✅ | ✅ | Identical |
| Project properties | ✅ | ✅ | Identical |
| Dependencies | ✅ | ✅ | Enhanced inspection |
### Export System
| Feature | Pakku | Pakker | Notes |
|---------|-------|--------|-------|
| CurseForge export | ✅ | ✅ | Identical format |
| Modrinth export | ✅ | ✅ | Identical format |
| Server pack | ✅ | ✅ | Enhanced filtering |
| Export profiles | ✅ | ✅ | Extended features |
| Override dirs | ✅ | ✅ | Identical |
| Platform filtering | ❌ | ✅ | New in Pakker |
| Profile-specific overrides | ❌ | ✅ | New in Pakker |
### Configuration
| Feature | Pakku | Pakker | Notes |
|---------|-------|--------|-------|
| pakku.json | ✅ | ✅ | Same format |
| pakku-lock.json | ✅ | ✅ | Same format |
| Project config | ✅ | ✅ | Identical |
| Export profiles | ✅ | ✅ | Extended in Pakker |
## Migration Scenarios
### Scenario 1: Local Development
**Pakku workflow:**
```bash
pakku init
pakku add sodium lithium
pakku fetch
pakku export
```
**Pakker workflow:**
```bash
pakker init
pakker add sodium lithium
pakker fetch
pakker export --profile modrinth
```
No changes needed!
### Scenario 2: CI/CD Pipeline
**Pakku workflow:**
```bash
# Interactive, requires user input
pakku add sodium
pakku add lithium
pakku fetch
pakku export
```
**Pakker workflow:**
```bash
# Fully automated
pakker add-prj --mr sodium --yes --no-deps
pakker add-prj --mr lithium --yes --no-deps
pakker update --all --yes
pakker fetch
pakker export --profile modrinth -o dist/pack.mrpack
```
Benefits: No interactive prompts, perfect for CI/CD.
### Scenario 3: Multi-Platform Release
**Pakku workflow:**
```bash
# Manual export for each platform
pakku export
# Manually select CurseForge
pakku export
# Manually select Modrinth
```
**Pakker workflow:**
```bash
# Script multiple exports
pakker export --profile curseforge -o dist/cf.zip
pakker export --profile modrinth -o dist/mr.mrpack
pakker export --profile serverpack -o dist/server.zip
```
Benefits: Scriptable, reproducible builds.
## Migrating CI/CD Workflows
### GitHub Actions
**Before (Pakku):**
```yaml
- name: Setup Pakku
run: |
# Install Pakku
- name: Build modpack
run: |
pakku fetch
pakku export
```
**After (Pakker):**
```yaml
- name: Setup Pakker
run: |
# Install Pakker
cargo install pakker
- name: Build modpack
run: |
pakker fetch
pakker export --profile curseforge -o dist/curseforge.zip
pakker export --profile modrinth -o dist/modrinth.mrpack
```
### GitLab CI
**Before (Pakku):**
```yaml
build:
script:
- pakku fetch
- pakku export
```
**After (Pakker):**
```yaml
build:
script:
- pakker fetch
- pakker export --profile curseforge -o curseforge.zip
- pakker export --profile modrinth -o modrinth.mrpack
artifacts:
paths:
- "*.zip"
- "*.mrpack"
```
## Troubleshooting
### Projects Not Found After Migration
**Problem**: Pakker can't find projects that worked in Pakku.
**Solution**: Verify lockfile format:
```bash
# Check lockfile
cat pakku-lock.json
# Re-fetch projects
pakker fetch
# If issues persist, re-add problematic projects
pakker rm problematic-mod
pakker add-prj --mr problematic-mod --yes
```
### Export Format Differences
**Problem**: Exported packs look different.
**Solution**: Pakker may organize files slightly differently. Both formats are valid:
```bash
# Inspect export
unzip -l curseforge.zip
# Verify manifest
jq . manifest.json
```
If needed, configure export profiles to match Pakku behavior exactly.
### Performance Issues
**Problem**: Pakker seems slower than expected.
**Solution**: Pakker should be faster than Pakku. Check:
```bash
# Enable logging
pakker -vv fetch
# Check network connectivity
pakker status
# Verify credentials (for rate limits)
pakker credentials
```
### Configuration Not Recognized
**Problem**: Some configuration options don't work.
**Solution**: Ensure you're using compatible options:
```json
{
"name": "My Pack",
"version": "1.0.0",
"overrides": ["overrides"]
}
```
Pakker supports all Pakku options plus new ones. Check [EXPORT_PROFILES.md](./EXPORT_PROFILES.md) for new features.
## Best Practices
### 1. Test Before Full Migration
```bash
# Copy modpack to test directory
cp -r my-modpack my-modpack-test
cd my-modpack-test
# Test with Pakker
pakker ls
pakker fetch
pakker export --profile modrinth
# Verify exports match expectations
```
### 2. Update CI/CD Gradually
1. Test Pakker locally first
2. Update a single workflow
3. Verify builds succeed
4. Roll out to all workflows
### 3. Use New Features
Take advantage of Pakker enhancements:
```bash
# Non-interactive project addition
pakker add-prj --mr sodium --yes
# Enhanced inspection
pakker inspect sodium
# Profile-based exports
pakker export --profile production -o release.mrpack
```
### 4. Keep Backward Compatibility
If you need to support both Pakku and Pakker:
```json
{
"exportProfiles": {
"curseforge": {
"filterPlatform": "curseforge"
}
}
}
```
Use the common subset of features that both support.
## Getting Help
### Resources
- [Pakker Documentation](./README.md)
- [Command Reference](./COMMANDS.md)
- [Export Profiles](./EXPORT_PROFILES.md)
### Common Issues
| Issue | Solution |
|-------|----------|
| Project not found | Try explicit platform: `pakker add-prj --mr <name>` |
| Export fails | Check export profile configuration |
| Missing files | Run `pakker fetch` |
| Slow performance | Verify network, add credentials |
| Config not recognized | Check pakku.json syntax with `jq` |
### Support Channels
- GitHub Issues: Report bugs and feature requests
- Documentation: Check docs/ directory
- Examples: See examples/ directory
## Migration Checklist
- [ ] Backup existing modpack directory
- [ ] Install Pakker
- [ ] Verify `pakker ls` shows all projects
- [ ] Test `pakker fetch` downloads files
- [ ] Test export for your target platform
- [ ] Update CI/CD workflows
- [ ] Test automated builds
- [ ] Update documentation
- [ ] Train team members on new commands
- [ ] Monitor for issues
## Feature Roadmap
Pakker aims for full Pakku compatibility plus enhancements. If you find missing features, please report them!
**Current Status:**
- ✅ Core functionality (add, remove, update, export)
- ✅ Multi-platform support
- ✅ Export profiles
- ✅ CI/CD workflows
- ✅ Dependency resolution
**Planned:**
- Custom export rules
- Advanced project filtering
- Enhanced remote sync
- Plugin system
## Summary
Migrating from Pakku to Pakker is straightforward:
1. **No configuration changes** needed for basic usage
2. **Same commands** for most operations
3. **Enhanced features** available when ready
4. **Better performance** out of the box
5. **CI/CD friendly** with non-interactive modes
Most users can start using Pakker immediately with existing Pakku modpacks!

455
docs/README.md Normal file
View file

@ -0,0 +1,455 @@
# Pakker
A fast, reliable multiplatform modpack manager for Minecraft, written in Rust.
## Overview
Pakker is a command-line tool for managing Minecraft modpacks across multiple
platforms including CurseForge, Modrinth, and GitHub. It provides a streamlined
workflow for creating, maintaining, and distributing modpacks with support for
automated dependency resolution, version management, and multi-platform exports.
## Key Features
### Multi-Platform Support
- **CurseForge**, **Modrinth**, and **GitHub** integration
- Unified project management across all platforms
- Platform-specific filtering and optimizations
- Automatic platform-specific manifest generation
### Intelligent Project Management
- **Dependency Resolution** - Automatically resolve and manage project
dependencies
- **Version Management** - Update strategies (latest, specific version, pinned)
- **Project Inspection** - Detailed project information with dependency trees
- **Fuzzy Matching** - Typo-tolerant project searches with suggestions
### Flexible Export System
- **Profile-Based Exports** - Customize exports for different platforms and use
cases
- **Platform Filtering** - Automatically exclude projects not available on
target platform
- **Override Management** - Separate override directories for client/server
configurations
- **Non-Interactive Mode** - Full CI/CD support with scriptable commands
### Advanced Configuration
- **Profile-Specific Settings** - Override paths, filtering rules, and export
behavior per profile
- **Project-Level Customization** - Per-project export settings, aliases, and
properties
- **Side Detection** - Automatic client/server/both classification
- **Redistributable Control** - Manage non-redistributable mod inclusion per
profile
## Installation
### From Source
```bash
git clone https://github.com/yourusername/pakker
cd pakker
cargo build --release
```
The binary will be available at `target/release/pakker`.
### Prerequisites
- Rust 1.70 or later
- Git (for remote repository features)
## Quick Start
### Initialize a New Modpack
```bash
# Create a new modpack for Fabric 1.20.1
pakker init -m 1.20.1 -l fabric
# Or for Forge
pakker init -m 1.20.1 -l forge
```
### Add Mods
```bash
# Interactive mode - search and select
pakker add sodium
# Non-interactive with platform specification
pakker add-prj --mr sodium --yes
# Add with specific properties
pakker add-prj --cf jei --side both --yes
```
### Inspect Projects
```bash
# View project details
pakker inspect sodium
# Multiple projects with dependency trees
pakker inspect sodium lithium phosphor
# View all projects
pakker ls
```
### Fetch Mod Files
```bash
# Download all mod files
pakker fetch
# Or use sync to update and fetch
pakker sync
```
### Export Modpack
```bash
# Export for CurseForge
pakker export --profile curseforge
# Export for Modrinth
pakker export --profile modrinth
# Export server pack
pakker export --profile serverpack
```
## Project Structure
```
my-modpack/
├── pakku.json # Modpack configuration
├── pakku-lock.json # Lockfile with resolved versions
├── mods/ # Downloaded mod files
├── overrides/ # Files to include in all exports
├── server-overrides/ # Server-specific files
└── client-overrides/ # Client-specific files
```
## Configuration Example
### Basic Configuration (`pakku.json`)
```json
{
"name": "My Awesome Modpack",
"version": "1.0.0",
"description": "A performance-focused modpack",
"author": "YourName",
"overrides": ["overrides"],
"serverOverrides": ["server-overrides"],
"clientOverrides": ["client-overrides"]
}
```
### With Export Profiles
```json
{
"name": "Multi-Platform Pack",
"version": "1.0.0",
"overrides": ["overrides"],
"exportProfiles": {
"curseforge": {
"filterPlatform": "curseforge",
"includeNonRedistributable": false,
"overrides": ["overrides", "curseforge-specific"]
},
"modrinth": {
"filterPlatform": "modrinth",
"includeNonRedistributable": true
},
"serverpack": {
"includeClientOnly": false,
"serverOverrides": ["server-config"]
}
}
}
```
## Common Workflows
### Development Workflow
```bash
# Initialize project
pakker init -m 1.20.1 -l fabric
# Add core mods
pakker add-prj --mr fabric-api --yes
pakker add-prj --mr sodium --yes
pakker add-prj --mr lithium --yes
# Configure and test
pakker fetch
# ... test in game ...
# Add more mods
pakker add create
# Check status
pakker status
# Export for testing
pakker export --profile modrinth
```
### CI/CD Pipeline
```bash
# Non-interactive project addition
pakker add-prj --mr fabric-api --yes --no-deps
pakker add-prj --cf jei --yes
# Update to latest versions
pakker update --yes
# Fetch all files
pakker fetch
# Export for all platforms
pakker export --profile curseforge -o dist/curseforge.zip
pakker export --profile modrinth -o dist/modrinth.mrpack
pakker export --profile serverpack -o dist/server.zip
```
### Multi-Platform Release
```bash
# Add projects from multiple platforms
pakker add-prj --mr sodium --cf sodium --yes
pakker add-prj --mr lithium --cf lithium --yes
# Configure CurseForge export (no non-redistributable)
# Configure Modrinth export (allow all)
# (in pakku.json - see Export Profiles documentation)
# Export for both
pakker export --profile curseforge -o releases/curseforge-pack.zip
pakker export --profile modrinth -o releases/modrinth-pack.mrpack
```
## Documentation
- **[Commands Reference](./COMMANDS.md)** - Complete command documentation
- **[Export Profiles](./EXPORT_PROFILES.md)** - Profile configuration guide
- **[Migration Guide](./MIGRATION.md)** - Migrating from Pakku
- **[Examples](../examples/)** - Configuration examples
## Command Overview
| Command | Description |
| ------------- | ------------------------------------------------------ |
| `init` | Initialize a new modpack project |
| `import` | Import an existing modpack |
| `add` | Add projects interactively |
| `add-prj` | Add projects non-interactively with explicit platforms |
| `rm` | Remove projects |
| `update` | Update projects to newer versions |
| `ls` | List all projects in modpack |
| `inspect` | View detailed project information |
| `fetch` | Download all project files |
| `sync` | Update and fetch in one command |
| `export` | Export modpack for distribution |
| `status` | Check for available updates |
| `set` | Modify project properties |
| `link` | Link related projects |
| `diff` | Show differences with remote |
| `credentials` | Manage API credentials |
| `cfg` | Configure modpack properties |
See [COMMANDS.md](./COMMANDS.md) for detailed usage.
## Features in Detail
### Dependency Resolution
Pakker automatically resolves and manages dependencies:
```bash
pakker add sodium
# Automatically adds fabric-api as a dependency
```
Dependencies are tracked in `pakku-lock.json` and can be inspected:
```bash
pakker inspect sodium
# Shows:
# Dependencies: fabric-api
```
### Project Side Detection
Projects are automatically classified as client-only, server-only, or both:
- **Client-only**: Shaders, client performance mods, minimap mods
- **Server-only**: Server management mods, world generation mods
- **Both**: Most gameplay mods, APIs, libraries
Override this with:
```bash
pakker set sodium --side client
```
### Version Management
Control how projects are updated:
```bash
# Pin to specific version
pakker set sodium --strategy specific --version 0.5.0
# Always use latest
pakker set lithium --strategy latest
# Manual updates only
pakker set fabric-api --strategy pinned
```
### Export Profiles
Customize export behavior per platform:
- **Override directories** - Different overrides per export
- **Platform filtering** - Exclude unavailable mods
- **Redistributable control** - Manage non-redistributable content
- **Client/server filtering** - Remove client-only mods from server packs
See [EXPORT_PROFILES.md](./EXPORT_PROFILES.md) for complete documentation.
### Remote Repositories
Sync modpack configuration with Git repositories:
```bash
# Add remote
pakker remote add origin https://github.com/user/modpack.git
# Pull updates
pakker remote-update
# Configuration is automatically synced
```
### Credentials Management
Store platform API credentials securely:
```bash
# Set credentials
pakker credentials set --curseforge YOUR_CF_KEY
pakker credentials set --modrinth YOUR_MR_TOKEN
# Credentials are used automatically for:
# - Private mods
# - Rate limit increases
# - Authenticated downloads
```
## Platform Compatibility
### CurseForge
- Full project search and metadata
- Automatic manifest generation
- Non-redistributable mod detection
- Curse modpack import support
### Modrinth
- Complete Modrinth API integration
- .mrpack format import/export
- Environment metadata support
- Full modpack search
### GitHub
- GitHub Releases integration
- Direct JAR downloads
- Version tag support
## Advanced Usage
### Fuzzy Project Search
Don't remember exact names? Pakker helps:
```bash
pakker add sodum
# Suggested: Did you mean "sodium"?
```
### Batch Operations
Add multiple projects efficiently:
```bash
pakker add-prj --mr sodium --yes
pakker add-prj --mr lithium --yes
pakker add-prj --mr phosphor --yes
```
Or use a script:
```bash
#!/bin/bash
mods=("sodium" "lithium" "phosphor" "iris")
for mod in "${mods[@]}"; do
pakker add-prj --mr "$mod" --yes
done
```
### Inspect Dependency Trees
Visualize complex dependency relationships:
```bash
pakker inspect create
# Shows:
# create v0.5.1
# ├── flywheel v0.6.10
# │ └── forge-api v1.20.1
# └── registrate v1.3.3
```
### Custom Export Profiles
Create specialized export configurations:
```json
{
"exportProfiles": {
"production": {
"filterPlatform": "curseforge",
"includeNonRedistributable": false,
"overrides": ["overrides", "prod-config"]
},
"development": {
"includeNonRedistributable": true,
"overrides": ["overrides", "dev-config"]
},
"client": {
"includeClientOnly": true,
"overrides": ["overrides", "client-extras"]
}
}
}
```
## Acknowledgments
Pakker is inspired by [Pakku](https://github.com/juraj-hrivnak/Pakku), bringing
similar functionality with improved performance and additional features through
Rust implementation.

6
flake.lock generated
View file

@ -2,11 +2,11 @@
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1769461804,
"narHash": "sha256-msG8SU5WsBUfVVa/9RPLaymvi5bI8edTavbIq3vRlhI=",
"lastModified": 1770197578,
"narHash": "sha256-AYqlWrX09+HvGs8zM6ebZ1pwUqjkfpnv8mewYwAo+iM=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "bfc1b8a4574108ceef22f02bafcf6611380c100d",
"rev": "00c21e4c93d963c50d4c0c89bfa84ed6e0694df2",
"type": "github"
},
"original": {

View file

@ -97,20 +97,20 @@ pub struct InitArgs {
pub version: Option<String>,
/// Target platform
#[clap(short, long, default_value = "multiplatform")]
pub target: String,
#[clap(short, long)]
pub target: Option<String>,
/// Minecraft version
#[clap(short, long, default_value = "1.20.1")]
pub mc_version: String,
/// Minecraft versions (space-separated)
#[clap(short, long = "mc-versions", value_delimiter = ' ', num_args = 1..)]
pub mc_versions: Option<Vec<String>>,
/// Mod loader
#[clap(short, long, default_value = "fabric")]
pub loader: String,
/// Mod loaders (format: name=version, can be specified multiple times)
#[clap(short, long = "loaders", value_delimiter = ',')]
pub loaders: Option<Vec<String>>,
/// Mod loader version
#[clap(short = 'v', long, default_value = "latest")]
pub loader_version: String,
/// Skip interactive prompts (use defaults)
#[clap(short, long)]
pub yes: bool,
}
#[derive(Args)]
@ -118,6 +118,10 @@ pub struct ImportArgs {
/// Path to modpack file
pub file: String,
/// Resolve dependencies
#[clap(short = 'D', long = "deps")]
pub deps: bool,
/// Skip confirmation prompts
#[clap(short, long)]
pub yes: bool,
@ -203,9 +207,17 @@ pub struct RmArgs {
#[clap(required = true)]
pub inputs: Vec<String>,
/// Remove all projects
#[clap(short = 'a', long)]
pub all: bool,
/// Skip confirmation prompt
#[clap(short, long)]
pub yes: bool,
/// Skip removing dependent projects
#[clap(short = 'D', long = "no-deps")]
pub no_deps: bool,
}
#[derive(Args)]
@ -214,6 +226,10 @@ pub struct UpdateArgs {
#[arg(value_name = "PROJECT")]
pub inputs: Vec<String>,
/// Update all projects
#[arg(short, long)]
pub all: bool,
/// Skip confirmation prompts
#[arg(short, long)]
pub yes: bool,
@ -336,7 +352,7 @@ pub struct SyncArgs {
#[clap(short = 'R', long)]
pub removals: bool,
/// Sync updates only
/// Sync updates only (apply pending updates)
#[clap(short = 'U', long)]
pub updates: bool,
}
@ -356,6 +372,16 @@ pub struct ExportArgs {
/// Default is Pakker layout (exports/...)
#[clap(long)]
pub pakker_layout: bool,
/// Show file IO errors during export
#[clap(long = "show-io-errors")]
pub show_io_errors: bool,
/// Export modpack without server content
/// Modrinth: exclude server-overrides and SERVER mods
/// `ServerPack`: skip export
#[clap(long = "no-server")]
pub no_server: bool,
}
#[derive(Args)]

View file

@ -1,7 +1,7 @@
use std::collections::HashMap;
use crate::{
error::{PakkerError, Result},
error::{MultiError, PakkerError, Result},
model::{Config, LockFile, Project},
platform::create_platform,
resolver::DependencyResolver,
@ -139,10 +139,19 @@ pub async fn execute(
let platforms = create_all_platforms()?;
let mut new_projects = Vec::new();
let mut errors = MultiError::new();
// Resolve each input
for input in &args.inputs {
let project = resolve_input(input, &platforms, &lockfile).await?;
let project = match resolve_input(input, &platforms, &lockfile).await {
Ok(p) => p,
Err(e) => {
// Collect error but continue with other inputs
log::warn!("Failed to resolve '{input}': {e}");
errors.push(e);
continue;
},
};
// Check if already exists by matching platform IDs (not pakku_id which is
// random)
@ -174,6 +183,15 @@ pub async fn execute(
continue;
}
// Prompt for confirmation unless --yes flag is set
if !args.yes {
let prompt_msg = format!("Add project '{}'?", project.get_name());
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? {
log::info!("Skipping project: {}", project.get_name());
continue;
}
}
new_projects.push(project);
}
@ -213,6 +231,9 @@ pub async fn execute(
new_projects = all_new_projects;
}
// Track count before moving
let added_count = new_projects.len();
// Add projects to lockfile (updates already handled above)
for project in new_projects {
lockfile.add_project(project);
@ -221,7 +242,20 @@ pub async fn execute(
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully added {} project(s)", args.inputs.len());
log::info!("Successfully added {added_count} project(s)");
// Return aggregated errors if any occurred
if !errors.is_empty() {
let error_count = errors.len();
log::warn!(
"{error_count} project(s) failed to resolve (see warnings above)"
);
// Return success if at least some projects were added, otherwise return
// errors
if added_count == 0 && args.inputs.len() == error_count {
return errors.into_result(());
}
}
Ok(())
}

View file

@ -2,7 +2,11 @@ use std::path::Path;
use yansi::Paint;
use crate::{error::Result, model::config::Config};
use crate::{
error::Result,
model::config::Config,
ui_utils::prompt_input_optional,
};
pub fn execute(
config_path: &Path,
@ -85,11 +89,50 @@ pub fn execute(
}
if !changed {
eprintln!(
// Interactive mode: prompt for values if none were specified
println!(
"{}",
"No changes specified. Use --help for options.".yellow()
"No changes specified. Enter values interactively (press Enter to skip):"
.yellow()
);
return Ok(());
println!();
// Prompt for each configurable field
if let Ok(Some(new_name)) = prompt_input_optional(" Name") {
config.name = new_name.clone();
println!("{}", format!(" ✓ 'name' set to '{new_name}'").green());
changed = true;
}
if let Ok(Some(new_version)) = prompt_input_optional(" Version") {
config.version = new_version.clone();
println!(
"{}",
format!(" ✓ 'version' set to '{new_version}'").green()
);
changed = true;
}
if let Ok(Some(new_description)) = prompt_input_optional(" Description") {
config.description = Some(new_description.clone());
println!(
"{}",
format!(" ✓ 'description' set to '{new_description}'").green()
);
changed = true;
}
if let Ok(Some(new_author)) = prompt_input_optional(" Author") {
config.author = Some(new_author.clone());
println!("{}", format!(" ✓ 'author' set to '{new_author}'").green());
changed = true;
}
if !changed {
println!();
println!("{}", "No changes made.".dim());
return Ok(());
}
}
// Config::save expects directory path, not file path

View file

@ -20,11 +20,22 @@ pub async fn execute(
log::info!("Exporting all profiles");
}
// Handle --no-server flag
if args.no_server {
log::info!("Server content will be excluded from export");
}
// Handle --show-io-errors flag
let show_io_errors = args.show_io_errors;
if show_io_errors {
log::info!("IO errors will be shown during export");
}
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
// IPC coordination - prevent concurrent operations on the same modpack
let ipc = IpcCoordinator::new(&config_dir.to_path_buf())?;
let ipc = IpcCoordinator::new(config_dir)?;
let ipc_timeout = std::time::Duration::from_secs(60);
// Check for conflicting export operations

View file

@ -37,8 +37,8 @@ pub async fn execute(
let operation_id = coordinator.register_operation(OperationType::Fetch)?;
let _guard = OperationGuard::new(coordinator, operation_id);
// Create fetcher
let fetcher = Fetcher::new(".");
// Create fetcher with shelve option
let fetcher = Fetcher::new(".").with_shelve(args.shelve);
// Fetch all projects (progress indicators handled in fetch.rs)
fetcher.fetch_all(&lockfile, &config).await?;

View file

@ -211,13 +211,12 @@ fn execute_init(
.args(["log", "--limit", "1", "--template", ""])
.current_dir(path)
.output()
&& !output.stdout.is_empty()
{
if !output.stdout.is_empty() {
println!(
"Note: Jujutsu repository detected. Make sure to run 'jj git \
push' to sync changes with remote if needed."
);
}
println!(
"Note: Jujutsu repository detected. Make sure to run 'jj git \
push' to sync changes with remote if needed."
);
}
},
VcsType::None => {

View file

@ -13,6 +13,10 @@ pub async fn execute(
config_path: &Path,
) -> Result<()> {
log::info!("Importing modpack from {}", args.file);
log::info!(
"Dependency resolution: {}",
if args.deps { "enabled" } else { "disabled" }
);
let path = Path::new(&args.file);
@ -130,16 +134,19 @@ async fn import_modrinth(
{
log::info!("Fetching project: {project_id}");
match platform
.request_project_with_files(project_id, &lockfile.mc_versions, &[
loader.0.clone(),
])
.request_project_with_files(
project_id,
&lockfile.mc_versions,
std::slice::from_ref(&loader.0),
)
.await
{
Ok(mut project) => {
// Select best file
if let Err(e) =
project.select_file(&lockfile.mc_versions, &[loader.0.clone()])
{
if let Err(e) = project.select_file(
&lockfile.mc_versions,
std::slice::from_ref(&loader.0),
) {
log::warn!(
"Failed to select file for {}: {}",
project.get_name(),
@ -159,24 +166,25 @@ async fn import_modrinth(
// Create config
let config = Config {
name: index["name"]
name: index["name"]
.as_str()
.unwrap_or("Imported Pack")
.to_string(),
version: index["versionId"]
version: index["versionId"]
.as_str()
.unwrap_or("1.0.0")
.to_string(),
description: index["summary"]
description: index["summary"]
.as_str()
.map(std::string::ToString::to_string),
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: Default::default(),
projects: None,
export_profiles: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: Default::default(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
// Save files using provided paths
@ -341,24 +349,25 @@ async fn import_curseforge(
// Create config
let config = Config {
name: manifest["name"]
name: manifest["name"]
.as_str()
.unwrap_or("Imported Pack")
.to_string(),
version: manifest["version"]
version: manifest["version"]
.as_str()
.unwrap_or("1.0.0")
.to_string(),
description: None,
author: manifest["author"]
description: None,
author: manifest["author"]
.as_str()
.map(std::string::ToString::to_string),
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: Default::default(),
projects: None,
export_profiles: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: Default::default(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
// Save files using provided paths

View file

@ -3,7 +3,13 @@ use std::{collections::HashMap, path::Path};
use crate::{
cli::InitArgs,
error::PakkerError,
model::{Config, LockFile, Target},
model::{Config, LockFile, ResolvedCredentials, Target},
ui_utils::{
prompt_curseforge_api_key,
prompt_input,
prompt_select,
prompt_yes_no,
},
};
pub async fn execute(
@ -17,8 +23,42 @@ pub async fn execute(
));
}
let target = args.target.as_str();
let target_enum = match target {
// Interactive mode: prompt for values not provided via CLI and --yes not set
let is_interactive = !args.yes && args.name.is_none();
// Get modpack name
let name = if let Some(name) = args.name.clone() {
name
} else if is_interactive {
prompt_input("Modpack name", Some("My Modpack"))
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
} else {
"My Modpack".to_string()
};
// Get modpack version
let version = if let Some(version) = args.version.clone() {
version
} else if is_interactive {
prompt_input("Version", Some("1.0.0"))
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
} else {
"1.0.0".to_string()
};
// Get target platform
let target = if let Some(target) = args.target.clone() {
target
} else if is_interactive {
let targets = ["multiplatform", "curseforge", "modrinth"];
let idx = prompt_select("Target platform", &targets)
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
targets[idx].to_string()
} else {
"multiplatform".to_string()
};
let target_enum = match target.as_str() {
"curseforge" => Target::CurseForge,
"modrinth" => Target::Modrinth,
"multiplatform" => Target::Multiplatform,
@ -29,17 +69,56 @@ pub async fn execute(
},
};
let mc_versions = vec![args.mc_version];
// Get Minecraft versions (supports multiple)
let mc_versions = if let Some(versions) = args.mc_versions.clone() {
versions
} else if is_interactive {
let input =
prompt_input("Minecraft versions (space-separated)", Some("1.20.1"))
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
input.split_whitespace().map(String::from).collect()
} else {
vec!["1.20.1".to_string()]
};
let mut loaders = HashMap::new();
loaders.insert(args.loader, args.loader_version);
// Get mod loaders (supports multiple in name=version format)
let loaders: HashMap<String, String> = if let Some(loader_strs) = args.loaders
{
let mut map = HashMap::new();
for loader_str in loader_strs {
let parts: Vec<&str> = loader_str.splitn(2, '=').collect();
if parts.len() == 2 {
map.insert(parts[0].to_string(), parts[1].to_string());
} else {
// If no version specified, use "latest"
map.insert(loader_str, "latest".to_string());
}
}
map
} else if is_interactive {
let loader_options = ["fabric", "forge", "neoforge", "quilt"];
let idx = prompt_select("Mod loader", &loader_options)
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
let loader = loader_options[idx].to_string();
let loader_version = prompt_input("Loader version", Some("latest"))
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
let mut map = HashMap::new();
map.insert(loader, loader_version);
map
} else {
let mut map = HashMap::new();
map.insert("fabric".to_string(), "latest".to_string());
map
};
let lockfile = LockFile {
target: Some(target_enum),
mc_versions,
loaders,
projects: Vec::new(),
lockfile_version: 1,
lockfile_version: 2,
};
// Save expects directory path, so get parent directory
@ -47,21 +126,65 @@ pub async fn execute(
lockfile.save(lockfile_dir)?;
let config = Config {
name: args.name.unwrap_or_else(|| "My Modpack".to_string()),
version: args.version.unwrap_or_else(|| "1.0.0".to_string()),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
name: name.clone(),
version: version.clone(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
let config_dir = config_path.parent().unwrap_or(Path::new("."));
config.save(config_dir)?;
println!("Initialized new modpack with target: {target}");
println!("Initialized new modpack '{name}' v{version}");
println!(" Target: {target}");
println!(" Minecraft: {}", lockfile.mc_versions.join(", "));
println!(
" Loaders: {}",
lockfile
.loaders
.iter()
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>()
.join(", ")
);
// Check if CurseForge API key is needed and prompt if interactive
if is_interactive && (target == "curseforge" || target == "multiplatform") {
let credentials = ResolvedCredentials::load().ok();
let has_cf_key = credentials
.as_ref()
.is_some_and(|c| c.curseforge_api_key().is_some());
if !has_cf_key {
println!();
if prompt_yes_no("Would you like to set up CurseForge API key now?", true)
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
&& let Ok(Some(api_key)) = prompt_curseforge_api_key()
{
// Save to credentials file
let creds_path = std::env::var("HOME").map_or_else(
|_| Path::new(".pakku").to_path_buf(),
|h| Path::new(&h).join(".pakku"),
);
std::fs::create_dir_all(&creds_path).ok();
let creds_file = creds_path.join("credentials");
let content =
format!("# Pakku/Pakker credentials\nCURSEFORGE_API_KEY={api_key}\n");
if std::fs::write(&creds_file, content).is_ok() {
println!("CurseForge API key saved to ~/.pakku/credentials");
}
}
}
}
Ok(())
}

View file

@ -176,7 +176,7 @@ fn display_project_inspection(
// Display project files
println!();
display_project_files(&project.files)?;
display_project_files(&project.files, project)?;
// Display properties
println!();
@ -228,7 +228,10 @@ fn display_project_header(project: &Project) -> Result<()> {
Ok(())
}
fn display_project_files(files: &[ProjectFile]) -> Result<()> {
fn display_project_files(
files: &[ProjectFile],
project: &Project,
) -> Result<()> {
if files.is_empty() {
println!("{}", "No files available".yellow());
return Ok(());
@ -250,19 +253,31 @@ fn display_project_files(files: &[ProjectFile]) -> Result<()> {
format!(" {status}")
};
// File path line
// File path line with optional site URL
let file_path = format!("{}={}", file.file_type, file.file_name);
table.add_row(vec![
Cell::new(format!("{file_path}:{status_text}")).fg(if idx == 0 {
Color::Green
} else {
Color::White
}),
]);
let file_display = if let Some(site_url) = file.get_site_url(project) {
// Create hyperlink for the file
let hyperlink = crate::ui_utils::hyperlink(&site_url, &file_path);
format!("{hyperlink}:{status_text}")
} else {
format!("{file_path}:{status_text}")
};
table.add_row(vec![Cell::new(file_display).fg(if idx == 0 {
Color::Green
} else {
Color::White
})]);
// Date published
table.add_row(vec![Cell::new(&file.date_published).fg(Color::DarkGrey)]);
// Show site URL if available (for non-hyperlink terminals)
if let Some(site_url) = file.get_site_url(project) {
table
.add_row(vec![Cell::new(format!("URL: {site_url}")).fg(Color::Blue)]);
}
// Empty line
table.add_row(vec![Cell::new("")]);

View file

@ -2,6 +2,18 @@ use std::path::Path;
use crate::{cli::LsArgs, error::Result, model::LockFile};
/// Truncate a name to fit within `max_len` characters, adding "..." if
/// truncated
fn truncate_name(name: &str, max_len: usize) -> String {
if name.len() <= max_len {
name.to_string()
} else if max_len > 3 {
format!("{}...", &name[..max_len - 3])
} else {
name[..max_len].to_string()
}
}
pub fn execute(args: LsArgs, lockfile_path: &Path) -> Result<()> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
@ -15,10 +27,33 @@ pub fn execute(args: LsArgs, lockfile_path: &Path) -> Result<()> {
println!("Installed projects ({}):", lockfile.projects.len());
println!();
// Calculate max name length for alignment
let max_name_len = args.name_max_length.unwrap_or_else(|| {
lockfile
.projects
.iter()
.map(|p| p.get_name().len())
.max()
.unwrap_or(20)
.min(50)
});
for project in &lockfile.projects {
// Check for version mismatch across providers
let version_warning = if project.versions_match_across_providers() {
""
} else {
// Use the detailed check_version_mismatch for logging
if let Some(mismatch_detail) = project.check_version_mismatch() {
log::warn!("{mismatch_detail}");
}
" [!] versions do not match across providers"
};
if args.detailed {
let id = project.pakku_id.as_deref().unwrap_or("unknown");
println!(" {} ({})", project.get_name(), id);
let name = truncate_name(&project.get_name(), max_name_len);
println!(" {name} ({id}){version_warning}");
println!(" Type: {:?}", project.r#type);
println!(" Side: {:?}", project.side);
@ -30,19 +65,28 @@ pub fn execute(args: LsArgs, lockfile_path: &Path) -> Result<()> {
);
}
// Show version details if there's a mismatch
if !version_warning.is_empty() {
println!(" Provider versions:");
for file in &project.files {
println!(" {}: {}", file.file_type, file.file_name);
}
}
if !project.pakku_links.is_empty() {
println!(" Dependencies: {}", project.pakku_links.len());
}
println!();
} else {
let name = truncate_name(&project.get_name(), max_name_len);
let file_info = project
.files
.first()
.map(|f| format!(" ({})", f.file_name))
.unwrap_or_default();
println!(" {}{}", project.get_name(), file_info);
println!(" {name}{file_info}{version_warning}");
}
}

View file

@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use crate::{cli::RemoteUpdateArgs, error::PakkerError, git, model::Config};
@ -71,7 +71,7 @@ pub async fn execute(args: RemoteUpdateArgs) -> Result<(), PakkerError> {
}
/// Sync override files from remote directory to current directory
async fn sync_overrides(remote_dir: &PathBuf) -> Result<(), PakkerError> {
async fn sync_overrides(remote_dir: &Path) -> Result<(), PakkerError> {
let remote_config_path = remote_dir.join("pakku.json");
if !remote_config_path.exists() {
return Ok(());

View file

@ -4,7 +4,7 @@ use crate::{
cli::RmArgs,
error::{PakkerError, Result},
model::LockFile,
ui_utils::prompt_yes_no,
ui_utils::{prompt_typo_suggestion, prompt_yes_no},
};
pub async fn execute(
@ -12,19 +12,84 @@ pub async fn execute(
lockfile_path: &Path,
_config_path: &Path,
) -> Result<()> {
log::info!("Removing projects: {:?}", args.inputs);
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
// Determine which projects to remove
let inputs: Vec<String> = if args.all {
log::info!("Removing all projects from lockfile");
lockfile
.projects
.iter()
.filter_map(|p| {
p.pakku_id
.clone()
.or_else(|| p.slug.values().next().cloned())
})
.collect()
} else {
args.inputs.clone()
};
if inputs.is_empty() {
return if args.all {
Err(PakkerError::ProjectNotFound(
"No projects found in lockfile".to_string(),
))
} else {
Err(PakkerError::ProjectNotFound(
"No projects specified".to_string(),
))
};
}
log::info!("Removing projects: {inputs:?}");
let mut removed_count = 0;
let mut removed_ids = Vec::new();
let mut projects_to_remove = Vec::new();
// Collect all known project identifiers for typo suggestions
let all_slugs: Vec<String> = lockfile
.projects
.iter()
.flat_map(|p| {
let mut ids = Vec::new();
if let Some(ref pakku_id) = p.pakku_id {
ids.push(pakku_id.clone());
}
ids.extend(p.slug.values().cloned());
ids.extend(p.name.values().cloned());
ids.extend(p.aliases.iter().cloned());
ids
})
.collect();
// First, identify all projects to remove
for input in &args.inputs {
let mut resolved_inputs = Vec::new();
for input in &inputs {
// Find project by various identifiers
if lockfile.projects.iter().any(|p| {
p.pakku_id.as_deref() == Some(input)
|| p.slug.values().any(|s| s == input)
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|| p.aliases.contains(input)
}) {
resolved_inputs.push(input.clone());
} else if !args.all {
// Try typo suggestion
if let Ok(Some(suggestion)) = prompt_typo_suggestion(input, &all_slugs) {
log::info!("Using suggested project: {suggestion}");
resolved_inputs.push(suggestion);
} else {
log::warn!("Project not found: {input}");
}
}
}
// Now find the actual projects from resolved inputs
for input in &resolved_inputs {
if let Some(project) = lockfile.projects.iter().find(|p| {
p.pakku_id.as_deref() == Some(input)
|| p.slug.values().any(|s| s == input)
@ -32,18 +97,20 @@ pub async fn execute(
|| p.aliases.contains(input)
}) {
projects_to_remove.push(project.get_name());
} else {
log::warn!("Project not found: {input}");
}
}
// Replace inputs with resolved_inputs for actual removal
let inputs = resolved_inputs;
if projects_to_remove.is_empty() {
return Err(PakkerError::ProjectNotFound(
"None of the specified projects found".to_string(),
));
}
// Ask for confirmation unless --yes flag is provided
// Ask for confirmation unless --yes flag is provided or --all with no
// projects
if !args.yes {
println!("The following projects will be removed:");
for name in &projects_to_remove {
@ -57,7 +124,7 @@ pub async fn execute(
}
// Now actually remove the projects
for input in &args.inputs {
for input in &inputs {
if let Some(pos) = lockfile.projects.iter().position(|p| {
p.pakku_id.as_deref() == Some(input)
|| p.slug.values().any(|s| s == input)

View file

@ -6,7 +6,7 @@ use tokio::sync::Semaphore;
use yansi::Paint;
use crate::{
error::Result,
error::{ErrorSeverity, Result},
model::{Config, LockFile, Project},
platform::create_platform,
};
@ -36,13 +36,42 @@ pub async fn execute(
// Display results
display_update_results(&updates);
// Display errors if any
// Display errors if any, categorized by severity
if !errors.is_empty() {
println!();
println!("{}", "Errors encountered:".red());
for (project, error) in &errors {
println!(" - {}: {}", project.yellow(), error.red());
// Categorize errors by severity
let (warnings, errors_only): (Vec<_>, Vec<_>) =
errors.iter().partition(|(_, err)| {
// Network errors and "not found" are warnings (non-fatal)
err.contains("Failed to check") || err.contains("not found")
});
// Display warnings (ErrorSeverity::Warning)
if !warnings.is_empty() {
let severity = ErrorSeverity::Warning;
println!("{}", format_severity_header(severity, "Warnings"));
for (project, error) in &warnings {
println!(" - {}: {}", project.yellow(), error.dim());
}
}
// Display errors (ErrorSeverity::Error)
if !errors_only.is_empty() {
let severity = ErrorSeverity::Error;
println!("{}", format_severity_header(severity, "Errors"));
for (project, error) in &errors_only {
println!(" - {}: {}", project.yellow(), error.red());
}
}
// Log info level summary
let _info_severity = ErrorSeverity::Info;
log::info!(
"Update check completed with {} warning(s) and {} error(s)",
warnings.len(),
errors_only.len()
);
}
// Prompt to update if there are updates available
@ -52,6 +81,7 @@ pub async fn execute(
// Call update command programmatically (update all projects)
let update_args = crate::cli::UpdateArgs {
inputs: vec![],
all: true,
yes: true, // Auto-yes for status command
};
crate::cli::commands::update::execute(
@ -368,3 +398,12 @@ fn get_api_key(platform: &str) -> Option<String> {
_ => None,
}
}
/// Format severity header with appropriate color
fn format_severity_header(severity: ErrorSeverity, label: &str) -> String {
match severity {
ErrorSeverity::Error => format!("{label}:").red().to_string(),
ErrorSeverity::Warning => format!("{label}:").yellow().to_string(),
ErrorSeverity::Info => format!("{label}:").cyan().to_string(),
}
}

View file

@ -4,10 +4,10 @@ use indicatif::{ProgressBar, ProgressStyle};
use crate::{
cli::UpdateArgs,
error::PakkerError,
model::{Config, LockFile},
error::{MultiError, PakkerError},
model::{Config, LockFile, UpdateStrategy},
platform::create_platform,
ui_utils::prompt_select,
ui_utils::{prompt_select, prompt_typo_suggestion, prompt_yes_no},
};
pub async fn execute(
@ -33,6 +33,22 @@ pub async fn execute(
platforms.insert("curseforge".to_string(), platform);
}
// Collect all known project identifiers for typo suggestions
let all_slugs: Vec<String> = lockfile
.projects
.iter()
.flat_map(|p| {
let mut ids = Vec::new();
if let Some(ref pakku_id) = p.pakku_id {
ids.push(pakku_id.clone());
}
ids.extend(p.slug.values().cloned());
ids.extend(p.name.values().cloned());
ids.extend(p.aliases.iter().cloned());
ids
})
.collect();
let project_indices: Vec<_> = if args.inputs.is_empty() {
(0..lockfile.projects.len()).collect()
} else {
@ -46,14 +62,29 @@ pub async fn execute(
{
indices.push(idx);
} else {
// Try typo suggestion
if let Ok(Some(suggestion)) = prompt_typo_suggestion(input, &all_slugs)
&& let Some((idx, _)) = lockfile
.projects
.iter()
.enumerate()
.find(|(_, p)| p.matches_input(&suggestion))
{
log::info!("Using suggested project: {suggestion}");
indices.push(idx);
continue;
}
return Err(PakkerError::ProjectNotFound(input.clone()));
}
}
indices
};
// Capture count before consuming the iterator
let total_projects = project_indices.len();
// Create progress bar
let pb = ProgressBar::new(project_indices.len() as u64);
let pb = ProgressBar::new(total_projects as u64);
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
@ -61,8 +92,23 @@ pub async fn execute(
.progress_chars("#>-"),
);
let mut skipped_pinned = 0;
let mut update_errors = MultiError::new();
for idx in project_indices {
let old_project = &lockfile.projects[idx];
// Skip projects with UpdateStrategy::None (pinned)
if old_project.update_strategy == UpdateStrategy::None {
pb.println(format!(
" {} - Skipped (update strategy: NONE)",
old_project.get_name()
));
skipped_pinned += 1;
pb.inc(1);
continue;
}
pb.set_message(format!("Updating {}...", old_project.get_name()));
let slug = old_project
@ -87,54 +133,116 @@ pub async fn execute(
}
}
if updated_project.is_none() {
// Failed to fetch update info from any platform
update_errors.push(PakkerError::PlatformApiError(format!(
"Failed to check updates for '{}'",
old_project.get_name()
)));
pb.inc(1);
continue;
}
if let Some(mut updated_project) = updated_project
&& !updated_project.files.is_empty()
&& let Some(old_file) = lockfile.projects[idx].files.first()
{
let new_file = updated_project.files.first().unwrap();
// Clone data needed for comparisons to avoid borrow issues
let new_file_id = updated_project.files.first().unwrap().id.clone();
let new_file_name =
updated_project.files.first().unwrap().file_name.clone();
let old_file_name = old_file.file_name.clone();
let project_name = old_project.get_name();
if new_file.id == old_file.id {
pb.println(format!(
" {} - Already up to date",
old_project.get_name()
));
if new_file_id == old_file.id {
pb.println(format!(" {project_name} - Already up to date"));
} else {
// Interactive version selection if not using --yes flag
if !args.yes && updated_project.files.len() > 1 {
// Interactive confirmation and version selection if not using --yes
// flag
let mut should_update = args.yes || args.all;
let mut selected_idx: Option<usize> = None;
if !args.yes && !args.all {
pb.suspend(|| {
let choices: Vec<String> = updated_project
.files
.iter()
.map(|f| format!("{} ({})", f.file_name, f.id))
.collect();
// First, confirm the update
let prompt_msg = format!(
"Update '{project_name}' from {old_file_name} to \
{new_file_name}?"
);
should_update = prompt_yes_no(&prompt_msg, true).unwrap_or(false);
let choice_refs: Vec<&str> =
choices.iter().map(std::string::String::as_str).collect();
// If confirmed and multiple versions available, offer selection
if should_update && updated_project.files.len() > 1 {
let choices: Vec<String> = updated_project
.files
.iter()
.map(|f| format!("{} ({})", f.file_name, f.id))
.collect();
if let Ok(selected_idx) = prompt_select(
&format!("Select version for {}:", old_project.get_name()),
&choice_refs,
) {
// Move selected file to front
if selected_idx > 0 {
updated_project.files.swap(0, selected_idx);
let choice_refs: Vec<&str> =
choices.iter().map(std::string::String::as_str).collect();
if let Ok(idx) = prompt_select(
&format!("Select version for {project_name}:"),
&choice_refs,
) {
selected_idx = Some(idx);
}
}
});
}
let selected_file = updated_project.files.first().unwrap();
pb.println(format!(
" {} -> {}",
old_file.file_name, selected_file.file_name
));
lockfile.projects[idx] = updated_project;
// Apply file selection outside the closure
if let Some(idx) = selected_idx
&& idx > 0
{
updated_project.files.swap(0, idx);
}
if should_update {
let selected_file = updated_project.files.first().unwrap();
pb.println(format!(
" {} -> {}",
old_file_name, selected_file.file_name
));
lockfile.projects[idx] = updated_project;
} else {
pb.println(format!(" {project_name} - Skipped by user"));
}
}
}
pb.inc(1);
}
pb.finish_with_message("Update complete");
if skipped_pinned > 0 {
pb.finish_with_message(format!(
"Update complete ({skipped_pinned} pinned projects skipped)"
));
} else {
pb.finish_with_message("Update complete");
}
lockfile.save(lockfile_dir)?;
// Report any errors that occurred during updates
if !update_errors.is_empty() {
let error_list = update_errors.errors();
log::warn!(
"{} project(s) encountered errors during update check",
error_list.len()
);
for err in error_list {
log::warn!(" - {err}");
}
// Extend with any additional collected errors and check if we should fail
let all_errors = update_errors.into_errors();
if all_errors.len() == total_projects {
// All projects failed - return error
let mut multi = MultiError::new();
multi.extend(all_errors);
return multi.into_result(());
}
}
Ok(())
}

257
src/cli/tests.rs Normal file
View file

@ -0,0 +1,257 @@
#[cfg(test)]
mod tests {
use std::{fs, path::PathBuf};
use tempfile::TempDir;
use crate::{
cli::{ExportArgs, ImportArgs, RmArgs},
model::config::Config,
};
#[test]
fn test_rm_args_parsing_all_flag() {
let args = RmArgs::parse_from(&["pakker", "rm", "--all"]);
assert!(args.all);
assert!(args.inputs.is_empty());
}
#[test]
fn test_rm_args_parsing_multiple_inputs() {
let args = RmArgs::parse_from(&["pakker", "rm", "mod1", "mod2", "mod3"]);
assert!(!args.all);
assert_eq!(args.inputs, vec!["mod1", "mod2", "mod3"]);
}
#[test]
fn test_rm_args_parsing_all_with_yes() {
let args = RmArgs::parse_from(&["pakker", "rm", "--all", "--yes"]);
assert!(args.all);
assert!(args.yes);
assert!(args.inputs.is_empty());
}
#[test]
fn test_rm_args_parsing_with_inputs_and_yes() {
let args = RmArgs::parse_from(&["pakker", "rm", "mod1", "--yes"]);
assert!(!args.all);
assert!(args.yes);
assert_eq!(args.inputs, vec!["mod1"]);
}
#[test]
fn test_import_args_parsing_deps_flag() {
let args =
ImportArgs::parse_from(&["pakker", "import", "--deps", "pack.zip"]);
assert!(args.deps);
assert_eq!(args.file, "pack.zip");
}
#[test]
fn test_import_args_parsing_no_deps_default() {
let args = ImportArgs::parse_from(&["pakker", "import", "pack.zip"]);
assert!(!args.deps);
assert_eq!(args.file, "pack.zip");
}
#[test]
fn test_import_args_parsing_deps_with_yes() {
let args = ImportArgs::parse_from(&[
"pakker", "import", "--deps", "--yes", "pack.zip",
]);
assert!(args.deps);
assert!(args.yes);
assert_eq!(args.file, "pack.zip");
}
#[test]
fn test_import_args_parsing_short_deps_flag() {
let args = ImportArgs::parse_from(&["pakker", "import", "-D", "pack.zip"]);
assert!(args.deps);
assert_eq!(args.file, "pack.zip");
}
#[test]
fn test_export_args_parsing_show_io_errors() {
let args =
ExportArgs::parse_from(&["pakker", "export", "--show-io-errors"]);
assert!(args.show_io_errors);
assert!(!args.no_server);
}
#[test]
fn test_export_args_parsing_no_server() {
let args = ExportArgs::parse_from(&["pakker", "export", "--no-server"]);
assert!(args.no_server);
assert!(!args.show_io_errors);
}
#[test]
fn test_export_args_parsing_both_flags() {
let args = ExportArgs::parse_from(&[
"pakker",
"export",
"--show-io-errors",
"--no-server",
"--profile",
"modrinth",
]);
assert!(args.show_io_errors);
assert!(args.no_server);
assert_eq!(args.profile, Some("modrinth".to_string()));
}
#[test]
fn test_export_args_parsing_with_output() {
let args = ExportArgs::parse_from(&[
"pakker",
"export",
"--output",
"/tmp/export",
"--profile",
"curseforge",
]);
assert_eq!(args.output, Some("/tmp/export".to_string()));
assert_eq!(args.profile, Some("curseforge".to_string()));
}
#[test]
fn test_export_args_parsing_pakker_layout() {
let args = ExportArgs::parse_from(&["pakker", "export", "--pakker-layout"]);
assert!(args.pakker_layout);
}
#[test]
fn test_config_with_export_server_side_projects_to_client_true() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: Some(true),
};
assert_eq!(config.export_server_side_projects_to_client, Some(true));
}
#[test]
fn test_config_with_export_server_side_projects_to_client_false() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: Some(false),
};
assert_eq!(config.export_server_side_projects_to_client, Some(false));
}
#[test]
fn test_config_without_export_server_side_projects_to_client() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
assert!(config.export_server_side_projects_to_client.is_none());
}
#[test]
fn test_config_serialization_with_export_server_side() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: Some("A test modpack".to_string()),
author: Some("Test Author".to_string()),
overrides: vec!["overrides".to_string()],
server_overrides: Some(vec![
"server-overrides".to_string(),
]),
client_overrides: Some(vec![
"client-overrides".to_string(),
]),
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: Some(true),
};
let json = serde_json::to_string_pretty(&config).unwrap();
assert!(json.contains("exportServerSideProjectsToClient"));
assert!(json.contains("true"));
let deserialized: Config = serde_json::from_str(&json).unwrap();
assert_eq!(
deserialized.export_server_side_projects_to_client,
Some(true)
);
}
#[test]
fn test_config_serialization_without_export_server_side() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
let json = serde_json::to_string_pretty(&config).unwrap();
assert!(!json.contains("exportServerSideProjectsToClient"));
let deserialized: Config = serde_json::from_str(&json).unwrap();
assert!(deserialized.export_server_side_projects_to_client.is_none());
}
#[test]
fn test_config_default_has_no_export_server_side() {
let config = Config::default();
assert!(config.export_server_side_projects_to_client.is_none());
}
#[test]
fn test_export_args_all_flags_together() {
let args = ExportArgs::parse_from(&[
"pakker",
"export",
"--profile",
"modrinth",
"--output",
"/tmp/out",
"--pakker-layout",
"--show-io-errors",
"--no-server",
]);
assert_eq!(args.profile, Some("modrinth".to_string()));
assert_eq!(args.output, Some("/tmp/out".to_string()));
assert!(args.pakker_layout);
assert!(args.show_io_errors);
assert!(args.no_server);
}
}

View file

@ -2,6 +2,76 @@ use thiserror::Error;
pub type Result<T> = std::result::Result<T, PakkerError>;
/// Severity level for errors
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum ErrorSeverity {
/// Fatal error - operation cannot continue
#[default]
Error,
/// Warning - operation can continue but may have issues
Warning,
/// Info - informational message
Info,
}
/// Container for multiple errors that occurred during an operation
#[derive(Debug)]
pub struct MultiError {
errors: Vec<PakkerError>,
}
impl MultiError {
pub const fn new() -> Self {
Self { errors: Vec::new() }
}
pub fn push(&mut self, error: PakkerError) {
self.errors.push(error);
}
pub fn extend(&mut self, errors: impl IntoIterator<Item = PakkerError>) {
self.errors.extend(errors);
}
pub const fn is_empty(&self) -> bool {
self.errors.is_empty()
}
pub const fn len(&self) -> usize {
self.errors.len()
}
pub fn into_result<T>(self, success_value: T) -> Result<T> {
if self.is_empty() {
Ok(success_value)
} else {
Err(PakkerError::Multiple(self.errors))
}
}
pub fn errors(&self) -> &[PakkerError] {
&self.errors
}
pub fn into_errors(self) -> Vec<PakkerError> {
self.errors
}
}
impl Default for MultiError {
fn default() -> Self {
Self::new()
}
}
impl FromIterator<PakkerError> for MultiError {
fn from_iter<I: IntoIterator<Item = PakkerError>>(iter: I) -> Self {
Self {
errors: iter.into_iter().collect(),
}
}
}
#[derive(Error, Debug)]
pub enum PakkerError {
// Network errors
@ -95,6 +165,21 @@ pub enum PakkerError {
#[error("IPC error: {0}")]
IpcError(String),
#[error("{}", format_multiple_errors(.0))]
Multiple(Vec<Self>),
}
fn format_multiple_errors(errors: &[PakkerError]) -> String {
if errors.len() == 1 {
return errors[0].to_string();
}
let mut msg = format!("{} errors occurred:\n", errors.len());
for (idx, error) in errors.iter().enumerate() {
msg.push_str(&format!(" {}. {}\n", idx + 1, error));
}
msg
}
impl From<git2::Error> for PakkerError {
@ -108,3 +193,96 @@ impl From<crate::ipc::IpcError> for PakkerError {
Self::IpcError(err.to_string())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_multi_error_empty() {
let multi = MultiError::new();
assert!(multi.is_empty());
assert_eq!(multi.len(), 0);
}
#[test]
fn test_multi_error_push() {
let mut multi = MultiError::new();
multi.push(PakkerError::ProjectNotFound("mod1".to_string()));
multi.push(PakkerError::ProjectNotFound("mod2".to_string()));
assert!(!multi.is_empty());
assert_eq!(multi.len(), 2);
}
#[test]
fn test_multi_error_into_result_empty() {
let multi = MultiError::new();
let result: Result<i32> = multi.into_result(42);
assert!(result.is_ok());
assert_eq!(result.unwrap(), 42);
}
#[test]
fn test_multi_error_into_result_with_errors() {
let mut multi = MultiError::new();
multi.push(PakkerError::ProjectNotFound("mod1".to_string()));
let result: Result<i32> = multi.into_result(42);
assert!(result.is_err());
}
#[test]
fn test_multi_error_from_iterator() {
let errors = vec![
PakkerError::ProjectNotFound("mod1".to_string()),
PakkerError::ProjectNotFound("mod2".to_string()),
];
let multi: MultiError = errors.into_iter().collect();
assert_eq!(multi.len(), 2);
}
#[test]
fn test_multi_error_extend() {
let mut multi = MultiError::new();
multi.push(PakkerError::ProjectNotFound("mod1".to_string()));
let more_errors = vec![
PakkerError::ProjectNotFound("mod2".to_string()),
PakkerError::ProjectNotFound("mod3".to_string()),
];
multi.extend(more_errors);
assert_eq!(multi.len(), 3);
}
#[test]
fn test_multiple_errors_formatting() {
let errors = vec![
PakkerError::ProjectNotFound("mod1".to_string()),
PakkerError::ProjectNotFound("mod2".to_string()),
];
let error = PakkerError::Multiple(errors);
let msg = error.to_string();
assert!(msg.contains("2 errors occurred"));
assert!(msg.contains("mod1"));
assert!(msg.contains("mod2"));
}
#[test]
fn test_single_multiple_error_formatting() {
let errors = vec![PakkerError::ProjectNotFound("mod1".to_string())];
let error = PakkerError::Multiple(errors);
let msg = error.to_string();
// Single error should just display the error itself
assert!(msg.contains("mod1"));
assert!(!msg.contains("errors occurred"));
}
#[test]
fn test_error_severity_default() {
assert_eq!(ErrorSeverity::default(), ErrorSeverity::Error);
}
}

View file

@ -69,6 +69,17 @@ impl ProfileConfig {
.or(global_server_overrides.map(std::vec::Vec::as_slice))
}
/// Get effective client override paths, falling back to global config
pub fn get_client_overrides<'a>(
&'a self,
global_client_overrides: Option<&'a Vec<String>>,
) -> Option<&'a [String]> {
self
.client_overrides
.as_deref()
.or(global_client_overrides.map(std::vec::Vec::as_slice))
}
/// Get default config for `CurseForge` profile
pub fn curseforge_default() -> Self {
Self {

View file

@ -19,9 +19,15 @@ impl ExportProfile for CurseForgeProfile {
vec![
Box::new(super::rules::CopyProjectFilesRule),
Box::new(super::rules::FilterByPlatformRule),
Box::new(super::rules::MissingProjectsAsOverridesRule::new(
"curseforge",
)),
Box::new(super::rules::CopyOverridesRule),
Box::new(super::rules::CopyClientOverridesRule),
Box::new(super::rules::FilterServerOnlyRule),
Box::new(super::rules::GenerateManifestRule::curseforge()),
Box::new(super::rules::FilterNonRedistributableRule),
Box::new(super::rules::TextReplacementRule),
]
}
}
@ -35,8 +41,14 @@ impl ExportProfile for ModrinthProfile {
vec![
Box::new(super::rules::CopyProjectFilesRule),
Box::new(super::rules::FilterByPlatformRule),
Box::new(super::rules::MissingProjectsAsOverridesRule::new(
"modrinth",
)),
Box::new(super::rules::CopyOverridesRule),
Box::new(super::rules::CopyClientOverridesRule),
Box::new(super::rules::FilterServerOnlyRule),
Box::new(super::rules::GenerateManifestRule::modrinth()),
Box::new(super::rules::TextReplacementRule),
]
}
}
@ -51,6 +63,7 @@ impl ExportProfile for ServerPackProfile {
Box::new(super::rules::CopyProjectFilesRule),
Box::new(super::rules::CopyServerOverridesRule),
Box::new(super::rules::FilterClientOnlyRule),
Box::new(super::rules::TextReplacementRule),
]
}
}

View file

@ -1,10 +1,11 @@
use std::{fs, path::PathBuf};
use async_trait::async_trait;
use glob::glob;
use crate::{
error::Result,
model::{Config, LockFile, ProjectSide},
model::{Config, LockFile, ProjectSide, ProjectType},
};
#[derive(Clone)]
@ -46,7 +47,7 @@ pub struct CopyProjectFilesEffect;
#[async_trait]
impl Effect for CopyProjectFilesEffect {
fn name(&self) -> &'static str {
"Downloading and copying mod files"
"Downloading and copying project files"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
@ -58,17 +59,27 @@ impl Effect for CopyProjectFilesEffect {
credentials.curseforge_api_key().map(ToOwned::to_owned);
let modrinth_token = credentials.modrinth_token().map(ToOwned::to_owned);
let mods_dir = context.export_path.join("mods");
fs::create_dir_all(&mods_dir)?;
for project in &context.lockfile.projects {
if !project.export {
continue;
}
if let Some(file) = project.files.first() {
let source = context.base_path.join("mods").join(&file.file_name);
let dest = mods_dir.join(&file.file_name);
// Get the target directory based on project type and paths config
let type_dir = get_project_type_dir(&project.r#type, &context.config);
// Handle subpath if specified
let target_subdir = if let Some(subpath) = &project.subpath {
PathBuf::from(&type_dir).join(subpath)
} else {
PathBuf::from(&type_dir)
};
let export_dir = context.export_path.join(&target_subdir);
fs::create_dir_all(&export_dir)?;
let source = context.base_path.join(&type_dir).join(&file.file_name);
let dest = export_dir.join(&file.file_name);
if source.exists() {
fs::copy(&source, &dest)?;
@ -79,6 +90,7 @@ impl Effect for CopyProjectFilesEffect {
} else if !file.url.is_empty() {
download_file(
&context.base_path,
&type_dir,
&file.file_name,
&file.url,
curseforge_key.as_deref(),
@ -86,8 +98,9 @@ impl Effect for CopyProjectFilesEffect {
)
.await?;
// Copy into export mods/ after ensuring it is present in base mods/
let downloaded = context.base_path.join("mods").join(&file.file_name);
// Copy into export dir after ensuring it is present in base dir
let downloaded =
context.base_path.join(&type_dir).join(&file.file_name);
if downloaded.exists() {
fs::copy(&downloaded, &dest)?;
if let Some(ui) = &context.ui {
@ -102,7 +115,7 @@ impl Effect for CopyProjectFilesEffect {
}
} else {
return Err(crate::error::PakkerError::InternalError(format!(
"missing mod file and no download url: {}",
"missing project file and no download url: {}",
file.file_name
)));
}
@ -157,6 +170,7 @@ fn classify_reqwest_error(err: &reqwest::Error) -> DownloadFailure {
async fn download_file(
base_path: &std::path::Path,
type_dir: &str,
file_name: &str,
url: &str,
curseforge_key: Option<&str>,
@ -195,9 +209,9 @@ async fn download_file(
match response {
Ok(resp) if resp.status().is_success() => {
let bytes = resp.bytes().await?;
let mods_dir = base_path.join("mods");
fs::create_dir_all(&mods_dir)?;
let dest = mods_dir.join(file_name);
let target_dir = base_path.join(type_dir);
fs::create_dir_all(&target_dir)?;
let dest = target_dir.join(file_name);
std::fs::write(&dest, &bytes)?;
return Ok(());
},
@ -287,13 +301,16 @@ impl Effect for CopyOverridesEffect {
&context.config.overrides
};
for override_path in overrides {
let source = context.base_path.join(override_path);
// Expand any glob patterns in override paths
let expanded_paths = expand_override_globs(&context.base_path, overrides);
for override_path in expanded_paths {
let source = context.base_path.join(&override_path);
if !source.exists() {
continue;
}
let dest = context.export_path.join(override_path);
let dest = context.export_path.join(&override_path);
copy_recursive(&source, &dest)?;
}
@ -334,13 +351,16 @@ impl Effect for CopyServerOverridesEffect {
};
if let Some(overrides) = server_overrides {
for override_path in overrides {
let source = context.base_path.join(override_path);
// Expand any glob patterns in override paths
let expanded_paths = expand_override_globs(&context.base_path, overrides);
for override_path in expanded_paths {
let source = context.base_path.join(&override_path);
if !source.exists() {
continue;
}
let dest = context.export_path.join(override_path);
let dest = context.export_path.join(&override_path);
copy_recursive(&source, &dest)?;
}
}
@ -349,7 +369,58 @@ impl Effect for CopyServerOverridesEffect {
}
}
// Rule: Filter client-only projects
// Rule: Copy client overrides
pub struct CopyClientOverridesRule;
impl Rule for CopyClientOverridesRule {
fn matches(&self, context: &RuleContext) -> bool {
context.config.client_overrides.is_some()
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(CopyClientOverridesEffect)]
}
}
pub struct CopyClientOverridesEffect;
#[async_trait]
impl Effect for CopyClientOverridesEffect {
fn name(&self) -> &'static str {
"Copying client override files"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
// Use profile-specific client overrides if available, otherwise use global
// config
let client_overrides = if let Some(profile_config) = &context.profile_config
{
profile_config
.get_client_overrides(context.config.client_overrides.as_ref())
} else {
context.config.client_overrides.as_deref()
};
if let Some(overrides) = client_overrides {
// Expand any glob patterns in override paths
let expanded_paths = expand_override_globs(&context.base_path, overrides);
for override_path in expanded_paths {
let source = context.base_path.join(&override_path);
if !source.exists() {
continue;
}
let dest = context.export_path.join(&override_path);
copy_recursive(&source, &dest)?;
}
}
Ok(())
}
}
// Rule: Filter client-only projects (for server packs)
pub struct FilterClientOnlyRule;
impl Rule for FilterClientOnlyRule {
@ -367,7 +438,7 @@ pub struct FilterClientOnlyEffect;
#[async_trait]
impl Effect for FilterClientOnlyEffect {
fn name(&self) -> &'static str {
"Filtering client-only mods"
"Filtering client-only projects"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
@ -383,15 +454,77 @@ impl Effect for FilterClientOnlyEffect {
return Ok(());
}
let mods_dir = context.export_path.join("mods");
for project in &context.lockfile.projects {
if project.side == ProjectSide::Client
&& let Some(file) = project.files.first()
{
let file_path = mods_dir.join(&file.file_name);
// Get the target directory based on project type and paths config
let type_dir = get_project_type_dir(&project.r#type, &context.config);
let project_dir = context.export_path.join(&type_dir);
let file_path = project_dir.join(&file.file_name);
if file_path.exists() {
fs::remove_file(file_path)?;
fs::remove_file(&file_path)?;
log::info!("Filtered client-only project: {}", file.file_name);
}
}
}
Ok(())
}
}
// Rule: Filter server-only projects (for client packs)
// This rule respects the `export_server_side_projects_to_client` config option
pub struct FilterServerOnlyRule;
impl Rule for FilterServerOnlyRule {
fn matches(&self, _context: &RuleContext) -> bool {
true
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(FilterServerOnlyEffect)]
}
}
pub struct FilterServerOnlyEffect;
#[async_trait]
impl Effect for FilterServerOnlyEffect {
fn name(&self) -> &'static str {
"Filtering server-only projects"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
// Check config option: if true, include server-side projects in client
// exports
let export_server_to_client = context
.config
.export_server_side_projects_to_client
.unwrap_or(false);
if export_server_to_client {
// Don't filter server-only mods - include them in client pack
return Ok(());
}
for project in &context.lockfile.projects {
if project.side == ProjectSide::Server
&& let Some(file) = project.files.first()
{
// Get the target directory based on project type and paths config
let type_dir = get_project_type_dir(&project.r#type, &context.config);
let project_dir = context.export_path.join(&type_dir);
let file_path = project_dir.join(&file.file_name);
if file_path.exists() {
fs::remove_file(&file_path)?;
log::info!(
"Filtered server-only project: {} \
(export_server_side_projects_to_client=false)",
file.file_name
);
}
}
}
@ -418,7 +551,7 @@ pub struct FilterNonRedistributableEffect;
#[async_trait]
impl Effect for FilterNonRedistributableEffect {
fn name(&self) -> &'static str {
"Filtering non-redistributable mods"
"Filtering non-redistributable projects"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
@ -435,15 +568,17 @@ impl Effect for FilterNonRedistributableEffect {
return Ok(());
}
let mods_dir = context.export_path.join("mods");
for project in &context.lockfile.projects {
if !project.redistributable
&& let Some(file) = project.files.first()
{
let file_path = mods_dir.join(&file.file_name);
// Get the target directory based on project type and paths config
let type_dir = get_project_type_dir(&project.r#type, &context.config);
let project_dir = context.export_path.join(&type_dir);
let file_path = project_dir.join(&file.file_name);
if file_path.exists() {
fs::remove_file(file_path)?;
fs::remove_file(&file_path)?;
log::info!("Filtered non-redistributable: {}", file.file_name);
}
}
@ -644,6 +779,69 @@ fn copy_recursive(
Ok(())
}
/// Get the target directory for a project type, respecting the paths config.
/// Falls back to default directories if not configured.
fn get_project_type_dir(project_type: &ProjectType, config: &Config) -> String {
// Check if there's a custom path configured for this project type
let type_key = project_type.to_string();
if let Some(custom_path) = config.paths.get(&type_key) {
return custom_path.clone();
}
// Fall back to default paths
match project_type {
ProjectType::Mod => "mods".to_string(),
ProjectType::ResourcePack => "resourcepacks".to_string(),
ProjectType::DataPack => "datapacks".to_string(),
ProjectType::Shader => "shaderpacks".to_string(),
ProjectType::World => "saves".to_string(),
}
}
/// Expand glob patterns in override paths and return all matching paths.
/// If a path contains no glob characters, it's returned as-is (if it exists).
/// Glob patterns are relative to the `base_path`.
fn expand_override_globs(
base_path: &std::path::Path,
override_paths: &[String],
) -> Vec<PathBuf> {
let mut results = Vec::new();
for override_path in override_paths {
// Check if the path contains glob characters
let has_glob = override_path.contains('*')
|| override_path.contains('?')
|| override_path.contains('[');
if has_glob {
// Expand the glob pattern relative to base_path
let pattern = base_path.join(override_path);
let pattern_str = pattern.to_string_lossy();
match glob(&pattern_str) {
Ok(paths) => {
for entry in paths.flatten() {
// Store the path relative to base_path for consistent handling
if let Ok(relative) = entry.strip_prefix(base_path) {
results.push(relative.to_path_buf());
} else {
results.push(entry);
}
}
},
Err(e) => {
log::warn!("Invalid glob pattern '{override_path}': {e}");
},
}
} else {
// Not a glob pattern - use as-is
results.push(PathBuf::from(override_path));
}
}
results
}
// Rule: Filter projects by platform
pub struct FilterByPlatformRule;
@ -674,8 +872,6 @@ impl Effect for FilterByPlatformEffect {
if let Some(profile_config) = &context.profile_config
&& let Some(platform) = &profile_config.filter_platform
{
let mods_dir = context.export_path.join("mods");
for project in &context.lockfile.projects {
// Check if project is available on the target platform
let has_platform = project.get_platform_id(platform).is_some();
@ -683,9 +879,14 @@ impl Effect for FilterByPlatformEffect {
if !has_platform {
// Remove the file if it was copied
if let Some(file) = project.files.first() {
let file_path = mods_dir.join(&file.file_name);
// Get the target directory based on project type and paths config
let type_dir =
get_project_type_dir(&project.r#type, &context.config);
let project_dir = context.export_path.join(&type_dir);
let file_path = project_dir.join(&file.file_name);
if file_path.exists() {
fs::remove_file(file_path)?;
fs::remove_file(&file_path)?;
log::info!(
"Filtered {} (not available on {})",
file.file_name,
@ -701,6 +902,301 @@ impl Effect for FilterByPlatformEffect {
}
}
// Rule: Export missing projects as overrides
// When a project is not available on the target platform, download it and
// include as an override file instead
pub struct MissingProjectsAsOverridesRule {
target_platform: String,
}
impl MissingProjectsAsOverridesRule {
pub fn new(target_platform: &str) -> Self {
Self {
target_platform: target_platform.to_string(),
}
}
}
impl Rule for MissingProjectsAsOverridesRule {
fn matches(&self, _context: &RuleContext) -> bool {
true
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(MissingProjectsAsOverridesEffect {
target_platform: self.target_platform.clone(),
})]
}
}
pub struct MissingProjectsAsOverridesEffect {
target_platform: String,
}
#[async_trait]
impl Effect for MissingProjectsAsOverridesEffect {
fn name(&self) -> &'static str {
"Exporting missing projects as overrides"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
use crate::model::ResolvedCredentials;
let credentials = ResolvedCredentials::load().ok();
let curseforge_key = credentials
.as_ref()
.and_then(|c| c.curseforge_api_key().map(ToOwned::to_owned));
let modrinth_token = credentials
.as_ref()
.and_then(|c| c.modrinth_token().map(ToOwned::to_owned));
for project in &context.lockfile.projects {
if !project.export {
continue;
}
// Check if project is available on target platform
let has_target_platform =
project.get_platform_id(&self.target_platform).is_some();
if has_target_platform {
// Project is available on target platform, skip
continue;
}
// Project is missing on target platform - export as override
if let Some(file) = project.files.first() {
// Find a download URL from any available platform
if file.url.is_empty() {
log::warn!(
"Missing project '{}' has no download URL, skipping",
project.get_name()
);
continue;
}
// Download to overrides directory
let overrides_dir = context.export_path.join("overrides");
let type_dir = get_project_type_dir(&project.r#type, &context.config);
let target_dir = overrides_dir.join(&type_dir);
fs::create_dir_all(&target_dir)?;
let dest = target_dir.join(&file.file_name);
// Download the file
let client = reqwest::Client::new();
let mut request = client.get(&file.url);
// Add auth headers if needed
if file.url.contains("curseforge") {
if let Some(ref key) = curseforge_key {
request = request.header("x-api-key", key);
}
} else if file.url.contains("modrinth")
&& let Some(ref token) = modrinth_token
{
request = request.header("Authorization", token);
}
match request.send().await {
Ok(resp) if resp.status().is_success() => {
let bytes = resp.bytes().await?;
fs::write(&dest, &bytes)?;
log::info!(
"Exported missing project '{}' as override (not on {})",
project.get_name(),
self.target_platform
);
},
Ok(resp) => {
log::warn!(
"Failed to download missing project '{}': HTTP {}",
project.get_name(),
resp.status()
);
},
Err(e) => {
log::warn!(
"Failed to download missing project '{}': {}",
project.get_name(),
e
);
},
}
}
}
Ok(())
}
}
// Rule: Text replacement in exported files
// Replaces template variables like ${MC_VERSION}, ${PACK_NAME}, etc.
pub struct TextReplacementRule;
impl Rule for TextReplacementRule {
fn matches(&self, _context: &RuleContext) -> bool {
true
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(TextReplacementEffect)]
}
}
pub struct TextReplacementEffect;
#[async_trait]
impl Effect for TextReplacementEffect {
fn name(&self) -> &'static str {
"Applying text replacements"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
// Build replacement map from context
let mut replacements: std::collections::HashMap<&str, String> =
std::collections::HashMap::new();
// Pack metadata
replacements.insert("${PACK_NAME}", context.config.name.clone());
replacements.insert("${PACK_VERSION}", context.config.version.clone());
replacements.insert(
"${PACK_AUTHOR}",
context.config.author.clone().unwrap_or_default(),
);
replacements.insert(
"${PACK_DESCRIPTION}",
context.config.description.clone().unwrap_or_default(),
);
// Minecraft version
replacements.insert(
"${MC_VERSION}",
context
.lockfile
.mc_versions
.first()
.cloned()
.unwrap_or_default(),
);
replacements
.insert("${MC_VERSIONS}", context.lockfile.mc_versions.join(", "));
// Loader info
if let Some((name, version)) = context.lockfile.loaders.iter().next() {
replacements.insert("${LOADER}", name.clone());
replacements.insert("${LOADER_VERSION}", version.clone());
}
// All loaders
replacements.insert(
"${LOADERS}",
context
.lockfile
.loaders
.iter()
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>()
.join(", "),
);
// Project count
replacements.insert(
"${PROJECT_COUNT}",
context.lockfile.projects.len().to_string(),
);
replacements.insert(
"${MOD_COUNT}",
context
.lockfile
.projects
.iter()
.filter(|p| p.r#type == ProjectType::Mod)
.count()
.to_string(),
);
// Process text files in the export directory
process_text_files(&context.export_path, &replacements)?;
Ok(())
}
}
/// Process text files in a directory, applying replacements
fn process_text_files(
dir: &std::path::Path,
replacements: &std::collections::HashMap<&str, String>,
) -> Result<()> {
if !dir.exists() {
return Ok(());
}
// File extensions that should be processed for text replacement
const TEXT_EXTENSIONS: &[&str] = &[
"txt",
"md",
"json",
"toml",
"yaml",
"yml",
"cfg",
"conf",
"properties",
"lang",
"mcmeta",
"html",
"htm",
"xml",
];
for entry in walkdir::WalkDir::new(dir)
.into_iter()
.filter_map(std::result::Result::ok)
.filter(|e| e.file_type().is_file())
{
let path = entry.path();
// Check if file extension is in our list
let should_process = path
.extension()
.and_then(|ext| ext.to_str())
.is_some_and(|ext| {
TEXT_EXTENSIONS.contains(&ext.to_lowercase().as_str())
});
if !should_process {
continue;
}
// Read file content
let content = match fs::read_to_string(path) {
Ok(c) => c,
Err(_) => continue, // Skip binary files or unreadable files
};
// Check if any replacements are needed
let needs_replacement =
replacements.keys().any(|key| content.contains(*key));
if !needs_replacement {
continue;
}
// Apply replacements
let mut new_content = content;
for (pattern, replacement) in replacements {
new_content = new_content.replace(*pattern, replacement);
}
// Write back
fs::write(path, new_content)?;
log::debug!("Applied text replacements to: {}", path.display());
}
Ok(())
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
@ -721,16 +1217,21 @@ mod tests {
lockfile_version: 1,
},
config: Config {
name: "Test Pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: Some(vec!["server-overrides".to_string()]),
client_overrides: Some(vec!["client-overrides".to_string()]),
paths: HashMap::new(),
projects: None,
export_profiles: None,
name: "Test Pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: Some(vec![
"server-overrides".to_string(),
]),
client_overrides: Some(vec![
"client-overrides".to_string(),
]),
paths: HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
},
profile_config,
export_path: PathBuf::from("/tmp/export"),
@ -846,4 +1347,183 @@ mod tests {
assert!(context.profile_config.is_none());
assert_eq!(context.config.overrides, vec!["overrides"]);
}
#[test]
fn test_get_project_type_dir_default_paths() {
let config = Config {
name: "Test".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec![],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
assert_eq!(get_project_type_dir(&ProjectType::Mod, &config), "mods");
assert_eq!(
get_project_type_dir(&ProjectType::ResourcePack, &config),
"resourcepacks"
);
assert_eq!(
get_project_type_dir(&ProjectType::DataPack, &config),
"datapacks"
);
assert_eq!(
get_project_type_dir(&ProjectType::Shader, &config),
"shaderpacks"
);
assert_eq!(get_project_type_dir(&ProjectType::World, &config), "saves");
}
#[test]
fn test_get_project_type_dir_custom_paths() {
let mut paths = HashMap::new();
paths.insert("mod".to_string(), "custom-mods".to_string());
paths.insert("resource-pack".to_string(), "custom-rp".to_string());
let config = Config {
name: "Test".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec![],
server_overrides: None,
client_overrides: None,
paths,
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
assert_eq!(
get_project_type_dir(&ProjectType::Mod, &config),
"custom-mods"
);
assert_eq!(
get_project_type_dir(&ProjectType::ResourcePack, &config),
"custom-rp"
);
// Non-customized type should use default
assert_eq!(
get_project_type_dir(&ProjectType::Shader, &config),
"shaderpacks"
);
}
#[test]
fn test_expand_override_globs_no_globs() {
let base_path = PathBuf::from("/tmp/test");
let overrides = vec!["overrides".to_string(), "config".to_string()];
let result = expand_override_globs(&base_path, &overrides);
assert_eq!(result.len(), 2);
assert_eq!(result[0], PathBuf::from("overrides"));
assert_eq!(result[1], PathBuf::from("config"));
}
#[test]
fn test_expand_override_globs_detects_glob_characters() {
// Just test that glob characters are detected - actual expansion
// requires the files to exist
let base_path = PathBuf::from("/nonexistent");
let overrides = vec![
"overrides/*.txt".to_string(),
"config/**/*.json".to_string(),
"data/[abc].txt".to_string(),
"simple".to_string(),
];
let result = expand_override_globs(&base_path, &overrides);
// Glob patterns that don't match anything return empty
// Only the non-glob path should be returned as-is
assert!(result.contains(&PathBuf::from("simple")));
}
#[test]
fn test_client_overrides_rule_matches() {
let mut config = Config {
name: "Test".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec![],
server_overrides: None,
client_overrides: Some(vec![
"client-data".to_string(),
]),
paths: HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
let mut context = create_test_context(None);
context.config = config.clone();
let rule = CopyClientOverridesRule;
assert!(rule.matches(&context));
// Without client_overrides, should not match
config.client_overrides = None;
context.config = config;
assert!(!rule.matches(&context));
}
#[test]
fn test_server_overrides_rule_matches() {
let mut config = Config {
name: "Test".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec![],
server_overrides: Some(vec![
"server-data".to_string(),
]),
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
let mut context = create_test_context(None);
context.config = config.clone();
let rule = CopyServerOverridesRule;
assert!(rule.matches(&context));
// Without server_overrides, should not match
config.server_overrides = None;
context.config = config;
assert!(!rule.matches(&context));
}
#[test]
fn test_filter_server_only_rule_always_matches() {
let context = create_test_context(None);
let rule = FilterServerOnlyRule;
assert!(rule.matches(&context));
}
#[test]
fn test_text_replacement_rule_always_matches() {
let context = create_test_context(None);
let rule = TextReplacementRule;
assert!(rule.matches(&context));
}
#[test]
fn test_missing_projects_rule_always_matches() {
let context = create_test_context(None);
let rule = MissingProjectsAsOverridesRule::new("modrinth");
assert!(rule.matches(&context));
}
}

View file

@ -1,10 +1,13 @@
use std::{
fs,
path::{Path, PathBuf},
sync::Arc,
};
use indicatif::{ProgressBar, ProgressStyle};
use futures::future::join_all;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use reqwest::Client;
use tokio::sync::Semaphore;
use crate::{
error::{PakkerError, Result},
@ -12,14 +15,19 @@ use crate::{
utils::verify_hash,
};
/// Maximum number of concurrent downloads
const MAX_CONCURRENT_DOWNLOADS: usize = 8;
pub struct Fetcher {
client: Client,
base_path: PathBuf,
shelve: bool,
}
pub struct FileFetcher {
client: Client,
base_path: PathBuf,
shelve: bool,
}
impl Fetcher {
@ -27,9 +35,15 @@ impl Fetcher {
Self {
client: Client::new(),
base_path: base_path.as_ref().to_path_buf(),
shelve: false,
}
}
pub const fn with_shelve(mut self, shelve: bool) -> Self {
self.shelve = shelve;
self
}
pub async fn fetch_all(
&self,
lockfile: &LockFile,
@ -38,6 +52,7 @@ impl Fetcher {
let fetcher = FileFetcher {
client: self.client.clone(),
base_path: self.base_path.clone(),
shelve: self.shelve,
};
fetcher.fetch_all(lockfile, config).await
}
@ -48,7 +63,7 @@ impl Fetcher {
}
impl FileFetcher {
/// Fetch all project files according to lockfile
/// Fetch all project files according to lockfile with parallel downloads
pub async fn fetch_all(
&self,
lockfile: &LockFile,
@ -58,25 +73,104 @@ impl FileFetcher {
lockfile.projects.iter().filter(|p| p.export).collect();
let total = exportable_projects.len();
let spinner = ProgressBar::new(total as u64);
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.unwrap(),
);
for (idx, project) in exportable_projects.iter().enumerate() {
let name = project
.name
.values()
.next()
.map_or("unknown", std::string::String::as_str);
spinner.set_message(format!("Fetching {} ({}/{})", name, idx + 1, total));
self.fetch_project(project, lockfile, config).await?;
if total == 0 {
log::info!("No projects to fetch");
return Ok(());
}
spinner.finish_with_message("All projects fetched");
// Set up multi-progress for parallel download tracking
let multi_progress = MultiProgress::new();
let overall_bar = multi_progress.add(ProgressBar::new(total as u64));
overall_bar.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.unwrap()
.progress_chars("#>-"),
);
overall_bar.set_message("Fetching projects...");
// Use a semaphore to limit concurrent downloads
let semaphore = Arc::new(Semaphore::new(MAX_CONCURRENT_DOWNLOADS));
// Prepare download tasks
let download_tasks: Vec<_> = exportable_projects
.iter()
.map(|project| {
let semaphore = Arc::clone(&semaphore);
let client = self.client.clone();
let base_path = self.base_path.clone();
let lockfile = lockfile.clone();
let config = config.clone();
let project = (*project).clone();
let overall_bar = overall_bar.clone();
async move {
// Acquire semaphore permit to limit concurrency
let _permit = semaphore.acquire().await.map_err(|_| {
PakkerError::InternalError("Semaphore acquisition failed".into())
})?;
let name = project
.name
.values()
.next()
.map_or("unknown".to_string(), std::clone::Clone::clone);
let fetcher = Self {
client,
base_path,
shelve: false, // Shelving happens at sync level, not per-project
};
let result =
fetcher.fetch_project(&project, &lockfile, &config).await;
// Update progress bar
overall_bar.inc(1);
match &result {
Ok(()) => {
log::debug!("Successfully fetched: {name}");
},
Err(e) => {
log::error!("Failed to fetch {name}: {e}");
},
}
result.map(|()| name)
}
})
.collect();
// Execute all downloads in parallel (limited by semaphore)
let results = join_all(download_tasks).await;
overall_bar.finish_with_message("All projects fetched");
// Collect and report errors
let mut errors = Vec::new();
let mut success_count = 0;
for result in results {
match result {
Ok(_) => success_count += 1,
Err(e) => errors.push(e),
}
}
log::info!("Fetch complete: {success_count}/{total} successful");
if !errors.is_empty() {
// Return the first error, but log all of them
for (idx, error) in errors.iter().enumerate() {
log::error!("Download error {}: {}", idx + 1, error);
}
return Err(errors.remove(0));
}
// Handle unknown files (shelve or delete)
self.handle_unknown_files(lockfile, config)?;
// Sync overrides
self.sync_overrides(config)?;
@ -84,6 +178,117 @@ impl FileFetcher {
Ok(())
}
/// Handle unknown project files that aren't in the lockfile.
/// If shelve is true, moves them to a shelf directory.
/// Otherwise, deletes them.
fn handle_unknown_files(
&self,
lockfile: &LockFile,
config: &Config,
) -> Result<()> {
// Collect all expected file names from lockfile
let expected_files: std::collections::HashSet<String> = lockfile
.projects
.iter()
.filter(|p| p.export)
.filter_map(|p| p.files.first().map(|f| f.file_name.clone()))
.collect();
// Check each project type directory
let project_dirs = [
(
"mod",
self.get_default_path(&crate::model::ProjectType::Mod),
),
(
"resource-pack",
self.get_default_path(&crate::model::ProjectType::ResourcePack),
),
(
"shader",
self.get_default_path(&crate::model::ProjectType::Shader),
),
(
"data-pack",
self.get_default_path(&crate::model::ProjectType::DataPack),
),
(
"world",
self.get_default_path(&crate::model::ProjectType::World),
),
];
// Also check custom paths from config
let mut dirs_to_check: Vec<PathBuf> = project_dirs
.iter()
.map(|(_, dir)| self.base_path.join(dir))
.collect();
for custom_path in config.paths.values() {
dirs_to_check.push(self.base_path.join(custom_path));
}
let shelf_dir = self.base_path.join(".pakker-shelf");
let mut shelved_count = 0;
let mut deleted_count = 0;
for dir in dirs_to_check {
if !dir.exists() {
continue;
}
let entries = match fs::read_dir(&dir) {
Ok(e) => e,
Err(_) => continue,
};
for entry in entries.flatten() {
let path = entry.path();
if !path.is_file() {
continue;
}
let file_name = match path.file_name().and_then(|n| n.to_str()) {
Some(name) => name.to_string(),
None => continue,
};
// Skip if file is expected
if expected_files.contains(&file_name) {
continue;
}
// Skip non-jar files (might be configs, etc.)
if !file_name.ends_with(".jar") {
continue;
}
if self.shelve {
// Move to shelf
fs::create_dir_all(&shelf_dir)?;
let shelf_path = shelf_dir.join(&file_name);
fs::rename(&path, &shelf_path)?;
log::info!("Shelved unknown file: {file_name} -> .pakker-shelf/");
shelved_count += 1;
} else {
// Delete unknown file
fs::remove_file(&path)?;
log::info!("Deleted unknown file: {file_name}");
deleted_count += 1;
}
}
}
if shelved_count > 0 {
log::info!("Shelved {shelved_count} unknown file(s) to .pakker-shelf/");
}
if deleted_count > 0 {
log::info!("Deleted {deleted_count} unknown file(s)");
}
Ok(())
}
/// Fetch files for a single project
pub async fn fetch_project(
&self,

View file

@ -288,10 +288,9 @@ pub fn detect_vcs_type<P: AsRef<Path>>(path: P) -> VcsType {
.args(["root"])
.current_dir(path)
.output()
&& output.status.success()
{
if output.status.success() {
return VcsType::Jujutsu;
}
return VcsType::Jujutsu;
}
// Check for git
@ -299,10 +298,9 @@ pub fn detect_vcs_type<P: AsRef<Path>>(path: P) -> VcsType {
.args(["rev-parse", "--show-toplevel"])
.current_dir(path)
.output()
&& output.status.success()
{
if output.status.success() {
return VcsType::Git;
}
return VcsType::Git;
}
VcsType::None
@ -333,7 +331,7 @@ pub fn repo_has_uncommitted_changes<P: AsRef<Path>>(path: P) -> Result<bool> {
.current_dir(path)
.output()
.map_err(|e| {
PakkerError::GitError(format!("Failed to run jj status: {}", e))
PakkerError::GitError(format!("Failed to run jj status: {e}"))
})?;
let output_str = String::from_utf8_lossy(&output.stdout);

16
src/http.rs Normal file
View file

@ -0,0 +1,16 @@
use std::time::Duration;
use reqwest::Client;
pub fn create_http_client() -> Client {
Client::builder()
.pool_max_idle_per_host(10)
.pool_idle_timeout(Duration::from_secs(30))
.tcp_keepalive(Duration::from_secs(60))
.tcp_nodelay(true)
.connect_timeout(Duration::from_secs(15))
.timeout(Duration::from_secs(30))
.user_agent("Pakker/0.1.0")
.build()
.expect("Failed to build HTTP client")
}

View file

@ -12,7 +12,7 @@ use std::{
fs::{self, File, OpenOptions},
io::Write,
os::unix::{fs::PermissionsExt, io::AsRawFd},
path::PathBuf,
path::{Path, PathBuf},
time::{Duration, SystemTime},
};
@ -108,7 +108,7 @@ impl IpcCoordinator {
/// Extract modpack hash from pakku.json's parentLockHash field.
/// This is the authoritative content hash for the modpack (Nix-style).
fn get_modpack_hash(working_dir: &PathBuf) -> Result<String, IpcError> {
fn get_modpack_hash(working_dir: &Path) -> Result<String, IpcError> {
let pakku_path = working_dir.join("pakku.json");
if !pakku_path.exists() {
@ -147,7 +147,7 @@ impl IpcCoordinator {
/// Create a new IPC coordinator for the given modpack directory.
/// Uses parentLockHash from pakku.json to identify the modpack.
pub fn new(working_dir: &PathBuf) -> Result<Self, IpcError> {
pub fn new(working_dir: &Path) -> Result<Self, IpcError> {
let modpack_hash = Self::get_modpack_hash(working_dir)?;
let ipc_base = Self::get_ipc_base_dir();
let ipc_dir = ipc_base.join(&modpack_hash);
@ -187,6 +187,7 @@ impl IpcCoordinator {
.read(true)
.write(true)
.create(true)
.truncate(false)
.open(&self.ops_file)
.map_err(|e| IpcError::InvalidFormat(e.to_string()))?;

View file

@ -1,8 +1,15 @@
// Allow pre-existing clippy warnings for functions with many arguments
// and complex types that would require significant refactoring
#![allow(clippy::too_many_arguments)]
#![allow(clippy::type_complexity)]
#![allow(clippy::large_enum_variant)]
mod cli;
mod error;
mod export;
mod fetch;
mod git;
mod http;
mod ipc;
mod model;
mod platform;
@ -17,8 +24,6 @@ use clap::Parser;
use cli::{Cli, Commands};
use error::PakkerError;
use crate::rate_limiter::RateLimiter;
#[tokio::main]
async fn main() -> Result<(), PakkerError> {
let cli = Cli::parse();
@ -42,8 +47,6 @@ async fn main() -> Result<(), PakkerError> {
let lockfile_path = working_dir.join("pakker-lock.json");
let config_path = working_dir.join("pakker.json");
let _rate_limiter = std::sync::Arc::new(RateLimiter::new(None));
match cli.command {
Commands::Init(args) => {
cli::commands::init::execute(args, &lockfile_path, &config_path).await

View file

@ -7,8 +7,6 @@ use crate::error::{PakkerError, Result};
const CONFIG_NAME: &str = "pakker.json";
// Pakker config wrapper - supports both Pakker (direct) and Pakku (wrapped)
// formats
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum ConfigWrapper {
@ -43,39 +41,45 @@ pub struct ParentConfig {
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Config {
pub name: String,
pub version: String,
pub name: String,
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub author: Option<String>,
pub author: Option<String>,
#[serde(default)]
pub overrides: Vec<String>,
pub overrides: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub server_overrides: Option<Vec<String>>,
pub server_overrides: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub client_overrides: Option<Vec<String>>,
pub client_overrides: Option<Vec<String>>,
#[serde(default)]
pub paths: HashMap<String, String>,
pub paths: HashMap<String, String>,
#[serde(default)]
pub projects: Option<HashMap<String, ProjectConfig>>,
pub projects: Option<HashMap<String, ProjectConfig>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub export_profiles: Option<HashMap<String, crate::export::ProfileConfig>>,
pub export_profiles: Option<HashMap<String, crate::export::ProfileConfig>>,
#[serde(
skip_serializing_if = "Option::is_none",
rename = "exportServerSideProjectsToClient"
)]
pub export_server_side_projects_to_client: Option<bool>,
}
impl Default for Config {
fn default() -> Self {
Self {
name: String::new(),
version: String::new(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: Some(HashMap::new()),
export_profiles: None,
name: String::new(),
version: String::new(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: Some(HashMap::new()),
export_profiles: None,
export_server_side_projects_to_client: None,
}
}
}
@ -105,21 +109,16 @@ impl Config {
let content =
std::fs::read_to_string(&path).map_err(PakkerError::IoError)?;
// Try to parse as ConfigWrapper (supports both Pakker and Pakku formats)
match serde_json::from_str::<ConfigWrapper>(&content) {
Ok(ConfigWrapper::Pakker(config)) => {
config.validate()?;
Ok(config)
},
Ok(ConfigWrapper::Pakku { pakku }) => {
// Convert Pakku format to Pakker format
// Pakku format doesn't have name/version, use parent repo info as
// fallback
let name = pakku
.parent
.as_ref()
.map(|p| {
// Extract repo name from URL
p.id
.split('/')
.next_back()
@ -145,6 +144,7 @@ impl Config {
paths: HashMap::new(),
projects: Some(pakku.projects),
export_profiles: None,
export_server_side_projects_to_client: None,
})
},
Err(e) => Err(PakkerError::InvalidConfigFile(e.to_string())),
@ -153,17 +153,12 @@ impl Config {
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
self.validate()?;
let path = path.as_ref().join(CONFIG_NAME);
// Write to temporary file first (atomic write)
let temp_path = path.with_extension("tmp");
let content = serde_json::to_string_pretty(self)
.map_err(PakkerError::SerializationError)?;
std::fs::write(&temp_path, content)?;
std::fs::rename(temp_path, path)?;
Ok(())
}
@ -175,27 +170,39 @@ impl Config {
}
Ok(())
}
pub fn get_project_config(&self, project_id: &str) -> Option<&ProjectConfig> {
self.projects.as_ref()?.get(project_id)
}
pub fn set_project_config(
&mut self,
project_id: String,
project_config: ProjectConfig,
) {
let projects = self.projects.get_or_insert_with(HashMap::new);
projects.insert(project_id, project_config);
}
}
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
#[test]
fn test_config_new() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
assert_eq!(config.name, "test-pack");
assert_eq!(config.version, "1.0.0");
@ -206,178 +213,26 @@ mod tests {
#[test]
fn test_config_serialization() {
let mut config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
};
config.description = Some("A test modpack".to_string());
config.author = Some("Test Author".to_string());
let json = serde_json::to_string(&config).unwrap();
let deserialized: Config = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.name, config.name);
assert_eq!(deserialized.version, config.version);
assert_eq!(deserialized.description, config.description);
assert_eq!(deserialized.author, config.author);
}
#[test]
fn test_config_save_and_load() {
let temp_dir = TempDir::new().unwrap();
let mut config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
config.description = Some("Test description".to_string());
config.save(temp_dir.path()).unwrap();
let loaded = Config::load(temp_dir.path()).unwrap();
assert_eq!(loaded.name, config.name);
assert_eq!(loaded.version, config.version);
assert_eq!(loaded.description, config.description);
}
#[test]
fn test_config_compatibility_with_pakku() {
// Test basic config loading with projects
let config = Config {
name: "test-modpack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
assert_eq!(config.name, "test-modpack");
assert_eq!(config.version, "1.0.0");
assert!(config.projects.is_none());
}
#[test]
fn test_config_wrapped_format() {
let mut projects = HashMap::new();
projects.insert("sodium".to_string(), ProjectConfig {
r#type: Some(ProjectType::Mod),
side: Some(ProjectSide::Client),
update_strategy: None,
redistributable: None,
subpath: None,
aliases: None,
export: None,
});
let wrapped = PakkerWrappedConfig {
parent: None,
parent_lock_hash: String::new(),
patches: vec![],
projects,
};
let json = serde_json::to_string(&wrapped).unwrap();
assert!(json.contains("\"projects\""));
let deserialized: PakkerWrappedConfig =
serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.projects.len(), 1);
}
#[test]
fn test_config_wrapped_format_old() {
use crate::model::fork::{LocalConfig, LocalProjectConfig};
let mut projects = HashMap::new();
projects.insert("sodium".to_string(), LocalProjectConfig {
version: None,
r#type: Some(ProjectType::Mod),
side: Some(ProjectSide::Client),
update_strategy: None,
redistributable: None,
subpath: None,
aliases: None,
export: None,
});
let wrapped_inner = LocalConfig {
parent: None,
projects,
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
};
// Just verify we can create the struct
assert_eq!(wrapped_inner.projects.len(), 1);
}
#[test]
fn test_config_validate() {
let config = Config {
name: "test".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
assert!(config.validate().is_ok());
let invalid = Config {
name: "".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec![],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
assert!(invalid.validate().is_err());
}
}
impl Config {
pub fn get_project_config(&self, identifier: &str) -> Option<&ProjectConfig> {
self.projects.as_ref()?.get(identifier)
}
pub fn set_project_config(
&mut self,
identifier: String,
config: ProjectConfig,
) {
if self.projects.is_none() {
self.projects = Some(HashMap::new());
}
if let Some(ref mut projects) = self.projects {
projects.insert(identifier, config);
}
assert_eq!(deserialized.name, "test-pack");
assert_eq!(deserialized.version, "1.0.0");
assert_eq!(deserialized.description, Some("A test modpack".to_string()));
assert_eq!(deserialized.author, Some("Test Author".to_string()));
}
}

View file

@ -384,7 +384,8 @@ mod tests {
assert_eq!(loaded.mc_versions, mc_versions);
assert_eq!(loaded.loaders, loaders);
assert_eq!(loaded.projects.len(), 2);
assert_eq!(loaded.lockfile_version, 1);
// Lockfile should be migrated from v1 to v2 on load
assert_eq!(loaded.lockfile_version, 2);
}
#[test]
@ -423,6 +424,95 @@ mod tests {
assert!(lockfile.validate().is_ok());
}
#[test]
fn test_lockfile_migration_v1_to_v2() {
// Test that v1 lockfiles are migrated to v2
let temp_dir = TempDir::new().unwrap();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
// Create a v1 lockfile manually
let v1_content = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [],
"lockfile_version": 1
}"#;
let lockfile_path = temp_dir.path().join("pakku-lock.json");
std::fs::write(&lockfile_path, v1_content).unwrap();
// Load should trigger migration
let loaded = LockFile::load(temp_dir.path()).unwrap();
assert_eq!(loaded.lockfile_version, 2);
// Verify the migrated file was saved
let reloaded = LockFile::load(temp_dir.path()).unwrap();
assert_eq!(reloaded.lockfile_version, 2);
}
#[test]
fn test_lockfile_migration_preserves_projects() {
// Test that migration preserves all project data
let temp_dir = TempDir::new().unwrap();
// Create a v1 lockfile with projects (using correct enum case)
let v1_content = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [
{
"pakku_id": "test-id-1",
"type": "MOD",
"side": "BOTH",
"name": {"modrinth": "Test Mod"},
"slug": {"modrinth": "test-mod"},
"id": {"modrinth": "abc123"},
"files": [],
"pakku_links": [],
"aliases": [],
"update_strategy": "LATEST",
"redistributable": true,
"export": true
}
],
"lockfile_version": 1
}"#;
let lockfile_path = temp_dir.path().join("pakku-lock.json");
std::fs::write(&lockfile_path, v1_content).unwrap();
let loaded = LockFile::load(temp_dir.path()).unwrap();
assert_eq!(loaded.lockfile_version, 2);
assert_eq!(loaded.projects.len(), 1);
assert_eq!(loaded.projects[0].pakku_id, Some("test-id-1".to_string()));
}
#[test]
fn test_lockfile_rejects_future_version() {
// Test that lockfiles with version > current are rejected
let temp_dir = TempDir::new().unwrap();
let future_content = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [],
"lockfile_version": 999
}"#;
let lockfile_path = temp_dir.path().join("pakku-lock.json");
std::fs::write(&lockfile_path, future_content).unwrap();
let result = LockFile::load(temp_dir.path());
assert!(result.is_err());
let err = result.unwrap_err().to_string();
assert!(err.contains("newer than supported"));
}
#[test]
fn test_lockfile_pretty_json_format() {
// Test that saved JSON is pretty-printed
@ -472,7 +562,10 @@ mod tests {
}
}
const LOCKFILE_VERSION: u32 = 1;
/// Current lockfile version - bump this when making breaking changes
const LOCKFILE_VERSION: u32 = 2;
/// Minimum supported lockfile version for migration
const MIN_SUPPORTED_VERSION: u32 = 1;
const LOCKFILE_NAME: &str = "pakku-lock.json";
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -494,13 +587,26 @@ impl LockFile {
path: P,
validate: bool,
) -> Result<Self> {
let path = path.as_ref().join(LOCKFILE_NAME);
let path_ref = path.as_ref();
let lockfile_path = path_ref.join(LOCKFILE_NAME);
let content =
std::fs::read_to_string(&path).map_err(PakkerError::IoError)?;
std::fs::read_to_string(&lockfile_path).map_err(PakkerError::IoError)?;
let mut lockfile: Self = serde_json::from_str(&content)
.map_err(|e| PakkerError::InvalidLockFile(e.to_string()))?;
// Check if migration is needed
if lockfile.lockfile_version < LOCKFILE_VERSION {
lockfile = lockfile.migrate()?;
// Save migrated lockfile
lockfile.save_without_validation(path_ref)?;
log::info!(
"Migrated lockfile from version {} to {}",
lockfile.lockfile_version,
LOCKFILE_VERSION
);
}
if validate {
lockfile.validate()?;
}
@ -509,6 +615,42 @@ impl LockFile {
Ok(lockfile)
}
/// Migrate lockfile from older version to current version
fn migrate(mut self) -> Result<Self> {
if self.lockfile_version < MIN_SUPPORTED_VERSION {
return Err(PakkerError::InvalidLockFile(format!(
"Lockfile version {} is too old to migrate. Minimum supported: {}",
self.lockfile_version, MIN_SUPPORTED_VERSION
)));
}
// Migration from v1 to v2
if self.lockfile_version == 1 {
log::info!("Migrating lockfile from v1 to v2...");
// v2 changes:
// - Projects now have explicit export field (defaults to true)
// - Side detection is more granular
for project in &mut self.projects {
// Ensure export field is set (v1 didn't always have it)
// Already has a default in Project, but be explicit
if !project.export {
project.export = true;
}
}
self.lockfile_version = 2;
}
// Future migrations would go here:
// if self.lockfile_version == 2 {
// // migrate v2 -> v3
// self.lockfile_version = 3;
// }
Ok(self)
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
self.validate()?;
let path = path.as_ref().join(LOCKFILE_NAME);
@ -525,10 +667,17 @@ impl LockFile {
}
pub fn validate(&self) -> Result<()> {
if self.lockfile_version != LOCKFILE_VERSION {
if self.lockfile_version > LOCKFILE_VERSION {
return Err(PakkerError::InvalidLockFile(format!(
"Unsupported lockfile version: {}",
self.lockfile_version
"Lockfile version {} is newer than supported version {}. Please \
upgrade Pakker.",
self.lockfile_version, LOCKFILE_VERSION
)));
}
if self.lockfile_version < MIN_SUPPORTED_VERSION {
return Err(PakkerError::InvalidLockFile(format!(
"Lockfile version {} is too old. Minimum supported: {}",
self.lockfile_version, MIN_SUPPORTED_VERSION
)));
}

View file

@ -168,6 +168,79 @@ impl Project {
self.aliases.extend(other.aliases);
}
/// Check if versions match across all providers.
/// Returns true if all provider files have the same version/file,
/// or if there's only one provider.
pub fn versions_match_across_providers(&self) -> bool {
if self.files.len() <= 1 {
return true;
}
// Group files by provider (using parent_id as proxy)
let mut versions_by_provider: HashMap<String, Vec<&str>> = HashMap::new();
for file in &self.files {
// Extract provider from file type or use parent_id
let provider = &file.file_type;
versions_by_provider
.entry(provider.clone())
.or_default()
.push(&file.file_name);
}
// If only one provider, versions match
if versions_by_provider.len() <= 1 {
return true;
}
// Compare semantic versions extracted from file names
let parse_version = |name: &str| {
// Try to extract version from patterns like "mod-1.0.0.jar" or
// "mod_v1.0.0"
let version_str = name
.rsplit_once('-')
.and_then(|(_, v)| v.strip_suffix(".jar"))
.or_else(|| {
name
.rsplit_once('_')
.and_then(|(_, v)| v.strip_suffix(".jar"))
})
.unwrap_or(name);
semver::Version::parse(version_str).ok()
};
let versions: Vec<_> = versions_by_provider
.values()
.filter_map(|files| files.first().copied().and_then(parse_version))
.collect();
// All versions should be the same
versions.windows(2).all(|w| w[0] == w[1])
}
/// Check if versions do NOT match across providers.
/// Returns Some with details if there's a mismatch, None if versions match.
pub fn check_version_mismatch(&self) -> Option<String> {
if self.versions_match_across_providers() {
return None;
}
// Collect version info by provider
let mut provider_versions: Vec<(String, String)> = Vec::new();
for file in &self.files {
provider_versions.push((file.file_type.clone(), file.file_name.clone()));
}
Some(format!(
"Version mismatch for {}: {}",
self.get_name(),
provider_versions
.iter()
.map(|(p, v)| format!("{p}={v}"))
.collect::<Vec<_>>()
.join(", ")
))
}
pub fn select_file(
&mut self,
mc_versions: &[String],
@ -254,6 +327,39 @@ impl ProjectFile {
mc_compatible && loader_compatible
}
/// Generate a viewable URL for this file based on its provider.
/// Returns None if the URL cannot be determined.
pub fn get_site_url(&self, project: &Project) -> Option<String> {
// Determine provider from file type
match self.file_type.as_str() {
"modrinth" => {
// Format: https://modrinth.com/mod/{slug}/version/{file_id}
let slug = project.slug.get("modrinth")?;
Some(format!(
"https://modrinth.com/mod/{}/version/{}",
slug, self.id
))
},
"curseforge" => {
// Format: https://www.curseforge.com/minecraft/mc-mods/{slug}/files/{file_id}
let slug = project.slug.get("curseforge")?;
Some(format!(
"https://www.curseforge.com/minecraft/mc-mods/{}/files/{}",
slug, self.id
))
},
"github" => {
// Format: https://github.com/{owner}/{repo}/releases/tag/{tag}
// parent_id contains owner/repo, id contains the tag/version
Some(format!(
"https://github.com/{}/releases/tag/{}",
self.parent_id, self.id
))
},
_ => None,
}
}
}
#[cfg(test)]
@ -436,4 +542,230 @@ mod tests {
let result = project.select_file(&lockfile_mc, &lockfile_loaders);
assert!(result.is_ok());
}
#[test]
fn test_versions_match_across_providers_single_file() {
let mut project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
project.files.push(ProjectFile {
file_type: "modrinth".to_string(),
file_name: "test-1.0.0.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/test.jar".to_string(),
id: "file1".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
});
assert!(project.versions_match_across_providers());
assert!(project.check_version_mismatch().is_none());
}
#[test]
fn test_versions_match_across_providers_same_file() {
let mut project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
// Same file name from different providers
project.files.push(ProjectFile {
file_type: "modrinth".to_string(),
file_name: "test-1.0.0.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://modrinth.com/test.jar".to_string(),
id: "mr-file1".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
});
project.files.push(ProjectFile {
file_type: "curseforge".to_string(),
file_name: "test-1.0.0.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://curseforge.com/test.jar".to_string(),
id: "cf-file1".to_string(),
parent_id: "mod456".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
});
assert!(project.versions_match_across_providers());
}
#[test]
fn test_versions_mismatch_across_providers() {
let mut project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
project
.name
.insert("test".to_string(), "Test Mod".to_string());
// Different file names from different providers
project.files.push(ProjectFile {
file_type: "modrinth".to_string(),
file_name: "test-1.0.0.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://modrinth.com/test.jar".to_string(),
id: "mr-file1".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
});
project.files.push(ProjectFile {
file_type: "curseforge".to_string(),
file_name: "test-0.9.0.jar".to_string(), // Different version
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://curseforge.com/test.jar".to_string(),
id: "cf-file1".to_string(),
parent_id: "mod456".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
});
assert!(!project.versions_match_across_providers());
let mismatch = project.check_version_mismatch();
assert!(mismatch.is_some());
let msg = mismatch.unwrap();
assert!(msg.contains("Version mismatch"));
}
#[test]
fn test_get_site_url_modrinth() {
let mut project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
project
.slug
.insert("modrinth".to_string(), "sodium".to_string());
let file = ProjectFile {
file_type: "modrinth".to_string(),
file_name: "sodium-1.0.0.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://modrinth.com/sodium.jar".to_string(),
id: "abc123".to_string(),
parent_id: "sodium".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let url = file.get_site_url(&project);
assert!(url.is_some());
let url = url.unwrap();
assert!(url.contains("modrinth.com"));
assert!(url.contains("sodium"));
assert!(url.contains("abc123"));
}
#[test]
fn test_get_site_url_curseforge() {
let mut project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
project
.slug
.insert("curseforge".to_string(), "jei".to_string());
let file = ProjectFile {
file_type: "curseforge".to_string(),
file_name: "jei-1.0.0.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["forge".to_string()],
release_type: ReleaseType::Release,
url: "https://curseforge.com/jei.jar".to_string(),
id: "12345".to_string(),
parent_id: "jei".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let url = file.get_site_url(&project);
assert!(url.is_some());
let url = url.unwrap();
assert!(url.contains("curseforge.com"));
assert!(url.contains("jei"));
assert!(url.contains("12345"));
}
#[test]
fn test_get_site_url_github() {
let project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
let file = ProjectFile {
file_type: "github".to_string(),
file_name: "mod-1.0.0.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url:
"https://github.com/owner/repo/releases/download/v1.0.0/mod.jar"
.to_string(),
id: "v1.0.0".to_string(),
parent_id: "owner/repo".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let url = file.get_site_url(&project);
assert!(url.is_some());
let url = url.unwrap();
assert!(url.contains("github.com"));
assert!(url.contains("owner/repo"));
assert!(url.contains("v1.0.0"));
}
#[test]
fn test_get_site_url_unknown_type() {
let project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
let file = ProjectFile {
file_type: "unknown".to_string(),
file_name: "mod.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/mod.jar".to_string(),
id: "123".to_string(),
parent_id: "mod".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let url = file.get_site_url(&project);
assert!(url.is_none());
}
}

View file

@ -8,13 +8,19 @@ use std::sync::Arc;
pub use curseforge::CurseForgePlatform;
pub use github::GitHubPlatform;
pub use modrinth::ModrinthPlatform;
use once_cell::sync::Lazy;
pub use traits::PlatformClient;
use crate::{error::Result, rate_limiter::RateLimiter};
use crate::{error::Result, http, rate_limiter::RateLimiter};
static RATE_LIMITER: Lazy<Arc<RateLimiter>> =
Lazy::new(|| Arc::new(RateLimiter::new(None)));
static HTTP_CLIENT: std::sync::LazyLock<Arc<reqwest::Client>> =
std::sync::LazyLock::new(|| Arc::new(http::create_http_client()));
static RATE_LIMITER: std::sync::LazyLock<Arc<RateLimiter>> =
std::sync::LazyLock::new(|| Arc::new(RateLimiter::new(None)));
pub fn get_http_client() -> Arc<reqwest::Client> {
HTTP_CLIENT.clone()
}
pub fn create_platform(
platform: &str,
@ -34,9 +40,21 @@ fn create_client(
api_key: Option<String>,
) -> Result<Box<dyn PlatformClient>> {
match platform {
"modrinth" => Ok(Box::new(ModrinthPlatform::new())),
"curseforge" => Ok(Box::new(CurseForgePlatform::new(api_key))),
"github" => Ok(Box::new(GitHubPlatform::new(api_key))),
"modrinth" => {
Ok(Box::new(ModrinthPlatform::with_client(get_http_client())))
},
"curseforge" => {
Ok(Box::new(CurseForgePlatform::with_client(
get_http_client(),
api_key,
)))
},
"github" => {
Ok(Box::new(GitHubPlatform::with_client(
get_http_client(),
api_key,
)))
},
_ => {
Err(crate::error::PakkerError::ConfigError(format!(
"Unknown platform: {platform}"

View file

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::{collections::HashMap, sync::Arc};
use async_trait::async_trait;
use reqwest::Client;
@ -12,21 +12,30 @@ use crate::{
};
const CURSEFORGE_API_BASE: &str = "https://api.curseforge.com/v1";
/// CurseForge game version type ID for loader versions (e.g., "fabric",
/// "forge")
const LOADER_VERSION_TYPE_ID: i32 = 68441;
/// CurseForge relation type ID for "required dependency" (mod embeds or
/// requires another mod)
const DEPENDENCY_RELATION_TYPE_REQUIRED: u32 = 3;
pub struct CurseForgePlatform {
client: Client,
client: Arc<Client>,
api_key: Option<String>,
}
impl CurseForgePlatform {
pub fn new(api_key: Option<String>) -> Self {
Self {
client: Client::new(),
client: Arc::new(Client::new()),
api_key,
}
}
pub fn with_client(client: Arc<Client>, api_key: Option<String>) -> Self {
Self { client, api_key }
}
fn get_headers(&self) -> Result<reqwest::header::HeaderMap> {
let mut headers = reqwest::header::HeaderMap::new();
@ -66,11 +75,81 @@ impl CurseForgePlatform {
}
}
/// Determine project side based on `CurseForge` categories.
/// `CurseForge` doesn't have explicit client/server fields like Modrinth,
/// so we infer from category names and IDs.
fn detect_side_from_categories(
categories: &[CurseForgeCategory],
) -> ProjectSide {
// Known client-only category indicators (slugs and partial name matches)
const CLIENT_INDICATORS: &[&str] = &[
"client",
"hud",
"gui",
"cosmetic",
"shader",
"optifine",
"resource-pack",
"texture",
"minimap",
"tooltip",
"inventory",
"quality-of-life", // Often client-side QoL
];
// Known server-only category indicators
const SERVER_INDICATORS: &[&str] = &[
"server-utility",
"bukkit",
"spigot",
"paper",
"admin-tools",
"anti-grief",
"economy",
"permissions",
"chat",
];
let mut client_score = 0;
let mut server_score = 0;
for category in categories {
let slug_lower = category.slug.to_lowercase();
let name_lower = category.name.to_lowercase();
for indicator in CLIENT_INDICATORS {
if slug_lower.contains(indicator) || name_lower.contains(indicator) {
client_score += 1;
}
}
for indicator in SERVER_INDICATORS {
if slug_lower.contains(indicator) || name_lower.contains(indicator) {
server_score += 1;
}
}
}
// Only assign a specific side if there's clear indication
// and not conflicting signals
if client_score > 0 && server_score == 0 {
ProjectSide::Client
} else if server_score > 0 && client_score == 0 {
ProjectSide::Server
} else {
// Default to Both - works on both client and server
ProjectSide::Both
}
}
fn convert_project(&self, cf_project: CurseForgeProject) -> Project {
let pakku_id = generate_pakku_id();
let project_type = Self::map_class_id(cf_project.class_id.unwrap_or(6));
let mut project = Project::new(pakku_id, project_type, ProjectSide::Both);
// Detect side from categories
let side = Self::detect_side_from_categories(&cf_project.categories);
let mut project = Project::new(pakku_id, project_type, side);
project.add_platform(
"curseforge".to_string(),
@ -124,7 +203,7 @@ impl CurseForgePlatform {
required_dependencies: cf_file
.dependencies
.iter()
.filter(|d| d.relation_type == 3)
.filter(|d| d.relation_type == DEPENDENCY_RELATION_TYPE_REQUIRED)
.map(|d| d.mod_id.to_string())
.collect(),
size: cf_file.file_length,
@ -317,11 +396,20 @@ impl PlatformClient for CurseForgePlatform {
// CurseForge API models
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeProject {
id: u32,
name: String,
slug: String,
id: u32,
name: String,
slug: String,
#[serde(rename = "classId")]
class_id: Option<u32>,
class_id: Option<u32>,
#[serde(default)]
categories: Vec<CurseForgeCategory>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeCategory {
id: u32,
name: String,
slug: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
@ -381,3 +469,112 @@ struct CurseForgeFilesResponse {
struct CurseForgeSearchResponse {
data: Vec<CurseForgeProject>,
}
#[cfg(test)]
mod tests {
use super::*;
fn make_category(id: u32, name: &str, slug: &str) -> CurseForgeCategory {
CurseForgeCategory {
id,
name: name.to_string(),
slug: slug.to_string(),
}
}
#[test]
fn test_detect_side_client_only() {
// HUD mod should be client-only
let categories = vec![
make_category(1, "HUD Mods", "hud"),
make_category(2, "Fabric", "fabric"),
];
let side = CurseForgePlatform::detect_side_from_categories(&categories);
assert_eq!(side, ProjectSide::Client);
}
#[test]
fn test_detect_side_server_only() {
// Server utility should be server-only
let categories = vec![
make_category(1, "Server Utility", "server-utility"),
make_category(2, "Bukkit Plugins", "bukkit"),
];
let side = CurseForgePlatform::detect_side_from_categories(&categories);
assert_eq!(side, ProjectSide::Server);
}
#[test]
fn test_detect_side_both() {
// Generic mod categories should be both
let categories = vec![
make_category(1, "Technology", "technology"),
make_category(2, "Fabric", "fabric"),
];
let side = CurseForgePlatform::detect_side_from_categories(&categories);
assert_eq!(side, ProjectSide::Both);
}
#[test]
fn test_detect_side_conflicting_signals() {
// Mixed categories should default to both
let categories = vec![
make_category(1, "Client HUD", "client-hud"),
make_category(2, "Server Utility", "server-utility"),
];
let side = CurseForgePlatform::detect_side_from_categories(&categories);
assert_eq!(side, ProjectSide::Both);
}
#[test]
fn test_detect_side_empty_categories() {
let categories = vec![];
let side = CurseForgePlatform::detect_side_from_categories(&categories);
assert_eq!(side, ProjectSide::Both);
}
#[test]
fn test_detect_side_gui_client() {
let categories =
vec![make_category(1, "GUI Enhancement", "gui-enhancement")];
let side = CurseForgePlatform::detect_side_from_categories(&categories);
assert_eq!(side, ProjectSide::Client);
}
#[test]
fn test_detect_side_permissions_server() {
let categories = vec![make_category(1, "Permissions", "permissions")];
let side = CurseForgePlatform::detect_side_from_categories(&categories);
assert_eq!(side, ProjectSide::Server);
}
#[test]
fn test_map_class_id() {
assert_eq!(CurseForgePlatform::map_class_id(6), ProjectType::Mod);
assert_eq!(
CurseForgePlatform::map_class_id(12),
ProjectType::ResourcePack
);
assert_eq!(
CurseForgePlatform::map_class_id(6945),
ProjectType::DataPack
);
assert_eq!(CurseForgePlatform::map_class_id(6552), ProjectType::Shader);
assert_eq!(CurseForgePlatform::map_class_id(17), ProjectType::World);
assert_eq!(CurseForgePlatform::map_class_id(9999), ProjectType::Mod); // Unknown
}
#[test]
fn test_map_release_type() {
assert_eq!(
CurseForgePlatform::map_release_type(1),
ReleaseType::Release
);
assert_eq!(CurseForgePlatform::map_release_type(2), ReleaseType::Beta);
assert_eq!(CurseForgePlatform::map_release_type(3), ReleaseType::Alpha);
assert_eq!(
CurseForgePlatform::map_release_type(99),
ReleaseType::Release
); // Unknown
}
}

View file

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::{collections::HashMap, sync::Arc};
use async_trait::async_trait;
use regex::Regex;
@ -20,9 +20,9 @@ pub struct GitHubPlatform {
}
impl GitHubPlatform {
pub fn new(token: Option<String>) -> Self {
pub fn with_client(client: Arc<Client>, token: Option<String>) -> Self {
Self {
client: Client::new(),
client: (*client).clone(),
token,
}
}

View file

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::{collections::HashMap, sync::Arc};
use async_trait::async_trait;
use reqwest::Client;
@ -14,16 +14,76 @@ use crate::{
const MODRINTH_API_BASE: &str = "https://api.modrinth.com/v2";
pub struct ModrinthPlatform {
client: Client,
client: Arc<Client>,
}
impl ModrinthPlatform {
pub fn new() -> Self {
Self {
client: Client::new(),
client: Arc::new(Client::new()),
}
}
pub fn with_client(client: Arc<Client>) -> Self {
Self { client }
}
async fn request_project_url(&self, url: &str) -> Result<Project> {
let response = self.client.get(url).send().await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(url.to_string()));
}
let mr_project: ModrinthProject = response.json().await?;
Ok(self.convert_project(mr_project))
}
async fn request_project_files_url(
&self,
url: &str,
) -> Result<Vec<ProjectFile>> {
let response = self.client.get(url).send().await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(url.to_string()));
}
let mr_versions: Vec<ModrinthVersion> = response.json().await?;
let project_id = url
.split('/')
.nth(4)
.ok_or_else(|| {
PakkerError::InvalidResponse(
"Cannot parse project ID from URL".to_string(),
)
})?
.to_string();
Ok(
mr_versions
.into_iter()
.map(|v| self.convert_version(v, &project_id))
.collect(),
)
}
async fn lookup_by_hash_url(&self, url: &str) -> Result<Option<Project>> {
let response = self.client.get(url).send().await?;
if response.status().as_u16() == 404 {
return Ok(None);
}
if !response.status().is_success() {
return Err(PakkerError::PlatformApiError(format!(
"Modrinth API error: {}",
response.status()
)));
}
let version_data: serde_json::Value = response.json().await?;
let project_id = version_data["project_id"].as_str().ok_or_else(|| {
PakkerError::InvalidResponse("Missing project_id".to_string())
})?;
self
.request_project_with_files(project_id, &[], &[])
.await
.map(Some)
}
fn map_project_type(type_str: &str) -> ProjectType {
match type_str {
"mod" => ProjectType::Mod,
@ -123,15 +183,7 @@ impl PlatformClient for ModrinthPlatform {
_loaders: &[String],
) -> Result<Project> {
let url = format!("{MODRINTH_API_BASE}/project/{identifier}");
let response = self.client.get(&url).send().await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(identifier.to_string()));
}
let mr_project: ModrinthProject = response.json().await?;
Ok(self.convert_project(mr_project))
self.request_project_url(&url).await
}
async fn request_project_files(
@ -170,20 +222,7 @@ impl PlatformClient for ModrinthPlatform {
url.push_str(&params.join("&"));
}
let response = self.client.get(&url).send().await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(project_id.to_string()));
}
let mr_versions: Vec<ModrinthVersion> = response.json().await?;
Ok(
mr_versions
.into_iter()
.map(|v| self.convert_version(v, project_id))
.collect(),
)
self.request_project_files_url(&url).await
}
async fn request_project_with_files(
@ -213,30 +252,7 @@ impl PlatformClient for ModrinthPlatform {
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>> {
// Modrinth uses SHA-1 hash for file lookups
let url = format!("{MODRINTH_API_BASE}/version_file/{hash}");
let response = self.client.get(&url).send().await?;
if response.status().as_u16() == 404 {
return Ok(None);
}
if !response.status().is_success() {
return Err(PakkerError::PlatformApiError(format!(
"Modrinth API error: {}",
response.status()
)));
}
let version_data: serde_json::Value = response.json().await?;
let project_id = version_data["project_id"].as_str().ok_or_else(|| {
PakkerError::InvalidResponse("Missing project_id".to_string())
})?;
self
.request_project_with_files(project_id, &[], &[])
.await
.map(Some)
self.lookup_by_hash_url(&url).await
}
}
@ -280,3 +296,128 @@ struct ModrinthDependency {
project_id: Option<String>,
dependency_type: String,
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use reqwest::Client;
use super::*;
impl ModrinthPlatform {
fn with_raw_client(client: Client) -> Self {
Self {
client: Arc::new(client),
}
}
}
async fn create_platform_with_mock()
-> (ModrinthPlatform, mockito::ServerGuard) {
let server = mockito::Server::new_async().await;
let client = Client::new();
let platform = ModrinthPlatform::with_raw_client(client);
(platform, server)
}
#[tokio::test]
async fn test_request_project_success() {
let (platform, mut server) = create_platform_with_mock().await;
let url = format!("{}/project/test-mod", server.url());
let _mock = server
.mock("GET", "/project/test-mod")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(
r#"{
"id": "abc123",
"slug": "test-mod",
"title": "Test Mod",
"project_type": "mod",
"client_side": "required",
"server_side": "required"
}"#,
)
.create();
let result = platform.request_project_url(&url).await;
assert!(result.is_ok());
let project = result.unwrap();
assert!(project.get_platform_id("modrinth").is_some());
}
#[tokio::test]
async fn test_request_project_not_found() {
let (platform, mut server) = create_platform_with_mock().await;
let url = format!("{}/project/nonexistent", server.url());
let _mock = server
.mock("GET", "/project/nonexistent")
.with_status(404)
.create();
let result = platform.request_project_url(&url).await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_request_project_files() {
let (platform, mut server) = create_platform_with_mock().await;
let url = format!("{}/project/abc123/version", server.url());
let _mock = server
.mock("GET", "/project/abc123/version")
.with_status(200)
.with_header("content-type", "application/json")
.with_body(
r#"[
{
"id": "v1",
"project_id": "abc123",
"name": "Test Mod v1.0.0",
"version_number": "1.0.0",
"game_versions": ["1.20.1"],
"version_type": "release",
"loaders": ["fabric"],
"date_published": "2024-01-01T00:00:00Z",
"files": [{
"hashes": {"sha1": "abc123def456"},
"url": "https://example.com/mod.jar",
"filename": "test-mod-1.0.0.jar",
"primary": true,
"size": 1024
}],
"dependencies": []
}
]"#,
)
.create();
let result = platform.request_project_files_url(&url).await;
assert!(result.is_ok());
let files = result.unwrap();
assert_eq!(files.len(), 1);
assert_eq!(files[0].file_name, "test-mod-1.0.0.jar");
}
#[tokio::test]
async fn test_lookup_by_hash_not_found() {
let (platform, mut server) = create_platform_with_mock().await;
let url = format!("{}/version_file/unknownhash123", server.url());
let _mock = server
.mock("GET", "/version_file/unknownhash123")
.with_status(404)
.create();
let result = platform.lookup_by_hash_url(&url).await;
assert!(result.is_ok());
assert!(result.unwrap().is_none());
}
}

View file

@ -80,13 +80,13 @@ impl RateLimiter {
platform_requests
.retain(|t| now.duration_since(*t) < Duration::from_secs(60));
if platform_requests.len() >= burst as usize {
if let Some(oldest) = platform_requests.first() {
let wait_time = interval.saturating_sub(now.duration_since(*oldest));
if wait_time > Duration::ZERO {
drop(inner);
tokio::time::sleep(wait_time).await;
}
if platform_requests.len() >= burst as usize
&& let Some(oldest) = platform_requests.first()
{
let wait_time = interval.saturating_sub(now.duration_since(*oldest));
if wait_time > Duration::ZERO {
drop(inner);
tokio::time::sleep(wait_time).await;
}
}

View file

@ -2,7 +2,7 @@
use std::io;
use dialoguer::{Confirm, MultiSelect, Select, theme::ColorfulTheme};
use dialoguer::{Confirm, Input, MultiSelect, Select, theme::ColorfulTheme};
/// Creates a terminal hyperlink using OSC 8 escape sequence
/// Format: \x1b]8;;<URL>\x1b\\<TEXT>\x1b]8;;\x1b\\
@ -58,6 +58,136 @@ pub fn curseforge_project_url(project_id: &str) -> String {
format!("https://www.curseforge.com/minecraft/mc-mods/{project_id}")
}
/// Calculate Levenshtein edit distance between two strings
#[allow(clippy::needless_range_loop)]
fn levenshtein_distance(a: &str, b: &str) -> usize {
let a = a.to_lowercase();
let b = b.to_lowercase();
let a_len = a.chars().count();
let b_len = b.chars().count();
if a_len == 0 {
return b_len;
}
if b_len == 0 {
return a_len;
}
let mut matrix = vec![vec![0usize; b_len + 1]; a_len + 1];
for i in 0..=a_len {
matrix[i][0] = i;
}
for j in 0..=b_len {
matrix[0][j] = j;
}
let a_chars: Vec<char> = a.chars().collect();
let b_chars: Vec<char> = b.chars().collect();
for i in 1..=a_len {
for j in 1..=b_len {
let cost = usize::from(a_chars[i - 1] != b_chars[j - 1]);
matrix[i][j] = (matrix[i - 1][j] + 1) // deletion
.min(matrix[i][j - 1] + 1) // insertion
.min(matrix[i - 1][j - 1] + cost); // substitution
}
}
matrix[a_len][b_len]
}
/// Find similar strings to the input using Levenshtein distance.
/// Returns suggestions sorted by similarity (most similar first).
/// Only returns suggestions with distance <= `max_distance`.
pub fn suggest_similar<'a>(
input: &str,
candidates: &'a [String],
max_distance: usize,
) -> Vec<&'a str> {
let mut scored: Vec<(&str, usize)> = candidates
.iter()
.map(|c| (c.as_str(), levenshtein_distance(input, c)))
.filter(|(_, dist)| *dist <= max_distance && *dist > 0)
.collect();
scored.sort_by_key(|(_, dist)| *dist);
scored.into_iter().map(|(s, _)| s).collect()
}
/// Prompt user if they meant a similar project name.
/// Returns `Some(suggested_name)` if user confirms, None otherwise.
pub fn prompt_typo_suggestion(
input: &str,
candidates: &[String],
) -> io::Result<Option<String>> {
// Use a max distance based on input length for reasonable suggestions
let max_distance = (input.len() / 2).clamp(2, 4);
let suggestions = suggest_similar(input, candidates, max_distance);
if let Some(first_suggestion) = suggestions.first()
&& prompt_yes_no(&format!("Did you mean '{first_suggestion}'?"), true)?
{
return Ok(Some((*first_suggestion).to_string()));
}
Ok(None)
}
/// Prompt for text input with optional default value
pub fn prompt_input(prompt: &str, default: Option<&str>) -> io::Result<String> {
let theme = ColorfulTheme::default();
let mut input = Input::<String>::with_theme(&theme).with_prompt(prompt);
if let Some(def) = default {
input = input.default(def.to_string());
}
input.interact_text().map_err(io::Error::other)
}
/// Prompt for text input, returning None if empty
pub fn prompt_input_optional(prompt: &str) -> io::Result<Option<String>> {
let input: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt(prompt)
.allow_empty(true)
.interact_text()
.map_err(io::Error::other)?;
if input.is_empty() {
Ok(None)
} else {
Ok(Some(input))
}
}
/// Prompt for `CurseForge` API key when authentication fails.
/// Returns the API key if provided, None if cancelled.
pub fn prompt_curseforge_api_key() -> io::Result<Option<String>> {
use dialoguer::Password;
println!();
println!("CurseForge API key is required but not configured.");
println!("Get your API key from: https://console.curseforge.com/");
println!();
if !prompt_yes_no("Would you like to enter your API key now?", true)? {
return Ok(None);
}
let key: String = Password::with_theme(&ColorfulTheme::default())
.with_prompt("CurseForge API key")
.interact()
.map_err(io::Error::other)?;
if key.is_empty() {
Ok(None)
} else {
Ok(Some(key))
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -74,4 +204,38 @@ mod tests {
let url = modrinth_project_url("sodium");
assert_eq!(url, "https://modrinth.com/mod/sodium");
}
#[test]
fn test_levenshtein_distance() {
assert_eq!(levenshtein_distance("kitten", "sitting"), 3);
assert_eq!(levenshtein_distance("saturday", "sunday"), 3);
assert_eq!(levenshtein_distance("", "abc"), 3);
assert_eq!(levenshtein_distance("abc", ""), 3);
assert_eq!(levenshtein_distance("abc", "abc"), 0);
assert_eq!(levenshtein_distance("ABC", "abc"), 0); // case insensitive
}
#[test]
fn test_suggest_similar() {
let candidates = vec![
"sodium".to_string(),
"lithium".to_string(),
"phosphor".to_string(),
"iris".to_string(),
"fabric-api".to_string(),
];
// Close typo should be suggested
let suggestions = suggest_similar("sodim", &candidates, 2);
assert!(!suggestions.is_empty());
assert_eq!(suggestions[0], "sodium");
// Complete mismatch should return empty
let suggestions = suggest_similar("xyz123", &candidates, 2);
assert!(suggestions.is_empty());
// Exact match returns empty (distance 0 filtered out)
let suggestions = suggest_similar("sodium", &candidates, 2);
assert!(suggestions.is_empty());
}
}

View file

@ -1,4 +1,4 @@
use rand::Rng;
use rand::RngExt;
const CHARSET: &[u8] =
b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";