commit ef28bdaeb4683cb415208930f2c2d4297334947a Author: NotAShelf Date: Thu Jan 29 19:36:25 2026 +0300 initial commit Signed-off-by: NotAShelf Change-Id: Ife1391ed23a1e7f388b1b5eca90b9ea76a6a6964 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..50407da --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +/target +ai-docs/sessions/*/ +.opencode/sessions/ diff --git a/.rustfmt.toml b/.rustfmt.toml new file mode 100644 index 0000000..9d5c77e --- /dev/null +++ b/.rustfmt.toml @@ -0,0 +1,26 @@ +condense_wildcard_suffixes = true +doc_comment_code_block_width = 80 +edition = "2024" # Keep in sync with Cargo.toml. +enum_discrim_align_threshold = 60 +force_explicit_abi = false +force_multiline_blocks = true +format_code_in_doc_comments = true +format_macro_matchers = true +format_strings = true +group_imports = "StdExternalCrate" +hex_literal_case = "Upper" +imports_granularity = "Crate" +imports_layout = "HorizontalVertical" +inline_attribute_width = 60 +match_block_trailing_comma = true +max_width = 80 +newline_style = "Unix" +normalize_comments = true +normalize_doc_attributes = true +overflow_delimited_expr = true +struct_field_align_threshold = 60 +tab_spaces = 2 +unstable_features = true +use_field_init_shorthand = true +use_try_shorthand = true +wrap_comments = true diff --git a/.taplo.toml b/.taplo.toml new file mode 100644 index 0000000..fae0c57 --- /dev/null +++ b/.taplo.toml @@ -0,0 +1,13 @@ +[formatting] +align_entries = true +column_width = 110 +compact_arrays = false +reorder_inline_tables = false +reorder_keys = true + +[[rule]] +include = [ "**/Cargo.toml" ] +keys = [ "package" ] + +[rule.formatting] +reorder_keys = false diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..5e76806 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,2969 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "aws-lc-rs" +version = "1.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a88aab2464f1f25453baa7a07c84c5b7684e274054ba06817f382357f77a288" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.35.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b45afffdee1e7c9126814751f88dddc747f41d91da16c9551a0f1e8a11e788a1" +dependencies = [ + "cc", + "cmake", + "dunce", + "fs_extra", +] + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" + +[[package]] +name = "bytes" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" + +[[package]] +name = "bzip2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a53fac24f34a81bc9954b5d6cfce0c21e18ec6959f44f56e8e90e4bb7c346c" +dependencies = [ + "libbz2-rs-sys", +] + +[[package]] +name = "cc" +version = "1.2.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd4932aefd12402b36c60956a4fe0035421f544799057659ff86f923657aada3" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "clap" +version = "4.5.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" + +[[package]] +name = "cmake" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" +dependencies = [ + "cc", +] + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "colored" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "comfy-table" +version = "7.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "958c5d6ecf1f214b4c2bbbbf6ab9523a864bd136dcf71a7e8904799acfe1ad47" +dependencies = [ + "crossterm", + "unicode-segmentation", + "unicode-width", +] + +[[package]] +name = "console" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03e45a4a8926227e4197636ba97a9fc9b00477e9f4bd711395687c5f0734bec4" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width", + "windows-sys 0.61.2", +] + +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossterm" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" +dependencies = [ + "bitflags 2.10.0", + "crossterm_winapi", + "document-features", + "parking_lot", + "rustix", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "deflate64" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26bf8fc351c5ed29b5c2f0cbbac1b209b74f60ecd62e675a998df72c49af5204" + +[[package]] +name = "deranged" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "dialoguer" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f104b501bf2364e78d0d3974cbc774f738f5865306ed128e1e0d7499c0ad96" +dependencies = [ + "console", + "shell-words", + "tempfile", + "zeroize", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "document-features" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61" +dependencies = [ + "litrs", +] + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "env_filter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "jiff", + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "find-msvc-tools" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f449e6c6c08c865631d4890cfacf252b3d396c9bcc83adb6623cdb02a8336c41" + +[[package]] +name = "flate2" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" +dependencies = [ + "crc32fast", + "libz-rs-sys", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasip2", + "wasm-bindgen", +] + +[[package]] +name = "git2" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2b37e2f62729cdada11f0e6b3b6fe383c69c29fc619e391223e12856af308c" +dependencies = [ + "bitflags 2.10.0", + "libc", + "libgit2-sys", + "log", + "openssl-probe 0.1.6", + "openssl-sys", + "url", +] + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "system-configuration", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "indicatif" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88" +dependencies = [ + "console", + "portable-atomic", + "unicode-width", + "unit-prefix", + "web-time", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "jiff" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50" +dependencies = [ + "jiff-static", + "log", + "portable-atomic", + "portable-atomic-util", + "serde_core", +] + +[[package]] +name = "jiff-static" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0c84ee7f197eca9a86c6fd6cb771e55eb991632f15f2bc3ca6ec838929e6e78" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "keyring" +version = "3.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eebcc3aff044e5944a8fbaf69eb277d11986064cba30c468730e8b9909fb551c" +dependencies = [ + "log", + "zeroize", +] + +[[package]] +name = "libbz2-rs-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7" + +[[package]] +name = "libc" +version = "0.2.180" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" + +[[package]] +name = "libgit2-sys" +version = "0.18.3+1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9b3acc4b91781bb0b3386669d325163746af5f6e4f73e6d2d630e09a35f3487" +dependencies = [ + "cc", + "libc", + "libssh2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", +] + +[[package]] +name = "libssh2-sys" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "220e4f05ad4a218192533b300327f5150e809b54c4ec83b5a1d91833601811b9" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-rs-sys" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c10501e7805cee23da17c7790e59df2870c0d4043ec6d03f67d31e2b53e77415" +dependencies = [ + "zlib-rs", +] + +[[package]] +name = "libz-sys" +version = "1.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "litrs" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "lzma-rust2" +version = "0.15.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f7337d278fec032975dc884152491580dd23750ee957047856735fe0e61ede" +dependencies = [ + "crc", + "sha2", +] + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "mockito" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e0603425789b4a70fcc4ac4f5a46a566c116ee3e2a6b768dc623f7719c611de" +dependencies = [ + "assert-json-diff", + "bytes", + "colored", + "futures-core", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "log", + "pin-project-lite", + "rand", + "regex", + "serde_json", + "serde_urlencoded", + "similar", + "tokio", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-probe" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f50d9b3dabb09ecd771ad0aa242ca6894994c130308ca3d7684634df8037391" + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "pakker" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "clap", + "comfy-table", + "dialoguer", + "env_logger", + "futures", + "git2", + "indicatif", + "keyring", + "libc", + "log", + "md-5", + "mockito", + "once_cell", + "rand", + "regex", + "reqwest", + "serde", + "serde_json", + "sha1", + "sha2", + "strsim", + "tempfile", + "textwrap", + "thiserror 2.0.17", + "tokio", + "walkdir", + "yansi", + "zip", +] + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "pbkdf2" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" +dependencies = [ + "digest", + "hmac", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "portable-atomic" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" + +[[package]] +name = "portable-atomic-util" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +dependencies = [ + "portable-atomic", +] + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppmd-rust" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d558c559f0450f16f2a27a1f017ef38468c1090c9ce63c8e51366232d53717b4" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro2" +version = "1.0.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2", + "thiserror 2.0.17", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +dependencies = [ + "aws-lc-rs", + "bytes", + "getrandom 0.3.4", + "lru-slab", + "rand", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.17", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.60.2", +] + +[[package]] +name = "quote" +version = "1.0.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags 2.10.0", +] + +[[package]] +name = "regex" +version = "1.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" + +[[package]] +name = "reqwest" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04e9018c9d814e5f30cc16a0f03271aeab3571e609612d9fe78c1aa8d11c2f62" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "mime", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pki-types", + "rustls-platform-verifier", + "serde", + "serde_json", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustix" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +dependencies = [ + "bitflags 2.10.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +dependencies = [ + "aws-lc-rs", + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" +dependencies = [ + "openssl-probe 0.2.0", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pki-types" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-platform-verifier" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d99feebc72bae7ab76ba994bb5e121b8d83d910ca40b36e0921f53becc41784" +dependencies = [ + "core-foundation 0.10.1", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework", + "security-framework-sys", + "webpki-root-certs", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + +[[package]] +name = "rustls-webpki" +version = "0.103.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" +dependencies = [ + "aws-lc-rs", + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +dependencies = [ + "bitflags 2.10.0", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shell-words" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc6fe69c597f9c37bfeeeeeb33da3530379845f10be461a66d16d03eca2ded77" + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "smawk" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" + +[[package]] +name = "socket2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +dependencies = [ + "fastrand", + "getrandom 0.3.4", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "textwrap" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" +dependencies = [ + "smawk", + "unicode-linebreak", + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" +dependencies = [ + "thiserror-impl 2.0.17", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.3.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +dependencies = [ + "deranged", + "num-conv", + "powerfmt", + "serde", + "time-core", +] + +[[package]] +name = "time-core" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.49.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags 2.10.0", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-width" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" + +[[package]] +name = "unit-prefix" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81e544489bf3d8ef66c953931f56617f423cd4b5494be343d9b9d3dda037b9a3" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-root-certs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36a29fc0408b113f68cf32637857ab740edfafdf460c326cd2afaa2d84cc05dc" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zip" +version = "7.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9013f1222db8a6d680f13a7ccdc60a781199cd09c2fa4eff58e728bb181757fc" +dependencies = [ + "aes", + "bzip2", + "constant_time_eq", + "crc32fast", + "deflate64", + "flate2", + "generic-array", + "getrandom 0.3.4", + "hmac", + "indexmap", + "lzma-rust2", + "memchr", + "pbkdf2", + "ppmd-rust", + "sha1", + "time", + "zeroize", + "zopfli", + "zstd", +] + +[[package]] +name = "zlib-rs" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40990edd51aae2c2b6907af74ffb635029d5788228222c4bb811e9351c0caad3" + +[[package]] +name = "zmij" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fc5a66a20078bf1251bde995aa2fdcc4b800c70b5d92dd2c62abc5c60f679f8" + +[[package]] +name = "zopfli" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05cd8797d63865425ff89b5c4a48804f35ba0ce8d125800027ad6017d2b5249" +dependencies = [ + "bumpalo", + "crc32fast", + "log", + "simd-adler32", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.16+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..71acc35 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,53 @@ +[package] +name = "pakker" +version = "0.1.0" +edition = "2024" +authors = [ "NotAShelf , + + /// Modpack version + #[clap(short = 'V', long)] + pub version: Option, + + /// Target platform + #[clap(short, long, default_value = "multiplatform")] + pub target: String, + + /// Minecraft version + #[clap(short, long, default_value = "1.20.1")] + pub mc_version: String, + + /// Mod loader + #[clap(short, long, default_value = "fabric")] + pub loader: String, + + /// Mod loader version + #[clap(short = 'v', long, default_value = "latest")] + pub loader_version: String, +} + +#[derive(Args)] +pub struct ImportArgs { + /// Path to modpack file + pub file: String, + + /// Skip confirmation prompts + #[clap(short, long)] + pub yes: bool, +} + +#[derive(Args)] +pub struct AddArgs { + /// Project identifiers to add + #[clap(required = true)] + pub inputs: Vec, + + /// Project type (mod, resourcepack, shader, datapack, world) + #[clap(short = 't', long = "type")] + pub project_type: Option, + + /// Skip resolving dependencies + #[clap(short = 'D', long)] + pub no_deps: bool, + + /// Update if already exists + #[clap(short, long)] + pub update: bool, + + /// Skip confirmation prompts + #[clap(short, long)] + pub yes: bool, +} + +#[derive(Args)] +pub struct AddPrjArgs { + /// `CurseForge` project slug or ID (optional file ID: `slug#file_id`) + #[clap(long = "cf", alias = "curseforge")] + pub curseforge: Option, + + /// Modrinth project slug or ID (optional file ID: `slug#file_id`) + #[clap(long = "mr", alias = "modrinth")] + pub modrinth: Option, + + /// GitHub repository (format: owner/repo or owner/repo#tag) + #[clap(long = "gh", alias = "github")] + pub github: Option, + + /// Project type (mod, resourcepack, shader, datapack, world) + #[clap(short = 't', long = "type")] + pub project_type: Option, + + /// Project side (client, server, both) + #[clap(long)] + pub side: Option, + + /// Update strategy (latest, none) + #[clap(long)] + pub strategy: Option, + + /// Redistributable flag + #[clap(long)] + pub redistributable: Option, + + /// Subpath for project file placement + #[clap(long)] + pub subpath: Option, + + /// Project aliases (can be specified multiple times) + #[clap(long = "alias")] + pub aliases: Vec, + + /// Export flag (whether to include in exports) + #[clap(long)] + pub export: Option, + + /// Skip resolving dependencies + #[clap(short = 'D', long = "no-deps")] + pub no_deps: bool, + + /// Skip confirmation prompts + #[clap(short, long)] + pub yes: bool, +} + +#[derive(Args)] +pub struct RmArgs { + /// Project identifiers to remove + #[clap(required = true)] + pub inputs: Vec, + + /// Skip confirmation prompt + #[clap(short, long)] + pub yes: bool, +} + +#[derive(Args)] +pub struct UpdateArgs { + /// Projects to update (empty = all) + #[arg(value_name = "PROJECT")] + pub inputs: Vec, + + /// Skip confirmation prompts + #[arg(short, long)] + pub yes: bool, +} + +#[derive(Args)] +pub struct LsArgs { + /// Show detailed information + #[clap(short, long)] + pub detailed: bool, + + /// Add update information for projects + #[clap(short = 'c', long = "check-updates")] + pub check_updates: bool, + + /// Maximum length for project names + #[clap(long = "name-max-length")] + pub name_max_length: Option, +} + +#[derive(Args)] +pub struct SetArgs { + /// Project identifier (optional for lockfile properties) + pub input: Option, + + /// Project type + #[clap(long)] + pub r#type: Option, + + /// Project side (client/server/both) + #[clap(long)] + pub side: Option, + + /// Update strategy (latest/none) + #[clap(long)] + pub strategy: Option, + + /// Redistributable flag + #[clap(long)] + pub redistributable: Option, + + /// Change the target of the pack (curseforge, modrinth, multiplatform) + #[clap(short = 't', long)] + pub target: Option, + + /// Change the minecraft versions (comma-separated) + #[clap(short = 'v', long)] + pub mc_versions: Option, + + /// Change the mod loaders (format: name=version,name=version) + #[clap(short = 'l', long)] + pub loaders: Option, +} + +#[derive(Args)] +pub struct LinkArgs { + /// Source project + pub from: String, + + /// Target project + pub to: String, +} + +#[derive(Args)] +pub struct UnlinkArgs { + /// Source project + pub from: String, + + /// Target project + pub to: String, +} + +#[derive(Args)] +pub struct DiffArgs { + /// Path to old lockfile + pub old_lockfile: String, + + /// Path to current lockfile (optional, defaults to pakku-lock.json) + pub current_lockfile: Option, + + /// Export markdown diff + #[clap(long)] + pub markdown_diff: Option, + + /// Export markdown (formatted) + #[clap(long)] + pub markdown: Option, + + /// Verbose output (show file changes) + #[clap(short, long)] + pub verbose: bool, + + /// Header size for markdown (0-5) + #[clap(short = 'H', long, default_value = "2")] + pub header_size: usize, +} + +#[derive(Args)] +pub struct FetchArgs { + /// Timeout for waiting on conflicting operations (seconds) + #[clap(short, long)] + pub timeout: Option, + + /// Number of retry attempts for failed downloads + #[clap(short = 'r', long, default_value = "2")] + pub retry: u32, + + /// Move unknown files to shelf instead of deleting + #[clap(long)] + pub shelve: bool, +} + +#[derive(Args)] +pub struct SyncArgs { + /// Sync additions only + #[clap(short = 'A', long)] + pub additions: bool, + + /// Sync removals only + #[clap(short = 'R', long)] + pub removals: bool, + + /// Sync updates only + #[clap(short = 'U', long)] + pub updates: bool, +} + +#[derive(Args)] +pub struct ExportArgs { + /// Export profile (curseforge, modrinth, serverpack) + /// If not specified, all profiles will be exported + #[clap(short, long)] + pub profile: Option, + + /// Output directory + #[clap(short, long)] + pub output: Option, + + /// Use Pakker-compatible output layout (build//...) + /// Default is Pakker layout (exports/...) + #[clap(long)] + pub pakker_layout: bool, +} + +#[derive(Args)] +pub struct RemoteArgs { + /// Git URL to install from (if empty, shows status) + pub url: Option, + + /// Branch to checkout (instead of remote's HEAD) + #[clap(short, long)] + pub branch: Option, + + /// Install server pack + #[clap(short = 'S', long)] + pub server_pack: bool, + + /// Retry count for downloads + #[clap(short, long, default_value = "2")] + pub retry: u32, + + /// Remove remote from modpack + #[clap(long = "rm", long = "remove")] + pub remove: bool, +} + +#[derive(Args)] +pub struct RemoteUpdateArgs { + /// Branch to checkout instead of remote's HEAD + #[clap(short, long)] + pub branch: Option, + + /// Install server pack instead of full modpack + #[clap(short, long)] + pub server_pack: bool, +} + +#[derive(Args)] +pub struct StatusArgs { + /// Check updates in parallel + #[clap(short, long)] + pub parallel: bool, +} + +#[derive(Args)] +pub struct InspectArgs { + /// Project identifiers to inspect + #[clap(required = true)] + pub projects: Vec, +} + +#[derive(Args)] +pub struct CredentialsArgs { + /// Delete stored credentials (defaults to deleting both file and keyring) + #[clap(short, long)] + pub delete: bool, + + /// Delete credentials file (~/.pakku/credentials) + #[clap(long)] + pub delete_file: bool, + + /// Delete credentials from keyring (service: pakker) + #[clap(long)] + pub delete_keyring: bool, + + #[clap(subcommand)] + pub subcommand: Option, +} + +#[derive(Subcommand)] +pub enum CredentialsSubcommand { + /// Set API credentials + Set(CredentialsSetArgs), +} + +#[derive(Args)] +pub struct CredentialsSetArgs { + /// `CurseForge` API key + #[clap(long)] + pub cf_api_key: Option, + + /// Modrinth API token + #[clap(long)] + pub modrinth_token: Option, + + /// GitHub access token + #[clap(long)] + pub gh_access_token: Option, +} + +#[derive(Args)] +pub struct CfgArgs { + /// Modpack name + #[clap(long)] + pub name: Option, + + /// Modpack version + #[clap(long)] + pub version: Option, + + /// Modpack description + #[clap(long)] + pub description: Option, + + /// Modpack author + #[clap(long)] + pub author: Option, + + /// Path for mods + #[clap(long)] + pub mods_path: Option, + + /// Path for resource packs + #[clap(long)] + pub resource_packs_path: Option, + + /// Path for data packs + #[clap(long)] + pub data_packs_path: Option, + + /// Path for worlds + #[clap(long)] + pub worlds_path: Option, + + /// Path for shaders + #[clap(long)] + pub shaders_path: Option, + + #[clap(subcommand)] + pub subcommand: Option, +} + +#[derive(Subcommand)] +pub enum CfgSubcommand { + /// Configure per-project settings + Prj(CfgPrjArgs), +} + +#[derive(Args)] +pub struct CfgPrjArgs { + /// Project identifier + pub project: String, + + /// Project type + #[clap(long)] + pub r#type: Option, + + /// Project side (client/server/both) + #[clap(long)] + pub side: Option, + + /// Update strategy (latest/none) + #[clap(long)] + pub update_strategy: Option, + + /// Redistributable flag + #[clap(long)] + pub redistributable: Option, + + /// Subpath for project + #[clap(long)] + pub subpath: Option, + + /// Add alias + #[clap(long)] + pub add_alias: Option, + + /// Remove alias + #[clap(long)] + pub remove_alias: Option, + + /// Export flag + #[clap(long)] + pub export: Option, +} + +/// Fork subcommand arguments +#[derive(Debug, Args)] +#[command(args_conflicts_with_subcommands = true)] +pub struct ForkArgs { + #[clap(subcommand)] + pub subcommand: ForkSubcommand, +} + +#[derive(Debug, Subcommand)] +pub enum ForkSubcommand { + /// Initialize fork from parent repository + Init { + /// Git URL of parent repository + #[clap(long, conflicts_with = "from_path")] + git_url: Option, + + /// Use current repository as parent + #[clap(long, conflicts_with = "from_path")] + from_current: bool, + + /// Use an already-cloned repository as parent (path to worktree or .git) + #[clap(long, value_parser, conflicts_with_all = &["git_url", "from_current"])] + from_path: Option, + + /// Branch/tag/commit to track + #[clap(long)] + ref_name: Option, + + /// Type of ref (branch/tag/commit) + #[clap(long, value_enum)] + ref_type: Option, + + /// Remote name + #[clap(long, default_value = "origin")] + remote: Option, + }, + + /// Update fork configuration + Set { + /// New git URL (optional) + #[clap(long)] + git_url: Option, + + /// Branch/tag/commit to track + #[clap(long)] + ref_name: String, + + /// Type of ref (branch/tag/commit) + #[clap(long, value_enum)] + ref_type: Option, + + /// Remote name + #[clap(long)] + remote: Option, + }, + + /// Show fork configuration + Show, + + /// Remove fork configuration + Unset, + + /// Sync with parent repository + Sync, + + /// Promote projects to parent (legacy) + Promote { + /// Project identifiers to promote + projects: Vec, + }, +} diff --git a/src/cli/commands/add.rs b/src/cli/commands/add.rs new file mode 100644 index 0000000..751c941 --- /dev/null +++ b/src/cli/commands/add.rs @@ -0,0 +1,227 @@ +use std::collections::HashMap; + +use crate::{ + error::{PakkerError, Result}, + model::{Config, LockFile, Project}, + platform::create_platform, + resolver::DependencyResolver, +}; + +fn get_loaders(lockfile: &LockFile) -> Vec { + lockfile.loaders.keys().cloned().collect() +} + +pub fn create_all_platforms() +-> Result>> { + let mut platforms = HashMap::new(); + + if let Ok(platform) = create_platform("modrinth", None) { + platforms.insert("modrinth".to_string(), platform); + } + if let Ok(platform) = + create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok()) + { + platforms.insert("curseforge".to_string(), platform); + } + + Ok(platforms) +} + +async fn resolve_input( + input: &str, + platforms: &HashMap>, + lockfile: &LockFile, +) -> Result { + for platform in platforms.values() { + if let Ok(project) = platform + .request_project_with_files( + input, + &lockfile.mc_versions, + &get_loaders(lockfile), + ) + .await + { + return Ok(project); + } + } + + Err(PakkerError::ProjectNotFound(input.to_string())) +} + +use std::path::Path; + +use crate::{cli::AddArgs, model::fork::LocalConfig}; + +pub async fn execute( + args: AddArgs, + lockfile_path: &Path, + config_path: &Path, +) -> Result<()> { + log::info!("Adding projects: {:?}", args.inputs); + + // Load lockfile + // Load expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + // Check if lockfile exists (try both pakker-lock.json and pakku-lock.json) + let lockfile_exists = + lockfile_path.exists() || lockfile_dir.join("pakku-lock.json").exists(); + + if !lockfile_exists { + // Try to load config from both pakker.json and pakku.json + let local_config = LocalConfig::load(config_dir).or_else(|_| { + let legacy_config_path = config_dir.join("pakku.json"); + if legacy_config_path.exists() { + LocalConfig::load(&config_dir.join("pakku.json")) + } else { + Err(PakkerError::IoError(std::io::Error::new( + std::io::ErrorKind::NotFound, + "No pakker.json found", + ))) + } + })?; + + if local_config.has_parent() { + log::info!("Creating minimal fork lockfile with parent metadata..."); + + // Check for parent lockfile (try both pakker-lock.json and + // pakku-lock.json) + let parent_paths = [ + lockfile_dir.join(".pakku/parent/pakker-lock.json"), + lockfile_dir.join(".pakku/parent/pakku-lock.json"), + ]; + + let parent_found = parent_paths.iter().any(|path| path.exists()); + if !parent_found { + return Err(PakkerError::IoError(std::io::Error::new( + std::io::ErrorKind::NotFound, + "Fork configured but parent lockfile not found at \ + .pakku/parent/pakker-lock.json or .pakku/parent/pakku-lock.json", + ))); + } + + // Load parent lockfile to get metadata + let parent_lockfile = parent_paths + .iter() + .find(|path| path.exists()) + .and_then(|path| LockFile::load(path.parent().unwrap()).ok()) + .ok_or_else(|| { + PakkerError::IoError(std::io::Error::new( + std::io::ErrorKind::NotFound, + "Failed to load parent lockfile metadata", + )) + })?; + + let minimal_lockfile = LockFile { + target: parent_lockfile.target, + mc_versions: parent_lockfile.mc_versions, + loaders: parent_lockfile.loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + minimal_lockfile.save_without_validation(lockfile_dir)?; + } else { + return Err(PakkerError::IoError(std::io::Error::new( + std::io::ErrorKind::NotFound, + "pakker-lock.json not found and no fork configured. Run 'pakker init' \ + first.", + ))); + } + } + + let mut lockfile = LockFile::load_with_validation(lockfile_dir, false)?; + + // Load config if available + let _config = Config::load(config_dir).ok(); + + // Create platforms + let platforms = create_all_platforms()?; + + let mut new_projects = Vec::new(); + + // Resolve each input + for input in &args.inputs { + let project = resolve_input(input, &platforms, &lockfile).await?; + + // Check if already exists by matching platform IDs (not pakku_id which is + // random) + let already_exists = lockfile.projects.iter().any(|p| { + // Check if any platform ID matches + project.id.iter().any(|(platform, id)| { + p.id + .get(platform) + .is_some_and(|existing_id| existing_id == id) + }) + }); + + if already_exists { + if args.update { + log::info!("Updating existing project: {}", project.get_name()); + // Find and replace the existing project + if let Some(pos) = lockfile.projects.iter().position(|p| { + project.id.iter().any(|(platform, id)| { + p.id + .get(platform) + .is_some_and(|existing_id| existing_id == id) + }) + }) { + lockfile.projects[pos] = project; + } + continue; + } + log::info!("Project already exists: {}", project.get_name()); + continue; + } + + new_projects.push(project); + } + + // Resolve dependencies unless --no-deps is specified + if !args.no_deps { + log::info!("Resolving dependencies..."); + + let mut resolver = DependencyResolver::new(); + let mut all_new_projects = new_projects.clone(); + + for project in &mut new_projects { + let deps = resolver.resolve(project, &mut lockfile, &platforms).await?; + + for dep in deps { + if !lockfile.projects.iter().any(|p| p.pakku_id == dep.pakku_id) + && !all_new_projects.iter().any(|p| p.pakku_id == dep.pakku_id) + { + // Prompt user for confirmation unless --yes flag is set + if !args.yes { + let prompt_msg = format!( + "Add dependency '{}' required by '{}'?", + dep.get_name(), + project.get_name() + ); + if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? { + log::info!("Skipping dependency: {}", dep.get_name()); + continue; + } + } + + log::info!("Adding dependency: {}", dep.get_name()); + all_new_projects.push(dep); + } + } + } + + new_projects = all_new_projects; + } + + // Add projects to lockfile (updates already handled above) + for project in new_projects { + lockfile.add_project(project); + } + + // Save lockfile + lockfile.save(lockfile_dir)?; + + log::info!("Successfully added {} project(s)", args.inputs.len()); + + Ok(()) +} diff --git a/src/cli/commands/add_prj.rs b/src/cli/commands/add_prj.rs new file mode 100644 index 0000000..f111331 --- /dev/null +++ b/src/cli/commands/add_prj.rs @@ -0,0 +1,386 @@ +use std::{collections::HashMap, path::Path}; + +use crate::{ + error::{PakkerError, Result}, + model::{ + Config, + LockFile, + Project, + enums::{ProjectSide, ProjectType, UpdateStrategy}, + }, + platform::create_platform, + resolver::DependencyResolver, +}; + +/// Parse a common project argument (slug or ID with optional file ID) +/// Format: "input" or "`input#file_id`" +fn parse_common_arg(input: &str) -> (String, Option) { + if let Some((project_input, file_id)) = input.split_once('#') { + (project_input.to_string(), Some(file_id.to_string())) + } else { + (input.to_string(), None) + } +} + +/// Parse a GitHub argument (owner/repo with optional tag) +/// Format: "owner/repo" or "owner/repo#tag" +fn parse_github_arg(input: &str) -> Result<(String, String, Option)> { + let (repo_part, tag) = if let Some((r, t)) = input.split_once('#') { + (r, Some(t.to_string())) + } else { + (input, None) + }; + + if let Some((owner, repo)) = repo_part.split_once('/') { + Ok((owner.to_string(), repo.to_string(), tag)) + } else { + Err(PakkerError::InvalidInput(format!( + "Invalid GitHub format '{input}'. Expected: owner/repo or owner/repo#tag" + ))) + } +} + +fn get_loaders(lockfile: &LockFile) -> Vec { + lockfile.loaders.keys().cloned().collect() +} + +pub async fn execute( + cf_arg: Option, + mr_arg: Option, + gh_arg: Option, + project_type: Option, + project_side: Option, + update_strategy: Option, + redistributable: Option, + subpath: Option, + aliases: Vec, + export: Option, + no_deps: bool, + yes: bool, + lockfile_path: &Path, + config_path: &Path, +) -> Result<()> { + // At least one platform must be specified + if cf_arg.is_none() && mr_arg.is_none() && gh_arg.is_none() { + return Err(PakkerError::InvalidInput( + "At least one platform must be specified (--cf, --mr, or --gh)" + .to_string(), + )); + } + + log::info!("Adding project with explicit platform specification"); + + // Load lockfile + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + let mut lockfile = LockFile::load(lockfile_dir)?; + + // Load config if available + let _config = Config::load(config_dir).ok(); + + // Get MC versions and loaders from lockfile + let mc_versions = &lockfile.mc_versions; + let loaders = get_loaders(&lockfile); + + // Fetch projects from each specified platform + let mut projects_to_merge: Vec = Vec::new(); + + // CurseForge + if let Some(cf_input) = cf_arg { + log::info!("Fetching from CurseForge: {cf_input}"); + let (input, file_id) = parse_common_arg(&cf_input); + + let cf_api_key = std::env::var("CURSEFORGE_API_KEY").ok(); + let platform = create_platform("curseforge", cf_api_key)?; + + let mut project = platform + .request_project_with_files(&input, mc_versions, &loaders) + .await + .map_err(|e| { + PakkerError::ProjectNotFound(format!( + "CurseForge project '{input}': {e}" + )) + })?; + + // If file_id specified, filter to that file + if let Some(fid) = file_id { + project.files.retain(|f| f.id == fid); + if project.files.is_empty() { + return Err(PakkerError::FileSelectionError(format!( + "File ID '{fid}' not found for CurseForge project '{input}'" + ))); + } + } + + projects_to_merge.push(project); + } + + // Modrinth + if let Some(mr_input) = mr_arg { + log::info!("Fetching from Modrinth: {mr_input}"); + let (input, file_id) = parse_common_arg(&mr_input); + + let platform = create_platform("modrinth", None)?; + + let mut project = platform + .request_project_with_files(&input, mc_versions, &loaders) + .await + .map_err(|e| { + PakkerError::ProjectNotFound(format!("Modrinth project '{input}': {e}")) + })?; + + // If file_id specified, filter to that file + if let Some(fid) = file_id { + project.files.retain(|f| f.id == fid); + if project.files.is_empty() { + return Err(PakkerError::FileSelectionError(format!( + "File ID '{fid}' not found for Modrinth project '{input}'" + ))); + } + } + + projects_to_merge.push(project); + } + + // GitHub + if let Some(gh_input) = gh_arg { + log::info!("Fetching from GitHub: {gh_input}"); + let (owner, repo, tag) = parse_github_arg(&gh_input)?; + + let gh_token = std::env::var("GITHUB_TOKEN").ok(); + let platform = create_platform("github", gh_token)?; + + let repo_path = format!("{owner}/{repo}"); + let mut project = platform + .request_project_with_files(&repo_path, mc_versions, &loaders) + .await + .map_err(|e| { + PakkerError::ProjectNotFound(format!( + "GitHub repository '{owner}/{repo}': {e}" + )) + })?; + + // If tag specified, filter to that tag + if let Some(t) = tag { + project.files.retain(|f| f.id == t); + if project.files.is_empty() { + return Err(PakkerError::FileSelectionError(format!( + "Tag '{t}' not found for GitHub repository '{owner}/{repo}'" + ))); + } + } + + projects_to_merge.push(project); + } + + // Merge all fetched projects into one + if projects_to_merge.is_empty() { + return Err(PakkerError::ProjectNotFound( + "No projects could be fetched from specified platforms".to_string(), + )); + } + + let mut combined_project = projects_to_merge.remove(0); + for project in projects_to_merge { + combined_project.merge(project); + } + + // Apply user-specified properties + if let Some(pt) = project_type { + combined_project.r#type = pt; + } + if let Some(ps) = project_side { + combined_project.side = ps; + } + if let Some(us) = update_strategy { + combined_project.update_strategy = us; + } + if let Some(r) = redistributable { + combined_project.redistributable = r; + } + if let Some(sp) = subpath { + combined_project.subpath = Some(sp); + } + if let Some(e) = export { + combined_project.export = e; + } + + // Add aliases + for alias in aliases { + combined_project.aliases.insert(alias); + } + + // Check if project already exists + let existing_pos = lockfile.projects.iter().position(|p| { + // Check if any platform ID matches + combined_project.id.iter().any(|(platform, id)| { + p.id + .get(platform) + .is_some_and(|existing_id| existing_id == id) + }) + }); + + let project_name = combined_project.get_name(); + + if let Some(pos) = existing_pos { + let existing_project = &lockfile.projects[pos]; + let existing_name = existing_project.get_name(); + + if !yes { + let prompt_msg = format!( + "Project '{existing_name}' already exists. Replace with \ + '{project_name}'?" + ); + if !crate::ui_utils::prompt_yes_no(&prompt_msg, false)? { + log::info!("Operation cancelled by user"); + return Ok(()); + } + } + + log::info!("Replacing existing project: {existing_name}"); + lockfile.projects[pos] = combined_project.clone(); + println!("✓ Replaced '{existing_name}' with '{project_name}'"); + } else { + if !yes { + let prompt_msg = format!("Add project '{project_name}'?"); + if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? { + log::info!("Operation cancelled by user"); + return Ok(()); + } + } + + lockfile.add_project(combined_project.clone()); + println!("✓ Added '{project_name}'"); + } + + // Resolve dependencies unless --no-deps is specified + if !no_deps { + log::info!("Resolving dependencies..."); + + let platforms = create_all_platforms()?; + let mut resolver = DependencyResolver::new(); + + let deps = resolver + .resolve(&mut combined_project, &mut lockfile, &platforms) + .await?; + + for dep in deps { + // Skip if already in lockfile + if lockfile.projects.iter().any(|p| { + dep.id.iter().any(|(platform, id)| { + p.id + .get(platform) + .is_some_and(|existing_id| existing_id == id) + }) + }) { + continue; + } + + let dep_name = dep.get_name(); + + // Prompt user for confirmation unless --yes flag is set + if !yes { + let prompt_msg = + format!("Add dependency '{dep_name}' required by '{project_name}'?"); + if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? { + log::info!("Skipping dependency: {dep_name}"); + continue; + } + } + + log::info!("Adding dependency: {dep_name}"); + lockfile.add_project(dep); + println!(" ✓ Added dependency '{dep_name}'"); + } + } + + // Save lockfile + lockfile.save(lockfile_dir)?; + + log::info!("Successfully completed add-prj operation"); + + Ok(()) +} + +fn create_all_platforms() +-> Result>> { + let mut platforms = HashMap::new(); + + if let Ok(platform) = create_platform("modrinth", None) { + platforms.insert("modrinth".to_string(), platform); + } + if let Ok(platform) = + create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok()) + { + platforms.insert("curseforge".to_string(), platform); + } + if let Ok(platform) = + create_platform("github", std::env::var("GITHUB_TOKEN").ok()) + { + platforms.insert("github".to_string(), platform); + } + + Ok(platforms) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_common_arg_without_file_id() { + let (input, file_id) = parse_common_arg("fabric-api"); + assert_eq!(input, "fabric-api"); + assert_eq!(file_id, None); + } + + #[test] + fn test_parse_common_arg_with_file_id() { + let (input, file_id) = parse_common_arg("fabric-api#12345"); + assert_eq!(input, "fabric-api"); + assert_eq!(file_id, Some("12345".to_string())); + } + + #[test] + fn test_parse_github_arg_owner_repo() { + let result = parse_github_arg("FabricMC/fabric"); + assert!(result.is_ok()); + let (owner, repo, tag) = result.unwrap(); + assert_eq!(owner, "FabricMC"); + assert_eq!(repo, "fabric"); + assert_eq!(tag, None); + } + + #[test] + fn test_parse_github_arg_with_tag() { + let result = parse_github_arg("FabricMC/fabric#v0.15.0"); + assert!(result.is_ok()); + let (owner, repo, tag) = result.unwrap(); + assert_eq!(owner, "FabricMC"); + assert_eq!(repo, "fabric"); + assert_eq!(tag, Some("v0.15.0".to_string())); + } + + #[test] + fn test_parse_github_arg_invalid() { + let result = parse_github_arg("invalid-format"); + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .to_string() + .contains("Invalid GitHub format") + ); + } + + #[test] + fn test_parse_github_arg_missing_repo() { + let result = parse_github_arg("FabricMC/"); + assert!(result.is_ok()); + let (owner, repo, tag) = result.unwrap(); + assert_eq!(owner, "FabricMC"); + assert_eq!(repo, ""); + assert_eq!(tag, None); + } +} diff --git a/src/cli/commands/cfg.rs b/src/cli/commands/cfg.rs new file mode 100644 index 0000000..0e1068a --- /dev/null +++ b/src/cli/commands/cfg.rs @@ -0,0 +1,101 @@ +use std::path::Path; + +use yansi::Paint; + +use crate::{error::Result, model::config::Config}; + +pub fn execute( + config_path: &Path, + name: Option, + version: Option, + description: Option, + author: Option, + mods_path: Option, + resource_packs_path: Option, + data_packs_path: Option, + worlds_path: Option, + shaders_path: Option, +) -> Result<()> { + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + let mut config = Config::load(config_dir)?; + let mut changed = false; + + // Modpack properties + if let Some(new_name) = name { + config.name = new_name.clone(); + println!("{}", format!("✓ 'name' set to '{new_name}'").green()); + changed = true; + } + + if let Some(new_version) = version { + config.version = new_version.clone(); + println!("{}", format!("✓ 'version' set to '{new_version}'").green()); + changed = true; + } + + if let Some(new_description) = description { + config.description = Some(new_description.clone()); + println!( + "{}", + format!("✓ 'description' set to '{new_description}'").green() + ); + changed = true; + } + + if let Some(new_author) = author { + config.author = Some(new_author.clone()); + println!("{}", format!("✓ 'author' set to '{new_author}'").green()); + changed = true; + } + + // Project type paths + if let Some(path) = mods_path { + config.paths.insert("mod".to_string(), path.clone()); + println!("{}", format!("✓ 'paths.mod' set to '{path}'").green()); + changed = true; + } + + if let Some(path) = resource_packs_path { + config + .paths + .insert("resource-pack".to_string(), path.clone()); + println!( + "{}", + format!("✓ 'paths.resource-pack' set to '{path}'").green() + ); + changed = true; + } + + if let Some(path) = data_packs_path { + config.paths.insert("data-pack".to_string(), path.clone()); + println!("{}", format!("✓ 'paths.data-pack' set to '{path}'").green()); + changed = true; + } + + if let Some(path) = worlds_path { + config.paths.insert("world".to_string(), path.clone()); + println!("{}", format!("✓ 'paths.world' set to '{path}'").green()); + changed = true; + } + + if let Some(path) = shaders_path { + config.paths.insert("shader".to_string(), path.clone()); + println!("{}", format!("✓ 'paths.shader' set to '{path}'").green()); + changed = true; + } + + if !changed { + eprintln!( + "{}", + "No changes specified. Use --help for options.".yellow() + ); + return Ok(()); + } + + // Config::save expects directory path, not file path + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + config.save(config_dir)?; + println!("\n{}", "Configuration updated successfully".green().bold()); + + Ok(()) +} diff --git a/src/cli/commands/cfg_prj.rs b/src/cli/commands/cfg_prj.rs new file mode 100644 index 0000000..3ad7346 --- /dev/null +++ b/src/cli/commands/cfg_prj.rs @@ -0,0 +1,201 @@ +use std::path::Path; + +use yansi::Paint; + +use crate::{ + error::{PakkerError, Result}, + model::{ + config::Config, + enums::{ProjectSide, ProjectType, UpdateStrategy}, + lockfile::LockFile, + }, +}; + +pub fn execute( + config_path: &Path, + lockfile_path: &Path, + project: String, + r#type: Option<&str>, + side: Option<&str>, + update_strategy: Option<&str>, + redistributable: Option, + subpath: Option, + add_alias: Option, + remove_alias: Option, + export: Option, +) -> Result<()> { + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + let mut config = Config::load(config_dir)?; + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let lockfile = LockFile::load(lockfile_dir)?; + + // Find the project in lockfile to get its pakku_id + // Try multiple lookup strategies: pakku_id first, then slug, then name + let found_project = lockfile + .find_project(&project) + .or_else(|| { + // Try to find by slug on any platform + lockfile + .projects + .iter() + .find(|p| p.slug.values().any(|s| s.eq_ignore_ascii_case(&project))) + }) + .or_else(|| { + // Try to find by name on any platform + lockfile + .projects + .iter() + .find(|p| p.name.values().any(|n| n.eq_ignore_ascii_case(&project))) + }) + .ok_or_else(|| PakkerError::ProjectNotFound(project.clone()))?; + + let pakku_id = found_project.pakku_id.as_ref().ok_or_else(|| { + PakkerError::InvalidProject("Project has no pakku_id".to_string()) + })?; + + // Get or create project config + let mut project_config = config + .get_project_config(pakku_id) + .cloned() + .unwrap_or_default(); + + let mut changed = false; + + if let Some(type_str) = r#type { + let parsed_type = match type_str.to_uppercase().as_str() { + "MOD" => ProjectType::Mod, + "RESOURCE_PACK" | "RESOURCEPACK" => ProjectType::ResourcePack, + "DATA_PACK" | "DATAPACK" => ProjectType::DataPack, + "SHADER" => ProjectType::Shader, + "WORLD" => ProjectType::World, + _ => { + return Err(PakkerError::InvalidProject(format!( + "Invalid type: {type_str}" + ))); + }, + }; + project_config.r#type = Some(parsed_type); + println!( + "{}", + format!("✓ 'type' set to '{parsed_type:?}' for '{pakku_id}'").green() + ); + changed = true; + } + + if let Some(side_str) = side { + let parsed_side = match side_str.to_uppercase().as_str() { + "CLIENT" => ProjectSide::Client, + "SERVER" => ProjectSide::Server, + "BOTH" => ProjectSide::Both, + _ => { + return Err(PakkerError::InvalidProject(format!( + "Invalid side: {side_str}" + ))); + }, + }; + project_config.side = Some(parsed_side); + println!( + "{}", + format!("✓ 'side' set to '{parsed_side:?}' for '{pakku_id}'").green() + ); + changed = true; + } + + if let Some(strategy_str) = update_strategy { + let parsed_strategy = match strategy_str.to_uppercase().as_str() { + "LATEST" => UpdateStrategy::Latest, + "NONE" => UpdateStrategy::None, + _ => { + return Err(PakkerError::InvalidProject(format!( + "Invalid update strategy: {strategy_str}" + ))); + }, + }; + project_config.update_strategy = Some(parsed_strategy); + println!( + "{}", + format!( + "✓ 'updateStrategy' set to '{parsed_strategy:?}' for '{pakku_id}'" + ) + .green() + ); + changed = true; + } + + if let Some(new_redistributable) = redistributable { + project_config.redistributable = Some(new_redistributable); + println!( + "{}", + format!( + "✓ 'redistributable' set to '{new_redistributable}' for '{pakku_id}'" + ) + .green() + ); + changed = true; + } + + if let Some(new_subpath) = subpath { + project_config.subpath = Some(new_subpath.clone()); + println!( + "{}", + format!("✓ 'subpath' set to '{new_subpath}' for '{pakku_id}'").green() + ); + changed = true; + } + + if let Some(alias_to_add) = add_alias { + let mut aliases = project_config.aliases.clone().unwrap_or_default(); + if !aliases.contains(&alias_to_add) { + aliases.push(alias_to_add.clone()); + project_config.aliases = Some(aliases); + println!( + "{}", + format!("✓ Added alias '{alias_to_add}' for '{pakku_id}'").green() + ); + changed = true; + } + } + + if let Some(alias_to_remove) = remove_alias + && let Some(mut aliases) = project_config.aliases.clone() + { + aliases.retain(|a| a != &alias_to_remove); + project_config.aliases = Some(aliases); + println!( + "{}", + format!("✓ Removed alias '{alias_to_remove}' from '{pakku_id}'").green() + ); + changed = true; + } + + if let Some(new_export) = export { + project_config.export = Some(new_export); + println!( + "{}", + format!("✓ 'export' set to '{new_export}' for '{pakku_id}'").green() + ); + changed = true; + } + + if !changed { + eprintln!( + "{}", + "No changes specified. Use --help for options.".yellow() + ); + return Ok(()); + } + + config.set_project_config(pakku_id.clone(), project_config); + // Config::save expects directory path, not file path + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + config.save(config_dir)?; + + println!( + "\n{}", + format!("Project configuration updated for '{pakku_id}'") + .green() + .bold() + ); + + Ok(()) +} diff --git a/src/cli/commands/credentials.rs b/src/cli/commands/credentials.rs new file mode 100644 index 0000000..7540668 --- /dev/null +++ b/src/cli/commands/credentials.rs @@ -0,0 +1,112 @@ +use yansi::Paint; + +use crate::{ + error::Result, + model::{ + PakkerCredentialsFile, + credentials::{CredentialsSource, ResolvedCredentials}, + }, +}; + +pub fn execute( + delete: bool, + delete_file: bool, + delete_keyring: bool, +) -> Result<()> { + let delete_effective = delete || delete_file || delete_keyring; + + if delete_effective { + // Pakker must never delete or modify Pakku's credentials file + // (~/.pakku/credentials). Deletion here only affects Pakker-managed + // storage (keyring + Pakker-owned file). + let delete_keyring = delete_keyring || delete; + let delete_pakker_file = delete_file || delete; + + if delete_pakker_file { + PakkerCredentialsFile::delete()?; + } + if delete_keyring { + ResolvedCredentials::delete_keyring()?; + } + + println!("Credentials deleted."); + return Ok(()); + } + + let creds = ResolvedCredentials::load()?; + + let has_any = creds.curseforge_api_key().is_some() + || creds.modrinth_token().is_some() + || creds.github_access_token().is_some(); + + if !has_any { + println!("{}", "No credentials stored".yellow()); + println!("\nUse 'pakker credentials set' to add credentials"); + return Ok(()); + } + + println!("{}", "Stored Credentials:".cyan().bold()); + println!(); + + print_credential( + "CurseForge API Key", + creds.curseforge_api_key(), + creds.curseforge_source(), + ); + + print_credential( + "Modrinth Token", + creds.modrinth_token(), + creds.modrinth_source(), + ); + + print_credential( + "GitHub Access Token", + creds.github_access_token(), + creds.github_source(), + ); + + println!(); + println!( + "{}", + format!( + "Credentials file: {}", + PakkerCredentialsFile::get_path()?.display() + ) + .cyan() + ); + + Ok(()) +} + +fn print_credential( + label: &str, + value: Option<&str>, + source: Option, +) { + if let Some(v) = value { + let masked = mask_key(v); + let source = source.map_or("unknown", source_label); + println!(" {} {} ({})", format!("{label}:").yellow(), masked, source); + } +} + +const fn source_label(source: CredentialsSource) -> &'static str { + match source { + CredentialsSource::Env => "env", + CredentialsSource::Keyring => "keyring", + CredentialsSource::PakkerFile => "pakker-file", + } +} + +fn mask_key(key: &str) -> String { + if key.len() <= 12 { + return "*".repeat(key.len()); + } + + let start = &key[..8]; + let end = &key[key.len() - 4..]; + let middle = "*".repeat(key.len() - 12); + + format!("{start}{middle}{end}") +} diff --git a/src/cli/commands/credentials_set.rs b/src/cli/commands/credentials_set.rs new file mode 100644 index 0000000..60672b4 --- /dev/null +++ b/src/cli/commands/credentials_set.rs @@ -0,0 +1,74 @@ +use crate::{ + error::{PakkerError, Result}, + model::{PakkerCredentialsFile, set_keyring_secret}, +}; + +pub fn execute( + curseforge_api_key: Option, + modrinth_token: Option, + github_access_token: Option, +) -> Result<()> { + let mut creds = PakkerCredentialsFile::load()?; + let mut updated_any = false; + + if let Some(key) = curseforge_api_key { + let key = key.trim().to_string(); + if key.is_empty() { + return Err(PakkerError::InternalError( + "CurseForge API key cannot be empty".to_string(), + )); + } + + println!("Setting CurseForge API key..."); + set_keyring_secret("curseforge_api_key", &key)?; + creds.curseforge_api_key = Some(key); + updated_any = true; + } + + if let Some(token) = modrinth_token { + let token = token.trim().to_string(); + if token.is_empty() { + return Err(PakkerError::InternalError( + "Modrinth token cannot be empty".to_string(), + )); + } + + println!("Setting Modrinth token..."); + set_keyring_secret("modrinth_token", &token)?; + creds.modrinth_token = Some(token); + updated_any = true; + } + + if let Some(token) = github_access_token { + let token = token.trim().to_string(); + if token.is_empty() { + return Err(PakkerError::InternalError( + "GitHub access token cannot be empty".to_string(), + )); + } + + println!("Setting GitHub access token..."); + set_keyring_secret("github_access_token", &token)?; + creds.github_access_token = Some(token); + updated_any = true; + } + + if !updated_any { + println!( + "No credentials provided. Use --cf-api-key, --modrinth-token, or \ + --gh-access-token." + ); + return Ok(()); + } + + creds.save()?; + + println!("Credentials saved."); + println!( + "Credentials file: {}", + PakkerCredentialsFile::get_path()?.display() + ); + println!("Keyring service: pakker"); + + Ok(()) +} diff --git a/src/cli/commands/diff.rs b/src/cli/commands/diff.rs new file mode 100644 index 0000000..b2b71a6 --- /dev/null +++ b/src/cli/commands/diff.rs @@ -0,0 +1,457 @@ +use std::{ + collections::{HashMap, HashSet}, + fs, + path::Path, +}; + +use crate::{cli::DiffArgs, error::Result, model::LockFile}; + +#[derive(Debug)] +enum ChangeType { + Added, + Removed, + Updated, +} + +#[derive(Debug)] +struct ProjectChange { + name: String, + change_type: ChangeType, + old_file: Option, + new_file: Option, +} + +pub fn execute(args: DiffArgs, _lockfile_path: &Path) -> Result<()> { + log::info!("Comparing lockfiles"); + + // Load old lockfile + let old_path = Path::new(&args.old_lockfile); + let old_dir = old_path.parent().unwrap_or(Path::new(".")); + let old_lockfile = LockFile::load(old_dir)?; + + // Load current lockfile + let current_path = args + .current_lockfile + .as_ref() + .map_or(Path::new("pakku-lock.json"), Path::new); + let current_dir = current_path.parent().unwrap_or(Path::new(".")); + let current_lockfile = LockFile::load(current_dir)?; + + // Compare metadata + let mut changes = Vec::new(); + + // Check MC versions + let old_mc: HashSet<_> = old_lockfile.mc_versions.iter().collect(); + let new_mc: HashSet<_> = current_lockfile.mc_versions.iter().collect(); + let mc_added: Vec<_> = new_mc.difference(&old_mc).collect(); + let mc_removed: Vec<_> = old_mc.difference(&new_mc).collect(); + + // Check loaders + let old_loaders = &old_lockfile.loaders; + let new_loaders = ¤t_lockfile.loaders; + + // Compare projects + let old_projects: HashMap<_, _> = old_lockfile + .projects + .iter() + .map(|p| (&p.pakku_id, p)) + .collect(); + let new_projects: HashMap<_, _> = current_lockfile + .projects + .iter() + .map(|p| (&p.pakku_id, p)) + .collect(); + + // Find added, removed, updated projects + for (id, new_proj) in &new_projects { + if !old_projects.contains_key(id) { + changes.push(ProjectChange { + name: new_proj.name.values().next().cloned().unwrap_or_default(), + change_type: ChangeType::Added, + old_file: None, + new_file: new_proj.files.first().map(|f| f.file_name.clone()), + }); + } else if let Some(old_proj) = old_projects.get(id) { + let old_file_name = old_proj.files.first().map(|f| &f.file_name); + let new_file_name = new_proj.files.first().map(|f| &f.file_name); + + if old_file_name != new_file_name { + changes.push(ProjectChange { + name: new_proj + .name + .values() + .next() + .cloned() + .unwrap_or_default(), + change_type: ChangeType::Updated, + old_file: old_file_name.cloned(), + new_file: new_file_name.cloned(), + }); + } + } + } + + for (id, old_proj) in &old_projects { + if !new_projects.contains_key(id) { + changes.push(ProjectChange { + name: old_proj.name.values().next().cloned().unwrap_or_default(), + change_type: ChangeType::Removed, + old_file: old_proj.files.first().map(|f| f.file_name.clone()), + new_file: None, + }); + } + } + + // Output results + if let Some(path) = &args.markdown_diff { + write_markdown_diff( + path, + &old_lockfile, + ¤t_lockfile, + &changes, + &mc_added, + &mc_removed, + old_loaders, + new_loaders, + args.verbose, + args.header_size, + )?; + } else if let Some(path) = &args.markdown { + write_markdown( + path, + &old_lockfile, + ¤t_lockfile, + &changes, + &mc_added, + &mc_removed, + old_loaders, + new_loaders, + args.verbose, + args.header_size, + )?; + } else { + print_terminal_diff( + &old_lockfile, + ¤t_lockfile, + &changes, + &mc_added, + &mc_removed, + old_loaders, + new_loaders, + args.verbose, + ); + } + + Ok(()) +} + +fn print_terminal_diff( + old: &LockFile, + new: &LockFile, + changes: &[ProjectChange], + mc_added: &[&&String], + mc_removed: &[&&String], + old_loaders: &HashMap, + new_loaders: &HashMap, + verbose: bool, +) { + println!("## Lockfile Comparison\n"); + + // Target + if old.target != new.target { + println!("Target: {:?} -> {:?}", old.target, new.target); + } + + // MC versions + if !mc_removed.is_empty() || !mc_added.is_empty() { + println!("Minecraft Versions:"); + for v in mc_removed { + println!(" - {v}"); + } + for v in mc_added { + println!(" + {v}"); + } + } + + // Loaders + let mut loader_changes = false; + for (name, old_ver) in old_loaders { + if let Some(new_ver) = new_loaders.get(name) { + if old_ver != new_ver { + if !loader_changes { + println!("\nLoaders:"); + loader_changes = true; + } + println!(" ~ {name}: {old_ver} -> {new_ver}"); + } + } else { + if !loader_changes { + println!("\nLoaders:"); + loader_changes = true; + } + println!(" - {name}: {old_ver}"); + } + } + for (name, new_ver) in new_loaders { + if !old_loaders.contains_key(name) { + if !loader_changes { + println!("\nLoaders:"); + loader_changes = true; + } + println!(" + {name}: {new_ver}"); + } + } + + // Projects + if !changes.is_empty() { + println!("\nProjects:"); + for change in changes { + match change.change_type { + ChangeType::Added => { + print!(" + {}", change.name); + if verbose && let Some(file) = &change.new_file { + print!(" ({file})"); + } + println!(); + }, + ChangeType::Removed => { + print!(" - {}", change.name); + if verbose && let Some(file) = &change.old_file { + print!(" ({file})"); + } + println!(); + }, + ChangeType::Updated => { + print!(" ~ {}", change.name); + if verbose + && let (Some(old), Some(new)) = (&change.old_file, &change.new_file) + { + print!(" ({old} -> {new})"); + } + println!(); + }, + } + } + } + + if mc_removed.is_empty() + && mc_added.is_empty() + && !loader_changes + && changes.is_empty() + { + println!("✓ No differences found"); + } +} + +fn write_markdown_diff( + path: &str, + old: &LockFile, + new: &LockFile, + changes: &[ProjectChange], + mc_added: &[&&String], + mc_removed: &[&&String], + old_loaders: &HashMap, + new_loaders: &HashMap, + verbose: bool, + _header_size: usize, +) -> Result<()> { + let mut content = String::new(); + content.push_str("```diff\n"); + + // Metadata changes + if old.target != new.target { + content.push_str(&format!("- Target: {:?}\n", old.target)); + content.push_str(&format!("+ Target: {:?}\n", new.target)); + } + + if !mc_removed.is_empty() || !mc_added.is_empty() { + content.push_str("\nMinecraft Versions:\n"); + for v in mc_removed { + content.push_str(&format!("- {v}\n")); + } + for v in mc_added { + content.push_str(&format!("+ {v}\n")); + } + } + + // Loaders + for (name, old_ver) in old_loaders { + if let Some(new_ver) = new_loaders.get(name) { + if old_ver != new_ver { + content.push_str(&format!("- {name}: {old_ver}\n")); + content.push_str(&format!("+ {name}: {new_ver}\n")); + } + } else { + content.push_str(&format!("- {name}: {old_ver}\n")); + } + } + for (name, new_ver) in new_loaders { + if !old_loaders.contains_key(name) { + content.push_str(&format!("+ {name}: {new_ver}\n")); + } + } + + // Projects + if !changes.is_empty() { + content.push_str("\nProjects:\n"); + for change in changes { + match change.change_type { + ChangeType::Added => { + content.push_str(&format!("+ {}", change.name)); + if verbose && let Some(file) = &change.new_file { + content.push_str(&format!(" ({file})")); + } + content.push('\n'); + }, + ChangeType::Removed => { + content.push_str(&format!("- {}", change.name)); + if verbose && let Some(file) = &change.old_file { + content.push_str(&format!(" ({file})")); + } + content.push('\n'); + }, + ChangeType::Updated => { + if verbose { + if let (Some(old), Some(new)) = (&change.old_file, &change.new_file) + { + content.push_str(&format!("- {} ({})\n", change.name, old)); + content.push_str(&format!("+ {} ({})\n", change.name, new)); + } + } else { + content.push_str(&format!("~ {}\n", change.name)); + } + }, + } + } + } + + content.push_str("```\n"); + fs::write(path, content)?; + println!("Diff exported to {path}"); + Ok(()) +} + +fn write_markdown( + path: &str, + old: &LockFile, + new: &LockFile, + changes: &[ProjectChange], + mc_added: &[&&String], + mc_removed: &[&&String], + old_loaders: &HashMap, + new_loaders: &HashMap, + verbose: bool, + header_size: usize, +) -> Result<()> { + let header = "#".repeat(header_size.min(5)); + let mut content = String::new(); + + content.push_str(&format!("{header} Lockfile Comparison\n\n")); + + // Target + if old.target != new.target { + content.push_str(&format!( + "**Target:** {:?} → {:?}\n\n", + old.target, new.target + )); + } + + // MC versions + if !mc_removed.is_empty() || !mc_added.is_empty() { + content.push_str(&format!("{header} Minecraft Versions\n\n")); + for v in mc_removed { + content.push_str(&format!("- ~~{v}~~\n")); + } + for v in mc_added { + content.push_str(&format!("- **{v}** (new)\n")); + } + content.push('\n'); + } + + // Loaders + let mut has_loader_changes = false; + let mut loader_content = String::new(); + for (name, old_ver) in old_loaders { + if let Some(new_ver) = new_loaders.get(name) { + if old_ver != new_ver { + has_loader_changes = true; + loader_content + .push_str(&format!("- **{name}:** {old_ver} → {new_ver}\n")); + } + } else { + has_loader_changes = true; + loader_content.push_str(&format!("- ~~{name}: {old_ver}~~\n")); + } + } + for (name, new_ver) in new_loaders { + if !old_loaders.contains_key(name) { + has_loader_changes = true; + loader_content.push_str(&format!("- **{name}: {new_ver}** (new)\n")); + } + } + if has_loader_changes { + content.push_str(&format!("{header} Loaders\n\n")); + content.push_str(&loader_content); + content.push('\n'); + } + + // Projects + if !changes.is_empty() { + content.push_str(&format!("{header} Projects\n\n")); + + let added: Vec<_> = changes + .iter() + .filter(|c| matches!(c.change_type, ChangeType::Added)) + .collect(); + let removed: Vec<_> = changes + .iter() + .filter(|c| matches!(c.change_type, ChangeType::Removed)) + .collect(); + let updated: Vec<_> = changes + .iter() + .filter(|c| matches!(c.change_type, ChangeType::Updated)) + .collect(); + + if !added.is_empty() { + content.push_str(&format!("{}# Added ({})\n\n", header, added.len())); + for change in added { + content.push_str(&format!("- **{}**", change.name)); + if verbose && let Some(file) = &change.new_file { + content.push_str(&format!(" ({file})")); + } + content.push('\n'); + } + content.push('\n'); + } + + if !removed.is_empty() { + content.push_str(&format!("{}# Removed ({})\n\n", header, removed.len())); + for change in removed { + content.push_str(&format!("- ~~{}~~", change.name)); + if verbose && let Some(file) = &change.old_file { + content.push_str(&format!(" ({file})")); + } + content.push('\n'); + } + content.push('\n'); + } + + if !updated.is_empty() { + content.push_str(&format!("{}# Updated ({})\n\n", header, updated.len())); + for change in updated { + content.push_str(&format!("- **{}**", change.name)); + if verbose + && let (Some(old), Some(new)) = (&change.old_file, &change.new_file) + { + content.push_str(&format!(" ({old} → {new})")); + } + content.push('\n'); + } + content.push('\n'); + } + } + + fs::write(path, content)?; + println!("Diff exported to {path}"); + Ok(()) +} diff --git a/src/cli/commands/export.rs b/src/cli/commands/export.rs new file mode 100644 index 0000000..4c28adb --- /dev/null +++ b/src/cli/commands/export.rs @@ -0,0 +1,291 @@ +use std::path::Path; + +use crate::{ + cli::ExportArgs, + error::{PakkerError, Result}, + export::Exporter, + ipc::{IpcCoordinator, OperationType}, + model::{Config, LockFile, fork::LocalConfig}, + utils::hash::compute_sha256_bytes, +}; + +pub async fn execute( + args: ExportArgs, + lockfile_path: &Path, + config_path: &Path, +) -> Result<()> { + if let Some(ref profile) = args.profile { + log::info!("Exporting with profile: {profile}"); + } else { + log::info!("Exporting all profiles"); + } + + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + // IPC coordination - prevent concurrent operations on the same modpack + let ipc = IpcCoordinator::new(&config_dir.to_path_buf())?; + let ipc_timeout = std::time::Duration::from_secs(60); + + // Check for conflicting export operations + let conflicting = ipc.get_running_operations(OperationType::Export); + if !conflicting.is_empty() { + log::info!( + "Waiting for conflicting operations to complete: {:?}", + conflicting + .iter() + .map(|op| (op.id.clone(), op.pid)) + .collect::>() + ); + ipc + .wait_for_conflicts(OperationType::Export, ipc_timeout) + .await?; + } + + // Register this export operation + let _op_guard = ipc.register_operation(OperationType::Export)?; + + // Load config to check for fork configuration + let config = Config::load(config_dir)?; + let local_config = LocalConfig::load(config_dir).ok(); + + // Check if this is a fork with parent + let lockfile = if let Some(local_cfg) = &local_config { + if local_cfg.parent.is_some() { + log::info!("Fork detected - merging parent and local lockfiles"); + + // Try parent's lockfile + let parent_paths = [".pakku/parent", ".pakker/parent"]; + let mut parent_lockfile_path = None; + let mut lockfile_name = "pakku-lock.json"; + + for parent_dir in &parent_paths { + // Try pakker-lock.json first + let check_path = Path::new(parent_dir).join("pakker-lock.json"); + if check_path.exists() { + parent_lockfile_path = Some(parent_dir); + lockfile_name = "pakker-lock.json"; + break; + } + // Fall back to pakku-lock.json + let check_path = Path::new(parent_dir).join("pakku-lock.json"); + if check_path.exists() { + parent_lockfile_path = Some(parent_dir); + lockfile_name = "pakku-lock.json"; + break; + } + } + + if let Some(parent_dir) = parent_lockfile_path { + // Load parent lockfile + let parent_lockfile = LockFile::load(Path::new(parent_dir))?; + + // Verify parent lockfile hash for integrity + if let Some(stored_hash) = &local_cfg.parent_lock_hash { + let parent_lock_path = Path::new(parent_dir).join(lockfile_name); + let parent_lock_content = std::fs::read(&parent_lock_path)?; + let computed_hash = compute_sha256_bytes(&parent_lock_content); + + if &computed_hash != stored_hash { + log::warn!( + "Parent lockfile hash mismatch - parent may have changed since \ + last sync" + ); + log::warn!("Expected: {stored_hash}, Got: {computed_hash}"); + } + } + + // Load local lockfile if it exists + if lockfile_path.exists() { + log::info!("Merging parent lockfile with local overrides"); + let local_lockfile = + LockFile::load_with_validation(lockfile_dir, false)?; + + // Merge: start with parent, override with local + merge_lockfiles(parent_lockfile, local_lockfile, local_cfg)? + } else { + log::info!("No local lockfile - using parent lockfile"); + parent_lockfile + } + } else { + return Err(PakkerError::IoError(std::io::Error::new( + std::io::ErrorKind::NotFound, + "Fork configured but parent lockfile not found", + ))); + } + } else { + // No fork, use local lockfile + if lockfile_path.exists() { + LockFile::load(lockfile_dir)? + } else { + return Err(PakkerError::IoError(std::io::Error::new( + std::io::ErrorKind::NotFound, + "No lockfile found", + ))); + } + } + } else { + // No local config, try local lockfile or fall back to parent + if lockfile_path.exists() { + LockFile::load(lockfile_dir)? + } else { + // Try parent's lockfile as fallback + let parent_paths = [".pakku/parent", ".pakker/parent"]; + let mut parent_lockfile = None; + let mut lockfile_name = "pakku-lock.json"; + + for parent_dir in &parent_paths { + // Try pakker-lock.json first + let lockfile_path_check = + Path::new(parent_dir).join("pakker-lock.json"); + if lockfile_path_check.exists() { + parent_lockfile = Some(parent_dir); + lockfile_name = "pakker-lock.json"; + break; + } + // Fall back to pakku-lock.json + let lockfile_path_check = Path::new(parent_dir).join("pakku-lock.json"); + if lockfile_path_check.exists() { + parent_lockfile = Some(parent_dir); + lockfile_name = "pakku-lock.json"; + break; + } + } + + match parent_lockfile { + Some(parent_dir) => { + log::info!( + "Using parent's lockfile ({lockfile_name}) from {parent_dir}" + ); + LockFile::load(Path::new(parent_dir))? + }, + None => { + return Err(PakkerError::IoError(std::io::Error::new( + std::io::ErrorKind::NotFound, + "No lockfile found (neither local nor parent's)", + ))); + }, + } + } + }; + + // Determine output path + let output_path = if args.pakker_layout { + "build" + } else { + args.output.as_deref().unwrap_or("exports") + }; + + // Create exporter + let mut exporter = Exporter::new("."); + + // Export based on profile argument + if let Some(profile_name) = args.profile { + // Single profile export (backwards compatible) + let output_file = exporter + .export(&profile_name, &lockfile, &config, Path::new(output_path)) + .await?; + + println!("Export complete: {output_file:?}"); + } else { + // Multi-profile export (Pakker-compatible default behavior) + let output_files = exporter + .export_all_profiles(&lockfile, &config, Path::new(output_path)) + .await?; + + println!("\nExported {} files:", output_files.len()); + for output_file in output_files { + println!(" - {output_file:?}"); + } + } + + Ok(()) +} + +/// Merges parent lockfile with local lockfile +/// Parent projects are used as base, local projects override parent projects +/// with same slug +fn merge_lockfiles( + parent: LockFile, + local: LockFile, + local_config: &LocalConfig, +) -> Result { + let mut merged = LockFile { + target: parent.target, // Use parent target + mc_versions: parent.mc_versions, // Use parent MC versions + loaders: parent.loaders, // Use parent loaders + projects: Vec::new(), + lockfile_version: parent.lockfile_version, + }; + + // Collect local project slugs for override detection + let mut local_slugs = std::collections::HashSet::new(); + for project in &local.projects { + // Add all slugs from all platforms + for slug in project.slug.values() { + local_slugs.insert(slug.clone()); + } + } + + // Add parent projects that are NOT overridden by local + let parent_projects_count = parent.projects.len(); + + for parent_project in &parent.projects { + let is_overridden = parent_project + .slug + .values() + .any(|slug| local_slugs.contains(slug)); + + if !is_overridden { + // Check if project has local config overrides + let mut project = parent_project.clone(); + + // Apply local config overrides if they exist + for (key, local_proj_cfg) in &local_config.projects { + // Match by slug, name, or pakku_id + let matches = project.slug.values().any(|s| s == key) + || project.name.values().any(|n| n == key) + || project.pakku_id.as_ref() == Some(key); + + if matches { + if let Some(t) = local_proj_cfg.r#type { + project.r#type = t; + } + if let Some(s) = local_proj_cfg.side { + project.side = s; + } + if let Some(us) = local_proj_cfg.update_strategy { + project.update_strategy = us; + } + if let Some(r) = local_proj_cfg.redistributable { + project.redistributable = r; + } + if let Some(ref sp) = local_proj_cfg.subpath { + project.subpath = Some(sp.clone()); + } + if let Some(ref aliases) = local_proj_cfg.aliases { + project.aliases = aliases.iter().cloned().collect(); + } + if let Some(e) = local_proj_cfg.export { + project.export = e; + } + break; + } + } + + merged.projects.push(project); + } + } + + // Add all local projects + merged.projects.extend(local.projects.clone()); + + println!( + "Merged fork: {} parent projects + {} local projects = {} total projects", + parent_projects_count - local_config.projects.len(), + local.projects.len(), + merged.projects.len() + ); + + Ok(merged) +} diff --git a/src/cli/commands/fetch.rs b/src/cli/commands/fetch.rs new file mode 100644 index 0000000..bdea8f7 --- /dev/null +++ b/src/cli/commands/fetch.rs @@ -0,0 +1,49 @@ +use std::path::{Path, PathBuf}; + +use crate::{ + cli::FetchArgs, + error::Result, + fetch::Fetcher, + ipc::{IpcCoordinator, OperationGuard, OperationType}, + model::{Config, LockFile}, +}; + +pub async fn execute( + args: FetchArgs, + lockfile_path: &Path, + config_path: &Path, +) -> Result<()> { + // Load expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + let lockfile = LockFile::load(lockfile_dir)?; + let config = Config::load(config_dir)?; + + // Create IPC coordinator for this modpack + let working_dir = PathBuf::from("."); + let coordinator = IpcCoordinator::new(&working_dir)?; + + // Check for conflicting operations + if coordinator.has_running_operation(OperationType::Fetch) { + // Wait for conflicting operations to complete with timeout + let timeout = std::time::Duration::from_secs(args.timeout.unwrap_or(300)); + coordinator + .wait_for_conflicts(OperationType::Fetch, timeout) + .await?; + } + + // Register this fetch operation + let operation_id = coordinator.register_operation(OperationType::Fetch)?; + let _guard = OperationGuard::new(coordinator, operation_id); + + // Create fetcher + let fetcher = Fetcher::new("."); + + // Fetch all projects (progress indicators handled in fetch.rs) + fetcher.fetch_all(&lockfile, &config).await?; + + println!("Fetch complete"); + + Ok(()) +} diff --git a/src/cli/commands/fork.rs b/src/cli/commands/fork.rs new file mode 100644 index 0000000..1627dc3 --- /dev/null +++ b/src/cli/commands/fork.rs @@ -0,0 +1,677 @@ +use std::{fs, io::Write, path::Path}; + +use crate::{ + cli::ForkArgs, + error::PakkerError, + git::{self, VcsType}, + model::{ + config::Config, + fork::{ForkIntegrity, LocalConfig, ParentConfig, RefType, hash_content}, + }, +}; + +const PAKKU_DIR: &str = ".pakku"; +const PARENT_DIR_NAME: &str = "parent"; + +fn parent_dir() -> String { + format!("{PAKKU_DIR}/{PARENT_DIR_NAME}") +} + +/// Main entry point for fork commands +pub fn execute(args: &ForkArgs) -> Result<(), PakkerError> { + match &args.subcommand { + crate::cli::ForkSubcommand::Init { + git_url, + from_current, + from_path, + ref_name, + ref_type, + remote, + } => { + execute_init( + git_url.clone(), + *from_current, + from_path.clone(), + ref_name.clone(), + *ref_type, + remote.clone(), + ) + }, + crate::cli::ForkSubcommand::Set { + git_url, + ref_name, + ref_type, + remote, + } => { + execute_set(git_url.clone(), ref_name.clone(), *ref_type, remote.clone()) + }, + crate::cli::ForkSubcommand::Show => execute_show(), + crate::cli::ForkSubcommand::Unset => execute_unset(), + crate::cli::ForkSubcommand::Sync => execute_sync(), + crate::cli::ForkSubcommand::Promote { projects } => { + execute_promote(projects.clone()) + }, + } +} + +fn validate_git_url(url: &str) -> Result<(), PakkerError> { + // Allow network URLs, SSH-style URLs, or local filesystem paths (tests use + // local bare repos) + if url.starts_with("https://") + || url.starts_with("git@") + || url.starts_with("ssh://") + || url.starts_with("file://") + || url.starts_with('/') + { + Ok(()) + } else { + Err(PakkerError::Fork(format!( + "Invalid git URL: {url}. Expected https://, git@, ssh://, file://, or \ + absolute filesystem path." + ))) + } +} + +fn execute_init( + git_url: Option, + from_current: bool, + from_path: Option, + ref_name: Option, + ref_type: Option, + remote: Option, +) -> Result<(), PakkerError> { + let config_dir = Path::new("."); + + // Validate that pakker.json exists for fork operations + let pakker_json_path = config_dir.join("pakker.json"); + let pakku_json_path = config_dir.join("pakku.json"); + + if !pakker_json_path.exists() && pakku_json_path.exists() { + return Err(PakkerError::Fork( + "Forking is a pakker-specific feature and requires pakker.json. \nFound \ + pakku.json but not pakker.json. Please migrate to pakker.json to use \ + fork functionality.\nYou can convert your pakku.json to pakker.json by \ + renaming the file." + .to_string(), + )); + } + + let mut local_config = LocalConfig::load(config_dir).unwrap_or_default(); + + // Check if parent already configured + if local_config.parent.is_some() + && let Some(parent) = &local_config.parent + { + return Err(PakkerError::Fork(format!( + "Parent already configured: {}", + parent.id + ))); + } + + // Resolve defaults early to avoid shadowing/confusion + let resolved_remote = remote.unwrap_or_else(|| "origin".to_string()); + let resolved_ref = ref_name.unwrap_or_else(|| "main".to_string()); + + // Parent path (where we keep the cloned parent) + let parent_path_str = parent_dir(); + + // Branch: from_current, from_path, or git_url + let mut cloned_from_local = false; + let url = if from_current { + // Detect git URL from current directory + if !git::is_git_repository(config_dir) { + return Err(PakkerError::Fork( + "Not a git repository. Use --git-url or run 'git init' first." + .to_string(), + )); + } + git::get_remote_url(config_dir, &resolved_remote)? + } else if let Some(fp) = from_path { + // Use provided local path as source; infer upstream remote from it + let path = Path::new(&fp); + if !git::is_git_repository(path) { + return Err(PakkerError::Fork(format!( + "Provided path is not a git repository: {}", + path.display() + ))); + } + + // Infer upstream remote URL from the existing local clone + let upstream_url = git::get_primary_remote_url(path)?; + + // Reject file:// or non-network remotes + validate_git_url(&upstream_url)?; + + // Ensure working tree is clean + let vcs_type = git::detect_vcs_type(path); + if git::repo_has_uncommitted_changes(path)? { + let error_msg = match vcs_type { + VcsType::Git => { + "Local repository at --from-path has uncommitted changes. Commit or \ + stash them before proceeding." + }, + VcsType::Jujutsu => { + "Local repository at --from-path has uncommitted changes. Run 'jj \ + commit' to save changes before proceeding." + }, + VcsType::None => { + "Local repository at --from-path has uncommitted changes. Please \ + clean the directory before proceeding." + }, + }; + return Err(PakkerError::Fork(error_msg.to_string())); + } + + // VCS-specific validation + match vcs_type { + VcsType::Git => { + // Attempt lightweight fetch of remote refs to refresh remote tracking + match git::fetch_remote_light(path, &resolved_remote, &resolved_ref) { + Ok(()) => println!("Fetched remote refs for verification"), + Err(e) => { + log::warn!("Lightweight fetch from upstream failed: {e}"); + println!( + "Warning: could not perform lightweight fetch from upstream. \ + Proceeding with local clone; subsequent sync may require \ + network." + ); + }, + } + + // Compare local ref vs remote ref + let remote_ref = format!("{resolved_remote}/{resolved_ref}"); + match git::ahead_behind(path, &resolved_ref, &remote_ref) { + Ok((ahead, _behind)) => { + if ahead > 0 { + return Err(PakkerError::Fork(format!( + "Local repository at {} has {} commits not present on \ + upstream {}. Push or use --git-url if you intend to use an \ + upstream that contains these commits.", + path.display(), + ahead, + upstream_url + ))); + } + }, + Err(e) => { + log::warn!("Could not compute ahead/behind: {e}"); + }, + } + }, + VcsType::Jujutsu => { + // For jujutsu, we skip git-specific remote validation since jj has + // different synchronization patterns + println!( + "Warning: Skipping remote validation for jujutsu repository. Ensure \ + your jj repo is in sync with remote before proceeding." + ); + + // Check if there are any changes that haven't been pushed to the remote + if let Ok(output) = std::process::Command::new("jj") + .args(["log", "--limit", "1", "--template", ""]) + .current_dir(path) + .output() + { + if !output.stdout.is_empty() { + println!( + "Note: Jujutsu repository detected. Make sure to run 'jj git \ + push' to sync changes with remote if needed." + ); + } + } + }, + VcsType::None => { + // No VCS-specific validation needed + }, + } + + // Compute parent lock/config hashes for reproducibility + let parent_lock_path = if path.join("pakker-lock.json").exists() { + path.join("pakker-lock.json") + } else { + path.join("pakku-lock.json") + }; + + if parent_lock_path.exists() { + let lock_content = + fs::read_to_string(&parent_lock_path).map_err(|e| { + PakkerError::Fork(format!("Failed to read parent lock file: {e}")) + })?; + let lock_hash = hash_content(&lock_content); + local_config.parent_lock_hash = Some(lock_hash); + } + + let parent_config_path = if path.join("pakker.json").exists() { + path.join("pakker.json") + } else { + path.join("pakku.json") + }; + + if parent_config_path.exists() { + let config_content = + fs::read_to_string(&parent_config_path).map_err(|e| { + PakkerError::Fork(format!("Failed to read parent config: {e}")) + })?; + let config_hash = hash_content(&config_content); + local_config.parent_config_hash = Some(config_hash); + } + + // Now clone from the local path into .pakku/parent — this avoids + // re-downloading objects + let parent_path = Path::new(&parent_path_str); + + println!( + "Cloning parent repository from local path {}...", + path.display() + ); + git::clone_repository(&fp, parent_path, &resolved_ref, None)?; + + // Ensure the cloned repo's origin is set to the upstream URL (not the local + // path) + git::set_remote_url(parent_path, &resolved_remote, &upstream_url)?; + + // Mark that we've already cloned from local + cloned_from_local = true; + + // We will persist upstream_url as the canonical parent id + upstream_url + } else if let Some(url) = git_url { + url + } else { + return Err(PakkerError::Fork( + "Either --git-url, --from-current or --from-path must be specified" + .to_string(), + )); + }; + + let parent_path = Path::new(&parent_path_str); + + // If we did not already clone from local, perform network clone and checks + if cloned_from_local { + println!( + "Parent repository was cloned from local path; skipping network clone." + ); + } else { + // Check if parent directory already exists and is not empty + if parent_path.exists() { + let is_empty = parent_path + .read_dir() + .map(|mut entries| entries.next().is_none()) + .unwrap_or(false); + + if !is_empty { + return Err(PakkerError::Fork(format!( + "Directory not empty: {}", + parent_path.display() + ))); + } + } + + println!("Cloning parent repository..."); + println!(" URL: {url}"); + println!(" Ref: {resolved_ref}"); + + git::clone_repository(&url, parent_path, &resolved_ref, None)?; + } + + let commit_sha = git::get_commit_sha(parent_path, &resolved_ref)?; + + // Detect ref type if not specified + let resolved_ref_type = if let Some(rt) = ref_type { + rt + } else { + git::resolve_ref_type(parent_path, &resolved_ref)? + }; + + let parent_config = ParentConfig { + type_: "git".to_string(), + id: url.clone(), + version: Some(commit_sha[..8].to_string()), + ref_: resolved_ref.clone(), + ref_type: resolved_ref_type, + remote_name: resolved_remote, + }; + + local_config.parent = Some(parent_config); + local_config.save(config_dir)?; + + // Add .pakku/parent to .gitignore + add_to_gitignore()?; + + println!(); + println!("✓ Fork initialized successfully"); + println!(" Parent: {url}"); + println!(" Ref: {} ({})", resolved_ref, match resolved_ref_type { + RefType::Branch => "branch", + RefType::Tag => "tag", + RefType::Commit => "commit", + }); + println!(" Commit: {}", &commit_sha[..8]); + println!(); + println!("Run 'pakku fork sync' to sync with parent."); + + Ok(()) +} + +fn execute_set( + git_url: Option, + ref_name: String, + ref_type: Option, + remote: Option, +) -> Result<(), PakkerError> { + let config_dir = Path::new("."); + let mut local_config = LocalConfig::load(config_dir)?; + + if local_config.parent.is_none() { + return Err(PakkerError::Fork( + "No parent configured. Run 'pakku fork init' first.".to_string(), + )); + } + + let mut parent = local_config.parent.unwrap(); + + if let Some(url) = git_url { + validate_git_url(&url)?; + parent.id = url; + } + + parent.ref_ = ref_name; + + if let Some(rt) = ref_type { + parent.ref_type = rt; + } + + if let Some(remote_name) = remote { + parent.remote_name = remote_name; + } + + local_config.parent = Some(parent.clone()); + local_config.save(config_dir)?; + + println!("✓ Fork configuration updated"); + println!(" Parent: {}", parent.id); + println!(" Ref: {} ({})", parent.ref_, match parent.ref_type { + RefType::Branch => "branch", + RefType::Tag => "tag", + RefType::Commit => "commit", + }); + println!(); + println!("Run 'pakku fork sync' to sync with new configuration."); + + Ok(()) +} + +fn execute_show() -> Result<(), PakkerError> { + let config_dir = Path::new("."); + let local_config = LocalConfig::load(config_dir)?; + + if let Some(parent) = local_config.parent { + println!("Fork Configuration:"); + println!(" Parent URL: {}", parent.id); + println!(" Type: {}", match parent.ref_type { + RefType::Branch => "branch", + RefType::Tag => "tag", + RefType::Commit => "commit", + }); + println!(" Ref: {}", parent.ref_); + println!(" Remote: {}", parent.remote_name); + + if let Some(version) = parent.version { + println!(" Last synced commit: {version}"); + } else { + println!(" Last synced commit: never synced"); + } + + if !local_config.projects.is_empty() { + println!(); + println!("Project Overrides ({}):", local_config.projects.len()); + for (slug, proj_config) in &local_config.projects { + print!(" - {slug}"); + let mut details = Vec::new(); + if let Some(version) = &proj_config.version { + details.push(format!("version={version}")); + } + if let Some(side) = &proj_config.side { + details.push(format!("side={side}")); + } + if let Some(strategy) = &proj_config.update_strategy { + details.push(format!("updateStrategy={strategy}")); + } + if !details.is_empty() { + print!(" ({})", details.join(", ")); + } + println!(); + } + } + } else { + println!("No fork configured."); + println!("Run 'pakku fork init' to initialize a fork."); + } + + Ok(()) +} + +fn execute_unset() -> Result<(), PakkerError> { + let config_dir = Path::new("."); + let mut local_config = LocalConfig::load(config_dir)?; + + if local_config.parent.is_none() { + println!("No fork configured."); + return Ok(()); + } + + // Prompt for confirmation + print!("Are you sure you want to remove fork configuration? [y/N] "); + std::io::stdout().flush().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + + if !input.trim().eq_ignore_ascii_case("y") { + println!("Cancelled."); + return Ok(()); + } + + // Remove parent directory + let parent_path_str = parent_dir(); + let parent_path = Path::new(&parent_path_str); + if parent_path.exists() { + fs::remove_dir_all(parent_path).map_err(|e| { + PakkerError::Fork(format!("Failed to remove parent directory: {e}")) + })?; + } + + // Clear parent configuration + local_config.parent = None; + local_config.parent_lock_hash = None; + local_config.parent_config_hash = None; + local_config.save(config_dir)?; + + println!("✓ Fork configuration removed"); + + Ok(()) +} + +fn execute_sync() -> Result<(), PakkerError> { + let config_dir = Path::new("."); + let mut local_config = LocalConfig::load(config_dir)?; + + let parent = local_config.parent.as_ref().ok_or_else(|| { + PakkerError::Fork( + "No parent configured. Run 'pakku fork init' first.".to_string(), + ) + })?; + + let parent_path_str = parent_dir(); + let parent_path = Path::new(&parent_path_str); + + if parent_path.exists() { + println!("Fetching parent updates..."); + git::fetch_updates(parent_path, &parent.remote_name, &parent.ref_, None)?; + git::reset_to_ref(parent_path, &parent.remote_name, &parent.ref_)?; + } else { + println!("Parent repository not found. Cloning..."); + git::clone_repository(&parent.id, parent_path, &parent.ref_, None)?; + } + + let commit_sha = git::get_commit_sha(parent_path, &parent.ref_)?; + + let mut integrity = None; + + // Try pakker files first, fall back to pakku files + let parent_lock_path = if parent_path.join("pakker-lock.json").exists() { + parent_path.join("pakker-lock.json") + } else { + parent_path.join("pakku-lock.json") + }; + + let parent_config_path = if parent_path.join("pakker.json").exists() { + parent_path.join("pakker.json") + } else { + parent_path.join("pakku.json") + }; + + if parent_lock_path.exists() { + let lock_content = fs::read_to_string(&parent_lock_path).map_err(|e| { + PakkerError::Fork(format!("Failed to read parent lock file: {e}")) + })?; + + let lock_hash = hash_content(&lock_content); + + if let Some(prev_hash) = &local_config.parent_lock_hash + && prev_hash != &lock_hash + { + log::warn!("Parent lock file has changed since last sync"); + log::warn!(" Previous hash: {prev_hash}"); + log::warn!(" Current hash: {lock_hash}"); + } + + local_config.parent_lock_hash = Some(lock_hash); + + let config_content = if parent_config_path.exists() { + fs::read_to_string(&parent_config_path).map_err(|e| { + PakkerError::Fork(format!("Failed to read parent config: {e}")) + })? + } else { + String::new() + }; + + let config_hash = hash_content(&config_content); + + if let Some(prev_hash) = &local_config.parent_config_hash + && prev_hash != &config_hash + { + log::warn!("Parent config file has changed since last sync"); + log::warn!(" Previous hash: {prev_hash}"); + log::warn!(" Current hash: {config_hash}"); + } + + local_config.parent_config_hash = Some(config_hash); + + integrity = Some(ForkIntegrity::new( + local_config.parent_lock_hash.clone().unwrap_or_default(), + commit_sha.clone(), + local_config.parent_config_hash.clone().unwrap_or_default(), + )); + } + + if let Some(ref integrity_data) = integrity { + log::info!( + "Parent integrity verified at timestamp {}", + integrity_data.verified_at + ); + } + + if let Some(parent) = local_config.parent.as_mut() { + parent.version = Some(commit_sha[..8].to_string()); + } + + local_config.save(config_dir)?; + + println!(); + println!("✓ Parent sync complete"); + println!(" Commit: {}", &commit_sha[..8]); + println!(); + println!("Run 'pakku export' to merge changes from parent."); + + Ok(()) +} + +fn execute_promote(projects: Vec) -> Result<(), PakkerError> { + let config_dir = Path::new("."); + let local_config = LocalConfig::load(config_dir)?; + + if local_config.parent.is_none() { + return Err(PakkerError::Fork( + "No parent configured. Run 'pakku fork init' first.".to_string(), + )); + } + + if projects.is_empty() { + return Err(PakkerError::Fork( + "No projects specified. Usage: pakku fork promote ..." + .to_string(), + )); + } + + // Load current config + let config = Config::load(config_dir)?; + + // Verify all projects exist + for project_arg in &projects { + let found = config + .projects + .as_ref() + .and_then(|projs| projs.get(project_arg)) + .is_some(); + + if !found { + return Err(PakkerError::Fork(format!( + "Project not found: {project_arg}" + ))); + } + } + + println!("Note: In the current architecture, projects in pakku.json are"); + println!("automatically merged with parent projects during export."); + println!(); + println!("The following projects are already in pakku.json:"); + for project in &projects { + println!(" - {project}"); + } + println!(); + println!("These will be included in exports automatically."); + + Ok(()) +} + +fn add_to_gitignore() -> Result<(), PakkerError> { + let gitignore_path = Path::new(".gitignore"); + let parent_dir = parent_dir(); + + // Check if .gitignore exists and already contains the entry + if gitignore_path.exists() { + let content = fs::read_to_string(gitignore_path).map_err(|e| { + PakkerError::Fork(format!("Failed to read .gitignore: {e}")) + })?; + + if content.lines().any(|line| line.trim() == parent_dir) { + return Ok(()); + } + } + + // Append to .gitignore + let mut file = fs::OpenOptions::new() + .create(true) + .append(true) + .open(gitignore_path) + .map_err(|e| { + PakkerError::Fork(format!("Failed to open .gitignore: {e}")) + })?; + + writeln!(file, "{parent_dir}").map_err(|e| { + PakkerError::Fork(format!("Failed to write to .gitignore: {e}")) + })?; + + Ok(()) +} diff --git a/src/cli/commands/import.rs b/src/cli/commands/import.rs new file mode 100644 index 0000000..1079d73 --- /dev/null +++ b/src/cli/commands/import.rs @@ -0,0 +1,395 @@ +use std::path::Path; + +use crate::{ + cli::ImportArgs, + error::{PakkerError, Result}, + model::{Config, LockFile, Target}, + ui_utils::prompt_yes_no, +}; + +pub async fn execute( + args: ImportArgs, + lockfile_path: &Path, + config_path: &Path, +) -> Result<()> { + log::info!("Importing modpack from {}", args.file); + + let path = Path::new(&args.file); + + if !path.exists() { + return Err(PakkerError::FileNotFound( + path.to_string_lossy().to_string(), + )); + } + + // Check if lockfile or config already exist + if (lockfile_path.exists() || config_path.exists()) && !args.yes { + let msg = if lockfile_path.exists() && config_path.exists() { + "Both pakku-lock.json and pakku.json exist. Importing will overwrite \ + them. Continue?" + } else if lockfile_path.exists() { + "pakku-lock.json exists. Importing will overwrite it. Continue?" + } else { + "pakku.json exists. Importing will overwrite it. Continue?" + }; + + if !prompt_yes_no(msg, false)? { + log::info!("Import cancelled by user"); + return Ok(()); + } + } + + // Detect format by checking file contents + let file = std::fs::File::open(path)?; + let mut archive = zip::ZipArchive::new(file)?; + + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + if archive.by_name("modrinth.index.json").is_ok() { + drop(archive); + import_modrinth(path, lockfile_dir, config_dir).await + } else if archive.by_name("manifest.json").is_ok() { + drop(archive); + import_curseforge(path, lockfile_dir, config_dir).await + } else { + Err(PakkerError::InvalidImportFile( + "Unknown pack format".to_string(), + )) + } +} + +async fn import_modrinth( + path: &Path, + lockfile_dir: &Path, + config_dir: &Path, +) -> Result<()> { + use std::{fs::File, io::Read}; + + use zip::ZipArchive; + + use crate::platform::create_platform; + + let file = File::open(path)?; + let mut archive = ZipArchive::new(file)?; + + let index_content = { + let mut index_file = archive.by_name("modrinth.index.json")?; + let mut content = String::new(); + index_file.read_to_string(&mut content)?; + content + }; + + let index: serde_json::Value = serde_json::from_str(&index_content)?; + + // Create lockfile + let mc_version = index["dependencies"]["minecraft"] + .as_str() + .unwrap_or("1.20.1") + .to_string(); + + let loader = + if let Some(fabric) = index["dependencies"]["fabric-loader"].as_str() { + ("fabric".to_string(), fabric.to_string()) + } else if let Some(forge) = index["dependencies"]["forge"].as_str() { + ("forge".to_string(), forge.to_string()) + } else { + ("fabric".to_string(), "latest".to_string()) + }; + + let mut loaders = std::collections::HashMap::new(); + loaders.insert(loader.0.clone(), loader.1); + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec![mc_version.clone()], + loaders: loaders.clone(), + projects: Vec::new(), + lockfile_version: 1, + }; + + // Import projects from files list + if let Some(files) = index["files"].as_array() { + log::info!("Importing {} projects from modpack", files.len()); + + // Create platform client + let creds = crate::model::credentials::ResolvedCredentials::load().ok(); + let platform = create_platform( + "modrinth", + creds + .as_ref() + .and_then(|c| c.modrinth_token().map(std::string::ToString::to_string)), + )?; + + for file_entry in files { + if let Some(project_id) = file_entry["downloads"] + .as_array() + .and_then(|downloads| downloads.first()) + .and_then(|url| url.as_str()) + .and_then(|url| url.split('/').rev().nth(1)) + { + log::info!("Fetching project: {project_id}"); + match platform + .request_project_with_files(project_id, &lockfile.mc_versions, &[ + loader.0.clone(), + ]) + .await + { + Ok(mut project) => { + // Select best file + if let Err(e) = + project.select_file(&lockfile.mc_versions, &[loader.0.clone()]) + { + log::warn!( + "Failed to select file for {}: {}", + project.get_name(), + e + ); + continue; + } + lockfile.add_project(project); + }, + Err(e) => { + log::warn!("Failed to fetch project {project_id}: {e}"); + }, + } + } + } + } + + // Create config + let config = Config { + name: index["name"] + .as_str() + .unwrap_or("Imported Pack") + .to_string(), + version: index["versionId"] + .as_str() + .unwrap_or("1.0.0") + .to_string(), + description: index["summary"] + .as_str() + .map(std::string::ToString::to_string), + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: Default::default(), + projects: None, + export_profiles: None, + }; + + // Save files using provided paths + lockfile.save(lockfile_dir)?; + config.save(config_dir)?; + + log::info!("Imported {} projects", lockfile.projects.len()); + + // Extract overrides + for i in 0..archive.len() { + let mut file = archive.by_index(i)?; + let outpath = file.enclosed_name().ok_or_else(|| { + PakkerError::InternalError("Invalid file path in archive".to_string()) + })?; + + if outpath.starts_with("overrides/") { + let target = outpath.strip_prefix("overrides/").unwrap(); + + if file.is_dir() { + std::fs::create_dir_all(target)?; + } else { + if let Some(parent) = target.parent() { + std::fs::create_dir_all(parent)?; + } + let mut outfile = File::create(target)?; + std::io::copy(&mut file, &mut outfile)?; + } + } + } + + Ok(()) +} + +async fn import_curseforge( + path: &Path, + lockfile_dir: &Path, + config_dir: &Path, +) -> Result<()> { + use std::{fs::File, io::Read}; + + use zip::ZipArchive; + + let file = File::open(path)?; + let mut archive = ZipArchive::new(file)?; + + let manifest_content = { + let mut manifest_file = archive.by_name("manifest.json")?; + let mut content = String::new(); + manifest_file.read_to_string(&mut content)?; + content + }; + + let manifest: serde_json::Value = serde_json::from_str(&manifest_content)?; + + // Create lockfile + let mc_version = manifest["minecraft"]["version"] + .as_str() + .unwrap_or("1.20.1") + .to_string(); + + let mod_loaders = + manifest["minecraft"]["modLoaders"] + .as_array() + .ok_or_else(|| { + PakkerError::InvalidImportFile("Missing modLoaders".to_string()) + })?; + + let loader_info = mod_loaders + .first() + .and_then(|l| l["id"].as_str()) + .ok_or_else(|| { + PakkerError::InvalidImportFile("Missing loader id".to_string()) + })?; + + let parts: Vec<&str> = loader_info.split('-').collect(); + let loader_name = (*parts.first().unwrap_or(&"fabric")).to_string(); + let loader_version = (*parts.get(1).unwrap_or(&"latest")).to_string(); + + let mut loaders = std::collections::HashMap::new(); + loaders.insert(loader_name, loader_version); + + let mut lockfile = LockFile { + target: Some(Target::CurseForge), + mc_versions: vec![mc_version.clone()], + loaders: loaders.clone(), + projects: Vec::new(), + lockfile_version: 1, + }; + + // Import projects from files list + if let Some(files) = manifest["files"].as_array() { + log::info!("Importing {} projects from modpack", files.len()); + + // Create platform client + use crate::platform::create_platform; + let curseforge_token = std::env::var("CURSEFORGE_TOKEN").ok(); + let platform = create_platform("curseforge", curseforge_token)?; + + for file_entry in files { + if let Some(project_id) = file_entry["projectID"].as_u64() { + let project_id_str = project_id.to_string(); + log::info!("Fetching project: {project_id_str}"); + + match platform + .request_project_with_files( + &project_id_str, + &lockfile.mc_versions, + &loaders.keys().cloned().collect::>(), + ) + .await + { + Ok(mut project) => { + // Try to select the specific file if fileID is provided + if let Some(file_id) = file_entry["fileID"].as_u64() { + let file_id_str = file_id.to_string(); + // Try to find the file with matching ID + if let Some(file) = + project.files.iter().find(|f| f.id == file_id_str).cloned() + { + project.files = vec![file]; + } else { + log::warn!( + "Could not find file {} for project {}, selecting best match", + file_id, + project.get_name() + ); + if let Err(e) = project.select_file( + &lockfile.mc_versions, + &loaders.keys().cloned().collect::>(), + ) { + log::warn!( + "Failed to select file for {}: {}", + project.get_name(), + e + ); + continue; + } + } + } else { + // No specific file ID, select best match + if let Err(e) = project.select_file( + &lockfile.mc_versions, + &loaders.keys().cloned().collect::>(), + ) { + log::warn!( + "Failed to select file for {}: {}", + project.get_name(), + e + ); + continue; + } + } + lockfile.add_project(project); + }, + Err(e) => { + log::warn!("Failed to fetch project {project_id_str}: {e}"); + }, + } + } + } + } + + // Create config + let config = Config { + name: manifest["name"] + .as_str() + .unwrap_or("Imported Pack") + .to_string(), + version: manifest["version"] + .as_str() + .unwrap_or("1.0.0") + .to_string(), + description: None, + author: manifest["author"] + .as_str() + .map(std::string::ToString::to_string), + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: Default::default(), + projects: None, + export_profiles: None, + }; + + // Save files using provided paths + lockfile.save(lockfile_dir)?; + config.save(config_dir)?; + + log::info!("Imported {} projects", lockfile.projects.len()); + + // Extract overrides + let overrides_prefix = manifest["overrides"].as_str().unwrap_or("overrides"); + + for i in 0..archive.len() { + let mut file = archive.by_index(i)?; + let outpath = file.enclosed_name().ok_or_else(|| { + PakkerError::InternalError("Invalid file path in archive".to_string()) + })?; + + if outpath.starts_with(overrides_prefix) { + let target = outpath.strip_prefix(overrides_prefix).unwrap(); + + if file.is_dir() { + std::fs::create_dir_all(target)?; + } else { + if let Some(parent) = target.parent() { + std::fs::create_dir_all(parent)?; + } + let mut outfile = File::create(target)?; + std::io::copy(&mut file, &mut outfile)?; + } + } + } + + Ok(()) +} diff --git a/src/cli/commands/init.rs b/src/cli/commands/init.rs new file mode 100644 index 0000000..5107a38 --- /dev/null +++ b/src/cli/commands/init.rs @@ -0,0 +1,67 @@ +use std::{collections::HashMap, path::Path}; + +use crate::{ + cli::InitArgs, + error::PakkerError, + model::{Config, LockFile, Target}, +}; + +pub async fn execute( + args: InitArgs, + lockfile_path: &Path, + config_path: &Path, +) -> Result<(), PakkerError> { + if lockfile_path.exists() { + return Err(PakkerError::AlreadyExists( + "Lock file already exists".into(), + )); + } + + let target = args.target.as_str(); + let target_enum = match target { + "curseforge" => Target::CurseForge, + "modrinth" => Target::Modrinth, + "multiplatform" => Target::Multiplatform, + _ => { + return Err(PakkerError::InvalidInput(format!( + "Invalid target: {target}" + ))); + }, + }; + + let mc_versions = vec![args.mc_version]; + + let mut loaders = HashMap::new(); + loaders.insert(args.loader, args.loader_version); + + let lockfile = LockFile { + target: Some(target_enum), + mc_versions, + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + // Save expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + lockfile.save(lockfile_dir)?; + + let config = Config { + name: args.name.unwrap_or_else(|| "My Modpack".to_string()), + version: args.version.unwrap_or_else(|| "1.0.0".to_string()), + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: None, + export_profiles: None, + }; + + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + config.save(config_dir)?; + + println!("Initialized new modpack with target: {target}"); + Ok(()) +} diff --git a/src/cli/commands/inspect.rs b/src/cli/commands/inspect.rs new file mode 100644 index 0000000..7ed263d --- /dev/null +++ b/src/cli/commands/inspect.rs @@ -0,0 +1,596 @@ +use std::{collections::HashSet, path::Path}; + +use comfy_table::{Cell, Color, ContentArrangement, Table, presets}; +use strsim::levenshtein; +use yansi::Paint; + +use crate::{ + error::Result, + model::{Config, LockFile, Project, ProjectFile}, +}; + +pub async fn execute( + projects: Vec, + lockfile_path: &Path, + config_path: &Path, +) -> Result<()> { + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + let lockfile = LockFile::load(lockfile_dir)?; + let _config = Config::load(config_dir)?; + + let mut found_any = false; + let total_projects = projects.len(); + + for (idx, project_input) in projects.iter().enumerate() { + if let Some(project) = find_project(&lockfile, project_input) { + display_project_inspection(project, &lockfile)?; + found_any = true; + + // Add separator between projects (but not after the last one) + if idx < total_projects - 1 { + let width = 80; // Default terminal width + println!("{}", "─".repeat(width)); + println!(); + } + } else { + eprintln!( + "{}: {}", + "Error".red(), + format!("Project '{project_input}' not found in lockfile.").red() + ); + + // Suggest similar projects + if let Some(suggestions) = + find_similar_projects(&lockfile, project_input, 5) + { + eprintln!(); + eprintln!("{}", "Did you mean one of these?".yellow()); + for suggestion in suggestions { + eprintln!(" - {}", suggestion.cyan()); + } + } + eprintln!(); + } + } + + if !found_any && !projects.is_empty() { + return Err(crate::error::PakkerError::ProjectNotFound( + "No projects found".to_string(), + )); + } + + Ok(()) +} + +fn find_project<'a>( + lockfile: &'a LockFile, + query: &str, +) -> Option<&'a Project> { + lockfile.projects.iter().find(|p| project_matches(p, query)) +} + +fn project_matches(project: &Project, query: &str) -> bool { + // Check slugs + for slug in project.slug.values() { + if slug.eq_ignore_ascii_case(query) { + return true; + } + } + + // Check names + for name in project.name.values() { + if name.eq_ignore_ascii_case(query) { + return true; + } + } + + // Check pakku_id + if let Some(ref pakku_id) = project.pakku_id + && pakku_id.eq_ignore_ascii_case(query) + { + return true; + } + + // Check aliases + for alias in &project.aliases { + if alias.eq_ignore_ascii_case(query) { + return true; + } + } + + false +} + +fn find_similar_projects( + lockfile: &LockFile, + query: &str, + max_results: usize, +) -> Option> { + // Calculate similarity scores for all projects + let mut candidates: Vec<(String, usize)> = lockfile + .projects + .iter() + .flat_map(|p| { + let mut scores = Vec::new(); + + // Check slug similarity + for slug in p.slug.values() { + let distance = levenshtein(slug, query); + if distance <= 3 { + scores.push((slug.clone(), distance)); + } + } + + // Check name similarity (case-insensitive) + for name in p.name.values() { + let distance = levenshtein(&name.to_lowercase(), &query.to_lowercase()); + if distance <= 3 { + scores.push((name.clone(), distance)); + } + } + + // Check aliases + for alias in &p.aliases { + let distance = levenshtein(alias, query); + if distance <= 3 { + scores.push((alias.clone(), distance)); + } + } + + scores + }) + .collect(); + + if candidates.is_empty() { + return None; + } + + // Sort by distance (closest first) + candidates.sort_by_key(|(_, dist)| *dist); + + // Deduplicate and take top N + let mut seen = HashSet::new(); + let suggestions: Vec = candidates + .into_iter() + .filter_map(|(name, _)| { + if seen.insert(name.clone()) { + Some(name) + } else { + None + } + }) + .take(max_results) + .collect(); + + Some(suggestions) +} + +fn display_project_inspection( + project: &Project, + lockfile: &LockFile, +) -> Result<()> { + // Display project header panel + display_project_header(project)?; + + // Display project files + println!(); + display_project_files(&project.files)?; + + // Display properties + println!(); + display_properties(project)?; + + // Display dependency tree + println!(); + display_dependencies(project, lockfile)?; + + println!(); + + Ok(()) +} + +fn display_project_header(project: &Project) -> Result<()> { + let name = get_project_name(project); + let default_slug = String::from("N/A"); + let slug = project.slug.values().next().unwrap_or(&default_slug); + + // Create header table with comfy-table + let mut table = Table::new(); + table + .load_preset(presets::UTF8_FULL) + .set_content_arrangement(ContentArrangement::Dynamic); + + // Title row with name + table.add_row(vec![ + Cell::new(name) + .fg(Color::Cyan) + .set_alignment(comfy_table::CellAlignment::Left), + ]); + + // Second row with slug, type, side + let metadata = format!( + "{} ({}) • {} • {}", + slug, + project.id.keys().next().unwrap_or(&"unknown".to_string()), + format!("{:?}", project.r#type).to_lowercase(), + format!("{:?}", project.side).to_lowercase() + ); + table.add_row(vec![ + Cell::new(metadata) + .fg(Color::DarkGrey) + .set_alignment(comfy_table::CellAlignment::Left), + ]); + + println!("{table}"); + + Ok(()) +} + +fn display_project_files(files: &[ProjectFile]) -> Result<()> { + if files.is_empty() { + println!("{}", "No files available".yellow()); + return Ok(()); + } + + println!("{}", "Project Files".cyan().bold()); + + for (idx, file) in files.iter().enumerate() { + let mut table = Table::new(); + table + .load_preset(presets::UTF8_FULL) + .set_content_arrangement(ContentArrangement::Dynamic); + + // Mark the first file as "current" + let status = if idx == 0 { "current" } else { "" }; + let status_text = if status.is_empty() { + String::new() + } else { + format!(" {status}") + }; + + // File path line + let file_path = format!("{}={}", file.file_type, file.file_name); + table.add_row(vec![ + Cell::new(format!("{file_path}:{status_text}")).fg(if idx == 0 { + Color::Green + } else { + Color::White + }), + ]); + + // Date published + table.add_row(vec![Cell::new(&file.date_published).fg(Color::DarkGrey)]); + + // Empty line + table.add_row(vec![Cell::new("")]); + + // Hashes (truncated) + if !file.hashes.is_empty() { + for (hash_type, hash_value) in &file.hashes { + let display_hash = if hash_value.len() > 32 { + format!( + "{}...{}", + &hash_value[..16], + &hash_value[hash_value.len() - 16..] + ) + } else { + hash_value.clone() + }; + table.add_row(vec![ + Cell::new(format!("{hash_type}={display_hash}")).fg(Color::DarkGrey), + ]); + } + } + + println!("{table}"); + println!(); + } + + Ok(()) +} + +fn display_properties(project: &Project) -> Result<()> { + println!("{}", "Properties".cyan().bold()); + + println!( + " {}={}", + "type".yellow(), + format!("{:?}", project.r#type).to_lowercase() + ); + println!( + " {}={}", + "side".yellow(), + format!("{:?}", project.side).to_lowercase() + ); + println!( + " {}={}", + "update_strategy".yellow(), + format!("{:?}", project.update_strategy).to_lowercase() + ); + println!( + " {}={}", + "redistributable".yellow(), + project.redistributable + ); + + if let Some(subpath) = &project.subpath { + println!(" {}={}", "subpath".yellow(), subpath); + } + + if !project.aliases.is_empty() { + let aliases: Vec<_> = project.aliases.iter().cloned().collect(); + println!(" {}={}", "aliases".yellow(), aliases.join(", ")); + } + + Ok(()) +} + +fn display_dependencies(project: &Project, lockfile: &LockFile) -> Result<()> { + println!("{}", "Dependencies".cyan().bold()); + + // Collect all dependencies from all files + let mut all_deps = HashSet::new(); + for file in &project.files { + for dep in &file.required_dependencies { + all_deps.insert(dep.clone()); + } + } + + if all_deps.is_empty() { + println!(" {}", "No dependencies".dim()); + return Ok(()); + } + + // Display dependency tree + let mut visited = HashSet::new(); + for dep_id in all_deps { + display_dependency_tree(&dep_id, lockfile, 1, &mut visited)?; + } + + Ok(()) +} + +fn display_dependency_tree( + dep_id: &str, + lockfile: &LockFile, + depth: usize, + visited: &mut HashSet, +) -> Result<()> { + let indent = " ".repeat(depth); + let tree_char = if depth == 1 { "└─" } else { "├─" }; + + // Find the project in lockfile + let project = lockfile.projects.iter().find(|p| { + // Check if any ID matches + p.id.values().any(|id| id == dep_id) + || p.slug.values().any(|slug| slug == dep_id) + || p.pakku_id.as_ref() == Some(&dep_id.to_string()) + }); + + if let Some(proj) = project { + let name = get_project_name(proj); + + // Check for circular dependency + if visited.contains(&name) { + println!("{}{} {} {}", indent, tree_char, name, "(circular)".red()); + return Ok(()); + } + + println!("{}{} {} (required)", indent, tree_char, name.green()); + visited.insert(name); + + // Recursively display nested dependencies (limit depth to avoid infinite + // loops) + if depth < 5 { + for file in &proj.files { + for nested_dep in &file.required_dependencies { + display_dependency_tree(nested_dep, lockfile, depth + 1, visited)?; + } + } + } + } else { + // Dependency not found in lockfile + println!( + "{}{} {} {}", + indent, + tree_char, + dep_id, + "(not in lockfile)".yellow() + ); + } + + Ok(()) +} + +fn get_project_name(project: &Project) -> String { + project + .name + .values() + .next() + .or_else(|| project.slug.values().next()) + .cloned() + .unwrap_or_else(|| "Unknown".to_string()) +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use super::*; + use crate::model::enums::{ + ProjectSide, + ProjectType, + ReleaseType, + UpdateStrategy, + }; + + fn create_test_project(pakku_id: &str, slug: &str, name: &str) -> Project { + let mut slug_map = HashMap::new(); + slug_map.insert("modrinth".to_string(), slug.to_string()); + + let mut name_map = HashMap::new(); + name_map.insert("modrinth".to_string(), name.to_string()); + + let mut id_map = HashMap::new(); + id_map.insert("modrinth".to_string(), pakku_id.to_string()); + + Project { + pakku_id: Some(pakku_id.to_string()), + pakku_links: HashSet::new(), + r#type: ProjectType::Mod, + side: ProjectSide::Both, + slug: slug_map, + name: name_map, + id: id_map, + update_strategy: UpdateStrategy::Latest, + redistributable: true, + subpath: None, + aliases: HashSet::new(), + export: true, + files: vec![], + } + } + + fn create_test_lockfile(projects: Vec) -> LockFile { + use crate::model::enums::Target; + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + for project in projects { + lockfile.add_project(project); + } + + lockfile + } + + #[test] + fn test_find_project_by_slug() { + let project = create_test_project("test-id", "test-slug", "Test Mod"); + let lockfile = create_test_lockfile(vec![project]); + + let found = find_project(&lockfile, "test-slug"); + assert!(found.is_some()); + assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string())); + } + + #[test] + fn test_find_project_by_name() { + let project = create_test_project("test-id", "test-slug", "Test Mod"); + let lockfile = create_test_lockfile(vec![project]); + + let found = find_project(&lockfile, "test mod"); // Case-insensitive + assert!(found.is_some()); + assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string())); + } + + #[test] + fn test_find_project_by_pakku_id() { + let project = create_test_project("test-id", "test-slug", "Test Mod"); + let lockfile = create_test_lockfile(vec![project]); + + let found = find_project(&lockfile, "test-id"); + assert!(found.is_some()); + assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string())); + } + + #[test] + fn test_find_project_not_found() { + let project = create_test_project("test-id", "test-slug", "Test Mod"); + let lockfile = create_test_lockfile(vec![project]); + + let found = find_project(&lockfile, "nonexistent"); + assert!(found.is_none()); + } + + #[test] + fn test_fuzzy_matching_close_match() { + let project1 = create_test_project("id1", "fabric-api", "Fabric API"); + let project2 = create_test_project("id2", "sodium", "Sodium"); + let lockfile = create_test_lockfile(vec![project1, project2]); + + // Typo: "fabrc-api" should suggest "fabric-api" + let suggestions = find_similar_projects(&lockfile, "fabrc-api", 5); + assert!(suggestions.is_some()); + let suggestions = suggestions.unwrap(); + assert!(!suggestions.is_empty()); + assert!(suggestions.contains(&"fabric-api".to_string())); + } + + #[test] + fn test_fuzzy_matching_no_match() { + let project = create_test_project("test-id", "test-slug", "Test Mod"); + let lockfile = create_test_lockfile(vec![project]); + + // Very different query, should have no suggestions (distance > 3) + let suggestions = + find_similar_projects(&lockfile, "completely-different-xyz", 5); + assert!(suggestions.is_none() || suggestions.unwrap().is_empty()); + } + + #[test] + fn test_project_matches_alias() { + let mut project = create_test_project("test-id", "test-slug", "Test Mod"); + project.aliases.insert("test-alias".to_string()); + + assert!(project_matches(&project, "test-alias")); + } + + #[test] + fn test_circular_dependency_detection() { + // This is a conceptual test - in practice, we'd need to set up files with + // dependencies + let mut project1 = create_test_project("dep1", "dep1-slug", "Dependency 1"); + let mut project2 = create_test_project("dep2", "dep2-slug", "Dependency 2"); + + // Create files with circular dependencies + let file1 = ProjectFile { + file_type: "modrinth".to_string(), + file_name: "dep1.jar".to_string(), + mc_versions: vec!["1.20.1".to_string()], + loaders: vec!["fabric".to_string()], + release_type: ReleaseType::Release, + url: "https://example.com/dep1.jar".to_string(), + id: "file1".to_string(), + parent_id: "dep1".to_string(), + hashes: HashMap::new(), + required_dependencies: vec!["dep2".to_string()], + size: 1000, + date_published: "2024-01-01T00:00:00Z".to_string(), + }; + + let file2 = ProjectFile { + file_type: "modrinth".to_string(), + file_name: "dep2.jar".to_string(), + mc_versions: vec!["1.20.1".to_string()], + loaders: vec!["fabric".to_string()], + release_type: ReleaseType::Release, + url: "https://example.com/dep2.jar".to_string(), + id: "file2".to_string(), + parent_id: "dep2".to_string(), + hashes: HashMap::new(), + required_dependencies: vec!["dep1".to_string()], + size: 1000, + date_published: "2024-01-01T00:00:00Z".to_string(), + }; + + project1.files.push(file1); + project2.files.push(file2); + + let lockfile = create_test_lockfile(vec![project1, project2]); + + // Test that display_dependency_tree handles circular deps gracefully + let mut visited = HashSet::new(); + let result = display_dependency_tree("dep1", &lockfile, 1, &mut visited); + assert!(result.is_ok()); + } +} diff --git a/src/cli/commands/link.rs b/src/cli/commands/link.rs new file mode 100644 index 0000000..8f7fb6e --- /dev/null +++ b/src/cli/commands/link.rs @@ -0,0 +1,47 @@ +use std::path::Path; + +use crate::{ + cli::LinkArgs, + error::{PakkerError, Result}, + model::LockFile, +}; + +pub fn execute(args: LinkArgs, lockfile_path: &Path) -> Result<()> { + log::info!("Linking {} -> {}", args.from, args.to); + + // Load expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let mut lockfile = LockFile::load(lockfile_dir)?; + + // Find projects + let from_project = lockfile + .projects + .iter() + .find(|p| p.matches_input(&args.from)) + .ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?; + let from_id = from_project.pakku_id.clone().ok_or_else(|| { + PakkerError::InvalidProject("From project has no pakku_id".to_string()) + })?; + + let to_project = lockfile + .projects + .iter_mut() + .find(|p| p.matches_input(&args.to)) + .ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?; + + // Check if link already exists + if to_project.pakku_links.contains(&from_id) { + log::info!("Link already exists"); + return Ok(()); + } + + // Add link + to_project.pakku_links.insert(from_id); + + // Save lockfile + lockfile.save(lockfile_dir)?; + + log::info!("Successfully linked projects"); + + Ok(()) +} diff --git a/src/cli/commands/ls.rs b/src/cli/commands/ls.rs new file mode 100644 index 0000000..d225e04 --- /dev/null +++ b/src/cli/commands/ls.rs @@ -0,0 +1,50 @@ +use std::path::Path; + +use crate::{cli::LsArgs, error::Result, model::LockFile}; + +pub fn execute(args: LsArgs, lockfile_path: &Path) -> Result<()> { + // Load expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let lockfile = LockFile::load(lockfile_dir)?; + + if lockfile.projects.is_empty() { + println!("No projects installed"); + return Ok(()); + } + + println!("Installed projects ({}):", lockfile.projects.len()); + println!(); + + for project in &lockfile.projects { + if args.detailed { + let id = project.pakku_id.as_deref().unwrap_or("unknown"); + println!(" {} ({})", project.get_name(), id); + println!(" Type: {:?}", project.r#type); + println!(" Side: {:?}", project.side); + + if let Some(file) = project.files.first() { + println!(" File: {}", file.file_name); + println!( + " Version: {} ({})", + file.release_type, file.date_published + ); + } + + if !project.pakku_links.is_empty() { + println!(" Dependencies: {}", project.pakku_links.len()); + } + + println!(); + } else { + let file_info = project + .files + .first() + .map(|f| format!(" ({})", f.file_name)) + .unwrap_or_default(); + + println!(" {}{}", project.get_name(), file_info); + } + } + + Ok(()) +} diff --git a/src/cli/commands/mod.rs b/src/cli/commands/mod.rs new file mode 100644 index 0000000..17cf859 --- /dev/null +++ b/src/cli/commands/mod.rs @@ -0,0 +1,23 @@ +pub mod add; +pub mod add_prj; +pub mod cfg; +pub mod cfg_prj; +pub mod credentials; +pub mod credentials_set; +pub mod diff; +pub mod export; +pub mod fetch; +pub mod fork; +pub mod import; +pub mod init; +pub mod inspect; +pub mod link; +pub mod ls; +pub mod remote; +pub mod remote_update; +pub mod rm; +pub mod set; +pub mod status; +pub mod sync; +pub mod unlink; +pub mod update; diff --git a/src/cli/commands/remote.rs b/src/cli/commands/remote.rs new file mode 100644 index 0000000..393011a --- /dev/null +++ b/src/cli/commands/remote.rs @@ -0,0 +1,151 @@ +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use crate::{ + cli::RemoteArgs, + error::{PakkerError, Result}, + fetch::Fetcher, + git, + model::{config::Config, lockfile::LockFile}, +}; + +const REMOTE_DIR: &str = ".pakku-remote"; + +pub async fn execute(args: RemoteArgs) -> Result<()> { + let remote_path = PathBuf::from(REMOTE_DIR); + + // Handle --remove flag + if args.remove { + if remote_path.exists() { + fs::remove_dir_all(&remote_path)?; + log::info!("Removed remote from modpack"); + } else { + log::warn!("No remote configured"); + } + return Ok(()); + } + + // If no URL provided, show status + if args.url.is_none() { + show_remote_status(&remote_path)?; + return Ok(()); + } + + let url = args.url.unwrap(); + log::info!("Installing modpack from: {url}"); + + // Clone or update repository + if remote_path.exists() { + log::info!("Remote directory exists, updating..."); + let remote_name = "origin"; + let ref_name = args.branch.as_deref().unwrap_or("HEAD"); + + git::fetch_updates(&remote_path, remote_name, ref_name, None)?; + git::reset_to_ref(&remote_path, remote_name, ref_name)?; + } else { + log::info!("Cloning repository..."); + let ref_name = args.branch.as_deref().unwrap_or("HEAD"); + git::clone_repository(&url, &remote_path, ref_name, None)?; + } + + // Load lockfile and config from remote + let remote_lockfile_path = remote_path.join("pakku-lock.json"); + if !remote_lockfile_path.exists() { + return Err(PakkerError::ConfigError( + "Remote repository does not contain pakku-lock.json".to_string(), + )); + } + + let remote_lockfile = LockFile::load(&remote_path)?; + let remote_config = Config::load(&remote_path).ok(); + + // Copy lockfile to current directory + let current_lockfile_path = PathBuf::from("pakku-lock.json"); + fs::copy(&remote_lockfile_path, ¤t_lockfile_path)?; + log::info!("Copied lockfile from remote"); + + // Copy config if exists + if remote_config.is_some() { + let remote_config_path = remote_path.join("pakku.json"); + let current_config_path = PathBuf::from("pakku.json"); + if remote_config_path.exists() { + fs::copy(&remote_config_path, ¤t_config_path)?; + log::info!("Copied config from remote"); + } + } + + // Fetch project files + log::info!("Fetching project files..."); + let fetcher = Fetcher::new(&remote_path); + fetcher + .fetch_all(&remote_lockfile, &remote_config.unwrap_or_default()) + .await?; + + // Sync overrides + sync_overrides(&remote_path, args.server_pack)?; + + log::info!("Successfully installed modpack from remote"); + Ok(()) +} + +fn show_remote_status(remote_path: &Path) -> Result<()> { + if !remote_path.exists() { + println!("No remote configured"); + return Ok(()); + } + + println!("Remote status:"); + println!(" Directory: {}", remote_path.display()); + + if git::is_git_repository(remote_path) { + if let Ok(url) = git::get_remote_url(remote_path, "origin") { + println!(" URL: {url}"); + } + if let Ok(sha) = git::get_current_commit_sha(remote_path, None) { + println!(" Commit: {}", &sha[..8]); + } + } + + Ok(()) +} + +fn sync_overrides(remote_path: &Path, server_pack: bool) -> Result<()> { + let override_dirs = if server_pack { + vec!["overrides", "server_overrides"] + } else { + vec!["overrides", "client_overrides"] + }; + + for dir_name in override_dirs { + let src_dir = remote_path.join(dir_name); + if src_dir.exists() && src_dir.is_dir() { + log::info!("Syncing {dir_name} directory..."); + copy_dir_recursive(&src_dir, Path::new("."))?; + } + } + + Ok(()) +} + +fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> { + if !dst.exists() { + fs::create_dir_all(dst)?; + } + + for entry in fs::read_dir(src)? { + let entry = entry?; + let src_path = entry.path(); + let file_name = entry.file_name(); + let dst_path = dst.join(file_name); + + if src_path.is_dir() { + copy_dir_recursive(&src_path, &dst_path)?; + } else { + fs::copy(&src_path, &dst_path)?; + } + } + + Ok(()) +} diff --git a/src/cli/commands/remote_update.rs b/src/cli/commands/remote_update.rs new file mode 100644 index 0000000..a4ddbba --- /dev/null +++ b/src/cli/commands/remote_update.rs @@ -0,0 +1,121 @@ +use std::path::PathBuf; + +use crate::{cli::RemoteUpdateArgs, error::PakkerError, git, model::Config}; + +/// Update modpack from remote Git repository +/// +/// This command updates the current modpack from its remote Git repository. +/// It fetches the latest changes from the remote and syncs overrides. +pub async fn execute(args: RemoteUpdateArgs) -> Result<(), PakkerError> { + // Check if lockfile exists in current directory - if it does, we're in a + // modpack directory and should not update remote (use regular update + // instead) + let lockfile_path = PathBuf::from("pakku-lock.json"); + if lockfile_path.exists() { + return Err(PakkerError::InvalidInput( + "Cannot update remote from a modpack directory. Use 'update' command \ + instead." + .to_string(), + )); + } + + // Remote directory for the cloned modpack + let remote_dir = PathBuf::from(".pakku-remote"); + + // Check if remote directory exists + if !remote_dir.exists() { + return Err(PakkerError::RemoteNotFound( + "No remote found. Use 'remote' command to install a modpack first." + .to_string(), + )); + } + + // Fetch updates from remote repository + println!("Updating remote repository..."); + let remote_name = "origin"; + let ref_name = args.branch.as_deref().unwrap_or("HEAD"); + git::fetch_updates(&remote_dir, remote_name, ref_name, None)?; + + // Read remote lockfile + let remote_lockfile_path = remote_dir.join("pakku-lock.json"); + if !remote_lockfile_path.exists() { + return Err(PakkerError::FileNotFound( + "Remote lockfile not found".to_string(), + )); + } + + // Read remote config if it exists + let remote_config_path = remote_dir.join("pakku.json"); + let _remote_config = if remote_config_path.exists() { + match Config::load(&remote_config_path) { + Ok(config) => Some(config), + Err(e) => { + eprintln!("Warning: Could not read remote config: {e}"); + None + }, + } + } else { + None + }; + + // Sync overrides from remote directory + println!("Syncing overrides..."); + sync_overrides(&remote_dir).await?; + + // Clean up remote directory + std::fs::remove_dir_all(&remote_dir)?; + + println!("Remote modpack updated successfully."); + + Ok(()) +} + +/// Sync override files from remote directory to current directory +async fn sync_overrides(remote_dir: &PathBuf) -> Result<(), PakkerError> { + let remote_config_path = remote_dir.join("pakku.json"); + if !remote_config_path.exists() { + return Ok(()); + } + + let config = Config::load(&remote_config_path)?; + + // Get override directories from config + let overrides = config.overrides; + if overrides.is_empty() { + return Ok(()); + } + + for override_path in overrides { + let source = remote_dir.join(&override_path); + let dest = PathBuf::from(&override_path); + + if source.exists() { + // Copy override directory + copy_directory(&source, &dest)?; + println!(" Synced: {override_path}"); + } + } + + Ok(()) +} + +/// Recursively copy a directory +fn copy_directory(src: &PathBuf, dest: &PathBuf) -> Result<(), PakkerError> { + if !dest.exists() { + std::fs::create_dir_all(dest)?; + } + + for entry in std::fs::read_dir(src)? { + let entry = entry?; + let path = entry.path(); + let dest_path = dest.join(entry.file_name()); + + if path.is_dir() { + copy_directory(&path, &dest_path)?; + } else { + std::fs::copy(&path, &dest_path)?; + } + } + + Ok(()) +} diff --git a/src/cli/commands/rm.rs b/src/cli/commands/rm.rs new file mode 100644 index 0000000..2c72283 --- /dev/null +++ b/src/cli/commands/rm.rs @@ -0,0 +1,89 @@ +use std::path::Path; + +use crate::{ + cli::RmArgs, + error::{PakkerError, Result}, + model::LockFile, + ui_utils::prompt_yes_no, +}; + +pub async fn execute( + args: RmArgs, + lockfile_path: &Path, + _config_path: &Path, +) -> Result<()> { + log::info!("Removing projects: {:?}", args.inputs); + + // Load expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let mut lockfile = LockFile::load(lockfile_dir)?; + + let mut removed_count = 0; + let mut removed_ids = Vec::new(); + let mut projects_to_remove = Vec::new(); + + // First, identify all projects to remove + for input in &args.inputs { + // Find project by various identifiers + if let Some(project) = lockfile.projects.iter().find(|p| { + p.pakku_id.as_deref() == Some(input) + || p.slug.values().any(|s| s == input) + || p.name.values().any(|n| n.eq_ignore_ascii_case(input)) + || p.aliases.contains(input) + }) { + projects_to_remove.push(project.get_name()); + } else { + log::warn!("Project not found: {input}"); + } + } + + if projects_to_remove.is_empty() { + return Err(PakkerError::ProjectNotFound( + "None of the specified projects found".to_string(), + )); + } + + // Ask for confirmation unless --yes flag is provided + if !args.yes { + println!("The following projects will be removed:"); + for name in &projects_to_remove { + println!(" - {name}"); + } + + if !prompt_yes_no("Do you want to continue?", false)? { + println!("Removal cancelled."); + return Ok(()); + } + } + + // Now actually remove the projects + for input in &args.inputs { + if let Some(pos) = lockfile.projects.iter().position(|p| { + p.pakku_id.as_deref() == Some(input) + || p.slug.values().any(|s| s == input) + || p.name.values().any(|n| n.eq_ignore_ascii_case(input)) + || p.aliases.contains(input) + }) { + let project = lockfile.projects.remove(pos); + log::info!("Removed: {}", project.get_name()); + if let Some(pakku_id) = project.pakku_id.clone() { + removed_ids.push(pakku_id); + } + removed_count += 1; + } + } + + // Clean up pakku_links from all remaining projects + for project in &mut lockfile.projects { + project + .pakku_links + .retain(|link| !removed_ids.contains(link)); + } + + // Save lockfile + lockfile.save(lockfile_dir)?; + + log::info!("Successfully removed {removed_count} project(s)"); + + Ok(()) +} diff --git a/src/cli/commands/set.rs b/src/cli/commands/set.rs new file mode 100644 index 0000000..11111c6 --- /dev/null +++ b/src/cli/commands/set.rs @@ -0,0 +1,156 @@ +use std::{collections::HashMap, path::Path, str::FromStr}; + +use crate::{ + cli::SetArgs, + error::PakkerError, + model::{Config, LockFile, ProjectSide, ProjectType, Target, UpdateStrategy}, +}; + +pub async fn execute( + args: SetArgs, + lockfile_path: &Path, + config_path: &Path, +) -> Result<(), PakkerError> { + // Load expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + let mut lockfile = LockFile::load(lockfile_dir)?; + let config = Config::load(config_dir)?; + + // Check if we're modifying lockfile properties or project properties + let is_lockfile_modification = args.target.is_some() + || args.mc_versions.is_some() + || args.loaders.is_some(); + + if is_lockfile_modification { + // Modify lockfile properties + if let Some(target_str) = &args.target { + let target = Target::from_str(target_str).map_err(|e| { + PakkerError::InvalidInput(format!("Invalid target: {e}")) + })?; + lockfile.target = Some(target); + println!("Set target to: {target:?}"); + } + + if let Some(mc_versions_str) = &args.mc_versions { + let mc_versions: Vec = mc_versions_str + .split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(); + + if mc_versions.is_empty() { + return Err(PakkerError::InvalidInput( + "At least one Minecraft version is required".to_string(), + )); + } + + // Validate that all projects are compatible with new MC versions + for project in &lockfile.projects { + let compatible = project + .files + .iter() + .any(|file| file.mc_versions.iter().any(|v| mc_versions.contains(v))); + if !compatible { + eprintln!( + "Warning: Project '{}' has no files compatible with new MC \ + versions", + project.get_name() + ); + } + } + + lockfile.mc_versions = mc_versions.clone(); + println!("Set Minecraft versions to: {mc_versions:?}"); + } + + if let Some(loaders_str) = &args.loaders { + let mut loaders: HashMap = HashMap::new(); + + for pair in loaders_str.split(',') { + let parts: Vec<&str> = pair.split('=').collect(); + if parts.len() != 2 { + return Err(PakkerError::InvalidInput(format!( + "Invalid loader format '{pair}'. Expected 'name=version'" + ))); + } + loaders + .insert(parts[0].trim().to_string(), parts[1].trim().to_string()); + } + + if loaders.is_empty() { + return Err(PakkerError::InvalidInput( + "At least one loader is required".to_string(), + )); + } + + let loader_names: Vec = loaders.keys().cloned().collect(); + + // Validate that all projects are compatible with new loaders + for project in &lockfile.projects { + let compatible = project.files.iter().any(|file| { + file.loaders.is_empty() + || file.loaders.iter().any(|l| loader_names.contains(l)) + }); + if !compatible { + eprintln!( + "Warning: Project '{}' has no files compatible with new loaders", + project.get_name() + ); + } + } + + lockfile.loaders = loaders.clone(); + println!("Set loaders to: {loaders:?}"); + } + + lockfile.save(lockfile_dir)?; + println!("Lockfile properties updated successfully"); + } else if let Some(input) = &args.input { + // Modify project properties + let project_name = { + let project = lockfile + .projects + .iter_mut() + .find(|p| p.matches_input(input)) + .ok_or_else(|| PakkerError::ProjectNotFound(input.clone()))?; + + if let Some(type_str) = &args.r#type { + let project_type = + ProjectType::from_str(type_str).map_err(PakkerError::InvalidInput)?; + project.r#type = project_type; + } + + if let Some(side_str) = &args.side { + let side = + ProjectSide::from_str(side_str).map_err(PakkerError::InvalidInput)?; + project.side = side; + } + + if let Some(strategy_str) = &args.strategy { + let strategy = UpdateStrategy::from_str(strategy_str) + .map_err(PakkerError::InvalidInput)?; + project.update_strategy = strategy; + } + + if let Some(redistributable) = args.redistributable { + project.redistributable = redistributable; + } + + project.get_name() + }; + + lockfile.save(lockfile_dir)?; + config.save(config_dir)?; + + println!("Updated project: {project_name}"); + } else { + return Err(PakkerError::InvalidInput( + "Either provide a project identifier or lockfile properties to modify" + .to_string(), + )); + } + + Ok(()) +} diff --git a/src/cli/commands/status.rs b/src/cli/commands/status.rs new file mode 100644 index 0000000..41f84d7 --- /dev/null +++ b/src/cli/commands/status.rs @@ -0,0 +1,370 @@ +use std::{collections::HashMap, path::Path, sync::Arc}; + +use futures::stream::{FuturesUnordered, StreamExt}; +use indicatif::{ProgressBar, ProgressStyle}; +use tokio::sync::Semaphore; +use yansi::Paint; + +use crate::{ + error::Result, + model::{Config, LockFile, Project}, + platform::create_platform, +}; + +pub async fn execute( + parallel: bool, + lockfile_path: &Path, + config_path: &Path, +) -> Result<()> { + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + let lockfile = LockFile::load(lockfile_dir)?; + let config = Config::load(config_dir)?; + + // Display modpack metadata + display_modpack_info(&lockfile, &config); + println!(); + + // Check for updates (sequential or parallel) + let (updates, errors) = if parallel { + check_updates_parallel(&lockfile).await? + } else { + check_updates_sequential(&lockfile).await? + }; + + // Display results + display_update_results(&updates); + + // Display errors if any + if !errors.is_empty() { + println!(); + println!("{}", "Errors encountered:".red()); + for (project, error) in &errors { + println!(" - {}: {}", project.yellow(), error.red()); + } + } + + // Prompt to update if there are updates available + if !updates.is_empty() { + println!(); + if crate::ui_utils::prompt_yes_no("Update now?", false)? { + // Call update command programmatically (update all projects) + let update_args = crate::cli::UpdateArgs { + inputs: vec![], + yes: true, // Auto-yes for status command + }; + crate::cli::commands::update::execute( + update_args, + lockfile_path, + config_path, + ) + .await?; + } + } + + Ok(()) +} + +fn display_modpack_info(lockfile: &LockFile, config: &Config) { + let author = config.author.as_deref().unwrap_or("Unknown"); + println!( + "Managing {} modpack, version {}, by {}", + config.name.cyan(), + config.version.cyan(), + author.cyan() + ); + + let mc_versions = lockfile.mc_versions.join(", "); + let loaders: Vec = lockfile + .loaders + .iter() + .map(|(loader, version)| format!("{loader}-{version}")) + .collect(); + let loaders_str = loaders.join(", "); + + println!( + "on Minecraft version {}, loader {}, targeting platform {:?}.", + mc_versions.cyan(), + loaders_str.cyan(), + lockfile.target + ); +} + +#[derive(Debug)] +struct ProjectUpdate { + slug: HashMap, + name: String, + project_type: String, + side: String, + file_updates: Vec, +} + +#[derive(Debug)] +struct FileUpdate { + platform: String, + old_filename: String, + new_filename: String, +} + +async fn check_updates_sequential( + lockfile: &LockFile, +) -> Result<(Vec, Vec<(String, String)>)> { + let total = lockfile.projects.len(); + let mut updates = Vec::new(); + let mut errors = Vec::new(); + + // Create progress bar + let pb = ProgressBar::new(total as u64); + pb.set_style( + ProgressStyle::default_bar() + .template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}") + .unwrap() + .progress_chars("#>-"), + ); + pb.set_message("Checking for updates..."); + + for project in &lockfile.projects { + let project_name = project + .name + .values() + .next() + .unwrap_or(&"Unknown".to_string()) + .clone(); + pb.set_message(format!("Checking {project_name}...")); + + match check_project_update(project, lockfile).await { + Ok(update_opt) => { + if let Some(update) = update_opt { + updates.push(update); + } + }, + Err(e) => { + errors.push((project_name.clone(), e.to_string())); + }, + } + + pb.inc(1); + } + + pb.finish_with_message(format!("Checked {total} projects")); + println!(); // Add blank line after progress bar + + Ok((updates, errors)) +} + +async fn check_updates_parallel( + lockfile: &LockFile, +) -> Result<(Vec, Vec<(String, String)>)> { + let total = lockfile.projects.len(); + let semaphore = Arc::new(Semaphore::new(10)); + let mut futures = FuturesUnordered::new(); + + // Create progress bar + let pb = Arc::new(ProgressBar::new(total as u64)); + pb.set_style( + ProgressStyle::default_bar() + .template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}") + .unwrap() + .progress_chars("#>-"), + ); + pb.set_message("Checking for updates (parallel)..."); + + for project in &lockfile.projects { + let project = project.clone(); + let sem = semaphore.clone(); + let pb_clone = pb.clone(); + let lockfile_clone = lockfile.clone(); + + futures.push(async move { + let _permit = sem.acquire().await.unwrap(); + let result = check_project_update(&project, &lockfile_clone).await; + pb_clone.inc(1); + (project, result) + }); + } + + let mut updates = Vec::new(); + let mut errors = Vec::new(); + + while let Some((project, result)) = futures.next().await { + match result { + Ok(update_opt) => { + if let Some(update) = update_opt { + updates.push(update); + } + }, + Err(e) => { + let project_name = project + .name + .values() + .next() + .unwrap_or(&"Unknown".to_string()) + .clone(); + errors.push((project_name, e.to_string())); + }, + } + } + + pb.finish_with_message(format!("Checked {total} projects")); + println!(); // Add blank line after progress bar + + Ok((updates, errors)) +} + +async fn check_project_update( + project: &Project, + lockfile: &LockFile, +) -> Result> { + // Get primary slug + let slug = project + .slug + .values() + .next() + .ok_or_else(|| { + crate::error::PakkerError::InvalidProject("No slug found".to_string()) + })? + .clone(); + + // Try each platform in project + for platform_name in project.id.keys() { + let api_key = get_api_key(platform_name); + let platform = match create_platform(platform_name, api_key) { + Ok(p) => p, + Err(_) => continue, + }; + + let loaders: Vec = lockfile.loaders.keys().cloned().collect(); + + match platform + .request_project_with_files(&slug, &lockfile.mc_versions, &loaders) + .await + { + Ok(updated_project) => { + // Compare files to detect updates + let file_updates = detect_file_updates(project, &updated_project); + + if !file_updates.is_empty() { + return Ok(Some(ProjectUpdate { + slug: project.slug.clone(), + name: project.name.values().next().cloned().unwrap_or_default(), + project_type: format!("{:?}", project.r#type), + side: format!("{:?}", project.side), + file_updates, + })); + } + + return Ok(None); // No updates + }, + Err(_) => { + // Try next platform + continue; + }, + } + } + + Err(crate::error::PakkerError::PlatformApiError( + "Failed to check for updates on any platform".to_string(), + )) +} + +fn detect_file_updates( + current: &Project, + updated: &Project, +) -> Vec { + let mut updates = Vec::new(); + + for old_file in ¤t.files { + if let Some(new_file) = updated + .files + .iter() + .find(|f| f.file_type == old_file.file_type) + { + // Check if file ID changed (indicates update) + if new_file.id != old_file.id { + updates.push(FileUpdate { + platform: old_file.file_type.clone(), + old_filename: old_file.file_name.clone(), + new_filename: new_file.file_name.clone(), + }); + } + } + } + + updates +} + +fn display_update_results(updates: &[ProjectUpdate]) { + if updates.is_empty() { + println!("{}", "✓ All projects are up to date".green()); + return; + } + + println!(); + println!("{}", "📦 Updates Available:".cyan().bold()); + println!(); + + for update in updates { + // Create hyperlink for project name using ui_utils + let project_url = if let Some((platform, slug)) = update.slug.iter().next() + { + match platform.as_str() { + "modrinth" => crate::ui_utils::modrinth_project_url(slug), + "curseforge" => crate::ui_utils::curseforge_project_url(slug), + _ => String::new(), + } + } else { + String::new() + }; + + if project_url.is_empty() { + println!( + "{} ({}, {})", + update.name.yellow(), + update.project_type, + update.side + ); + } else { + let hyperlinked = crate::ui_utils::hyperlink( + &project_url, + &update.name.yellow().to_string(), + ); + println!("{} ({}, {})", hyperlinked, update.project_type, update.side); + } + + for file_update in &update.file_updates { + println!( + " • {}: {} → {}", + file_update.platform.cyan(), + file_update.old_filename.dim(), + file_update.new_filename.green() + ); + } + + println!(); + } + + println!( + "{}", + format!("{} project(s) need updates", updates.len()).yellow() + ); +} + +#[allow(dead_code)] +fn get_project_display_name(project: &Project) -> String { + project + .name + .values() + .next() + .or_else(|| project.slug.values().next()) + .cloned() + .unwrap_or_else(|| "Unknown".to_string()) +} + +fn get_api_key(platform: &str) -> Option { + match platform { + "modrinth" => std::env::var("MODRINTH_TOKEN").ok(), + "curseforge" => std::env::var("CURSEFORGE_API_KEY").ok(), + _ => None, + } +} diff --git a/src/cli/commands/sync.rs b/src/cli/commands/sync.rs new file mode 100644 index 0000000..c5404cf --- /dev/null +++ b/src/cli/commands/sync.rs @@ -0,0 +1,216 @@ +use std::{ + collections::{HashMap, HashSet}, + fs, + io::{self, Write}, + path::{Path, PathBuf}, +}; + +use indicatif::{ProgressBar, ProgressStyle}; + +use crate::{ + cli::SyncArgs, + error::{PakkerError, Result}, + fetch::Fetcher, + model::{Config, LockFile}, + platform::{CurseForgePlatform, ModrinthPlatform, PlatformClient}, +}; + +enum SyncChange { + Addition(PathBuf, String), // (file_path, project_name) + Removal(String), // project_pakku_id +} + +pub async fn execute( + args: SyncArgs, + lockfile_path: &Path, + config_path: &Path, +) -> Result<()> { + log::info!("Synchronizing with lockfile"); + + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + let mut lockfile = LockFile::load(lockfile_dir)?; + let config = Config::load(config_dir)?; + + // Detect changes + let changes = detect_changes(&lockfile, &config)?; + + if changes.is_empty() { + println!("✓ Everything is in sync"); + return Ok(()); + } + + // Filter changes based on flags + let mut additions = Vec::new(); + let mut removals = Vec::new(); + + for change in changes { + match change { + SyncChange::Addition(path, name) => additions.push((path, name)), + SyncChange::Removal(id) => removals.push(id), + } + } + + // Apply filters + let no_filter = !args.additions && !args.removals; + + let spinner = ProgressBar::new_spinner(); + spinner.set_style( + ProgressStyle::default_spinner() + .template("{spinner:.green} {msg}") + .unwrap(), + ); + + if no_filter || args.additions { + for (file_path, _) in &additions { + spinner + .set_message(format!("Processing addition: {}", file_path.display())); + if prompt_user(&format!("Add {} to lockfile?", file_path.display()))? { + add_file_to_lockfile(&mut lockfile, file_path, &config).await?; + } + } + } + + if no_filter || args.removals { + for pakku_id in &removals { + if let Some(project) = lockfile + .projects + .iter() + .find(|p| p.pakku_id.as_ref() == Some(pakku_id)) + { + let name = project + .name + .values() + .next() + .map(std::string::String::as_str) + .or(project.pakku_id.as_deref()) + .unwrap_or("unknown"); + spinner.set_message(format!("Processing removal: {name}")); + if prompt_user(&format!("Remove {name} from lockfile?"))? { + lockfile + .remove_project(pakku_id) + .ok_or_else(|| PakkerError::ProjectNotFound(pakku_id.clone()))?; + } + } + } + } + + spinner.finish_and_clear(); + + // Save changes + lockfile.save(lockfile_dir)?; + + // Fetch missing files + let fetcher = Fetcher::new("."); + fetcher.sync(&lockfile, &config).await?; + + println!("✓ Sync complete"); + Ok(()) +} + +fn detect_changes( + lockfile: &LockFile, + config: &Config, +) -> Result> { + let mut changes = Vec::new(); + + // Get paths for each project type + let paths = config.paths.clone(); + let mods_path = paths + .get("mods") + .map_or("mods", std::string::String::as_str); + + // Build map of lockfile projects by file path + let mut lockfile_files: HashMap = HashMap::new(); + for project in &lockfile.projects { + for file in &project.files { + let file_path = PathBuf::from(mods_path).join(&file.file_name); + if let Some(ref pakku_id) = project.pakku_id { + lockfile_files.insert(file_path, pakku_id.clone()); + } + } + } + + // Scan filesystem for additions + if let Ok(entries) = fs::read_dir(mods_path) { + for entry in entries.flatten() { + let path = entry.path(); + if path.is_file() + && let Some(ext) = path.extension() + && ext == "jar" + && !lockfile_files.contains_key(&path) + { + let name = path.file_name().unwrap().to_string_lossy().to_string(); + changes.push(SyncChange::Addition(path, name)); + } + } + } + + // Check for removals (projects in lockfile but files missing) + let filesystem_files: HashSet<_> = + if let Ok(entries) = fs::read_dir(mods_path) { + entries + .flatten() + .map(|e| e.path()) + .filter(|p| p.is_file()) + .collect() + } else { + HashSet::new() + }; + + for (lockfile_path, pakku_id) in &lockfile_files { + if !filesystem_files.contains(lockfile_path) { + changes.push(SyncChange::Removal(pakku_id.clone())); + } + } + + Ok(changes) +} + +async fn add_file_to_lockfile( + lockfile: &mut LockFile, + file_path: &Path, + _config: &Config, +) -> Result<()> { + // Try to identify the file by hash lookup + let _modrinth = ModrinthPlatform::new(); + let curseforge = CurseForgePlatform::new(None); + + // Compute file hash + let file_data = fs::read(file_path)?; + // Compute SHA-1 hash from file bytes + use sha1::Digest; + let mut hasher = sha1::Sha1::new(); + hasher.update(&file_data); + let hash = format!("{:x}", hasher.finalize()); + + // Try Modrinth first (SHA-1 hash) + if let Ok(Some(project)) = _modrinth.lookup_by_hash(&hash).await { + lockfile.add_project(project); + println!("✓ Added {} (from Modrinth)", file_path.display()); + return Ok(()); + } + + // Try CurseForge (Murmur2 hash computed from file) + if let Ok(Some(project)) = curseforge.lookup_by_hash(&hash).await { + lockfile.add_project(project); + println!("✓ Added {} (from CurseForge)", file_path.display()); + return Ok(()); + } + + println!("âš  Could not identify {}, skipping", file_path.display()); + Ok(()) +} + +fn prompt_user(message: &str) -> Result { + print!("{message} [y/N] "); + io::stdout().flush().map_err(PakkerError::IoError)?; + + let mut input = String::new(); + io::stdin() + .read_line(&mut input) + .map_err(PakkerError::IoError)?; + + Ok(input.trim().eq_ignore_ascii_case("y")) +} diff --git a/src/cli/commands/unlink.rs b/src/cli/commands/unlink.rs new file mode 100644 index 0000000..f43951d --- /dev/null +++ b/src/cli/commands/unlink.rs @@ -0,0 +1,41 @@ +use std::path::Path; + +use crate::{ + cli::UnlinkArgs, + error::{PakkerError, Result}, + model::LockFile, +}; + +pub fn execute(args: UnlinkArgs, lockfile_path: &Path) -> Result<()> { + log::info!("Unlinking {} -> {}", args.from, args.to); + + // Load expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let mut lockfile = LockFile::load(lockfile_dir)?; + + // Find projects + let from_project = lockfile + .projects + .iter() + .find(|p| p.matches_input(&args.from)) + .ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?; + let from_id = from_project.pakku_id.clone().ok_or_else(|| { + PakkerError::InvalidProject("From project has no pakku_id".to_string()) + })?; + + let to_project = lockfile + .projects + .iter_mut() + .find(|p| p.matches_input(&args.to)) + .ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?; + + // Remove link + to_project.pakku_links.remove(&from_id); + + // Save lockfile + lockfile.save(lockfile_dir)?; + + log::info!("Successfully unlinked projects"); + + Ok(()) +} diff --git a/src/cli/commands/update.rs b/src/cli/commands/update.rs new file mode 100644 index 0000000..88f5a43 --- /dev/null +++ b/src/cli/commands/update.rs @@ -0,0 +1,140 @@ +use std::{collections::HashMap, path::Path}; + +use indicatif::{ProgressBar, ProgressStyle}; + +use crate::{ + cli::UpdateArgs, + error::PakkerError, + model::{Config, LockFile}, + platform::create_platform, + ui_utils::prompt_select, +}; + +pub async fn execute( + args: UpdateArgs, + lockfile_path: &Path, + config_path: &Path, +) -> Result<(), PakkerError> { + // Load expects directory path, so get parent directory + let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new(".")); + let config_dir = config_path.parent().unwrap_or(Path::new(".")); + + let mut lockfile = LockFile::load(lockfile_dir)?; + let _config = Config::load(config_dir)?; + + // Create platforms + let mut platforms = HashMap::new(); + if let Ok(platform) = create_platform("modrinth", None) { + platforms.insert("modrinth".to_string(), platform); + } + if let Ok(platform) = + create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok()) + { + platforms.insert("curseforge".to_string(), platform); + } + + let project_indices: Vec<_> = if args.inputs.is_empty() { + (0..lockfile.projects.len()).collect() + } else { + let mut indices = Vec::new(); + for input in &args.inputs { + if let Some((idx, _)) = lockfile + .projects + .iter() + .enumerate() + .find(|(_, p)| p.matches_input(input)) + { + indices.push(idx); + } else { + return Err(PakkerError::ProjectNotFound(input.clone())); + } + } + indices + }; + + // Create progress bar + let pb = ProgressBar::new(project_indices.len() as u64); + pb.set_style( + ProgressStyle::default_bar() + .template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}") + .unwrap() + .progress_chars("#>-"), + ); + + for idx in project_indices { + let old_project = &lockfile.projects[idx]; + pb.set_message(format!("Updating {}...", old_project.get_name())); + + let slug = old_project + .slug + .values() + .next() + .ok_or_else(|| PakkerError::InvalidProject("No slug found".into()))?; + + // Find updated project from one of the platforms + let mut updated_project = None; + for platform in platforms.values() { + if let Ok(project) = platform + .request_project_with_files( + slug, + &lockfile.mc_versions, + &lockfile.loaders.keys().cloned().collect::>(), + ) + .await + { + updated_project = Some(project); + break; + } + } + + if let Some(mut updated_project) = updated_project + && !updated_project.files.is_empty() + && let Some(old_file) = lockfile.projects[idx].files.first() + { + let new_file = updated_project.files.first().unwrap(); + + if new_file.id == old_file.id { + pb.println(format!( + " {} - Already up to date", + old_project.get_name() + )); + } else { + // Interactive version selection if not using --yes flag + if !args.yes && updated_project.files.len() > 1 { + pb.suspend(|| { + let choices: Vec = updated_project + .files + .iter() + .map(|f| format!("{} ({})", f.file_name, f.id)) + .collect(); + + let choice_refs: Vec<&str> = + choices.iter().map(std::string::String::as_str).collect(); + + if let Ok(selected_idx) = prompt_select( + &format!("Select version for {}:", old_project.get_name()), + &choice_refs, + ) { + // Move selected file to front + if selected_idx > 0 { + updated_project.files.swap(0, selected_idx); + } + } + }); + } + + let selected_file = updated_project.files.first().unwrap(); + pb.println(format!( + " {} -> {}", + old_file.file_name, selected_file.file_name + )); + lockfile.projects[idx] = updated_project; + } + } + pb.inc(1); + } + + pb.finish_with_message("Update complete"); + lockfile.save(lockfile_dir)?; + Ok(()) +} diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..4c1eca5 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,110 @@ +use thiserror::Error; + +pub type Result = std::result::Result; + +#[derive(Error, Debug)] +pub enum PakkerError { + // Network errors + #[error("Network request failed: {0}")] + NetworkError(#[from] reqwest::Error), + + #[error("Platform API error: {0}")] + PlatformApiError(String), + + // Validation errors + #[error("Invalid lock file: {0}")] + InvalidLockFile(String), + + #[error("Invalid config file: {0}")] + InvalidConfigFile(String), + + #[error("Project not found: {0}")] + ProjectNotFound(String), + + #[error("File selection error: {0}")] + FileSelectionError(String), + + #[error("File not found: {0}")] + FileNotFound(String), + + // Conflict errors + #[error("Circular dependency detected: {0}")] + CircularDependency(String), + + // File I/O errors + #[error("IO error: {0}")] + IoError(#[from] std::io::Error), + + #[error("Serialization error: {0}")] + SerializationError(#[from] serde_json::Error), + + #[error("Hash mismatch for file {file}: expected {expected}, got {actual}")] + HashMismatch { + file: String, + expected: String, + actual: String, + }, + + #[error("Download failed: {0}")] + DownloadFailed(String), + + // Export errors + #[error("Export failed: {0}")] + ExportFailed(String), + + #[error("Invalid export profile: {0}")] + InvalidExportProfile(String), + + // General errors + #[error("Configuration error: {0}")] + ConfigError(String), + + #[error("Internal error: {0}")] + InternalError(String), + + #[error("Already exists: {0}")] + AlreadyExists(String), + + #[error("Invalid input: {0}")] + InvalidInput(String), + + #[error("Invalid project: {0}")] + InvalidProject(String), + + #[error("Invalid import file: {0}")] + InvalidImportFile(String), + + #[error("Zip error: {0}")] + ZipError(#[from] zip::result::ZipError), + + // Git and Fork errors + #[error("Git error: {0}")] + GitError(String), + + #[error("Remote not found: {0}")] + RemoteNotFound(String), + + #[error("Fork error: {0}")] + Fork(String), + + #[error("Invalid hash: {0}")] + InvalidHash(String), + + #[error("Invalid response: {0}")] + InvalidResponse(String), + + #[error("IPC error: {0}")] + IpcError(String), +} + +impl From for PakkerError { + fn from(err: git2::Error) -> Self { + Self::GitError(err.to_string()) + } +} + +impl From for PakkerError { + fn from(err: crate::ipc::IpcError) -> Self { + Self::IpcError(err.to_string()) + } +} diff --git a/src/export.rs b/src/export.rs new file mode 100644 index 0000000..2bd1bbf --- /dev/null +++ b/src/export.rs @@ -0,0 +1,266 @@ +mod profile_config; +mod profiles; +mod rules; + +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use indicatif::{ProgressBar, ProgressStyle}; +pub use profile_config::ProfileConfig; +pub use profiles::{ExportProfile, create_profile}; +pub use rules::{Effect, Rule, RuleContext}; + +use crate::{ + error::{PakkerError, Result}, + model::{Config, LockFile}, +}; + +pub struct Exporter { + base_path: PathBuf, +} + +impl Exporter { + pub fn new>(base_path: P) -> Self { + Self { + base_path: base_path.as_ref().to_path_buf(), + } + } + + /// Export all default profiles (curseforge, modrinth, serverpack). + /// + /// In multi-profile mode we try each profile independently. + /// - Profiles that can't run due to missing required credentials are skipped. + /// - Profiles that fail for other reasons are recorded and reported. + /// + /// Returns successfully exported files. If any profile failed (non-skip), + /// returns an error after attempting all profiles. + pub async fn export_all_profiles( + &mut self, + lockfile: &LockFile, + config: &Config, + output_path: &Path, + ) -> Result> { + let profiles = vec!["curseforge", "modrinth", "serverpack"]; + let mut output_files = Vec::new(); + let mut failures: Vec<(String, String)> = Vec::new(); + + println!("Exporting {} profiles...", profiles.len()); + + for profile_name in profiles { + match self + .export(profile_name, lockfile, config, output_path) + .await + { + Ok(output_file) => output_files.push(output_file), + Err(err) => { + if Self::is_auth_error(&err) { + eprintln!( + "{profile_name} export skipped (authentication required)" + ); + continue; + } + + eprintln!("{profile_name} export failed: {err}"); + failures.push((profile_name.to_string(), err.to_string())); + }, + } + } + + if !failures.is_empty() { + return Err(PakkerError::ExportFailed(format!( + "{} profile(s) failed", + failures.len() + ))); + } + + if output_files.is_empty() { + return Err(PakkerError::ExportFailed( + "No export profiles produced an output file".to_string(), + )); + } + + println!("All profiles exported successfully."); + Ok(output_files) + } + + fn is_auth_error(err: &PakkerError) -> bool { + // Auth/token/API-key issues should not abort multi-profile export as a + // whole. We detect these by messages emitted from the downloader. + match err { + PakkerError::InternalError(msg) => { + msg.contains("authentication error") + || msg.contains("unauthorized") + || msg.contains("forbidden") + }, + _ => false, + } + } + + /// Export modpack using specified profile + pub async fn export( + &mut self, + profile_name: &str, + lockfile: &LockFile, + config: &Config, + output_path: &Path, + ) -> Result { + let spinner = ProgressBar::new_spinner(); + spinner.set_style( + ProgressStyle::default_spinner() + .template("{spinner:.cyan} {msg}") + .unwrap(), + ); + spinner.set_message(format!("Preparing {profile_name} export...")); + + spinner.enable_steady_tick(std::time::Duration::from_millis(80)); + let spinner = &spinner; + + // Get export profile + let profile = create_profile(profile_name)?; + + log::info!( + "Exporting with profile: {} ({})", + profile_name, + profile.name() + ); + + // Load profile-specific configuration if available, otherwise use defaults + let profile_config = config + .export_profiles + .as_ref() + .and_then(|profiles| profiles.get(profile_name)) + .cloned() + .or_else(|| { + // Use defaults based on profile name + match profile_name { + "curseforge" => Some(ProfileConfig::curseforge_default()), + "modrinth" => Some(ProfileConfig::modrinth_default()), + "serverpack" => Some(ProfileConfig::serverpack_default()), + _ => None, + } + }); + + // Create temporary export directory + let temp_dir = tempfile::tempdir()?; + let export_dir = temp_dir.path(); + + // Build rule context + let context = RuleContext { + lockfile: lockfile.clone(), + config: config.clone(), + profile_config, + export_path: export_dir.to_path_buf(), + base_path: self.base_path.clone(), + ui: Some(spinner.clone()), + }; + + spinner.set_message("Collecting export rules..."); + // Apply rules and collect effects + let mut effects = Vec::new(); + for rule in profile.rules() { + if rule.matches(&context) { + effects.extend(rule.effects()); + } + } + + // Execute effects with descriptive messages + for effect in &effects { + let effect_name = effect.name(); + spinner.set_message(format!("Exporting: {effect_name}...")); + effect.execute(&context).await?; + } + + spinner.set_message("Creating archive..."); + // Package export + let output_file = + self.package_export(export_dir, output_path, profile_name, config)?; + + // Cleanup + drop(temp_dir); + + spinner.finish_and_clear(); + println!("Exported to: {}", output_file.display()); + Ok(output_file) + } + + /// Package export directory into final format + fn package_export( + &self, + export_dir: &Path, + output_path: &Path, + profile_name: &str, + config: &Config, + ) -> Result { + // Pakku layout support: if output_path ends with "build" (set by CLI), + // create build//. + let profile_output_path = + if output_path.file_name().and_then(|n| n.to_str()) == Some("build") { + output_path.join(profile_name) + } else { + output_path.to_path_buf() + }; + + fs::create_dir_all(&profile_output_path)?; + + // Use .mrpack extension for Modrinth, .zip for others + let extension = if profile_name == "modrinth" { + "mrpack" + } else { + "zip" + }; + let output_file = profile_output_path.join(format!( + "{}-{}-{}.{}", + config.name, config.version, profile_name, extension + )); + + // Create zip archive + let file = fs::File::create(&output_file)?; + let mut zip = zip::ZipWriter::new(file); + + let options = zip::write::FileOptions::default() + .compression_method(zip::CompressionMethod::Deflated) + .unix_permissions(0o755); + + // Add all files from export directory + self.add_directory_to_zip(&mut zip, export_dir, export_dir, options)?; + + zip.finish()?; + + Ok(output_file) + } + + /// Recursively add directory to zip + fn add_directory_to_zip( + &self, + zip: &mut zip::ZipWriter, + base_path: &Path, + current_path: &Path, + options: zip::write::SimpleFileOptions, + ) -> Result<()> { + for entry in fs::read_dir(current_path)? { + let entry = entry?; + let path = entry.path(); + let relative_path = path + .strip_prefix(base_path) + .map_err(|e| PakkerError::InternalError(e.to_string()))?; + + if path.is_file() { + zip.start_file(relative_path.to_string_lossy().to_string(), options)?; + let content = fs::read(&path)?; + zip.write_all(&content)?; + } else if path.is_dir() { + zip.add_directory( + relative_path.to_string_lossy().to_string(), + options, + )?; + self.add_directory_to_zip(zip, base_path, &path, options)?; + } + } + + Ok(()) + } +} + +use std::io::Write; diff --git a/src/export/cache.rs b/src/export/cache.rs new file mode 100644 index 0000000..d62e009 --- /dev/null +++ b/src/export/cache.rs @@ -0,0 +1,66 @@ +use crate::error::Result; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::fs; +use std::path::{Path, PathBuf}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct CacheEntry { + hash: String, + path: PathBuf, +} + +pub struct ExportCache { + cache_dir: PathBuf, + entries: HashMap, +} + +impl ExportCache { + pub fn new(cache_dir: PathBuf) -> Self { + let entries = Self::load_cache(&cache_dir).unwrap_or_default(); + + Self { cache_dir, entries } + } + + fn load_cache(cache_dir: &Path) -> Result> { + let cache_file = cache_dir.join("export-cache.json"); + + if !cache_file.exists() { + return Ok(HashMap::new()); + } + + let content = fs::read_to_string(cache_file)?; + let entries = serde_json::from_str(&content)?; + + Ok(entries) + } + + pub fn get(&self, key: &str) -> Option<&CacheEntry> { + self.entries.get(key) + } + + pub fn put(&mut self, key: String, hash: String, path: PathBuf) { + self.entries.insert(key, CacheEntry { hash, path }); + } + + pub fn save(&self) -> Result<()> { + fs::create_dir_all(&self.cache_dir)?; + + let cache_file = self.cache_dir.join("export-cache.json"); + let content = serde_json::to_string_pretty(&self.entries)?; + + fs::write(cache_file, content)?; + + Ok(()) + } + + pub fn clear(&mut self) -> Result<()> { + self.entries.clear(); + + if self.cache_dir.exists() { + fs::remove_dir_all(&self.cache_dir)?; + } + + Ok(()) + } +} diff --git a/src/export/profile_config.rs b/src/export/profile_config.rs new file mode 100644 index 0000000..411c80a --- /dev/null +++ b/src/export/profile_config.rs @@ -0,0 +1,161 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; + +/// Profile-specific export configuration +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct ProfileConfig { + /// Custom override paths for this profile + #[serde(skip_serializing_if = "Option::is_none")] + pub overrides: Option>, + + /// Custom server override paths for this profile + #[serde(skip_serializing_if = "Option::is_none")] + pub server_overrides: Option>, + + /// Custom client override paths for this profile + #[serde(skip_serializing_if = "Option::is_none")] + pub client_overrides: Option>, + + /// Platform filter - only include projects available on this platform + #[serde(skip_serializing_if = "Option::is_none")] + pub filter_platform: Option, + + /// Include non-redistributable projects (default: false for `CurseForge`, + /// true for others) + #[serde(skip_serializing_if = "Option::is_none")] + pub include_non_redistributable: Option, + + /// Include client-only mods in server exports (default: false) + #[serde(skip_serializing_if = "Option::is_none")] + pub include_client_only: Option, + + /// Custom project-specific settings for this profile + #[serde(skip_serializing_if = "Option::is_none")] + pub project_overrides: Option>, +} + +/// Project-specific overrides for a profile +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct ProjectOverride { + /// Whether to export this project in this profile + #[serde(skip_serializing_if = "Option::is_none")] + pub export: Option, + + /// Custom subpath for this project in this profile + #[serde(skip_serializing_if = "Option::is_none")] + pub subpath: Option, +} + +impl ProfileConfig { + /// Get effective override paths, falling back to global config + pub fn get_overrides<'a>( + &'a self, + global_overrides: &'a [String], + ) -> &'a [String] { + self.overrides.as_deref().unwrap_or(global_overrides) + } + + /// Get effective server override paths, falling back to global config + pub fn get_server_overrides<'a>( + &'a self, + global_server_overrides: Option<&'a Vec>, + ) -> Option<&'a [String]> { + self + .server_overrides + .as_deref() + .or(global_server_overrides.map(std::vec::Vec::as_slice)) + } + + /// Get default config for `CurseForge` profile + pub fn curseforge_default() -> Self { + Self { + filter_platform: Some("curseforge".to_string()), + include_non_redistributable: Some(false), + ..Default::default() + } + } + + /// Get default config for Modrinth profile + pub fn modrinth_default() -> Self { + Self { + filter_platform: Some("modrinth".to_string()), + include_non_redistributable: Some(true), + ..Default::default() + } + } + + /// Get default config for `ServerPack` profile + pub fn serverpack_default() -> Self { + Self { + include_client_only: Some(false), + ..Default::default() + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_default_profile_config() { + let config = ProfileConfig::default(); + assert!(config.overrides.is_none()); + assert!(config.filter_platform.is_none()); + } + + #[test] + fn test_curseforge_default() { + let config = ProfileConfig::curseforge_default(); + assert_eq!(config.filter_platform, Some("curseforge".to_string())); + assert_eq!(config.include_non_redistributable, Some(false)); + } + + #[test] + fn test_modrinth_default() { + let config = ProfileConfig::modrinth_default(); + assert_eq!(config.filter_platform, Some("modrinth".to_string())); + assert_eq!(config.include_non_redistributable, Some(true)); + } + + #[test] + fn test_serverpack_default() { + let config = ProfileConfig::serverpack_default(); + assert_eq!(config.include_client_only, Some(false)); + } + + #[test] + fn test_get_overrides_with_custom() { + let mut config = ProfileConfig::default(); + config.overrides = Some(vec!["custom-overrides".to_string()]); + + let global = vec!["overrides".to_string()]; + assert_eq!(config.get_overrides(&global), &["custom-overrides"]); + } + + #[test] + fn test_get_overrides_fallback_to_global() { + let config = ProfileConfig::default(); + let global = vec!["overrides".to_string()]; + assert_eq!(config.get_overrides(&global), &["overrides"]); + } + + #[test] + fn test_serialization() { + let mut config = ProfileConfig::default(); + config.filter_platform = Some("modrinth".to_string()); + config.include_non_redistributable = Some(true); + + let json = serde_json::to_string(&config).unwrap(); + let deserialized: ProfileConfig = serde_json::from_str(&json).unwrap(); + + assert_eq!(deserialized.filter_platform, config.filter_platform); + assert_eq!( + deserialized.include_non_redistributable, + config.include_non_redistributable + ); + } +} diff --git a/src/export/profiles.rs b/src/export/profiles.rs new file mode 100644 index 0000000..f5a272c --- /dev/null +++ b/src/export/profiles.rs @@ -0,0 +1,65 @@ +use super::rules::Rule; +use crate::error::{PakkerError, Result}; + +pub trait ExportProfile { + fn name(&self) -> &str; + fn rules(&self) -> Vec>; +} + +pub struct CurseForgeProfile; +pub struct ModrinthProfile; +pub struct ServerPackProfile; + +impl ExportProfile for CurseForgeProfile { + fn name(&self) -> &'static str { + "curseforge" + } + + fn rules(&self) -> Vec> { + vec![ + Box::new(super::rules::CopyProjectFilesRule), + Box::new(super::rules::FilterByPlatformRule), + Box::new(super::rules::CopyOverridesRule), + Box::new(super::rules::GenerateManifestRule::curseforge()), + Box::new(super::rules::FilterNonRedistributableRule), + ] + } +} + +impl ExportProfile for ModrinthProfile { + fn name(&self) -> &'static str { + "modrinth" + } + + fn rules(&self) -> Vec> { + vec![ + Box::new(super::rules::CopyProjectFilesRule), + Box::new(super::rules::FilterByPlatformRule), + Box::new(super::rules::CopyOverridesRule), + Box::new(super::rules::GenerateManifestRule::modrinth()), + ] + } +} + +impl ExportProfile for ServerPackProfile { + fn name(&self) -> &'static str { + "serverpack" + } + + fn rules(&self) -> Vec> { + vec![ + Box::new(super::rules::CopyProjectFilesRule), + Box::new(super::rules::CopyServerOverridesRule), + Box::new(super::rules::FilterClientOnlyRule), + ] + } +} + +pub fn create_profile(name: &str) -> Result> { + match name { + "curseforge" => Ok(Box::new(CurseForgeProfile)), + "modrinth" => Ok(Box::new(ModrinthProfile)), + "serverpack" => Ok(Box::new(ServerPackProfile)), + _ => Err(PakkerError::InvalidExportProfile(name.to_string())), + } +} diff --git a/src/export/rules.rs b/src/export/rules.rs new file mode 100644 index 0000000..da61951 --- /dev/null +++ b/src/export/rules.rs @@ -0,0 +1,849 @@ +use std::{fs, path::PathBuf}; + +use async_trait::async_trait; + +use crate::{ + error::Result, + model::{Config, LockFile, ProjectSide}, +}; + +#[derive(Clone)] +pub struct RuleContext { + pub lockfile: LockFile, + pub config: Config, + pub profile_config: Option, + pub export_path: PathBuf, + pub base_path: PathBuf, + pub ui: Option, +} + +pub trait Rule: Send + Sync { + fn matches(&self, context: &RuleContext) -> bool; + fn effects(&self) -> Vec>; +} + +#[async_trait] +pub trait Effect: Send + Sync { + fn name(&self) -> &str; + async fn execute(&self, context: &RuleContext) -> Result<()>; +} + +// Rule: Copy project files +pub struct CopyProjectFilesRule; + +impl Rule for CopyProjectFilesRule { + fn matches(&self, _context: &RuleContext) -> bool { + true + } + + fn effects(&self) -> Vec> { + vec![Box::new(CopyProjectFilesEffect)] + } +} + +pub struct CopyProjectFilesEffect; + +#[async_trait] +impl Effect for CopyProjectFilesEffect { + fn name(&self) -> &'static str { + "Downloading and copying mod files" + } + + async fn execute(&self, context: &RuleContext) -> Result<()> { + use crate::model::ResolvedCredentials; + + // Resolve credentials (env -> keyring -> Pakker file -> Pakku file). + let credentials = ResolvedCredentials::load()?; + let curseforge_key = + credentials.curseforge_api_key().map(ToOwned::to_owned); + let modrinth_token = credentials.modrinth_token().map(ToOwned::to_owned); + + let mods_dir = context.export_path.join("mods"); + fs::create_dir_all(&mods_dir)?; + + for project in &context.lockfile.projects { + if !project.export { + continue; + } + + if let Some(file) = project.files.first() { + let source = context.base_path.join("mods").join(&file.file_name); + let dest = mods_dir.join(&file.file_name); + + if source.exists() { + fs::copy(&source, &dest)?; + if let Some(ui) = &context.ui { + ui.println(format!("fetched {} (local)", file.file_name)); + } + log::info!("fetched {} (local)", file.file_name); + } else if !file.url.is_empty() { + download_file( + &context.base_path, + &file.file_name, + &file.url, + curseforge_key.as_deref(), + modrinth_token.as_deref(), + ) + .await?; + + // Copy into export mods/ after ensuring it is present in base mods/ + let downloaded = context.base_path.join("mods").join(&file.file_name); + if downloaded.exists() { + fs::copy(&downloaded, &dest)?; + if let Some(ui) = &context.ui { + ui.println(format!("fetched {} (download)", file.file_name)); + } + log::info!("fetched {} (download)", file.file_name); + } else { + return Err(crate::error::PakkerError::InternalError(format!( + "download reported success but file is missing: {}", + file.file_name + ))); + } + } else { + return Err(crate::error::PakkerError::InternalError(format!( + "missing mod file and no download url: {}", + file.file_name + ))); + } + } + } + + Ok(()) + } +} + +#[derive(Debug)] +enum DownloadFailure { + Auth(String), + Retryable(String), + Fatal(String), +} + +fn classify_response( + status: reqwest::StatusCode, + body: &str, +) -> DownloadFailure { + if status == reqwest::StatusCode::UNAUTHORIZED + || status == reqwest::StatusCode::FORBIDDEN + { + return DownloadFailure::Auth(format!( + "http {}: {}", + status.as_u16(), + body + )); + } + + if status == reqwest::StatusCode::TOO_MANY_REQUESTS + || status.is_server_error() + { + return DownloadFailure::Retryable(format!( + "http {}: {}", + status.as_u16(), + body + )); + } + + DownloadFailure::Fatal(format!("http {}: {}", status.as_u16(), body)) +} + +fn classify_reqwest_error(err: &reqwest::Error) -> DownloadFailure { + if err.is_timeout() || err.is_connect() { + return DownloadFailure::Retryable(err.to_string()); + } + + DownloadFailure::Fatal(err.to_string()) +} + +async fn download_file( + base_path: &std::path::Path, + file_name: &str, + url: &str, + curseforge_key: Option<&str>, + modrinth_token: Option<&str>, +) -> Result<()> { + if url.is_empty() { + return Err(crate::error::PakkerError::InternalError(format!( + "cannot download empty url for {file_name}" + ))); + } + + let client = reqwest::ClientBuilder::new() + .redirect(reqwest::redirect::Policy::default()) + .build()?; + + let mut request_builder = client.get(url); + + // Credentials are optional for direct file downloads; only attach them when + // available. Hard failures are determined via HTTP status codes (401/403) + // during the request. + if url.contains("curseforge") { + if let Some(key) = curseforge_key { + request_builder = request_builder.header("x-api-key", key); + } + } else if url.contains("modrinth") + && let Some(token) = modrinth_token + { + request_builder = request_builder.header("Authorization", token); + } + + let attempts: usize = 5; + + for attempt in 1..=attempts { + let response = request_builder.try_clone().unwrap().send().await; + + match response { + Ok(resp) if resp.status().is_success() => { + let bytes = resp.bytes().await?; + let mods_dir = base_path.join("mods"); + fs::create_dir_all(&mods_dir)?; + let dest = mods_dir.join(file_name); + std::fs::write(&dest, &bytes)?; + return Ok(()); + }, + Ok(resp) => { + let status = resp.status(); + let body = resp.text().await.unwrap_or_default(); + match classify_response(status, &body) { + DownloadFailure::Auth(msg) => { + return Err(crate::error::PakkerError::InternalError(format!( + "authentication error while downloading {file_name}: {msg}" + ))); + }, + DownloadFailure::Retryable(msg) => { + if attempt == attempts { + return Err(crate::error::PakkerError::InternalError(format!( + "retryable download error (attempts exhausted) for \ + {file_name}: {msg}" + ))); + } + tokio::time::sleep(std::time::Duration::from_millis( + 250u64.saturating_mul(attempt as u64), + )) + .await; + }, + DownloadFailure::Fatal(msg) => { + return Err(crate::error::PakkerError::InternalError(format!( + "download failed for {file_name}: {msg}" + ))); + }, + } + }, + Err(err) => { + match classify_reqwest_error(&err) { + DownloadFailure::Retryable(msg) => { + if attempt == attempts { + return Err(crate::error::PakkerError::InternalError(format!( + "retryable download error (attempts exhausted) for \ + {file_name}: {msg}" + ))); + } + tokio::time::sleep(std::time::Duration::from_millis( + 250u64.saturating_mul(attempt as u64), + )) + .await; + }, + DownloadFailure::Fatal(msg) | DownloadFailure::Auth(msg) => { + return Err(crate::error::PakkerError::InternalError(format!( + "download error for {file_name}: {msg}" + ))); + }, + } + }, + } + } + + Err(crate::error::PakkerError::InternalError(format!( + "download failed for {file_name} (unknown error)" + ))) +} + +// Rule: Copy overrides +pub struct CopyOverridesRule; + +impl Rule for CopyOverridesRule { + fn matches(&self, _context: &RuleContext) -> bool { + true + } + + fn effects(&self) -> Vec> { + vec![Box::new(CopyOverridesEffect)] + } +} + +pub struct CopyOverridesEffect; + +#[async_trait] +impl Effect for CopyOverridesEffect { + fn name(&self) -> &'static str { + "Copying override files" + } + + async fn execute(&self, context: &RuleContext) -> Result<()> { + // Use profile-specific overrides if available, otherwise use global config + let overrides = if let Some(profile_config) = &context.profile_config { + profile_config.get_overrides(&context.config.overrides) + } else { + &context.config.overrides + }; + + for override_path in overrides { + let source = context.base_path.join(override_path); + if !source.exists() { + continue; + } + + let dest = context.export_path.join(override_path); + copy_recursive(&source, &dest)?; + } + + Ok(()) + } +} + +// Rule: Copy server overrides +pub struct CopyServerOverridesRule; + +impl Rule for CopyServerOverridesRule { + fn matches(&self, context: &RuleContext) -> bool { + context.config.server_overrides.is_some() + } + + fn effects(&self) -> Vec> { + vec![Box::new(CopyServerOverridesEffect)] + } +} + +pub struct CopyServerOverridesEffect; + +#[async_trait] +impl Effect for CopyServerOverridesEffect { + fn name(&self) -> &'static str { + "Copying server override files" + } + + async fn execute(&self, context: &RuleContext) -> Result<()> { + // Use profile-specific server overrides if available, otherwise use global + // config + let server_overrides = if let Some(profile_config) = &context.profile_config + { + profile_config + .get_server_overrides(context.config.server_overrides.as_ref()) + } else { + context.config.server_overrides.as_deref() + }; + + if let Some(overrides) = server_overrides { + for override_path in overrides { + let source = context.base_path.join(override_path); + if !source.exists() { + continue; + } + + let dest = context.export_path.join(override_path); + copy_recursive(&source, &dest)?; + } + } + + Ok(()) + } +} + +// Rule: Filter client-only projects +pub struct FilterClientOnlyRule; + +impl Rule for FilterClientOnlyRule { + fn matches(&self, _context: &RuleContext) -> bool { + true + } + + fn effects(&self) -> Vec> { + vec![Box::new(FilterClientOnlyEffect)] + } +} + +pub struct FilterClientOnlyEffect; + +#[async_trait] +impl Effect for FilterClientOnlyEffect { + fn name(&self) -> &'static str { + "Filtering client-only mods" + } + + async fn execute(&self, context: &RuleContext) -> Result<()> { + // Check if we should include client-only mods (profile config can override) + let include_client_only = context + .profile_config + .as_ref() + .and_then(|pc| pc.include_client_only) + .unwrap_or(false); + + if include_client_only { + // Don't filter anything + return Ok(()); + } + + let mods_dir = context.export_path.join("mods"); + + for project in &context.lockfile.projects { + if project.side == ProjectSide::Client + && let Some(file) = project.files.first() + { + let file_path = mods_dir.join(&file.file_name); + if file_path.exists() { + fs::remove_file(file_path)?; + } + } + } + + Ok(()) + } +} + +// Rule: Filter non-redistributable +pub struct FilterNonRedistributableRule; + +impl Rule for FilterNonRedistributableRule { + fn matches(&self, _context: &RuleContext) -> bool { + true + } + + fn effects(&self) -> Vec> { + vec![Box::new(FilterNonRedistributableEffect)] + } +} + +pub struct FilterNonRedistributableEffect; + +#[async_trait] +impl Effect for FilterNonRedistributableEffect { + fn name(&self) -> &'static str { + "Filtering non-redistributable mods" + } + + async fn execute(&self, context: &RuleContext) -> Result<()> { + // Check if we should include non-redistributable mods (profile config can + // override) + let include_non_redistributable = context + .profile_config + .as_ref() + .and_then(|pc| pc.include_non_redistributable) + .unwrap_or(false); + + if include_non_redistributable { + // Don't filter anything + return Ok(()); + } + + let mods_dir = context.export_path.join("mods"); + + for project in &context.lockfile.projects { + if !project.redistributable + && let Some(file) = project.files.first() + { + let file_path = mods_dir.join(&file.file_name); + if file_path.exists() { + fs::remove_file(file_path)?; + log::info!("Filtered non-redistributable: {}", file.file_name); + } + } + } + + Ok(()) + } +} + +// Rule: Generate manifest +pub struct GenerateManifestRule { + platform: String, +} + +impl GenerateManifestRule { + pub fn curseforge() -> Self { + Self { + platform: "curseforge".to_string(), + } + } + + pub fn modrinth() -> Self { + Self { + platform: "modrinth".to_string(), + } + } +} + +impl Rule for GenerateManifestRule { + fn matches(&self, _context: &RuleContext) -> bool { + true + } + + fn effects(&self) -> Vec> { + vec![Box::new(GenerateManifestEffect { + platform: self.platform.clone(), + })] + } +} + +pub struct GenerateManifestEffect { + platform: String, +} + +#[async_trait] +impl Effect for GenerateManifestEffect { + fn name(&self) -> &'static str { + "Generating manifest file" + } + + async fn execute(&self, context: &RuleContext) -> Result<()> { + let (manifest, filename) = if self.platform == "curseforge" { + (generate_curseforge_manifest(context)?, "manifest.json") + } else if self.platform == "modrinth" { + (generate_modrinth_manifest(context)?, "modrinth.index.json") + } else { + return Ok(()); + }; + + let manifest_path = context.export_path.join(filename); + fs::write(manifest_path, manifest)?; + + Ok(()) + } +} + +fn generate_curseforge_manifest(context: &RuleContext) -> Result { + use serde_json::json; + + let files: Vec<_> = context + .lockfile + .projects + .iter() + .filter(|p| p.export) + .filter_map(|p| { + p.get_platform_id("curseforge").and_then(|id| { + p.files.first().map(|f| { + json!({ + "projectID": id.parse::().unwrap_or(0), + "fileID": f.id.parse::().unwrap_or(0), + "required": true + }) + }) + }) + }) + .collect(); + + let manifest = json!({ + "minecraft": { + "version": context.lockfile.mc_versions.first().unwrap_or(&"1.20.1".to_string()), + "modLoaders": context.lockfile.loaders.iter().map(|(name, version)| { + json!({ + "id": format!("{}-{}", name, version), + "primary": true + }) + }).collect::>() + }, + "manifestType": "minecraftModpack", + "manifestVersion": 1, + "name": context.config.name, + "version": context.config.version, + "author": context.config.author.clone().unwrap_or_default(), + "files": files, + "overrides": "overrides" + }); + + Ok(serde_json::to_string_pretty(&manifest)?) +} + +fn generate_modrinth_manifest(context: &RuleContext) -> Result { + use serde_json::json; + + let files: Vec<_> = context + .lockfile + .projects + .iter() + .filter(|p| p.export) + .filter_map(|p| { + p.get_platform_id("modrinth").and_then(|_id| { + p.files.first().map(|f| { + let mut env = serde_json::Map::new(); + match p.side { + crate::model::ProjectSide::Client => { + env.insert("client".to_string(), json!("required")); + env.insert("server".to_string(), json!("unsupported")); + }, + crate::model::ProjectSide::Server => { + env.insert("client".to_string(), json!("unsupported")); + env.insert("server".to_string(), json!("required")); + }, + crate::model::ProjectSide::Both => { + env.insert("client".to_string(), json!("required")); + env.insert("server".to_string(), json!("required")); + }, + } + + json!({ + "path": format!("mods/{}", f.file_name), + "hashes": f.hashes, + "env": env, + "downloads": [f.url.clone()], + "fileSize": f.size + }) + }) + }) + }) + .collect(); + + // Build dependencies dynamically based on loaders present + let mut dependencies = serde_json::Map::new(); + dependencies.insert( + "minecraft".to_string(), + json!( + context + .lockfile + .mc_versions + .first() + .unwrap_or(&"1.20.1".to_string()) + ), + ); + + for (loader_name, loader_version) in &context.lockfile.loaders { + let dep_key = format!("{loader_name}-loader"); + dependencies.insert(dep_key, json!(loader_version)); + } + + let manifest = json!({ + "formatVersion": 1, + "game": "minecraft", + "versionId": context.config.version, + "name": context.config.name, + "summary": context.config.description.clone().unwrap_or_default(), + "files": files, + "dependencies": dependencies + }); + + Ok(serde_json::to_string_pretty(&manifest)?) +} + +fn copy_recursive( + source: &std::path::Path, + dest: &std::path::Path, +) -> Result<()> { + if source.is_file() { + if let Some(parent) = dest.parent() { + fs::create_dir_all(parent)?; + } + fs::copy(source, dest)?; + } else if source.is_dir() { + fs::create_dir_all(dest)?; + for entry in fs::read_dir(source)? { + let entry = entry?; + let target = dest.join(entry.file_name()); + copy_recursive(&entry.path(), &target)?; + } + } + + Ok(()) +} + +// Rule: Filter projects by platform +pub struct FilterByPlatformRule; + +impl Rule for FilterByPlatformRule { + fn matches(&self, context: &RuleContext) -> bool { + // Only match if profile config specifies a platform filter + context + .profile_config + .as_ref() + .and_then(|pc| pc.filter_platform.as_ref()) + .is_some() + } + + fn effects(&self) -> Vec> { + vec![Box::new(FilterByPlatformEffect)] + } +} + +pub struct FilterByPlatformEffect; + +#[async_trait] +impl Effect for FilterByPlatformEffect { + fn name(&self) -> &'static str { + "Filtering projects by platform availability" + } + + async fn execute(&self, context: &RuleContext) -> Result<()> { + if let Some(profile_config) = &context.profile_config + && let Some(platform) = &profile_config.filter_platform + { + let mods_dir = context.export_path.join("mods"); + + for project in &context.lockfile.projects { + // Check if project is available on the target platform + let has_platform = project.get_platform_id(platform).is_some(); + + if !has_platform { + // Remove the file if it was copied + if let Some(file) = project.files.first() { + let file_path = mods_dir.join(&file.file_name); + if file_path.exists() { + fs::remove_file(file_path)?; + log::info!( + "Filtered {} (not available on {})", + file.file_name, + platform + ); + } + } + } + } + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use super::*; + use crate::{export::ProfileConfig, model::LockFile}; + + fn create_test_context(profile_config: Option) -> RuleContext { + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + RuleContext { + lockfile: LockFile { + target: None, + projects: vec![], + mc_versions: vec!["1.20.1".to_string()], + loaders, + lockfile_version: 1, + }, + config: Config { + name: "Test Pack".to_string(), + version: "1.0.0".to_string(), + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: Some(vec!["server-overrides".to_string()]), + client_overrides: Some(vec!["client-overrides".to_string()]), + paths: HashMap::new(), + projects: None, + export_profiles: None, + }, + profile_config, + export_path: PathBuf::from("/tmp/export"), + base_path: PathBuf::from("/tmp/base"), + ui: None, + } + } + + #[test] + fn test_filter_by_platform_rule_matches_with_platform_filter() { + let profile_config = ProfileConfig { + filter_platform: Some("modrinth".to_string()), + ..Default::default() + }; + let context = create_test_context(Some(profile_config)); + let rule = FilterByPlatformRule; + + assert!(rule.matches(&context)); + } + + #[test] + fn test_filter_by_platform_rule_no_match_without_platform_filter() { + let context = create_test_context(None); + let rule = FilterByPlatformRule; + + assert!(!rule.matches(&context)); + } + + #[test] + fn test_filter_by_platform_rule_no_match_with_empty_profile_config() { + let profile_config = ProfileConfig::default(); + let context = create_test_context(Some(profile_config)); + let rule = FilterByPlatformRule; + + assert!(!rule.matches(&context)); + } + + #[test] + fn test_copy_overrides_uses_profile_config() { + let profile_config = ProfileConfig { + overrides: Some(vec!["custom-overrides".to_string()]), + ..Default::default() + }; + let context = create_test_context(Some(profile_config)); + + assert!(context.profile_config.is_some()); + assert_eq!( + context + .profile_config + .as_ref() + .unwrap() + .overrides + .as_ref() + .unwrap(), + &vec!["custom-overrides".to_string()] + ); + } + + #[test] + fn test_filter_non_redistributable_respects_profile_config() { + let profile_config = ProfileConfig { + include_non_redistributable: Some(true), + ..Default::default() + }; + let context = create_test_context(Some(profile_config)); + + assert_eq!( + context + .profile_config + .as_ref() + .unwrap() + .include_non_redistributable, + Some(true) + ); + } + + #[test] + fn test_filter_client_only_respects_profile_config() { + let profile_config = ProfileConfig { + include_client_only: Some(true), + ..Default::default() + }; + let context = create_test_context(Some(profile_config)); + + assert_eq!( + context.profile_config.as_ref().unwrap().include_client_only, + Some(true) + ); + } + + #[test] + fn test_server_overrides_uses_profile_config() { + let profile_config = ProfileConfig { + server_overrides: Some(vec!["custom-server-overrides".to_string()]), + ..Default::default() + }; + let context = create_test_context(Some(profile_config)); + + let server_overrides = context + .profile_config + .as_ref() + .unwrap() + .get_server_overrides(context.config.server_overrides.as_ref()); + + assert!(server_overrides.is_some()); + assert_eq!(server_overrides.unwrap(), &["custom-server-overrides"]); + } + + #[test] + fn test_profile_config_fallback_to_global() { + let context = create_test_context(None); + + assert!(context.profile_config.is_none()); + assert_eq!(context.config.overrides, vec!["overrides"]); + } +} diff --git a/src/fetch.rs b/src/fetch.rs new file mode 100644 index 0000000..214b4eb --- /dev/null +++ b/src/fetch.rs @@ -0,0 +1,260 @@ +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use indicatif::{ProgressBar, ProgressStyle}; +use reqwest::Client; + +use crate::{ + error::{PakkerError, Result}, + model::{Config, LockFile, Project, ProjectFile}, + utils::verify_hash, +}; + +pub struct Fetcher { + client: Client, + base_path: PathBuf, +} + +pub struct FileFetcher { + client: Client, + base_path: PathBuf, +} + +impl Fetcher { + pub fn new>(base_path: P) -> Self { + Self { + client: Client::new(), + base_path: base_path.as_ref().to_path_buf(), + } + } + + pub async fn fetch_all( + &self, + lockfile: &LockFile, + config: &Config, + ) -> Result<()> { + let fetcher = FileFetcher { + client: self.client.clone(), + base_path: self.base_path.clone(), + }; + fetcher.fetch_all(lockfile, config).await + } + + pub async fn sync(&self, lockfile: &LockFile, config: &Config) -> Result<()> { + self.fetch_all(lockfile, config).await + } +} + +impl FileFetcher { + /// Fetch all project files according to lockfile + pub async fn fetch_all( + &self, + lockfile: &LockFile, + config: &Config, + ) -> Result<()> { + let exportable_projects: Vec<_> = + lockfile.projects.iter().filter(|p| p.export).collect(); + + let total = exportable_projects.len(); + let spinner = ProgressBar::new(total as u64); + spinner.set_style( + ProgressStyle::default_spinner() + .template("{spinner:.green} {msg}") + .unwrap(), + ); + + for (idx, project) in exportable_projects.iter().enumerate() { + let name = project + .name + .values() + .next() + .map_or("unknown", std::string::String::as_str); + + spinner.set_message(format!("Fetching {} ({}/{})", name, idx + 1, total)); + self.fetch_project(project, lockfile, config).await?; + } + + spinner.finish_with_message("All projects fetched"); + + // Sync overrides + self.sync_overrides(config)?; + + Ok(()) + } + + /// Fetch files for a single project + pub async fn fetch_project( + &self, + project: &Project, + lockfile: &LockFile, + config: &Config, + ) -> Result<()> { + // Select the best file for this project + let file = self.select_best_file(project, lockfile)?; + + // Determine target path + let target_path = self.get_target_path(project, file, config); + + // Check if file already exists and is valid + if target_path.exists() + && let Some((algo, expected_hash)) = file.hashes.iter().next() + && verify_hash(&target_path, algo, expected_hash)? + { + log::info!("File already exists and is valid: {}", file.file_name); + return Ok(()); + } + + // Download file + log::info!("Downloading: {}", file.file_name); + self.download_file(&file.url, &target_path).await?; + + // Verify hash + if let Some((algo, expected_hash)) = file.hashes.iter().next() + && !verify_hash(&target_path, algo, expected_hash)? + { + fs::remove_file(&target_path)?; + return Err(PakkerError::HashMismatch { + file: file.file_name.clone(), + expected: expected_hash.clone(), + actual: "mismatch".to_string(), + }); + } + + log::info!("Successfully downloaded: {}", file.file_name); + Ok(()) + } + + /// Select the best file for a project based on constraints + fn select_best_file<'a>( + &self, + project: &'a Project, + lockfile: &LockFile, + ) -> Result<&'a ProjectFile> { + let compatible_files: Vec<&ProjectFile> = project + .files + .iter() + .filter(|f| { + f.is_compatible(&lockfile.mc_versions, &lockfile.get_loader_names()) + }) + .collect(); + + if compatible_files.is_empty() { + return Err(PakkerError::FileNotFound(format!( + "No compatible files for project: {:?}", + project.name.values().next() + ))); + } + + // Prefer release over beta over alpha + let best = compatible_files + .iter() + .max_by_key(|f| { + let type_priority = match f.release_type { + crate::model::ReleaseType::Release => 3, + crate::model::ReleaseType::Beta => 2, + crate::model::ReleaseType::Alpha => 1, + }; + (type_priority, &f.date_published) + }) + .unwrap(); + + Ok(best) + } + + /// Get target path for a project file + fn get_target_path( + &self, + project: &Project, + file: &ProjectFile, + config: &Config, + ) -> PathBuf { + let mut path = self.base_path.clone(); + + // Check for custom path in config + if let Some(custom_path) = config.paths.get(&project.r#type.to_string()) { + path.push(custom_path); + } else { + // Default path based on project type + path.push(self.get_default_path(&project.r#type)); + } + + // Add subpath if specified + if let Some(subpath) = &project.subpath { + path.push(subpath); + } + + path.push(&file.file_name); + path + } + + /// Get default path for project type + const fn get_default_path( + &self, + project_type: &crate::model::ProjectType, + ) -> &str { + match project_type { + crate::model::ProjectType::Mod => "mods", + crate::model::ProjectType::ResourcePack => "resourcepacks", + crate::model::ProjectType::DataPack => "datapacks", + crate::model::ProjectType::Shader => "shaderpacks", + crate::model::ProjectType::World => "saves", + } + } + + /// Download a file from URL to target path + async fn download_file(&self, url: &str, target_path: &Path) -> Result<()> { + // Create parent directory + if let Some(parent) = target_path.parent() { + fs::create_dir_all(parent)?; + } + + // Download file + let response = self.client.get(url).send().await?; + + if !response.status().is_success() { + return Err(PakkerError::DownloadFailed(url.to_string())); + } + + let bytes = response.bytes().await?; + + // Write to temporary file first (atomic write) + let temp_path = target_path.with_extension("tmp"); + fs::write(&temp_path, bytes)?; + fs::rename(temp_path, target_path)?; + + Ok(()) + } + + /// Sync override directories + fn sync_overrides(&self, config: &Config) -> Result<()> { + for override_path in &config.overrides { + let source = self.base_path.join(override_path); + if !source.exists() { + continue; + } + + // Copy override files to target locations + self.copy_recursive(&source, &self.base_path)?; + } + + Ok(()) + } + + /// Copy directory recursively + fn copy_recursive(&self, source: &Path, dest: &Path) -> Result<()> { + if source.is_file() { + fs::copy(source, dest)?; + } else if source.is_dir() { + fs::create_dir_all(dest)?; + for entry in fs::read_dir(source)? { + let entry = entry?; + let target = dest.join(entry.file_name()); + self.copy_recursive(&entry.path(), &target)?; + } + } + + Ok(()) + } +} diff --git a/src/git/mod.rs b/src/git/mod.rs new file mode 100644 index 0000000..f2b93a9 --- /dev/null +++ b/src/git/mod.rs @@ -0,0 +1,589 @@ +use std::path::Path; + +use git2::{ + Cred, + FetchOptions, + Oid, + RemoteCallbacks, + Repository, + ResetType, + build::RepoBuilder, +}; + +use crate::error::{PakkerError, Result}; + +/// Check if a directory is a Git repository +pub fn is_git_repository>(path: P) -> bool { + Repository::open(path).is_ok() +} + +/// Get the URL of a remote +pub fn get_remote_url>( + path: P, + remote_name: &str, +) -> Result { + let repo = Repository::open(path)?; + let remote = repo.find_remote(remote_name).map_err(|e| { + PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}")) + })?; + + remote + .url() + .ok_or_else(|| { + PakkerError::GitError("Remote URL is not valid UTF-8".to_string()) + }) + .map(std::string::ToString::to_string) +} + +pub fn get_current_commit_sha>( + path: P, + ref_name: Option<&str>, +) -> Result { + let repo = Repository::open(path)?; + + let commit = if let Some(ref_name) = ref_name { + let obj = repo.revparse_single(ref_name)?; + obj.peel_to_commit()? + } else { + let head = repo.head()?; + head.peel_to_commit()? + }; + + Ok(commit.id().to_string()) +} + +/// Get the commit SHA for a specific ref (alias for compatibility) +pub fn get_commit_sha>( + path: P, + ref_name: &str, +) -> Result { + get_current_commit_sha(path, Some(ref_name)) +} + +/// Clone a Git repository +pub fn clone_repository>( + url: &str, + target_path: P, + ref_name: &str, + progress_callback: Option< + Box) + 'static>, + >, +) -> Result { + let target_path = target_path.as_ref(); + + // Check if target directory exists and is not empty + if target_path.exists() { + let is_empty = target_path.read_dir()?.next().is_none(); + if !is_empty { + return Err(PakkerError::GitError(format!( + "Target directory is not empty: {}", + target_path.display() + ))); + } + } + + let mut callbacks = RemoteCallbacks::new(); + + // Setup SSH key authentication + callbacks.credentials(|_url, username_from_url, _allowed_types| { + let username = username_from_url.unwrap_or("git"); + Cred::ssh_key_from_agent(username) + }); + + // Setup progress callback if provided + if let Some(mut progress_fn) = progress_callback { + callbacks.transfer_progress(move |stats| { + progress_fn( + stats.received_objects(), + stats.total_objects(), + Some(stats.received_bytes()), + ); + true + }); + } + + let mut fetch_options = FetchOptions::new(); + fetch_options.remote_callbacks(callbacks); + + let mut builder = RepoBuilder::new(); + builder.fetch_options(fetch_options); + + // Perform the clone. Avoid forcing a branch at clone time because some + // local repositories (or bare repos) may not expose the exact remote + // tracking refs that libgit2 expects. We'll attempt to set the desired + // ref after cloning when possible. + let repo = builder.clone(url, target_path).map_err(|e| { + PakkerError::GitError(format!("Failed to clone repository '{url}': {e}")) + })?; + + // If a branch/ref name was requested, try to make HEAD point to it. + // Prefer local branch refs (refs/heads/*), then tags, then raw rev-parse. + let branch_ref = format!("refs/heads/{ref_name}"); + if repo.find_reference(&branch_ref).is_ok() { + repo.set_head(&branch_ref).map_err(|e| { + PakkerError::GitError(format!( + "Cloned repository but failed to set HEAD to {branch_ref}: {e}" + )) + })?; + } else if let Ok(obj) = repo.revparse_single(ref_name) { + // Create a detached HEAD pointing to the commit/tag + let commit = obj.peel_to_commit().map_err(|e| { + PakkerError::GitError(format!( + "Resolved ref '{ref_name}' but could not peel to commit: {e}" + )) + })?; + repo.set_head_detached(commit.id()).map_err(|e| { + PakkerError::GitError(format!( + "Cloned repository but failed to set detached HEAD to {ref_name}: {e}" + )) + })?; + } + + Ok(repo) +} + +/// Fetch updates from a remote +pub fn fetch_updates>( + path: P, + remote_name: &str, + ref_name: &str, + progress_callback: Option< + Box) + 'static>, + >, +) -> Result<()> { + let repo = Repository::open(path)?; + let mut remote = repo.find_remote(remote_name).map_err(|e| { + PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}")) + })?; + + let mut callbacks = RemoteCallbacks::new(); + + // Setup SSH key authentication + callbacks.credentials(|_url, username_from_url, _allowed_types| { + let username = username_from_url.unwrap_or("git"); + Cred::ssh_key_from_agent(username) + }); + + // Setup progress callback if provided + if let Some(mut progress_fn) = progress_callback { + callbacks.transfer_progress(move |stats| { + progress_fn( + stats.received_objects(), + stats.total_objects(), + Some(stats.received_bytes()), + ); + true + }); + } + + let mut fetch_options = FetchOptions::new(); + fetch_options.remote_callbacks(callbacks); + + remote + .fetch(&[ref_name], Some(&mut fetch_options), None) + .map_err(|e| { + PakkerError::GitError(format!("Failed to fetch updates: {e}")) + })?; + + Ok(()) +} + +/// Hard reset to a specific ref (like git reset --hard) +pub fn reset_to_ref>( + path: P, + remote_name: &str, + ref_name: &str, +) -> Result<()> { + let repo = Repository::open(path)?; + + // Construct the full ref path (e.g., "origin/main") + let full_ref = format!("{remote_name}/{ref_name}"); + + let obj = repo.revparse_single(&full_ref).map_err(|e| { + PakkerError::GitError(format!("Failed to find ref '{full_ref}': {e}")) + })?; + + let commit = obj.peel_to_commit().map_err(|e| { + PakkerError::GitError(format!("Failed to resolve ref to commit: {e}")) + })?; + + repo + .reset(commit.as_object(), ResetType::Hard, None) + .map_err(|e| { + PakkerError::GitError(format!("Failed to reset to ref: {e}")) + })?; + + Ok(()) +} + +/// Determine the ref type (branch, tag, or commit) +pub fn resolve_ref_type>( + path: P, + ref_name: &str, +) -> Result { + let repo = Repository::open(path)?; + + // Check if it's a branch + if repo.find_branch(ref_name, git2::BranchType::Local).is_ok() + || repo.find_branch(ref_name, git2::BranchType::Remote).is_ok() + { + return Ok(crate::model::fork::RefType::Branch); + } + + // Check if it's a tag + let tag_ref = format!("refs/tags/{ref_name}"); + if repo.find_reference(&tag_ref).is_ok() { + return Ok(crate::model::fork::RefType::Tag); + } + + // Try to resolve as commit SHA + if repo.revparse_single(ref_name).is_ok() { + return Ok(crate::model::fork::RefType::Commit); + } + + Err(PakkerError::GitError(format!( + "Could not resolve ref '{ref_name}' as branch, tag, or commit" + ))) +} + +/// Get the primary remote URL for a repository at path. Prefer 'origin', +/// otherwise first remote with a URL. +pub fn get_primary_remote_url>(path: P) -> Result { + let repo = Repository::open(path)?; + if let Ok(remote) = repo.find_remote("origin") + && let Some(url) = remote.url() + { + return Ok(url.to_string()); + } + + // Fallback: first remote with a URL + if let Ok(remotes) = repo.remotes() { + for name in remotes.iter().flatten() { + if let Ok(remote) = repo.find_remote(name) + && let Some(url) = remote.url() + { + return Ok(url.to_string()); + } + } + } + + Err(PakkerError::GitError( + "No remote with a valid URL found on repository".to_string(), + )) +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum VcsType { + Git, + Jujutsu, + None, +} + +/// Detect the VCS type for a given path +pub fn detect_vcs_type>(path: P) -> VcsType { + let path = path.as_ref(); + + // Check for jujutsu first (higher priority) + if let Ok(output) = std::process::Command::new("jj") + .args(["root"]) + .current_dir(path) + .output() + { + if output.status.success() { + return VcsType::Jujutsu; + } + } + + // Check for git + if let Ok(output) = std::process::Command::new("git") + .args(["rev-parse", "--show-toplevel"]) + .current_dir(path) + .output() + { + if output.status.success() { + return VcsType::Git; + } + } + + VcsType::None +} + +/// Check whether the repository has uncommitted changes (working tree or index) +pub fn repo_has_uncommitted_changes>(path: P) -> Result { + let vcs_type = detect_vcs_type(&path); + + match vcs_type { + VcsType::Git => { + let repo = Repository::open(path)?; + let statuses = repo.statuses(None)?; + for entry in statuses.iter() { + let s = entry.status(); + // Consider any change in index or working tree as uncommitted + if !(s.is_empty()) { + return Ok(true); + } + } + Ok(false) + }, + VcsType::Jujutsu => { + // Use jj status to check for changes - look for "The working copy has no + // changes" + let output = std::process::Command::new("jj") + .args(["status"]) + .current_dir(path) + .output() + .map_err(|e| { + PakkerError::GitError(format!("Failed to run jj status: {}", e)) + })?; + + let output_str = String::from_utf8_lossy(&output.stdout); + // Check if the output indicates no changes + Ok(!output_str.contains("The working copy has no changes")) + }, + VcsType::None => Ok(false), + } +} + +/// Attempt a lightweight fetch of a single ref from the named remote into the +/// repository at path +pub fn fetch_remote_light>( + path: P, + remote_name: &str, + ref_name: &str, +) -> Result<()> { + let repo = Repository::open(path)?; + let mut remote = repo.find_remote(remote_name).map_err(|e| { + PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}")) + })?; + + let mut callbacks = RemoteCallbacks::new(); + callbacks.credentials(|_url, username_from_url, _allowed_types| { + let username = username_from_url.unwrap_or("git"); + Cred::ssh_key_from_agent(username) + }); + + let mut fetch_options = FetchOptions::new(); + fetch_options.remote_callbacks(callbacks); + + // Build a refspec that attempts to fetch the branch into the remote-tracking + // namespace + let fetch_refspec = if ref_name.starts_with("refs/") { + ref_name.to_string() + } else { + format!("refs/heads/{ref_name}:refs/remotes/{remote_name}/{ref_name}") + }; + + remote + .fetch(&[&fetch_refspec], Some(&mut fetch_options), None) + .map_err(|e| { + PakkerError::GitError(format!("Failed lightweight fetch: {e}")) + })?; + + Ok(()) +} + +/// Resolve a ref name to an Oid (commit) +pub fn get_ref_oid>(path: P, ref_name: &str) -> Result { + let repo = Repository::open(path)?; + let obj = repo.revparse_single(ref_name).map_err(|e| { + PakkerError::GitError(format!("Failed to resolve ref '{ref_name}': {e}")) + })?; + let commit = obj.peel_to_commit().map_err(|e| { + PakkerError::GitError(format!( + "Failed to peel ref '{ref_name}' to commit: {e}" + )) + })?; + Ok(commit.id()) +} + +/// Count commits reachable from `oid` in `repo` +fn count_commits(repo: &Repository, oid: Oid) -> Result { + let mut revwalk = repo.revwalk().map_err(|e| { + PakkerError::GitError(format!( + "Failed to create revwalk for counting commits: {e}" + )) + })?; + revwalk.push(oid).map_err(|e| { + PakkerError::GitError(format!( + "Failed to start revwalk from oid {oid}: {e}" + )) + })?; + let mut count = 0usize; + for _ in revwalk { + count += 1; + } + Ok(count) +} + +/// Compute how many commits `local_ref` is ahead/behind `remote_ref` +pub fn ahead_behind>( + path: P, + local_ref: &str, + remote_ref: &str, +) -> Result<(usize, usize)> { + let repo = Repository::open(&path)?; + + // Try to resolve local OID + let local_oid = match get_ref_oid(&path, local_ref) { + Ok(oid) => oid, + Err(e) => { + return Err(PakkerError::GitError(format!( + "Local ref not found '{local_ref}': {e}" + ))); + }, + }; + + // Try to resolve remote OID. If remote ref is missing, consider remote empty + // and count all commits in local as "ahead". + if let Ok(remote_oid) = get_ref_oid(&path, remote_ref) { + let (ahead, behind) = repo + .graph_ahead_behind(local_oid, remote_oid) + .map_err(|e| { + PakkerError::GitError(format!("Failed to compute ahead/behind: {e}")) + })?; + Ok((ahead, behind)) + } else { + // Remote ref missing — count commits reachable from local + let ahead_count = count_commits(&repo, local_oid)?; + Ok((ahead_count, 0)) + } +} + +/// Set the URL for a remote in the repository +pub fn set_remote_url>( + path: P, + remote_name: &str, + url: &str, +) -> Result<()> { + let repo = Repository::open(path)?; + repo.remote_set_url(remote_name, url).map_err(|e| { + PakkerError::GitError(format!("Failed to set remote URL: {e}")) + })?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use std::{fs::File, io::Write}; + + use git2::{Repository, Signature}; + use tempfile::tempdir; + + use super::*; + + fn init_bare_repo(path: &std::path::Path) -> Repository { + Repository::init_bare(path).expect("init bare") + } + + fn init_repo_with_commit( + path: &std::path::Path, + file_name: &str, + content: &str, + branch: &str, + ) -> Repository { + let repo = Repository::init(path).expect("init repo"); + let sig = Signature::now("Test", "test@example.com").unwrap(); + let mut index = repo.index().unwrap(); + let file_path = path.join(file_name); + let mut f = File::create(&file_path).unwrap(); + writeln!(f, "{}", content).unwrap(); + drop(f); + index.add_path(std::path::Path::new(file_name)).unwrap(); + let tree_id = index.write_tree().unwrap(); + // limit the scope of tree to avoid borrow while moving repo + { + let tree = repo.find_tree(tree_id).unwrap(); + let _commit_id = repo + .commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[]) + .unwrap(); + } + // Create branch pointing at HEAD and set HEAD + let head_oid = repo.refname_to_id("HEAD").unwrap(); + repo + .branch(branch, &repo.find_commit(head_oid).unwrap(), true) + .unwrap(); + repo.set_head(&format!("refs/heads/{}", branch)).unwrap(); + repo + } + + #[test] + fn test_is_git_repository_and_remote_url() { + let tmp = tempdir().unwrap(); + let repo_path = tmp.path().join("repo"); + let _repo = init_repo_with_commit(&repo_path, "a.txt", "hello", "master"); + assert!(is_git_repository(&repo_path)); + } + + #[test] + fn test_fetch_remote_light_and_ahead_behind() { + let tmp = tempdir().unwrap(); + let bare_path = tmp.path().join("bare.git"); + let _bare = init_bare_repo(&bare_path); + + let work_path = tmp.path().join("work"); + let repo = init_repo_with_commit(&work_path, "a.txt", "hello", "master"); + + // Add bare remote and push + repo.remote("origin", bare_path.to_str().unwrap()).unwrap(); + let mut remote = repo.find_remote("origin").unwrap(); + remote.connect(git2::Direction::Push).unwrap(); + remote + .push(&["refs/heads/master:refs/heads/master"], None) + .unwrap(); + + // Ensure bare HEAD points to master + let bare_repo = Repository::open(&bare_path).unwrap(); + bare_repo.set_head("refs/heads/master").unwrap(); + + // Now test fetch_remote_light against the work repo (fetch from origin into + // work should succeed) + assert!(fetch_remote_light(&work_path, "origin", "master").is_ok()); + + // Test ahead_behind with remote tracking ref + let (ahead, behind) = ahead_behind( + &work_path, + "refs/heads/master", + "refs/remotes/origin/master", + ) + .unwrap(); + assert_eq!(ahead, 0); + assert_eq!(behind, 0); + } + + #[test] + fn test_clone_repository_and_origin_rewrite_integration() { + let tmp = tempdir().unwrap(); + let bare_path = tmp.path().join("upstream.git"); + let _bare = init_bare_repo(&bare_path); + + let work_path = tmp.path().join("workrepo"); + let repo = init_repo_with_commit(&work_path, "b.txt", "hello2", "master"); + + // Add remote upstream and push + repo.remote("origin", bare_path.to_str().unwrap()).unwrap(); + let mut remote = repo.find_remote("origin").unwrap(); + remote.connect(git2::Direction::Push).unwrap(); + remote + .push(&["refs/heads/master:refs/heads/master"], None) + .unwrap(); + let bare_repo = Repository::open(&bare_path).unwrap(); + bare_repo.set_head("refs/heads/master").unwrap(); + + // Now clone from the local path into a new dir + let clone_target = tmp.path().join("clone_target"); + let _cloned = clone_repository( + bare_path.to_str().unwrap(), + &clone_target, + "master", + None, + ) + .expect("clone"); + + // After cloning from a local path, simulate rewriting origin to the + // upstream network URL + set_remote_url(&clone_target, "origin", "https://example.com/upstream.git") + .unwrap(); + let url = get_remote_url(&clone_target, "origin").unwrap(); + assert_eq!(url, "https://example.com/upstream.git"); + } +} diff --git a/src/ipc.rs b/src/ipc.rs new file mode 100644 index 0000000..2039ec9 --- /dev/null +++ b/src/ipc.rs @@ -0,0 +1,1326 @@ +//! IPC coordination for concurrent Pakker operations. +//! +//! Uses tmpfs for inter-process coordination with cryptographic hashing +//! of modpack content (like Nix store paths) to identify unique modpacks. +//! +//! IPC path: `/run/user//pakker//ops.json` +//! +//! The hash is derived from the modpack's `parentLockHash` in pakku.json, +//! ensuring the same modpack in different directories/sessions shares IPC. + +use std::{ + fs::{self, File, OpenOptions}, + io::Write, + os::unix::{fs::PermissionsExt, io::AsRawFd}, + path::PathBuf, + time::{Duration, SystemTime}, +}; + +use libc::{LOCK_EX, LOCK_UN, flock}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum IpcError { + #[error("operation already in progress: {0}")] + OperationInProgress(String), + + #[error("invalid ops file format: {0}")] + InvalidFormat(String), + + #[error("operation not found: {0}")] + OperationNotFound(String), + + #[error("failed to create IPC directory: {0}")] + IpcDirCreationFailed(String), + + #[error("timeout waiting for operation: {0}")] + Timeout(String), + + #[error("failed to read pakku.json for modpack hash: {0}")] + PakkuJsonReadFailed(String), +} + +impl From for IpcError { + fn from(e: std::io::Error) -> Self { + Self::InvalidFormat(e.to_string()) + } +} + +impl From for IpcError { + fn from(e: serde_json::Error) -> Self { + Self::InvalidFormat(e.to_string()) + } +} + +/// Represents an ongoing operation tracked in IPC +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OngoingOperation { + pub id: String, + pub r#type: OperationType, + pub pid: u32, + pub started_at: u64, + pub status: OperationStatus, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum OperationType { + Fetch, + Export, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum OperationStatus { + Running, + Completed, + Failed, +} + +/// Guard that releases an advisory lock when dropped +struct FileLock { + file: File, +} + +impl Drop for FileLock { + fn drop(&mut self) { + let _ = unsafe { flock(self.file.as_raw_fd(), LOCK_UN) }; + } +} + +/// IPC coordination for concurrent Pakker operations +#[derive(Clone)] +pub struct IpcCoordinator { + ops_file: PathBuf, +} + +impl IpcCoordinator { + /// Get the base IPC directory in tmpfs + fn get_ipc_base_dir() -> PathBuf { + // Use XDG_RUNTIME_DIR if available, otherwise fallback to /tmp + if let Ok(runtime) = std::env::var("XDG_RUNTIME_DIR") { + PathBuf::from(runtime).join("pakker") + } else { + PathBuf::from("/tmp/pakker") + } + } + + /// Extract modpack hash from pakku.json's parentLockHash field. + /// This is the authoritative content hash for the modpack (Nix-style). + fn get_modpack_hash(working_dir: &PathBuf) -> Result { + let pakku_path = working_dir.join("pakku.json"); + + if !pakku_path.exists() { + return Err(IpcError::PakkuJsonReadFailed( + "pakku.json not found in working directory".to_string(), + )); + } + + let content = fs::read_to_string(&pakku_path) + .map_err(|e| IpcError::PakkuJsonReadFailed(e.to_string()))?; + + // Parse pakku.json and extract parentLockHash + let pakku: serde_json::Value = serde_json::from_str(&content) + .map_err(|e| IpcError::PakkuJsonReadFailed(e.to_string()))?; + + let hash = pakku + .get("pakku") + .and_then(|p| p.get("parentLockHash")) + .and_then(|v| v.as_str()) + .ok_or_else(|| { + IpcError::PakkuJsonReadFailed( + "parentLockHash not found in pakku.json".to_string(), + ) + })? + .to_string(); + + // Validate it's a valid hex string (SHA256 = 64 chars) + if hash.len() != 64 || !hash.chars().all(|c| c.is_ascii_hexdigit()) { + return Err(IpcError::PakkuJsonReadFailed( + "parentLockHash is not a valid SHA256 hash".to_string(), + )); + } + + Ok(hash) + } + + /// Create a new IPC coordinator for the given modpack directory. + /// Uses parentLockHash from pakku.json to identify the modpack. + pub fn new(working_dir: &PathBuf) -> Result { + let modpack_hash = Self::get_modpack_hash(working_dir)?; + let ipc_base = Self::get_ipc_base_dir(); + let ipc_dir = ipc_base.join(&modpack_hash); + + // Create IPC directory with restricted permissions + if let Err(e) = fs::create_dir_all(&ipc_dir) + && !ipc_dir.exists() + { + return Err(IpcError::IpcDirCreationFailed(e.to_string())); + } + + if ipc_dir.exists() { + // Set permissions to 700 (owner only) + if let Ok(metadata) = fs::metadata(&ipc_dir) + && metadata.permissions().mode() != 0o700 + { + let mut perms = metadata.permissions(); + perms.set_mode(0o700); + let _ = fs::set_permissions(&ipc_dir, perms); + } + } + + let ops_file = ipc_dir.join("ops.json"); + + Ok(Self { ops_file }) + } + + /// Create a new IPC coordinator for testing with guaranteed unique isolation. + /// Uses the absolute path of the working directory to ensure no collisions. + /// Acquire an exclusive advisory lock on the ops file for atomic operations. + /// Returns a guard that releases the lock on drop. + fn lock_ops_file(&self) -> Result { + log::debug!("Acquiring file lock on {:?}", self.ops_file); + + // Open or create the ops file with read/write access + let file = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(&self.ops_file) + .map_err(|e| IpcError::InvalidFormat(e.to_string()))?; + + // Set permissions to 600 + let mut perms = fs::metadata(&self.ops_file)?.permissions(); + perms.set_mode(0o600); + fs::set_permissions(&self.ops_file, perms)?; + + // Acquire exclusive lock using flock + unsafe { + if flock(file.as_raw_fd(), LOCK_EX) != 0 { + log::warn!("Failed to acquire file lock on {:?}", self.ops_file); + return Err(IpcError::InvalidFormat( + "failed to acquire file lock".to_string(), + )); + } + } + + log::debug!("File lock acquired on {:?}", self.ops_file); + + // Return a guard that releases the lock on drop + Ok(FileLock { file }) + } + + /// Load all ongoing operations from IPC + pub fn load_operations(&self) -> Result, IpcError> { + if !self.ops_file.exists() { + return Ok(Vec::new()); + } + + let content = fs::read_to_string(&self.ops_file) + .map_err(|e| IpcError::InvalidFormat(e.to_string()))?; + + // Handle empty file case + if content.trim().is_empty() { + return Ok(Vec::new()); + } + + serde_json::from_str(&content) + .map_err(|e| IpcError::InvalidFormat(e.to_string())) + } + + /// Save operations to IPC + fn save_operations( + &self, + operations: &[OngoingOperation], + ) -> Result<(), IpcError> { + let content = serde_json::to_string_pretty(operations) + .map_err(|e| IpcError::InvalidFormat(e.to_string()))?; + + let mut file = File::create(&self.ops_file) + .map_err(|e| IpcError::InvalidFormat(e.to_string()))?; + file + .write_all(content.as_bytes()) + .map_err(|e| IpcError::InvalidFormat(e.to_string()))?; + + // Set file permissions to 600 + let mut perms = fs::metadata(&self.ops_file)?.permissions(); + perms.set_mode(0o600); + fs::set_permissions(&self.ops_file, perms)?; + + Ok(()) + } + + /// Register a new operation, returns error if one of the same type is already + /// running Uses advisory locking to prevent TOCTOU race conditions between + /// processes + pub fn register_operation( + &self, + operation_type: OperationType, + ) -> Result { + log::debug!("Registering {operation_type:?} operation"); + + // Acquire exclusive lock before load-check-save sequence + let _lock = self.lock_ops_file()?; + + let mut operations = self.load_operations()?; + log::debug!("Loaded {} existing operations", operations.len()); + + // Clean up stale operations (older than 10 minutes) + let now = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or(Duration::ZERO) + .as_secs(); + + let stale_count = operations + .iter() + .filter(|op| { + op.status == OperationStatus::Running + && now.saturating_sub(op.started_at) > 600 + }) + .count(); + + operations.retain(|op| { + if op.status == OperationStatus::Running + && now.saturating_sub(op.started_at) > 600 + { + false // Remove stale operations + } else { + true + } + }); + + if stale_count > 0 { + log::info!("Cleaned up {stale_count} stale operations"); + } + + // Check for conflicting operations + let conflicting: Vec<_> = operations + .iter() + .filter(|op| { + op.r#type == operation_type && op.status == OperationStatus::Running + }) + .collect(); + + if !conflicting.is_empty() { + log::debug!("Found {} conflicting operations", conflicting.len()); + return Err(IpcError::OperationInProgress(format!( + "{} operation already in progress (PID: {})", + operation_type.as_str(), + conflicting[0].pid + ))); + } + + log::debug!("No conflicts found, registering new operation"); + + // Generate operation ID with nanosecond timestamp and PID for uniqueness + // Using nanoseconds instead of milliseconds to prevent ID collisions + let now_ns = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or(Duration::ZERO) + .as_nanos(); + let rand_suffix = rand::random::(); + let id = format!( + "{}-{}-{:x}-{}", + operation_type.as_str(), + now_ns, + rand_suffix, + std::process::id() + ); + + log::debug!("Generated operation ID: {id}"); + + let op_type_str = operation_type.as_str(); + + // Register new operation + let new_op = OngoingOperation { + id: id.clone(), + r#type: operation_type, + pid: std::process::id(), + started_at: now, + status: OperationStatus::Running, + }; + + operations.push(new_op); + self.save_operations(&operations)?; + + log::info!("Registered {op_type_str} operation with ID: {id}"); + + Ok(id) + } + + /// Mark an operation as completed + pub fn complete_operation(&self, operation_id: &str) -> Result<(), IpcError> { + log::debug!("Completing operation: {operation_id}"); + + // Acquire exclusive lock for atomic read-modify-write + let _lock = self.lock_ops_file()?; + + let mut operations = self.load_operations()?; + + let mut found = false; + for op in &mut operations { + if op.id == operation_id { + op.status = OperationStatus::Completed; + found = true; + log::info!("Marked operation {operation_id} as completed"); + break; + } + } + + if !found { + log::warn!("Operation not found: {operation_id}"); + return Err(IpcError::OperationNotFound(operation_id.to_string())); + } + + self.save_operations(&operations)?; + Ok(()) + } + + /// Wait for any conflicting operations to complete + pub async fn wait_for_conflicts( + &self, + operation_type: OperationType, + timeout: Duration, + ) -> Result<(), IpcError> { + let start = SystemTime::now(); + + loop { + let operations = self.load_operations()?; + let conflicts: Vec<_> = operations + .iter() + .filter(|op| { + op.r#type == operation_type && op.status == OperationStatus::Running + }) + .collect(); + + if conflicts.is_empty() { + return Ok(()); + } + + if start.elapsed().unwrap_or(Duration::ZERO) > timeout { + return Err(IpcError::Timeout(format!( + "timeout waiting for {} operation(s) to complete", + conflicts.len() + ))); + } + + tokio::time::sleep(Duration::from_millis(500)).await; + } + } + + /// Check if there are any running operations of the given type + pub fn has_running_operation(&self, operation_type: OperationType) -> bool { + let operations = self.load_operations().unwrap_or_default(); + operations.iter().any(|op| { + op.r#type == operation_type && op.status == OperationStatus::Running + }) + } + + /// Get list of running operations of a specific type + pub fn get_running_operations( + &self, + operation_type: OperationType, + ) -> Vec { + let operations = self.load_operations().unwrap_or_default(); + operations + .into_iter() + .filter(|op| { + op.r#type == operation_type && op.status == OperationStatus::Running + }) + .collect() + } +} + +impl OperationType { + pub const fn as_str(&self) -> &'static str { + match self { + Self::Fetch => "fetch", + Self::Export => "export", + } + } +} + +/// RAII guard for registering an operation +pub struct OperationGuard { + coordinator: IpcCoordinator, + operation_id: String, +} + +impl OperationGuard { + pub const fn new(coordinator: IpcCoordinator, operation_id: String) -> Self { + Self { + coordinator, + operation_id, + } + } +} + +impl Drop for OperationGuard { + fn drop(&mut self) { + // On drop, mark the operation as completed + // Note: We ignore errors here since we might be panicking + let _ = self.coordinator.complete_operation(&self.operation_id); + } +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + + fn create_test_modpack(files: &[(&str, &str)]) -> TempDir { + // Generate a unique parentLockHash for each test to ensure test isolation + let unique_hash = format!("{:064x}", rand::random::()); + + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + for (path, content) in files { + let full_path = temp_dir.path().join(path); + if let Some(parent) = full_path.parent() { + fs::create_dir_all(parent).unwrap(); + } + fs::write(&full_path, content).unwrap(); + } + + // Write pakku.json with unique parentLockHash (nested under "pakku" key) + let pakku_content = + format!(r#"{{"pakku": {{"parentLockHash": "{}"}}}}"#, unique_hash); + fs::write(temp_dir.path().join("pakku.json"), pakku_content).unwrap(); + + temp_dir + } + + fn create_test_modpack_with_hash( + files: &[(&str, &str)], + hash: &str, + ) -> TempDir { + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + for (path, content) in files { + let full_path = temp_dir.path().join(path); + if let Some(parent) = full_path.parent() { + fs::create_dir_all(parent).unwrap(); + } + fs::write(&full_path, content).unwrap(); + } + + // Write pakku.json with specified parentLockHash (nested under "pakku" key) + let pakku_content = + format!(r#"{{"pakku": {{"parentLockHash": "{}"}}}}"#, hash); + fs::write(temp_dir.path().join("pakku.json"), pakku_content).unwrap(); + + temp_dir + } + #[tokio::test] + async fn test_operation_type_as_str() { + assert_eq!(OperationType::Fetch.as_str(), "fetch"); + assert_eq!(OperationType::Export.as_str(), "export"); + } + + #[test] + fn test_get_modpack_hash_valid() { + // Use create_test_modpack_with_hash for specific hash values + let temp_dir = create_test_modpack_with_hash( + &[("mod.jar", "content")], + "cfe85e0e7e7aa0922d30d8faad071e3a4126cb78b5f0f792f191e90a295aa2c7", + ); + + let hash = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()).unwrap(); + assert_eq!( + hash, + "cfe85e0e7e7aa0922d30d8faad071e3a4126cb78b5f0f792f191e90a295aa2c7" + ); + } + + #[test] + fn test_get_modpack_hash_missing_pakku_json() { + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + let result = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()); + assert!(matches!(result, Err(IpcError::PakkuJsonReadFailed(_)))); + } + + #[test] + fn test_get_modpack_hash_missing_parent_lock_hash() { + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + fs::write(temp_dir.path().join("pakku.json"), r#"{"other": "field"}"#) + .unwrap(); + + let result = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()); + assert!(matches!(result, Err(IpcError::PakkuJsonReadFailed(_)))); + } + + #[test] + fn test_get_modpack_hash_invalid_hash() { + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + fs::write( + temp_dir.path().join("pakku.json"), + r#"{"parentLockHash": "not-a-sha256"}"#, + ) + .unwrap(); + + let result = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()); + assert!(matches!(result, Err(IpcError::PakkuJsonReadFailed(_)))); + } + + #[test] + fn test_same_modpack_different_dirs_same_ipc() { + // Two different directories with SAME parentLockHash should use same IPC + // Use valid 64-character SHA256 hash + let shared_hash = + "abc123def456789012345678901234567890abcd123456789012345678901234"; + let temp_dir1 = create_test_modpack_with_hash( + &[("mods/mod1.jar", "content1")], + shared_hash, + ); + let temp_dir2 = create_test_modpack_with_hash( + &[("config/settings.toml", "content2")], + shared_hash, + ); + + let coord1 = IpcCoordinator::new(&temp_dir1.path().to_path_buf()).unwrap(); + let coord2 = IpcCoordinator::new(&temp_dir2.path().to_path_buf()).unwrap(); + + assert_eq!( + coord1.ops_file, coord2.ops_file, + "Same modpack hash should use same ops file" + ); + } + + #[test] + fn test_different_modpacks_different_ipc() { + // Two different directories with DIFFERENT parentLockHash should use + // different IPC Use create_test_modpack which auto-generates unique + // hashes + let temp_dir1 = create_test_modpack(&[("mod1.jar", "content")]); + let temp_dir2 = create_test_modpack(&[("mod1.jar", "content")]); + + let coord1 = IpcCoordinator::new(&temp_dir1.path().to_path_buf()).unwrap(); + let coord2 = IpcCoordinator::new(&temp_dir2.path().to_path_buf()).unwrap(); + + assert_ne!( + coord1.ops_file, coord2.ops_file, + "Different modpack hashes should use different ops files" + ); + } + + #[test] + fn test_ipc_coordinator_new_creates_dir() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Check that the parent directory (ipc_dir) exists + assert!(coordinator.ops_file.parent().unwrap().exists()); + } + + #[test] + fn test_load_operations_empty() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let operations = coordinator.load_operations().unwrap(); + assert!(operations.is_empty()); + } + + #[test] + fn test_register_operation() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + assert!(!id.is_empty()); + assert!(id.contains("fetch")); + assert!(id.contains(&std::process::id().to_string())); + } + + #[test] + fn test_register_multiple_operations_different_types() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let fetch_id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + let export_id = coordinator + .register_operation(OperationType::Export) + .unwrap(); + + assert_ne!(fetch_id, export_id); + + let operations = coordinator.load_operations().unwrap(); + assert_eq!(operations.len(), 2); + } + + #[test] + fn test_register_conflicting_operation_same_type() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let _id1 = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + let result = coordinator.register_operation(OperationType::Fetch); + + assert!(matches!(result, Err(IpcError::OperationInProgress(_)))); + } + + #[test] + fn test_complete_operation() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + coordinator.complete_operation(&id).unwrap(); + + let operations = coordinator.load_operations().unwrap(); + assert_eq!(operations.len(), 1); + assert_eq!(operations[0].status, OperationStatus::Completed); + } + + #[test] + fn test_complete_nonexistent_operation() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let result = coordinator.complete_operation("nonexistent-id"); + assert!(matches!(result, Err(IpcError::OperationNotFound(_)))); + } + + #[test] + fn test_has_running_operation() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + assert!(!coordinator.has_running_operation(OperationType::Fetch)); + + let _id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + assert!(coordinator.has_running_operation(OperationType::Fetch)); + assert!(!coordinator.has_running_operation(OperationType::Export)); + + coordinator.complete_operation(&_id).unwrap(); + assert!(!coordinator.has_running_operation(OperationType::Fetch)); + } + + #[test] + fn test_get_running_operations() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let operations = coordinator.get_running_operations(OperationType::Fetch); + assert!(operations.is_empty()); + + let id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + let operations = coordinator.get_running_operations(OperationType::Fetch); + assert_eq!(operations.len(), 1); + assert_eq!(operations[0].id, id); + } + + #[tokio::test] + async fn test_wait_for_conflicts_no_conflicts() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let result = coordinator + .wait_for_conflicts(OperationType::Fetch, Duration::from_secs(1)) + .await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_wait_for_conflicts_with_completion() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Register an operation + let id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + + // Complete it after a short delay + let coordinator_clone = coordinator.clone(); + tokio::spawn(async move { + tokio::time::sleep(Duration::from_millis(200)).await; + let _ = coordinator_clone.complete_operation(&id); + }); + + // Wait should succeed once operation completes + let result = coordinator + .wait_for_conflicts(OperationType::Fetch, Duration::from_secs(5)) + .await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_wait_for_conflicts_timeout() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Register a long-running operation (we won't complete it) + let _id = coordinator + .register_operation(OperationType::Export) + .unwrap(); + + // Wait should timeout + let result = coordinator + .wait_for_conflicts(OperationType::Export, Duration::from_millis(500)) + .await; + assert!(matches!(result, Err(IpcError::Timeout(_)))); + } + + #[test] + fn test_operation_guard_completes_on_drop() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + { + let _guard = OperationGuard::new(coordinator.clone(), id.clone()); + assert!(coordinator.has_running_operation(OperationType::Fetch)); + } // guard drops here + + // After guard drops, operation should be completed + let operations = coordinator.load_operations().unwrap(); + assert!( + operations + .iter() + .any(|op| op.id == id && op.status == OperationStatus::Completed) + ); + } + + #[test] + fn test_operation_guard_manual_complete() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let id = coordinator + .register_operation(OperationType::Export) + .unwrap(); + let guard = OperationGuard::new(coordinator.clone(), id.clone()); + + // Manual complete + coordinator.complete_operation(&id).unwrap(); + drop(guard); + + // Operation should still be completed (not marked twice) + let operations = coordinator.load_operations().unwrap(); + assert_eq!(operations.iter().filter(|op| op.id == id).count(), 1); + } + + #[test] + fn test_stale_operation_cleanup() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Manually add a stale operation (started 15 minutes ago) + let now = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_secs(); + + let stale_op = OngoingOperation { + id: "stale-fetch-123".to_string(), + r#type: OperationType::Fetch, + pid: 99999, + started_at: now - 900, // 15 minutes ago + status: OperationStatus::Running, + }; + + let mut operations = coordinator.load_operations().unwrap(); + operations.push(stale_op); + coordinator.save_operations(&operations).unwrap(); + + // Registering a new fetch should clean up the stale one + let new_id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + assert!(new_id.contains("fetch")); + + let operations = coordinator.load_operations().unwrap(); + assert!(!operations.iter().any(|op| op.id == "stale-fetch-123")); + } + + #[test] + fn test_multiple_operations_same_process() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let id1 = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + coordinator.complete_operation(&id1).unwrap(); + + // Sleep for 1ms to ensure different timestamp + std::thread::sleep(std::time::Duration::from_millis(2)); + + let id2 = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + assert_ne!(id1, id2, "IDs should differ when timestamps differ"); + + let operations = coordinator.load_operations().unwrap(); + // Both completed and new running operation + assert_eq!(operations.len(), 2); + } + + #[test] + fn test_ipc_base_dir_fallback() { + // Test that XDG_RUNTIME_DIR fallback works + // This is tricky to test because we can't easily unset env vars, + // but we can verify the function returns a valid path + let base_dir = IpcCoordinator::get_ipc_base_dir(); + assert!(base_dir.is_absolute()); + } + + #[test] + fn test_operation_id_format() { + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let id = coordinator + .register_operation(OperationType::Export) + .unwrap(); + let parts: Vec<&str> = id.split('-').collect(); + + // Format is: type-nanoseconds-random-pid (4 parts) + assert_eq!(parts.len(), 4); + assert_eq!(parts[0], "export"); + assert!(parts[1].parse::().is_ok(), "nanoseconds should be u64"); + // random suffix is formatted as hex + assert!( + parts[2].starts_with("0x") || u64::from_str_radix(parts[2], 16).is_ok(), + "random should be hex" + ); + assert_eq!(parts[3], std::process::id().to_string()); + } + + #[test] + fn test_different_temp_dirs_same_content_hash() { + // This is the key test: two temp dirs with SAME pakku.json parentLockHash + // should produce the SAME IPC directory + let temp_dir1 = tempfile::Builder::new() + .prefix("pakker-ipc-test1-") + .tempdir() + .unwrap(); + let temp_dir2 = tempfile::Builder::new() + .prefix("pakker-ipc-test2-") + .tempdir() + .unwrap(); + + // Both have same parentLockHash (nested under "pakku" key) + let pakku_content = r#"{"pakku": {"parentLockHash": "cfe85e0e7e7aa0922d30d8faad071e3a4126cb78b5f0f792f191e90a295aa2c7"}}"#; + fs::write(temp_dir1.path().join("pakku.json"), pakku_content).unwrap(); + fs::write(temp_dir2.path().join("pakku.json"), pakku_content).unwrap(); + + // Different files in each + fs::write(temp_dir1.path().join("file1.txt"), "content1").unwrap(); + fs::write(temp_dir2.path().join("file2.txt"), "content2").unwrap(); + + let coord1 = IpcCoordinator::new(&temp_dir1.path().to_path_buf()).unwrap(); + let coord2 = IpcCoordinator::new(&temp_dir2.path().to_path_buf()).unwrap(); + + // Both should point to SAME ops file despite different paths + assert_eq!(coord1.ops_file, coord2.ops_file); + } + + #[test] + fn test_corrupted_ops_json_trailing_bracket() { + // Test handling of corrupted ops.json with trailing characters + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Register an operation to create ops.json + let _id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + + // Manually corrupt the ops.json by appending extra bracket + let ops_content = fs::read_to_string(&coordinator.ops_file).unwrap(); + fs::write(&coordinator.ops_file, format!("{}]", ops_content)).unwrap(); + + // Loading should fail with InvalidFormat error + let result = coordinator.load_operations(); + assert!(matches!(result, Err(IpcError::InvalidFormat(_)))); + } + + #[test] + fn test_corrupted_ops_json_invalid_json() { + // Test handling of completely invalid JSON + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Write invalid JSON to ops.json + fs::write(&coordinator.ops_file, "not valid json {[}").unwrap(); + + let result = coordinator.load_operations(); + assert!(matches!(result, Err(IpcError::InvalidFormat(_)))); + } + + #[test] + fn test_corrupted_ops_json_missing_fields() { + // Test handling of JSON with missing required fields + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Write JSON with missing fields + fs::write(&coordinator.ops_file, r#"[{"id": "test"}]"#).unwrap(); + + let result = coordinator.load_operations(); + assert!(matches!(result, Err(IpcError::InvalidFormat(_)))); + } + + #[test] + fn test_empty_ops_file() { + // Test handling of empty ops.json file + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Create empty ops.json + fs::write(&coordinator.ops_file, "").unwrap(); + + let operations = coordinator.load_operations().unwrap(); + assert!(operations.is_empty(), "Empty file should return empty vec"); + } + + #[test] + fn test_whitespace_only_ops_file() { + // Test handling of whitespace-only ops.json + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + fs::write(&coordinator.ops_file, " \n\t \n ").unwrap(); + + let operations = coordinator.load_operations().unwrap(); + assert!( + operations.is_empty(), + "Whitespace-only file should return empty vec" + ); + } + + #[test] + fn test_nested_pakku_json_structure() { + // Test correct parsing of nested pakku.json structure + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + let pakku_content = r#"{ + "pakku": { + "parentLockHash": "abc123def456789012345678901234567890abcd123456789012345678901234", + "other_field": "value" + }, + "another_field": "ignored" + }"#; + fs::write(temp_dir.path().join("pakku.json"), pakku_content).unwrap(); + + let hash = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()).unwrap(); + assert_eq!( + hash, + "abc123def456789012345678901234567890abcd123456789012345678901234" + ); + } + + #[test] + fn test_old_pakku_json_format_rejected() { + // Test that old (non-nested) format is rejected + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + // Old format: parentLockHash at root level + let old_format = r#"{ + "parentLockHash": "abc123def456789012345678901234567890abcd123456789012345678901234" + }"#; + fs::write(temp_dir.path().join("pakku.json"), old_format).unwrap(); + + let result = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()); + assert!( + matches!(result, Err(IpcError::PakkuJsonReadFailed(_))), + "Old format should be rejected" + ); + } + + #[test] + fn test_hash_validation_too_short() { + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + let pakku_content = r#"{"pakku": {"parentLockHash": "tooshort"}}"#; + fs::write(temp_dir.path().join("pakku.json"), pakku_content).unwrap(); + + let result = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()); + assert!(matches!(result, Err(IpcError::PakkuJsonReadFailed(_)))); + } + + #[test] + fn test_hash_validation_non_hex() { + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + // 64 chars but not all hex + let pakku_content = r#"{"pakku": {"parentLockHash": "gggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg"}}"#; + fs::write(temp_dir.path().join("pakku.json"), pakku_content).unwrap(); + + let result = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()); + assert!(matches!(result, Err(IpcError::PakkuJsonReadFailed(_)))); + } + + #[test] + fn test_hash_validation_uppercase_hex() { + // Uppercase hex should be accepted + let temp_dir = tempfile::Builder::new() + .prefix("pakker-ipc-test-") + .tempdir() + .unwrap(); + + let pakku_content = r#"{"pakku": {"parentLockHash": "ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789"}}"#; + fs::write(temp_dir.path().join("pakku.json"), pakku_content).unwrap(); + + let hash = + IpcCoordinator::get_modpack_hash(&temp_dir.path().to_path_buf()).unwrap(); + assert_eq!( + hash, + "ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789" + ); + } + + #[test] + fn test_concurrent_registration_race_condition() { + // Test that file locking prevents race conditions + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // First registration should succeed + let id1 = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + + // Second registration of same type should fail (conflict) + let result = coordinator.register_operation(OperationType::Fetch); + assert!(matches!(result, Err(IpcError::OperationInProgress(_)))); + + // After completing first, second should succeed + coordinator.complete_operation(&id1).unwrap(); + let id2 = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + assert!(id2.contains("fetch")); + } + + #[test] + fn test_file_permissions_ipc_dir() { + // Test that IPC directory has correct permissions (700 - owner only) + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let ipc_dir = coordinator.ops_file.parent().unwrap(); + let metadata = fs::metadata(ipc_dir).unwrap(); + let permissions = metadata.permissions(); + assert_eq!(permissions.mode() & 0o777, 0o700); + } + + #[test] + fn test_file_permissions_ops_file() { + // Test that ops.json has correct permissions (600 - owner read/write only) + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Register operation to create ops.json + let _id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + + let metadata = fs::metadata(&coordinator.ops_file).unwrap(); + let permissions = metadata.permissions(); + assert_eq!(permissions.mode() & 0o777, 0o600); + } + + #[test] + fn test_operations_persistence_across_coordinators() { + // Test that operations persist when creating new coordinator instances + // Use unique hash per test run to avoid conflicts from previous runs + let unique_hash = format!( + "{:064x}", + rand::random::() + ^ (std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_nanos() as u128) + ); + let temp_dir = create_test_modpack_with_hash( + &[("test.txt", "test")], + &unique_hash[..64], + ); + + let coord1 = IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + let id = coord1.register_operation(OperationType::Fetch).unwrap(); + + // Create new coordinator instance + let coord2 = IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + let operations = coord2.load_operations().unwrap(); + + assert_eq!(operations.len(), 1); + assert_eq!(operations[0].id, id); + assert_eq!(operations[0].r#type, OperationType::Fetch); + + // Cleanup: complete the operation so it doesn't interfere with other tests + coord2.complete_operation(&id).unwrap(); + } + + #[test] + fn test_stale_cleanup_preserves_completed() { + // Test that stale cleanup only removes running operations, not completed + // ones + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + let now = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_secs(); + + // Add old completed operation + let old_completed = OngoingOperation { + id: "old-completed-123".to_string(), + r#type: OperationType::Fetch, + pid: 88888, + started_at: now - 1000, // 16+ minutes ago + status: OperationStatus::Completed, + }; + + // Add old running operation (stale) + let old_running = OngoingOperation { + id: "old-running-456".to_string(), + r#type: OperationType::Export, + pid: 99999, + started_at: now - 1000, // 16+ minutes ago + status: OperationStatus::Running, + }; + + let operations = vec![old_completed, old_running]; + coordinator.save_operations(&operations).unwrap(); + + // Register new operation triggers cleanup + let _new_id = coordinator + .register_operation(OperationType::Fetch) + .unwrap(); + + let operations = coordinator.load_operations().unwrap(); + + // Old completed should still be there + assert!(operations.iter().any(|op| op.id == "old-completed-123")); + + // Old running should be removed (stale) + assert!(!operations.iter().any(|op| op.id == "old-running-456")); + } + + #[tokio::test] + async fn test_wait_for_conflicts_multiple_types() { + // Test that wait_for_conflicts only waits for matching operation types + let temp_dir = create_test_modpack(&[("test.txt", "test")]); + let coordinator = + IpcCoordinator::new(&temp_dir.path().to_path_buf()).unwrap(); + + // Register Export operation (different type) + let _export_id = coordinator + .register_operation(OperationType::Export) + .unwrap(); + + // Wait for Fetch should succeed immediately (no conflicts) + let result = coordinator + .wait_for_conflicts(OperationType::Fetch, Duration::from_secs(1)) + .await; + assert!(result.is_ok()); + } + + #[test] + fn test_operation_serialization_roundtrip() { + // Test that OngoingOperation can be serialized and deserialized correctly + let op = OngoingOperation { + id: "test-op-123".to_string(), + r#type: OperationType::Fetch, + pid: 12345, + started_at: 1234567890, + status: OperationStatus::Running, + }; + + let json = serde_json::to_string(&op).unwrap(); + let deserialized: OngoingOperation = serde_json::from_str(&json).unwrap(); + + assert_eq!(deserialized.id, op.id); + assert_eq!(deserialized.r#type, op.r#type); + assert_eq!(deserialized.pid, op.pid); + assert_eq!(deserialized.started_at, op.started_at); + assert_eq!(deserialized.status, op.status); + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..eee865b --- /dev/null +++ b/src/main.rs @@ -0,0 +1,176 @@ +mod cli; +mod error; +mod export; +mod fetch; +mod git; +mod ipc; +mod model; +mod platform; +mod rate_limiter; +mod resolver; +mod ui_utils; +mod utils; + +use std::path::PathBuf; + +use clap::Parser; +use cli::{Cli, Commands}; +use error::PakkerError; + +use crate::rate_limiter::RateLimiter; + +#[tokio::main] +async fn main() -> Result<(), PakkerError> { + let cli = Cli::parse(); + + // Initialize logging based on verbosity level + let log_level = match cli.verbose { + 0 => "warn", // Default: only warnings and errors + 1 => "info", // -v: info level + 2 => "debug", // -vv: debug level + _ => "trace", // -vvv+: trace level (most verbose) + }; + + env_logger::Builder::from_env( + env_logger::Env::default().default_filter_or(log_level), + ) + .format_timestamp(None) + .format_module_path(false) + .init(); + + let working_dir = PathBuf::from("."); + let lockfile_path = working_dir.join("pakker-lock.json"); + let config_path = working_dir.join("pakker.json"); + + let _rate_limiter = std::sync::Arc::new(RateLimiter::new(None)); + + match cli.command { + Commands::Init(args) => { + cli::commands::init::execute(args, &lockfile_path, &config_path).await + }, + Commands::Import(args) => { + cli::commands::import::execute(args, &lockfile_path, &config_path).await + }, + Commands::Add(args) => { + cli::commands::add::execute(args, &lockfile_path, &config_path).await + }, + Commands::AddPrj(args) => { + cli::commands::add_prj::execute( + args.curseforge.clone(), + args.modrinth.clone(), + args.github.clone(), + args.project_type, + args.side, + args.strategy, + args.redistributable, + args.subpath.clone(), + args.aliases.clone(), + args.export, + args.no_deps, + args.yes, + &lockfile_path, + &config_path, + ) + .await + }, + Commands::Rm(args) => { + cli::commands::rm::execute(args, &lockfile_path, &config_path).await + }, + Commands::Update(args) => { + cli::commands::update::execute(args, &lockfile_path, &config_path).await + }, + Commands::Ls(args) => cli::commands::ls::execute(args, &lockfile_path), + Commands::Set(args) => { + cli::commands::set::execute(args, &lockfile_path, &config_path).await + }, + Commands::Link(args) => cli::commands::link::execute(args, &lockfile_path), + Commands::Unlink(args) => { + cli::commands::unlink::execute(args, &lockfile_path) + }, + Commands::Diff(args) => cli::commands::diff::execute(args, &lockfile_path), + Commands::Fetch(args) => { + cli::commands::fetch::execute(args, &lockfile_path, &config_path).await + }, + Commands::Sync(args) => { + cli::commands::sync::execute(args, &lockfile_path, &config_path).await + }, + Commands::Export(args) => { + cli::commands::export::execute(args, &lockfile_path, &config_path).await + }, + Commands::Remote(args) => cli::commands::remote::execute(args).await, + Commands::RemoteUpdate(args) => { + cli::commands::remote_update::execute(args).await + }, + Commands::Status(args) => { + cli::commands::status::execute( + args.parallel, + &lockfile_path, + &config_path, + ) + .await + }, + Commands::Inspect(args) => { + cli::commands::inspect::execute( + args.projects, + &lockfile_path, + &config_path, + ) + .await + }, + Commands::Credentials(args) => { + match &args.subcommand { + Some(cli::CredentialsSubcommand::Set(set_args)) => { + cli::commands::credentials_set::execute( + set_args.cf_api_key.clone(), + set_args.modrinth_token.clone(), + set_args.gh_access_token.clone(), + ) + }, + None => { + cli::commands::credentials::execute( + args.delete, + args.delete_file, + args.delete_keyring, + ) + }, + } + }, + Commands::Cfg(args) => { + match &args.subcommand { + Some(cli::CfgSubcommand::Prj(prj_args)) => { + cli::commands::cfg_prj::execute( + &config_path, + &lockfile_path, + prj_args.project.clone(), + prj_args.r#type.as_deref(), + prj_args.side.as_deref(), + prj_args.update_strategy.as_deref(), + prj_args.redistributable, + prj_args.subpath.clone(), + prj_args.add_alias.clone(), + prj_args.remove_alias.clone(), + prj_args.export, + ) + }, + None => { + cli::commands::cfg::execute( + &config_path, + args.name.clone(), + args.version.clone(), + args.description.clone(), + args.author.clone(), + args.mods_path.clone(), + args.resource_packs_path.clone(), + args.data_packs_path.clone(), + args.worlds_path.clone(), + args.shaders_path.clone(), + ) + }, + } + }, + Commands::Fork(args) => { + cli::commands::fork::execute(&args)?; + Ok(()) + }, + } +} diff --git a/src/model.rs b/src/model.rs new file mode 100644 index 0000000..f64f5c8 --- /dev/null +++ b/src/model.rs @@ -0,0 +1,23 @@ +pub mod config; +pub mod credentials; +pub mod enums; +pub mod fork; +pub mod lockfile; +pub mod r#override; +pub mod project; + +pub use config::Config; +pub use credentials::{ + PakkerCredentialsFile, + ResolvedCredentials, + set_keyring_secret, +}; +pub use enums::{ + ProjectSide, + ProjectType, + ReleaseType, + Target, + UpdateStrategy, +}; +pub use lockfile::LockFile; +pub use project::{Project, ProjectFile}; diff --git a/src/model/config.rs b/src/model/config.rs new file mode 100644 index 0000000..d052695 --- /dev/null +++ b/src/model/config.rs @@ -0,0 +1,383 @@ +use std::{collections::HashMap, path::Path}; + +use serde::{Deserialize, Serialize}; + +use super::enums::{ProjectSide, ProjectType, UpdateStrategy}; +use crate::error::{PakkerError, Result}; + +const CONFIG_NAME: &str = "pakker.json"; + +// Pakker config wrapper - supports both Pakker (direct) and Pakku (wrapped) +// formats +#[derive(Debug, Deserialize)] +#[serde(untagged)] +enum ConfigWrapper { + Pakker(Config), + Pakku { pakku: PakkerWrappedConfig }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PakkerWrappedConfig { + pub parent: Option, + #[serde(default)] + pub parent_lock_hash: String, + #[serde(default)] + pub patches: Vec, + #[serde(default)] + pub projects: HashMap, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ParentConfig { + pub id: String, + pub r#ref: String, + pub ref_type: String, + pub remote_name: String, + #[serde(rename = "type")] + pub type_: String, + pub version: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Config { + pub name: String, + pub version: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub author: Option, + #[serde(default)] + pub overrides: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub server_overrides: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub client_overrides: Option>, + #[serde(default)] + pub paths: HashMap, + #[serde(default)] + pub projects: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub export_profiles: Option>, +} + +impl Default for Config { + fn default() -> Self { + Self { + name: String::new(), + version: String::new(), + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: Some(HashMap::new()), + export_profiles: None, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct ProjectConfig { + #[serde(rename = "type", skip_serializing_if = "Option::is_none")] + pub r#type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub side: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub update_strategy: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub redistributable: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub subpath: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub aliases: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub export: Option, +} + +impl Config { + pub fn load>(path: P) -> Result { + let path = path.as_ref().join(CONFIG_NAME); + let content = + std::fs::read_to_string(&path).map_err(PakkerError::IoError)?; + + // Try to parse as ConfigWrapper (supports both Pakker and Pakku formats) + match serde_json::from_str::(&content) { + Ok(ConfigWrapper::Pakker(config)) => { + config.validate()?; + Ok(config) + }, + Ok(ConfigWrapper::Pakku { pakku }) => { + // Convert Pakku format to Pakker format + // Pakku format doesn't have name/version, use parent repo info as + // fallback + let name = pakku + .parent + .as_ref() + .map(|p| { + // Extract repo name from URL + p.id + .split('/') + .next_back() + .unwrap_or(&p.id) + .trim_end_matches(".git") + .to_string() + }) + .unwrap_or_else(|| "unknown".to_string()); + + let version = pakku + .parent + .as_ref() + .map_or_else(|| "unknown".to_string(), |p| p.version.clone()); + + Ok(Self { + name, + version, + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: Some(pakku.projects), + export_profiles: None, + }) + }, + Err(e) => Err(PakkerError::InvalidConfigFile(e.to_string())), + } + } + + pub fn save>(&self, path: P) -> Result<()> { + self.validate()?; + + let path = path.as_ref().join(CONFIG_NAME); + + // Write to temporary file first (atomic write) + let temp_path = path.with_extension("tmp"); + let content = serde_json::to_string_pretty(self) + .map_err(PakkerError::SerializationError)?; + + std::fs::write(&temp_path, content)?; + std::fs::rename(temp_path, path)?; + + Ok(()) + } + + pub fn validate(&self) -> Result<()> { + if self.name.is_empty() { + return Err(PakkerError::InvalidConfigFile( + "Config name cannot be empty".to_string(), + )); + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_config_new() { + let config = Config { + name: "test-pack".to_string(), + version: "1.0.0".to_string(), + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: None, + export_profiles: None, + }; + assert_eq!(config.name, "test-pack"); + assert_eq!(config.version, "1.0.0"); + assert_eq!(config.overrides, vec!["overrides"]); + assert!(config.projects.is_none()); + } + + #[test] + fn test_config_serialization() { + let mut config = Config { + name: "test-pack".to_string(), + version: "1.0.0".to_string(), + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: None, + export_profiles: None, + }; + config.description = Some("A test modpack".to_string()); + config.author = Some("Test Author".to_string()); + + let json = serde_json::to_string(&config).unwrap(); + let deserialized: Config = serde_json::from_str(&json).unwrap(); + + assert_eq!(deserialized.name, config.name); + assert_eq!(deserialized.version, config.version); + assert_eq!(deserialized.description, config.description); + assert_eq!(deserialized.author, config.author); + } + + #[test] + fn test_config_save_and_load() { + let temp_dir = TempDir::new().unwrap(); + let mut config = Config { + name: "test-pack".to_string(), + version: "1.0.0".to_string(), + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: None, + export_profiles: None, + }; + config.description = Some("Test description".to_string()); + + config.save(temp_dir.path()).unwrap(); + + let loaded = Config::load(temp_dir.path()).unwrap(); + assert_eq!(loaded.name, config.name); + assert_eq!(loaded.version, config.version); + assert_eq!(loaded.description, config.description); + } + + #[test] + fn test_config_compatibility_with_pakku() { + // Test basic config loading with projects + let config = Config { + name: "test-modpack".to_string(), + version: "1.0.0".to_string(), + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: None, + export_profiles: None, + }; + + assert_eq!(config.name, "test-modpack"); + assert_eq!(config.version, "1.0.0"); + assert!(config.projects.is_none()); + } + + #[test] + fn test_config_wrapped_format() { + let mut projects = HashMap::new(); + projects.insert("sodium".to_string(), ProjectConfig { + r#type: Some(ProjectType::Mod), + side: Some(ProjectSide::Client), + update_strategy: None, + redistributable: None, + subpath: None, + aliases: None, + export: None, + }); + + let wrapped = PakkerWrappedConfig { + parent: None, + parent_lock_hash: String::new(), + patches: vec![], + projects, + }; + + let json = serde_json::to_string(&wrapped).unwrap(); + assert!(json.contains("\"projects\"")); + + let deserialized: PakkerWrappedConfig = + serde_json::from_str(&json).unwrap(); + assert_eq!(deserialized.projects.len(), 1); + } + + #[test] + fn test_config_wrapped_format_old() { + use crate::model::fork::{LocalConfig, LocalProjectConfig}; + + let mut projects = HashMap::new(); + projects.insert("sodium".to_string(), LocalProjectConfig { + version: None, + r#type: Some(ProjectType::Mod), + side: Some(ProjectSide::Client), + update_strategy: None, + redistributable: None, + subpath: None, + aliases: None, + export: None, + }); + + let wrapped_inner = LocalConfig { + parent: None, + projects, + parent_lock_hash: None, + parent_config_hash: None, + patches: vec![], + }; + + // Just verify we can create the struct + assert_eq!(wrapped_inner.projects.len(), 1); + } + + #[test] + fn test_config_validate() { + let config = Config { + name: "test".to_string(), + version: "1.0.0".to_string(), + description: None, + author: None, + overrides: vec!["overrides".to_string()], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: None, + export_profiles: None, + }; + assert!(config.validate().is_ok()); + + let invalid = Config { + name: "".to_string(), + version: "1.0.0".to_string(), + description: None, + author: None, + overrides: vec![], + server_overrides: None, + client_overrides: None, + paths: HashMap::new(), + projects: None, + export_profiles: None, + }; + assert!(invalid.validate().is_err()); + } +} + +impl Config { + pub fn get_project_config(&self, identifier: &str) -> Option<&ProjectConfig> { + self.projects.as_ref()?.get(identifier) + } + + pub fn set_project_config( + &mut self, + identifier: String, + config: ProjectConfig, + ) { + if self.projects.is_none() { + self.projects = Some(HashMap::new()); + } + if let Some(ref mut projects) = self.projects { + projects.insert(identifier, config); + } + } +} diff --git a/src/model/credentials.rs b/src/model/credentials.rs new file mode 100644 index 0000000..f577769 --- /dev/null +++ b/src/model/credentials.rs @@ -0,0 +1,290 @@ +use std::{fs, path::PathBuf}; + +use serde::{Deserialize, Serialize}; + +use crate::error::{PakkerError, Result}; + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct PakkerCredentialsFile { + #[serde(skip_serializing_if = "Option::is_none")] + pub curseforge_api_key: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub modrinth_token: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub github_access_token: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct PakkerCompatCredentialsFile { + #[serde(skip_serializing_if = "Option::is_none")] + pub curseforge_api_key: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub github_access_token: Option, +} + +fn home_dir() -> Result { + let home = std::env::var("HOME") + .or_else(|_| std::env::var("USERPROFILE")) + .map_err(|_| { + PakkerError::InternalError( + "Could not determine home directory".to_string(), + ) + })?; + + Ok(PathBuf::from(home)) +} + +impl PakkerCredentialsFile { + /// Pakker-owned credentials path: ~/.config/pakker/credentials.json + pub fn get_path() -> Result { + Ok( + home_dir()? + .join(".config") + .join("pakker") + .join("credentials.json"), + ) + } + + pub fn load() -> Result { + let path = Self::get_path()?; + if !path.exists() { + return Ok(Self::default()); + } + + let content = fs::read_to_string(&path).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to read Pakker credentials file: {e}" + )) + })?; + + serde_json::from_str(&content).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to parse Pakker credentials file: {e}" + )) + }) + } + + pub fn save(&self) -> Result<()> { + let path = Self::get_path()?; + + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to create pakker config directory: {e}" + )) + })?; + } + + let content = serde_json::to_string_pretty(self).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to serialize credentials: {e}" + )) + })?; + + let temp_path = path.with_extension("tmp"); + fs::write(&temp_path, content).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to write credentials file: {e}" + )) + })?; + + fs::rename(&temp_path, &path).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to save credentials file: {e}" + )) + })?; + + Ok(()) + } + + pub fn delete() -> Result<()> { + let path = Self::get_path()?; + if path.exists() { + fs::remove_file(&path).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to delete Pakker credentials file: {e}" + )) + })?; + } + Ok(()) + } +} + +impl PakkerCompatCredentialsFile { + /// Pakku credentials path: ~/.pakku/credentials + /// Read-only: Pakker must never delete or modify this file. + pub fn get_path() -> Result { + Ok(home_dir()?.join(".pakku").join("credentials")) + } + + pub fn load() -> Result { + let path = Self::get_path()?; + if !path.exists() { + return Ok(Self::default()); + } + + let content = fs::read_to_string(&path).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to read Pakku credentials file: {e}" + )) + })?; + + serde_json::from_str(&content).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to parse Pakku credentials file: {e}" + )) + }) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CredentialsSource { + Env, + Keyring, + PakkerFile, +} + +#[derive(Debug, Clone, Default)] +pub struct ResolvedCredentials { + curseforge_api_key: Option<(String, CredentialsSource)>, + modrinth_token: Option<(String, CredentialsSource)>, + github_access_token: Option<(String, CredentialsSource)>, +} + +impl ResolvedCredentials { + pub fn load() -> Result { + let pakker_file = PakkerCredentialsFile::load().ok(); + let pakku_file = PakkerCompatCredentialsFile::load().ok(); + + Ok(Self { + curseforge_api_key: resolve_secret( + "PAKKER_CURSEFORGE_API_KEY", + "curseforge_api_key", + pakker_file + .as_ref() + .and_then(|f| f.curseforge_api_key.clone()), + pakku_file + .as_ref() + .and_then(|f| f.curseforge_api_key.clone()), + )?, + modrinth_token: resolve_secret( + "PAKKER_MODRINTH_TOKEN", + "modrinth_token", + pakker_file.as_ref().and_then(|f| f.modrinth_token.clone()), + None, + )?, + github_access_token: resolve_secret( + "PAKKER_GITHUB_TOKEN", + "github_access_token", + pakker_file + .as_ref() + .and_then(|f| f.github_access_token.clone()), + pakku_file + .as_ref() + .and_then(|f| f.github_access_token.clone()), + )?, + }) + } + + pub fn curseforge_api_key(&self) -> Option<&str> { + self.curseforge_api_key.as_ref().map(|(v, _)| v.as_str()) + } + + pub fn modrinth_token(&self) -> Option<&str> { + self.modrinth_token.as_ref().map(|(v, _)| v.as_str()) + } + + pub fn github_access_token(&self) -> Option<&str> { + self.github_access_token.as_ref().map(|(v, _)| v.as_str()) + } + + pub fn curseforge_source(&self) -> Option { + self.curseforge_api_key.as_ref().map(|(_, s)| *s) + } + + pub fn modrinth_source(&self) -> Option { + self.modrinth_token.as_ref().map(|(_, s)| *s) + } + + pub fn github_source(&self) -> Option { + self.github_access_token.as_ref().map(|(_, s)| *s) + } + + pub fn delete_keyring() -> Result<()> { + delete_keyring_secret("curseforge_api_key")?; + delete_keyring_secret("modrinth_token")?; + delete_keyring_secret("github_access_token")?; + Ok(()) + } +} + +fn resolve_secret( + env_key: &str, + keyring_entry: &str, + pakker_file_value: Option, + pakku_file_value: Option, +) -> Result> { + if let Ok(v) = std::env::var(env_key) + && !v.trim().is_empty() + { + return Ok(Some((v.trim().to_string(), CredentialsSource::Env))); + } + + if let Ok(v) = get_keyring_secret(keyring_entry) + && !v.trim().is_empty() + { + return Ok(Some((v.trim().to_string(), CredentialsSource::Keyring))); + } + + if let Some(v) = pakker_file_value.filter(|v| !v.trim().is_empty()) { + return Ok(Some((v, CredentialsSource::PakkerFile))); + } + + Ok( + pakku_file_value + .filter(|v| !v.trim().is_empty()) + .map(|v| (v, CredentialsSource::PakkerFile)), + ) +} + +fn get_keyring_secret( + entry: &str, +) -> std::result::Result { + let e = keyring::Entry::new("pakker", entry)?; + e.get_password() +} + +pub fn set_keyring_secret(entry: &str, value: &str) -> Result<()> { + let e = keyring::Entry::new("pakker", entry).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to access keyring entry {entry}: {e}" + )) + })?; + + e.set_password(value).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to store secret in keyring entry {entry}: {e}" + )) + }) +} + +fn delete_keyring_secret(entry: &str) -> Result<()> { + let e = keyring::Entry::new("pakker", entry).map_err(|e| { + PakkerError::InternalError(format!( + "Failed to access keyring entry {entry}: {e}" + )) + })?; + + match e.delete_credential() { + Ok(()) => Ok(()), + Err(keyring::Error::NoEntry) => Ok(()), + Err(e) => { + Err(PakkerError::InternalError(format!( + "Failed to delete keyring entry {entry}: {e}" + ))) + }, + } +} diff --git a/src/model/enums.rs b/src/model/enums.rs new file mode 100644 index 0000000..2544c97 --- /dev/null +++ b/src/model/enums.rs @@ -0,0 +1,156 @@ +use std::{fmt, str::FromStr}; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "UPPERCASE")] +pub enum ProjectType { + #[serde(rename = "MOD")] + Mod, + #[serde(rename = "RESOURCE_PACK")] + ResourcePack, + #[serde(rename = "DATA_PACK")] + DataPack, + #[serde(rename = "SHADER")] + Shader, + #[serde(rename = "WORLD")] + World, +} + +impl fmt::Display for ProjectType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Mod => write!(f, "mod"), + Self::ResourcePack => write!(f, "resource-pack"), + Self::DataPack => write!(f, "data-pack"), + Self::Shader => write!(f, "shader"), + Self::World => write!(f, "world"), + } + } +} + +impl FromStr for ProjectType { + type Err = String; + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "mod" => Ok(Self::Mod), + "resource-pack" | "resourcepack" => Ok(Self::ResourcePack), + "data-pack" | "datapack" => Ok(Self::DataPack), + "shader" => Ok(Self::Shader), + "world" => Ok(Self::World), + _ => Err(format!("Invalid project type: {s}")), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum ProjectSide { + #[serde(rename = "CLIENT")] + Client, + #[serde(rename = "SERVER")] + Server, + #[serde(rename = "BOTH")] + Both, +} + +impl FromStr for ProjectSide { + type Err = String; + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "client" => Ok(Self::Client), + "server" => Ok(Self::Server), + "both" => Ok(Self::Both), + _ => Err(format!("Invalid project side: {s}")), + } + } +} + +impl std::fmt::Display for ProjectSide { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Client => write!(f, "CLIENT"), + Self::Server => write!(f, "SERVER"), + Self::Both => write!(f, "BOTH"), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum UpdateStrategy { + #[serde(rename = "LATEST")] + Latest, + #[serde(rename = "NONE")] + None, +} + +impl FromStr for UpdateStrategy { + type Err = String; + fn from_str(s: &str) -> Result { + match s.to_uppercase().as_str() { + "LATEST" => Ok(Self::Latest), + "NONE" => Ok(Self::None), + _ => Err(format!("Invalid update strategy: {s}")), + } + } +} + +impl std::fmt::Display for UpdateStrategy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Latest => write!(f, "LATEST"), + Self::None => write!(f, "NONE"), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ReleaseType { + Release, + Beta, + Alpha, +} + +impl FromStr for ReleaseType { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "release" => Ok(Self::Release), + "beta" => Ok(Self::Beta), + "alpha" => Ok(Self::Alpha), + _ => Err(format!("Invalid release type: {s}")), + } + } +} + +impl fmt::Display for ReleaseType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Release => write!(f, "release"), + Self::Beta => write!(f, "beta"), + Self::Alpha => write!(f, "alpha"), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum Target { + CurseForge, + Modrinth, + Multiplatform, +} + +impl std::str::FromStr for Target { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "curseforge" => Ok(Self::CurseForge), + "modrinth" => Ok(Self::Modrinth), + "multiplatform" => Ok(Self::Multiplatform), + _ => Err(format!("Invalid target: {s}")), + } + } +} diff --git a/src/model/fork.rs b/src/model/fork.rs new file mode 100644 index 0000000..716ab55 --- /dev/null +++ b/src/model/fork.rs @@ -0,0 +1,480 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; +use sha2::{Sha256, digest::Digest}; + +use crate::model::enums::{ProjectSide, ProjectType, UpdateStrategy}; + +/// Fork integrity verification data +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ForkIntegrity { + /// SHA256 hash of the parent lockfile content + pub lockfile_hash: String, + /// Git commit SHA of the parent + pub commit_sha: String, + /// Hash of the parent config (pakku.json) + pub config_hash: String, + /// Timestamp of verification + pub verified_at: u64, +} + +impl ForkIntegrity { + pub fn new( + lockfile_hash: String, + commit_sha: String, + config_hash: String, + ) -> Self { + use std::time::{SystemTime, UNIX_EPOCH}; + + let verified_at = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_secs(); + + Self { + lockfile_hash, + commit_sha, + config_hash, + verified_at, + } + } +} + +/// Compute SHA256 hash of content +pub fn hash_content(content: &str) -> String { + let mut hasher = Sha256::new(); + hasher.update(content.as_bytes()); + format!("{:x}", hasher.finalize()) +} + +/// Reference type for Git operations +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum RefType { + Branch, + Tag, + Commit, +} + +impl std::str::FromStr for RefType { + type Err = String; + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "branch" => Ok(Self::Branch), + "tag" => Ok(Self::Tag), + "commit" => Ok(Self::Commit), + _ => Err(format!("Invalid ref type: {s}")), + } + } +} + +impl std::fmt::Display for RefType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Branch => write!(f, "branch"), + Self::Tag => write!(f, "tag"), + Self::Commit => write!(f, "commit"), + } + } +} + +/// Parent configuration for fork management +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ParentConfig { + #[serde(rename = "type")] + pub type_: String, // Always "git" for now + pub id: String, // Git URL + #[serde(skip_serializing_if = "Option::is_none")] + pub version: Option, // Current commit SHA + #[serde(rename = "ref")] + pub ref_: String, // Branch/tag/commit name + pub ref_type: RefType, + #[serde(default = "default_remote_name")] + pub remote_name: String, +} + +fn default_remote_name() -> String { + "origin".to_string() +} + +/// Local project configuration for overrides +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LocalProjectConfig { + #[serde(skip_serializing_if = "Option::is_none")] + pub version: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub r#type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub side: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub update_strategy: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub redistributable: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub subpath: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub aliases: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub export: Option, +} + +/// Local configuration stored in pakku.json under "pakku" section +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct LocalConfig { + #[serde(skip_serializing_if = "Option::is_none")] + pub parent: Option, + #[serde(default)] + pub projects: HashMap, + #[serde(skip_serializing_if = "Option::is_none")] + pub parent_lock_hash: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub parent_config_hash: Option, + #[serde(default)] + pub patches: Vec, +} + +impl LocalConfig { + pub const fn has_parent(&self) -> bool { + self.parent.is_some() + } + + /// Load `LocalConfig` from pakker.json's "pakku" section (with fallback to + /// pakku.json) + pub fn load( + dir: &std::path::Path, + ) -> Result { + use std::fs; + + use crate::error::PakkerError; + + // Try pakker.json first, then fall back to pakku.json + let config_path = if dir.join("pakker.json").exists() { + dir.join("pakker.json") + } else { + dir.join("pakku.json") + }; + + if !config_path.exists() { + return Ok(Self::default()); + } + + let content = fs::read_to_string(&config_path).map_err(|e| { + PakkerError::InvalidConfigFile(format!( + "Failed to read {}: {}", + config_path.display(), + e + )) + })?; + + let json_value: serde_json::Value = serde_json::from_str(&content) + .map_err(|e| { + PakkerError::InvalidConfigFile(format!( + "Failed to parse pakku.json: {e}" + )) + })?; + + // Extract "pakku" section if it exists + if let Some(pakku_section) = json_value.get("pakku") { + let local_config: Self = serde_json::from_value(pakku_section.clone()) + .map_err(|e| { + PakkerError::InvalidConfigFile(format!( + "Failed to parse pakku section: {e}" + )) + })?; + Ok(local_config) + } else { + Ok(Self::default()) + } + } + + /// Save `LocalConfig` to pakku.json's "pakku" section + pub fn save( + &self, + dir: &std::path::Path, + ) -> Result<(), crate::error::PakkerError> { + use std::fs; + + use crate::error::PakkerError; + + let config_path = dir.join("pakker.json"); + + // Read existing pakku.json + let mut json_value: serde_json::Value = if config_path.exists() { + let content = fs::read_to_string(&config_path).map_err(|e| { + PakkerError::InvalidConfigFile(format!( + "Failed to read {}: {}", + config_path.display(), + e + )) + })?; + serde_json::from_str(&content).map_err(|e| { + PakkerError::InvalidConfigFile(format!( + "Failed to parse pakku.json: {e}" + )) + })? + } else { + serde_json::json!({}) + }; + + // Update or create "pakku" section + let local_config_json = + serde_json::to_value(self).map_err(PakkerError::SerializationError)?; + + json_value["pakku"] = local_config_json; + + // Write back to file + let content = serde_json::to_string_pretty(&json_value) + .map_err(PakkerError::SerializationError)?; + + fs::write(&config_path, content).map_err(|e| { + PakkerError::IoError(std::io::Error::other(format!( + "Failed to write {}: {}", + config_path.display(), + e + ))) + })?; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ref_type_serde_serialization() { + // Test that RefType serializes to uppercase screaming snake case + let branch = RefType::Branch; + let tag = RefType::Tag; + let commit = RefType::Commit; + + assert_eq!(serde_json::to_string(&branch).unwrap(), "\"BRANCH\""); + assert_eq!(serde_json::to_string(&tag).unwrap(), "\"TAG\""); + assert_eq!(serde_json::to_string(&commit).unwrap(), "\"COMMIT\""); + } + + #[test] + fn test_ref_type_clap_value_enum() { + // Test that clap ValueEnum derives work correctly + let values: Vec = + vec![RefType::Branch, RefType::Tag, RefType::Commit]; + assert_eq!(values.len(), 3); + } + + #[test] + fn test_parent_config_new() { + let config = ParentConfig { + type_: "git".to_string(), + id: "https://github.com/example/repo".to_string(), + version: None, + ref_: "main".to_string(), + ref_type: RefType::Branch, + remote_name: "upstream".to_string(), + }; + + assert_eq!(config.type_, "git"); + assert_eq!(config.id, "https://github.com/example/repo"); + assert_eq!(config.version, None); + assert_eq!(config.ref_, "main"); + assert_eq!(config.ref_type, RefType::Branch); + assert_eq!(config.remote_name, "upstream"); + } + + #[test] + fn test_parent_config_default_remote() { + let config = ParentConfig { + type_: "git".to_string(), + id: "https://github.com/example/repo".to_string(), + version: None, + ref_: "main".to_string(), + ref_type: RefType::Branch, + remote_name: "origin".to_string(), + }; + assert_eq!(config.remote_name, "origin"); + } + + #[test] + fn test_parent_config_serde_roundtrip() { + let mut original = ParentConfig { + type_: "git".to_string(), + id: "https://github.com/example/repo.git".to_string(), + version: None, + ref_: "v1.0.0".to_string(), + ref_type: RefType::Tag, + remote_name: "origin".to_string(), + }; + original.version = Some("abc123def456".to_string()); + + let json = serde_json::to_string(&original).unwrap(); + let deserialized: ParentConfig = serde_json::from_str(&json).unwrap(); + + assert_eq!(deserialized.type_, original.type_); + assert_eq!(deserialized.id, original.id); + assert_eq!(deserialized.version, original.version); + assert_eq!(deserialized.ref_, original.ref_); + assert_eq!(deserialized.ref_type, original.ref_type); + assert_eq!(deserialized.remote_name, original.remote_name); + } + + #[test] + fn test_local_project_config_default() { + let config = LocalProjectConfig { + version: None, + r#type: None, + side: None, + update_strategy: None, + redistributable: None, + subpath: None, + aliases: None, + export: None, + }; + assert_eq!(config.version, None); + assert_eq!(config.side, None); + assert_eq!(config.update_strategy, None); + assert_eq!(config.redistributable, None); + } + + #[test] + fn test_local_config_default() { + let config = LocalConfig { + parent: None, + projects: HashMap::new(), + parent_lock_hash: None, + parent_config_hash: None, + patches: vec![], + }; + assert!(config.parent.is_none()); + assert!(config.projects.is_empty()); + assert!(config.patches.is_empty()); + } + + #[test] + fn test_local_config_has_parent_false() { + let config = LocalConfig { + parent: None, + projects: HashMap::new(), + parent_lock_hash: None, + parent_config_hash: None, + patches: vec![], + }; + assert!(!config.has_parent()); + } + + #[test] + fn test_local_config_has_parent_true() { + let mut config = LocalConfig { + parent: None, + projects: HashMap::new(), + parent_lock_hash: None, + parent_config_hash: None, + patches: vec![], + }; + config.parent = Some(ParentConfig { + type_: "git".to_string(), + id: "https://github.com/example/repo".to_string(), + version: None, + ref_: "main".to_string(), + ref_type: RefType::Branch, + remote_name: "origin".to_string(), + }); + assert!(config.has_parent()); + } + + #[test] + fn test_local_config_projects_insertion() { + let mut config = LocalConfig { + parent: None, + projects: HashMap::new(), + parent_lock_hash: None, + parent_config_hash: None, + patches: vec![], + }; + config + .projects + .insert("sodium".to_string(), LocalProjectConfig { + version: Some("0.5.0".to_string()), + r#type: None, + side: Some(ProjectSide::Both), + update_strategy: Some(UpdateStrategy::Latest), + redistributable: Some(true), + subpath: None, + aliases: None, + export: None, + }); + + assert_eq!(config.projects.len(), 1); + let project = config.projects.get("sodium").unwrap(); + assert_eq!(project.version, Some("0.5.0".to_string())); + assert_eq!(project.side, Some(ProjectSide::Both)); + } + + #[test] + fn test_local_config_patches() { + let mut config = LocalConfig { + parent: None, + projects: HashMap::new(), + parent_lock_hash: None, + parent_config_hash: None, + patches: vec![], + }; + config.patches.push("custom.patch".to_string()); + config.patches.push("bugfix.patch".to_string()); + + assert_eq!(config.patches.len(), 2); + assert_eq!(config.patches[0], "custom.patch"); + } + + #[test] + fn test_local_config_serde_roundtrip() { + let mut config = LocalConfig { + parent: None, + projects: HashMap::new(), + parent_lock_hash: None, + parent_config_hash: None, + patches: vec![], + }; + config.parent = Some(ParentConfig { + type_: "git".to_string(), + id: "https://github.com/example/repo.git".to_string(), + version: None, + ref_: "develop".to_string(), + ref_type: RefType::Branch, + remote_name: "origin".to_string(), + }); + config.parent.as_mut().unwrap().version = Some("def456".to_string()); + config + .projects + .insert("test-mod".to_string(), LocalProjectConfig { + version: Some("1.0.0".to_string()), + r#type: None, + side: Some(ProjectSide::Client), + update_strategy: None, + redistributable: Some(false), + subpath: None, + aliases: None, + export: None, + }); + config.patches.push("test.patch".to_string()); + config.parent_lock_hash = Some("hash123".to_string()); + + let json = serde_json::to_string(&config).unwrap(); + let deserialized: LocalConfig = serde_json::from_str(&json).unwrap(); + + assert!(deserialized.parent.is_some()); + let parent = deserialized.parent.unwrap(); + assert_eq!(parent.id, "https://github.com/example/repo.git"); + assert_eq!(parent.ref_, "develop"); + assert_eq!(parent.ref_type, RefType::Branch); + assert_eq!(parent.version, Some("def456".to_string())); + + assert_eq!(deserialized.projects.len(), 1); + assert!(deserialized.projects.contains_key("test-mod")); + assert_eq!(deserialized.patches.len(), 1); + assert_eq!(deserialized.parent_lock_hash, Some("hash123".to_string())); + } +} diff --git a/src/model/lockfile.rs b/src/model/lockfile.rs new file mode 100644 index 0000000..9f8a945 --- /dev/null +++ b/src/model/lockfile.rs @@ -0,0 +1,622 @@ +use std::{collections::HashMap, path::Path}; + +use serde::{Deserialize, Serialize}; + +use super::{enums::Target, project::Project}; +use crate::error::{PakkerError, Result}; + +#[cfg(test)] +mod tests { + use tempfile::TempDir; + + use super::*; + use crate::model::enums::{ProjectSide, ProjectType, UpdateStrategy}; + + fn create_test_project(pakku_id: &str, slug: &str) -> Project { + use std::collections::HashSet; + let mut name_map = HashMap::new(); + name_map.insert("modrinth".to_string(), slug.to_string()); + + let mut id_map = HashMap::new(); + id_map.insert("modrinth".to_string(), pakku_id.to_string()); + + Project { + pakku_id: Some(pakku_id.to_string()), + pakku_links: HashSet::new(), + r#type: ProjectType::Mod, + side: ProjectSide::Both, + slug: name_map.clone(), + name: name_map.clone(), + id: id_map, + update_strategy: UpdateStrategy::Latest, + redistributable: true, + subpath: None, + aliases: HashSet::new(), + export: true, + files: vec![], + } + } + + #[test] + fn test_lockfile_new() { + let target = Target::Modrinth; + let mc_versions = vec!["1.20.1".to_string()]; + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let lockfile = LockFile { + target: Some(target), + mc_versions: mc_versions.clone(), + loaders: loaders.clone(), + projects: Vec::new(), + lockfile_version: 1, + }; + + assert_eq!(lockfile.target, Some(target)); + assert_eq!(lockfile.mc_versions, mc_versions); + assert_eq!(lockfile.loaders, loaders); + assert_eq!(lockfile.projects.len(), 0); + assert_eq!(lockfile.lockfile_version, 1); + } + + #[test] + fn test_lockfile_serialization() { + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.add_project(create_test_project("test-id", "test-slug")); + + let found = lockfile.find_project("test-id"); + assert!(found.is_some()); + assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string())); + + let not_found = lockfile.find_project("nonexistent"); + assert!(not_found.is_none()); + } + + #[test] + fn test_lockfile_find_project_by_platform_id() { + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.add_project(create_test_project("platform-123", "test-slug")); + + let found = + lockfile.find_project_by_platform_id("modrinth", "platform-123"); + assert!(found.is_some()); + assert_eq!( + found.unwrap().id.get("modrinth"), + Some(&"platform-123".to_string()) + ); + } + + #[test] + fn test_lockfile_get_loader_names() { + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + loaders.insert("forge".to_string(), "47.1.0".to_string()); + + let lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + let mut loader_names = lockfile.get_loader_names(); + loader_names.sort(); + + assert_eq!(loader_names, vec!["fabric", "forge"]); + } + + #[test] + fn test_lockfile_save_and_load() { + let temp_dir = TempDir::new().unwrap(); + let lockfile_path = temp_dir.path(); + + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.add_project(create_test_project("test-mod", "test-slug")); + + lockfile.save(lockfile_path).unwrap(); + + let loaded = LockFile::load(lockfile_path).unwrap(); + + assert_eq!(loaded.target, lockfile.target); + assert_eq!(loaded.mc_versions, lockfile.mc_versions); + assert_eq!(loaded.projects.len(), 1); + } + + #[test] + fn test_lockfile_compatibility_with_pakku() { + // Test that we can parse a Pakku-generated lockfile + let pakku_json = r#"{ + "target": "modrinth", + "mc_versions": ["1.20.1"], + "loaders": {"fabric": "0.15.0"}, + "projects": [ + { + "pakku_id": "fabric-api", + "type": "MOD", + "side": "BOTH", + "slug": { + "modrinth": "fabric-api" + }, + "name": { + "modrinth": "Fabric API" + }, + "id": { + "modrinth": "P7dR8mSH" + }, + "updateStrategy": "LATEST", + "redistributable": true, + "files": [], + "pakku_links": [] + } + ], + "lockfile_version": 1 + }"#; + + let lockfile: LockFile = serde_json::from_str(pakku_json).unwrap(); + assert_eq!(lockfile.target, Some(Target::Modrinth)); + assert_eq!(lockfile.mc_versions, vec!["1.20.1"]); + assert_eq!(lockfile.projects.len(), 1); + } + + #[test] + fn test_lockfile_validation_invalid_version() { + // Test that lockfile with wrong version fails validation + let temp_dir = TempDir::new().unwrap(); + let lockfile_path = temp_dir.path().join("pakku-lock.json"); + + let invalid_json = r#"{ + "target": "modrinth", + "mc_versions": ["1.20.1"], + "loaders": {"fabric": "0.15.0"}, + "projects": [], + "lockfile_version": 999 + }"#; + + std::fs::write(&lockfile_path, invalid_json).unwrap(); + + let result = LockFile::load(temp_dir.path()); + assert!(result.is_err()); + } + + #[test] + fn test_lockfile_validation_duplicate_pakku_ids() { + // Test that lockfile with duplicate pakku_ids fails validation + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.add_project(create_test_project("duplicate-id", "slug1")); + lockfile.add_project(create_test_project("duplicate-id", "slug2")); + + let result = lockfile.validate(); + assert!(result.is_err()); + } + + #[test] + fn test_lockfile_atomic_write() { + // Test that save uses atomic write (temp file + rename) + let temp_dir = TempDir::new().unwrap(); + + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.save(temp_dir.path()).unwrap(); + + // Temp file should not exist after save + let temp_path = temp_dir.path().join("pakku-lock.tmp"); + assert!(!temp_path.exists()); + + // Actual file should exist + let lockfile_path = temp_dir.path().join("pakku-lock.json"); + assert!(lockfile_path.exists()); + } + + #[test] + fn test_lockfile_sort_projects() { + // Test that projects are sorted alphabetically by name + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.add_project(create_test_project("zebra-mod", "zebra")); + lockfile.add_project(create_test_project("alpha-mod", "alpha")); + lockfile.add_project(create_test_project("middle-mod", "middle")); + + lockfile.sort_projects(); + + assert_eq!(lockfile.projects[0].pakku_id, Some("alpha-mod".to_string())); + assert_eq!( + lockfile.projects[1].pakku_id, + Some("middle-mod".to_string()) + ); + assert_eq!(lockfile.projects[2].pakku_id, Some("zebra-mod".to_string())); + } + + #[test] + fn test_lockfile_find_project_mut() { + // Test mutable project lookup + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.add_project(create_test_project("test-id", "test-slug")); + + // Modify through mutable reference + if let Some(project) = lockfile.find_project_mut("test-id") { + project.redistributable = false; + } + + let found = lockfile.get_project("test-id").unwrap(); + assert_eq!(found.redistributable, false); + } + + #[test] + fn test_lockfile_multiple_loaders() { + // Test lockfile with multiple loaders + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + loaders.insert("forge".to_string(), "47.1.0".to_string()); + loaders.insert("quilt".to_string(), "0.20.0".to_string()); + + let lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + let loader_names = lockfile.get_loader_names(); + assert_eq!(loader_names.len(), 3); + assert!(loader_names.contains(&"fabric".to_string())); + assert!(loader_names.contains(&"forge".to_string())); + assert!(loader_names.contains(&"quilt".to_string())); + } + + #[test] + fn test_lockfile_multiple_mc_versions() { + // Test lockfile with multiple Minecraft versions + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let mc_versions = vec![ + "1.20.1".to_string(), + "1.20.2".to_string(), + "1.20.4".to_string(), + ]; + + let lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: mc_versions.clone(), + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + assert_eq!(lockfile.mc_versions, mc_versions); + } + + #[test] + fn test_lockfile_roundtrip_preserves_data() { + // Test that save/load roundtrip preserves all data + let temp_dir = TempDir::new().unwrap(); + + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + loaders.insert("forge".to_string(), "47.1.0".to_string()); + + let mc_versions = vec!["1.20.1".to_string(), "1.20.4".to_string()]; + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: mc_versions.clone(), + loaders: loaders.clone(), + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.add_project(create_test_project("mod1", "slug1")); + lockfile.add_project(create_test_project("mod2", "slug2")); + + lockfile.save(temp_dir.path()).unwrap(); + let loaded = LockFile::load(temp_dir.path()).unwrap(); + + assert_eq!(loaded.target, Some(Target::Modrinth)); + assert_eq!(loaded.mc_versions, mc_versions); + assert_eq!(loaded.loaders, loaders); + assert_eq!(loaded.projects.len(), 2); + assert_eq!(loaded.lockfile_version, 1); + } + + #[test] + fn test_lockfile_remove_nonexistent_project() { + // Test removing a project that doesn't exist + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let mut lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + let result = lockfile.remove_project("nonexistent-id"); + assert!(result.is_none()); + } + + #[test] + fn test_lockfile_empty_projects_list() { + // Test lockfile with no projects + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + assert_eq!(lockfile.projects.len(), 0); + assert!(lockfile.validate().is_ok()); + } + + #[test] + fn test_lockfile_pretty_json_format() { + // Test that saved JSON is pretty-printed + let temp_dir = TempDir::new().unwrap(); + + let mut loaders = HashMap::new(); + loaders.insert("fabric".to_string(), "0.15.0".to_string()); + + let lockfile = LockFile { + target: Some(Target::Modrinth), + mc_versions: vec!["1.20.1".to_string()], + loaders, + projects: Vec::new(), + lockfile_version: 1, + }; + + lockfile.save(temp_dir.path()).unwrap(); + + let content = + std::fs::read_to_string(temp_dir.path().join("pakku-lock.json")).unwrap(); + + // Pretty-printed JSON should have newlines and indentation + assert!(content.contains('\n')); + assert!(content.contains(" ")); // Indentation + } + + #[test] + fn test_lockfile_missing_file() { + // Test loading from non-existent directory + let temp_dir = TempDir::new().unwrap(); + let nonexistent = temp_dir.path().join("nonexistent"); + + let result = LockFile::load(&nonexistent); + assert!(result.is_err()); + } + + #[test] + fn test_lockfile_corrupted_json() { + // Test loading corrupted JSON + let temp_dir = TempDir::new().unwrap(); + let lockfile_path = temp_dir.path().join("pakku-lock.json"); + + std::fs::write(&lockfile_path, "not valid json {[}").unwrap(); + + let result = LockFile::load(temp_dir.path()); + assert!(result.is_err()); + } +} + +const LOCKFILE_VERSION: u32 = 1; +const LOCKFILE_NAME: &str = "pakku-lock.json"; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LockFile { + #[serde(skip_serializing_if = "Option::is_none")] + pub target: Option, + pub mc_versions: Vec, + pub loaders: HashMap, + pub projects: Vec, + pub lockfile_version: u32, +} + +impl LockFile { + pub fn load>(path: P) -> Result { + Self::load_with_validation(path, true) + } + + pub fn load_with_validation>( + path: P, + validate: bool, + ) -> Result { + let path = path.as_ref().join(LOCKFILE_NAME); + let content = + std::fs::read_to_string(&path).map_err(PakkerError::IoError)?; + + let mut lockfile: Self = serde_json::from_str(&content) + .map_err(|e| PakkerError::InvalidLockFile(e.to_string()))?; + + if validate { + lockfile.validate()?; + } + lockfile.sort_projects(); + + Ok(lockfile) + } + + pub fn save>(&self, path: P) -> Result<()> { + self.validate()?; + let path = path.as_ref().join(LOCKFILE_NAME); + let content = serde_json::to_string_pretty(self) + .map_err(PakkerError::SerializationError)?; + std::fs::write(&path, content).map_err(PakkerError::IoError) + } + + pub fn save_without_validation>(&self, path: P) -> Result<()> { + let path = path.as_ref().join(LOCKFILE_NAME); + let content = serde_json::to_string_pretty(self) + .map_err(PakkerError::SerializationError)?; + std::fs::write(&path, content).map_err(PakkerError::IoError) + } + + pub fn validate(&self) -> Result<()> { + if self.lockfile_version != LOCKFILE_VERSION { + return Err(PakkerError::InvalidLockFile(format!( + "Unsupported lockfile version: {}", + self.lockfile_version + ))); + } + + if self.mc_versions.is_empty() { + return Err(PakkerError::InvalidLockFile( + "At least one Minecraft version is required".to_string(), + )); + } + + if self.loaders.is_empty() { + return Err(PakkerError::InvalidLockFile( + "At least one loader is required".to_string(), + )); + } + + // Check for unique pakku IDs + let mut seen_ids = std::collections::HashSet::new(); + for project in &self.projects { + if let Some(ref pakku_id) = project.pakku_id + && !seen_ids.insert(pakku_id) + { + return Err(PakkerError::InvalidLockFile(format!( + "Duplicate pakku ID: {pakku_id}" + ))); + } + } + + Ok(()) + } + + pub fn sort_projects(&mut self) { + self.projects.sort_by(|a, b| { + a.get_name() + .to_lowercase() + .cmp(&b.get_name().to_lowercase()) + }); + } + + pub fn add_project(&mut self, project: Project) { + self.projects.push(project); + self.projects.sort_by_key(super::project::Project::get_name); + } + + pub fn get_project(&self, pakku_id: &str) -> Option<&Project> { + self + .projects + .iter() + .find(|p| p.pakku_id.as_deref() == Some(pakku_id)) + } + + pub fn get_loader_names(&self) -> Vec { + self.loaders.keys().cloned().collect() + } + + pub fn remove_project(&mut self, pakku_id: &str) -> Option { + if let Some(pos) = self + .projects + .iter() + .position(|p| p.pakku_id.as_deref() == Some(pakku_id)) + { + Some(self.projects.remove(pos)) + } else { + None + } + } + + pub fn find_project(&self, pakku_id: &str) -> Option<&Project> { + self + .projects + .iter() + .find(|p| p.pakku_id.as_deref() == Some(pakku_id)) + } + + pub fn find_project_mut(&mut self, pakku_id: &str) -> Option<&mut Project> { + self + .projects + .iter_mut() + .find(|p| p.pakku_id.as_deref() == Some(pakku_id)) + } + + pub fn find_project_by_platform_id( + &self, + platform: &str, + id: &str, + ) -> Option<&Project> { + self + .projects + .iter() + .find(|p| p.id.get(platform).is_some_and(|pid| pid == id)) + } +} diff --git a/src/model/override.rs b/src/model/override.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/model/override.rs @@ -0,0 +1 @@ + diff --git a/src/model/project.rs b/src/model/project.rs new file mode 100644 index 0000000..1e2f464 --- /dev/null +++ b/src/model/project.rs @@ -0,0 +1,439 @@ +use std::collections::{HashMap, HashSet}; + +use serde::{Deserialize, Serialize}; + +use super::enums::{ProjectSide, ProjectType, ReleaseType, UpdateStrategy}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Project { + #[serde(skip_serializing_if = "Option::is_none")] + pub pakku_id: Option, + #[serde(skip_serializing_if = "HashSet::is_empty", default)] + pub pakku_links: HashSet, + #[serde(rename = "type")] + pub r#type: ProjectType, + #[serde(default = "default_side")] + pub side: ProjectSide, + pub slug: HashMap, + pub name: HashMap, + pub id: HashMap, + #[serde( + default = "default_update_strategy", + skip_serializing_if = "is_default_update_strategy" + )] + pub update_strategy: UpdateStrategy, + #[serde( + default = "default_redistributable", + skip_serializing_if = "is_default_redistributable" + )] + pub redistributable: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub subpath: Option, + #[serde(default, skip_serializing_if = "HashSet::is_empty")] + pub aliases: HashSet, + #[serde( + default = "default_export", + skip_serializing_if = "is_default_export" + )] + pub export: bool, + pub files: Vec, +} + +const fn default_export() -> bool { + true +} + +const fn default_side() -> ProjectSide { + ProjectSide::Both +} + +const fn default_update_strategy() -> UpdateStrategy { + UpdateStrategy::Latest +} + +const fn default_redistributable() -> bool { + true +} + +const fn is_default_update_strategy(strategy: &UpdateStrategy) -> bool { + matches!(strategy, UpdateStrategy::Latest) +} + +const fn is_default_redistributable(redistributable: &bool) -> bool { + *redistributable +} + +const fn is_default_export(export: &bool) -> bool { + *export +} + +impl Project { + pub fn new(pakku_id: String, typ: ProjectType, side: ProjectSide) -> Self { + Self { + pakku_id: Some(pakku_id), + pakku_links: HashSet::new(), + r#type: typ, + side, + slug: HashMap::new(), + name: HashMap::new(), + id: HashMap::new(), + update_strategy: UpdateStrategy::Latest, + redistributable: true, + subpath: None, + aliases: HashSet::new(), + export: true, + files: Vec::new(), + } + } + + pub fn get_platform_id(&self, platform: &str) -> Option<&String> { + self.id.get(platform) + } + + pub fn get_name(&self) -> String { + self.name.values().next().cloned().unwrap_or_else(|| { + self + .pakku_id + .clone() + .unwrap_or_else(|| "unknown".to_string()) + }) + } + + pub fn matches_input(&self, input: &str) -> bool { + // Check pakku_id + if let Some(ref pakku_id) = self.pakku_id + && pakku_id == input + { + return true; + } + + // Check slugs + if self.slug.values().any(|s| s == input) { + return true; + } + + // Check names (case-insensitive) + if self.name.values().any(|n| n.eq_ignore_ascii_case(input)) { + return true; + } + + // Check IDs + if self.id.values().any(|i| i == input) { + return true; + } + + // Check aliases + if self.aliases.contains(input) { + return true; + } + + false + } + + pub fn add_platform( + &mut self, + platform: String, + id: String, + slug: String, + name: String, + ) { + self.id.insert(platform.clone(), id); + self.slug.insert(platform.clone(), slug); + self.name.insert(platform, name); + } + + pub fn merge(&mut self, other: Self) { + // Merge platform identifiers + for (platform, id) in other.id { + self.id.entry(platform.clone()).or_insert(id); + } + for (platform, slug) in other.slug { + self.slug.entry(platform.clone()).or_insert(slug); + } + for (platform, name) in other.name { + self.name.entry(platform).or_insert(name); + } + + // Merge pakku links + self.pakku_links.extend(other.pakku_links); + + // Merge files + for file in other.files { + if !self.files.iter().any(|f| f.id == file.id) { + self.files.push(file); + } + } + + // Merge aliases + self.aliases.extend(other.aliases); + } + + pub fn select_file( + &mut self, + mc_versions: &[String], + loaders: &[String], + ) -> crate::error::Result<()> { + // Filter compatible files + let compatible_files: Vec<_> = self + .files + .iter() + .filter(|f| f.is_compatible(mc_versions, loaders)) + .collect(); + + if compatible_files.is_empty() { + return Err(crate::error::PakkerError::FileSelectionError(format!( + "No compatible files found for {}", + self.get_name() + ))); + } + + // Sort by release type (release > beta > alpha) and date + let mut sorted_files = compatible_files.clone(); + sorted_files.sort_by(|a, b| { + use super::enums::ReleaseType; + let type_order = |rt: &ReleaseType| { + match rt { + ReleaseType::Release => 0, + ReleaseType::Beta => 1, + ReleaseType::Alpha => 2, + } + }; + + type_order(&a.release_type) + .cmp(&type_order(&b.release_type)) + .then_with(|| b.date_published.cmp(&a.date_published)) + }); + + // Keep only the best file + if let Some(best_file) = sorted_files.first() { + self.files = vec![(*best_file).clone()]; + } + + Ok(()) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProjectFile { + #[serde(rename = "type")] + pub file_type: String, + pub file_name: String, + pub mc_versions: Vec, + #[serde(default)] + pub loaders: Vec, + pub release_type: ReleaseType, + pub url: String, + pub id: String, + pub parent_id: String, + pub hashes: HashMap, + pub required_dependencies: Vec, + pub size: u64, + pub date_published: String, +} + +impl ProjectFile { + pub fn is_compatible( + &self, + mc_versions: &[String], + loaders: &[String], + ) -> bool { + const VALID_LOADERS: &[&str] = + &["minecraft", "iris", "optifine", "datapack"]; + + let mc_compatible = + self.mc_versions.iter().any(|v| mc_versions.contains(v)); + + // Accept files with empty loaders, OR loaders matching request, OR valid + // special loaders + let loader_compatible = self.loaders.is_empty() + || self.loaders.iter().any(|l| loaders.contains(l)) + || self + .loaders + .iter() + .any(|l| VALID_LOADERS.contains(&l.as_str())); + + mc_compatible && loader_compatible + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_project_new() { + let project = + Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both); + assert_eq!(project.pakku_id, Some("test-id".to_string())); + assert_eq!(project.r#type, ProjectType::Mod); + assert_eq!(project.side, ProjectSide::Both); + assert!(project.pakku_links.is_empty()); + assert!(project.files.is_empty()); + } + + #[test] + fn test_project_serialization() { + let mut project = + Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both); + project + .slug + .insert("modrinth".to_string(), "test-slug".to_string()); + project + .name + .insert("modrinth".to_string(), "Test Mod".to_string()); + project + .id + .insert("modrinth".to_string(), "abc123".to_string()); + + let json = serde_json::to_string(&project).unwrap(); + let deserialized: Project = serde_json::from_str(&json).unwrap(); + + assert_eq!(deserialized.pakku_id, project.pakku_id); + assert_eq!(deserialized.r#type, project.r#type); + assert_eq!(deserialized.side, project.side); + assert_eq!( + deserialized.slug.get("modrinth"), + Some(&"test-slug".to_string()) + ); + } + + #[test] + fn test_project_file_is_compatible_with_empty_loaders() { + let file = ProjectFile { + file_type: "mod".to_string(), + file_name: "test.jar".to_string(), + mc_versions: vec!["1.20.1".to_string()], + loaders: vec![], // Empty loaders should be accepted + release_type: ReleaseType::Release, + url: "https://example.com/test.jar".to_string(), + id: "file123".to_string(), + parent_id: "mod123".to_string(), + hashes: HashMap::new(), + required_dependencies: vec![], + size: 1024, + date_published: "2024-01-01T00:00:00Z".to_string(), + }; + + let lockfile_mc = vec!["1.20.1".to_string()]; + let lockfile_loaders = vec!["fabric".to_string()]; + + assert!(file.is_compatible(&lockfile_mc, &lockfile_loaders)); + } + + #[test] + fn test_project_file_is_compatible_with_matching_loaders() { + let file = ProjectFile { + file_type: "mod".to_string(), + file_name: "test.jar".to_string(), + mc_versions: vec!["1.20.1".to_string()], + loaders: vec!["fabric".to_string()], + release_type: ReleaseType::Release, + url: "https://example.com/test.jar".to_string(), + id: "file123".to_string(), + parent_id: "mod123".to_string(), + hashes: HashMap::new(), + required_dependencies: vec![], + size: 1024, + date_published: "2024-01-01T00:00:00Z".to_string(), + }; + + let lockfile_mc = vec!["1.20.1".to_string()]; + let lockfile_loaders = vec!["fabric".to_string()]; + + assert!(file.is_compatible(&lockfile_mc, &lockfile_loaders)); + } + + #[test] + fn test_project_file_is_compatible_with_valid_loaders() { + for loader in ["minecraft", "iris", "optifine", "datapack"] { + let file = ProjectFile { + file_type: "mod".to_string(), + file_name: "test.jar".to_string(), + mc_versions: vec!["1.20.1".to_string()], + loaders: vec![loader.to_string()], + release_type: ReleaseType::Release, + url: "https://example.com/test.jar".to_string(), + id: "file123".to_string(), + parent_id: "mod123".to_string(), + hashes: HashMap::new(), + required_dependencies: vec![], + size: 1024, + date_published: "2024-01-01T00:00:00Z".to_string(), + }; + + let lockfile_mc = vec!["1.20.1".to_string()]; + let lockfile_loaders = vec!["fabric".to_string()]; + + assert!( + file.is_compatible(&lockfile_mc, &lockfile_loaders), + "Failed for valid loader: {}", + loader + ); + } + } + + #[test] + fn test_project_file_incompatible() { + let file = ProjectFile { + file_type: "mod".to_string(), + file_name: "test.jar".to_string(), + mc_versions: vec!["1.19.4".to_string()], + loaders: vec!["forge".to_string()], + release_type: ReleaseType::Release, + url: "https://example.com/test.jar".to_string(), + id: "file123".to_string(), + parent_id: "mod123".to_string(), + hashes: HashMap::new(), + required_dependencies: vec![], + size: 1024, + date_published: "2024-01-01T00:00:00Z".to_string(), + }; + + let lockfile_mc = vec!["1.20.1".to_string()]; + let lockfile_loaders = vec!["fabric".to_string()]; + + assert!(!file.is_compatible(&lockfile_mc, &lockfile_loaders)); + } + + #[test] + fn test_project_select_file() { + let mut project = + Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both); + + project.files.push(ProjectFile { + file_type: "mod".to_string(), + file_name: "alpha.jar".to_string(), + mc_versions: vec!["1.20.1".to_string()], + loaders: vec!["fabric".to_string()], + release_type: ReleaseType::Alpha, + url: "https://example.com/alpha.jar".to_string(), + id: "file1".to_string(), + parent_id: "mod123".to_string(), + hashes: HashMap::new(), + required_dependencies: vec![], + size: 1024, + date_published: "2024-01-03T00:00:00Z".to_string(), + }); + + project.files.push(ProjectFile { + file_type: "mod".to_string(), + file_name: "release.jar".to_string(), + mc_versions: vec!["1.20.1".to_string()], + loaders: vec!["fabric".to_string()], + release_type: ReleaseType::Release, + url: "https://example.com/release.jar".to_string(), + id: "file2".to_string(), + parent_id: "mod123".to_string(), + hashes: HashMap::new(), + required_dependencies: vec![], + size: 1024, + date_published: "2024-01-01T00:00:00Z".to_string(), + }); + + let lockfile_mc = vec!["1.20.1".to_string()]; + let lockfile_loaders = vec!["fabric".to_string()]; + + let result = project.select_file(&lockfile_mc, &lockfile_loaders); + assert!(result.is_ok()); + } +} diff --git a/src/platform.rs b/src/platform.rs new file mode 100644 index 0000000..af4a9a0 --- /dev/null +++ b/src/platform.rs @@ -0,0 +1,102 @@ +mod curseforge; +mod github; +mod modrinth; +mod traits; + +use std::sync::Arc; + +pub use curseforge::CurseForgePlatform; +pub use github::GitHubPlatform; +pub use modrinth::ModrinthPlatform; +use once_cell::sync::Lazy; +pub use traits::PlatformClient; + +use crate::{error::Result, rate_limiter::RateLimiter}; + +static RATE_LIMITER: Lazy> = + Lazy::new(|| Arc::new(RateLimiter::new(None))); + +pub fn create_platform( + platform: &str, + api_key: Option, +) -> Result> { + let client = create_client(platform, api_key)?; + let platform_name = platform.to_string(); + Ok(Box::new(RateLimitedPlatform { + platform: client, + rate_limiter: RATE_LIMITER.clone(), + platform_name, + })) +} + +fn create_client( + platform: &str, + api_key: Option, +) -> Result> { + match platform { + "modrinth" => Ok(Box::new(ModrinthPlatform::new())), + "curseforge" => Ok(Box::new(CurseForgePlatform::new(api_key))), + "github" => Ok(Box::new(GitHubPlatform::new(api_key))), + _ => { + Err(crate::error::PakkerError::ConfigError(format!( + "Unknown platform: {platform}" + ))) + }, + } +} + +struct RateLimitedPlatform { + platform: Box, + rate_limiter: Arc, + platform_name: String, +} + +#[async_trait::async_trait] +impl PlatformClient for RateLimitedPlatform { + async fn request_project( + &self, + identifier: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result { + self.rate_limiter.wait_for(&self.platform_name).await; + self + .platform + .request_project(identifier, mc_versions, loaders) + .await + } + + async fn request_project_files( + &self, + project_id: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result> { + self.rate_limiter.wait_for(&self.platform_name).await; + self + .platform + .request_project_files(project_id, mc_versions, loaders) + .await + } + + async fn request_project_with_files( + &self, + identifier: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result { + self.rate_limiter.wait_for(&self.platform_name).await; + self + .platform + .request_project_with_files(identifier, mc_versions, loaders) + .await + } + + async fn lookup_by_hash( + &self, + hash: &str, + ) -> Result> { + self.rate_limiter.wait_for(&self.platform_name).await; + self.platform.lookup_by_hash(hash).await + } +} diff --git a/src/platform/curseforge.rs b/src/platform/curseforge.rs new file mode 100644 index 0000000..5419501 --- /dev/null +++ b/src/platform/curseforge.rs @@ -0,0 +1,383 @@ +use std::collections::HashMap; + +use async_trait::async_trait; +use reqwest::Client; +use serde::{Deserialize, Serialize}; + +use super::traits::PlatformClient; +use crate::{ + error::{PakkerError, Result}, + model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType}, + utils::generate_pakku_id, +}; + +const CURSEFORGE_API_BASE: &str = "https://api.curseforge.com/v1"; +const LOADER_VERSION_TYPE_ID: i32 = 68441; + +pub struct CurseForgePlatform { + client: Client, + api_key: Option, +} + +impl CurseForgePlatform { + pub fn new(api_key: Option) -> Self { + Self { + client: Client::new(), + api_key, + } + } + + fn get_headers(&self) -> Result { + let mut headers = reqwest::header::HeaderMap::new(); + + if let Some(api_key) = &self.api_key { + headers.insert( + "x-api-key", + reqwest::header::HeaderValue::from_str(api_key).map_err(|_| { + PakkerError::ConfigError("Invalid API key".to_string()) + })?, + ); + } else { + return Err(PakkerError::ConfigError( + "CurseForge API key required".to_string(), + )); + } + + Ok(headers) + } + + const fn map_class_id(class_id: u32) -> ProjectType { + match class_id { + 6 => ProjectType::Mod, + 12 => ProjectType::ResourcePack, + 6945 => ProjectType::DataPack, + 6552 => ProjectType::Shader, + 17 => ProjectType::World, + _ => ProjectType::Mod, + } + } + + const fn map_release_type(release_type: u32) -> ReleaseType { + match release_type { + 1 => ReleaseType::Release, + 2 => ReleaseType::Beta, + 3 => ReleaseType::Alpha, + _ => ReleaseType::Release, + } + } + + fn convert_project(&self, cf_project: CurseForgeProject) -> Project { + let pakku_id = generate_pakku_id(); + let project_type = Self::map_class_id(cf_project.class_id.unwrap_or(6)); + + let mut project = Project::new(pakku_id, project_type, ProjectSide::Both); + + project.add_platform( + "curseforge".to_string(), + cf_project.id.to_string(), + cf_project.slug.clone(), + cf_project.name, + ); + + project.redistributable = false; + project + } + + fn convert_file( + &self, + cf_file: CurseForgeFile, + project_id: &str, + ) -> ProjectFile { + let mut hashes = HashMap::new(); + + for hash in cf_file.hashes { + hashes.insert(hash.algo.to_lowercase(), hash.value.clone()); + } + + let mc_versions: Vec = cf_file.game_versions.clone(); + + // Extract loaders from sortableGameVersions with LOADER_VERSION_TYPE_ID + let loaders: Vec = cf_file + .sortable_game_versions + .iter() + .filter(|v| v.game_version_type_id == Some(LOADER_VERSION_TYPE_ID)) + .map(|v| v.game_version_name.to_lowercase()) + .collect(); + + ProjectFile { + file_type: "mod".to_string(), + file_name: cf_file.file_name.clone(), + mc_versions, + loaders, + release_type: Self::map_release_type(cf_file.release_type.unwrap_or(1)), + url: cf_file.download_url.clone().unwrap_or_else(|| { + format!( + "https://edge.forgecdn.net/files/{}/{}/{}", + cf_file.id / 1000, + cf_file.id % 1000, + cf_file.file_name + ) + }), + id: cf_file.id.to_string(), + parent_id: project_id.to_string(), + hashes, + required_dependencies: cf_file + .dependencies + .iter() + .filter(|d| d.relation_type == 3) + .map(|d| d.mod_id.to_string()) + .collect(), + size: cf_file.file_length, + date_published: cf_file.file_date.clone(), + } + } + + async fn search_project_by_slug( + &self, + slug: &str, + ) -> Result { + let url = + format!("{CURSEFORGE_API_BASE}/mods/search?gameId=432&slug={slug}"); + + let response = self + .client + .get(&url) + .headers(self.get_headers()?) + .send() + .await?; + + if !response.status().is_success() { + return Err(PakkerError::ProjectNotFound(slug.to_string())); + } + + let result: CurseForgeSearchResponse = response.json().await?; + + result + .data + .into_iter() + .find(|p| p.slug == slug) + .ok_or_else(|| PakkerError::ProjectNotFound(slug.to_string())) + } +} + +#[async_trait] +impl PlatformClient for CurseForgePlatform { + async fn request_project( + &self, + identifier: &str, + _mc_versions: &[String], + _loaders: &[String], + ) -> Result { + if let Ok(mod_id) = identifier.parse::() { + let url = format!("{CURSEFORGE_API_BASE}/mods/{mod_id}"); + + let response = self + .client + .get(&url) + .headers(self.get_headers()?) + .send() + .await?; + + if response.status().is_success() { + let result: CurseForgeProjectResponse = response.json().await?; + return Ok(self.convert_project(result.data)); + } + } + + let cf_project = self.search_project_by_slug(identifier).await?; + Ok(self.convert_project(cf_project)) + } + + async fn request_project_files( + &self, + project_id: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result> { + let mut url = format!("{CURSEFORGE_API_BASE}/mods/{project_id}/files"); + + // Add query parameters for server-side filtering (Pakku-compatible) + let mut query_params = Vec::new(); + + // Add gameVersionTypeId for each MC version (requires lookup) + if !mc_versions.is_empty() { + // Fetch game version type IDs + // Add MC version gameVersionTypeId = 73250 for Minecraft versions + for mc_version in mc_versions { + query_params.push(("gameVersion", mc_version.clone())); + } + query_params.push(("gameVersionTypeId", "73250".to_string())); + } + + // Add mod loader types + if !loaders.is_empty() { + let loader_str = loaders.join(","); + query_params.push(("modLoaderTypes", loader_str)); + } + + if !query_params.is_empty() { + let query_string = query_params + .iter() + .map(|(k, v)| format!("{k}={v}")) + .collect::>() + .join("&"); + url = format!("{url}?{query_string}"); + } + + let response = self + .client + .get(&url) + .headers(self.get_headers()?) + .send() + .await?; + + if !response.status().is_success() { + return Err(PakkerError::ProjectNotFound(project_id.to_string())); + } + + let result: CurseForgeFilesResponse = response.json().await?; + + let files: Vec = result + .data + .into_iter() + .map(|f| self.convert_file(f, project_id)) + .collect(); + + Ok(files) + } + + async fn request_project_with_files( + &self, + identifier: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result { + let mut project = self + .request_project(identifier, mc_versions, loaders) + .await?; + let project_id = project + .get_platform_id("curseforge") + .ok_or_else(|| { + PakkerError::InternalError("Missing curseforge ID".to_string()) + })? + .clone(); + + let files = self + .request_project_files(&project_id, mc_versions, loaders) + .await?; + project.files = files; + + Ok(project) + } + + async fn lookup_by_hash(&self, hash: &str) -> Result> { + // CurseForge uses Murmur2 hash for file fingerprints + let fingerprint = hash + .parse::() + .map_err(|_| PakkerError::InvalidHash(hash.to_string()))?; + + let url = format!("{CURSEFORGE_API_BASE}/fingerprints"); + let response = self + .client + .post(&url) + .headers(self.get_headers()?) + .json(&serde_json::json!({ + "fingerprints": [fingerprint] + })) + .send() + .await?; + + if !response.status().is_success() { + return Ok(None); + } + + let response_data: serde_json::Value = response.json().await?; + + if let Some(matches) = response_data["data"]["exactMatches"].as_array() + && let Some(first_match) = matches.first() + && let Some(file) = first_match["file"].as_object() + { + let mod_id = file["modId"] + .as_u64() + .ok_or_else(|| { + PakkerError::InvalidResponse("Missing modId".to_string()) + })? + .to_string(); + + return self + .request_project_with_files(&mod_id, &[], &[]) + .await + .map(Some); + } + + Ok(None) + } +} + +// CurseForge API models +#[derive(Debug, Clone, Deserialize, Serialize)] +struct CurseForgeProject { + id: u32, + name: String, + slug: String, + #[serde(rename = "classId")] + class_id: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct CurseForgeGameVersion { + #[serde(rename = "gameVersionName")] + game_version_name: String, + #[serde(rename = "gameVersionTypeId")] + game_version_type_id: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct CurseForgeHash { + algo: String, + value: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct CurseForgeDependency { + #[serde(rename = "modId")] + mod_id: u32, + #[serde(rename = "relationType")] + relation_type: u32, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct CurseForgeProjectResponse { + data: CurseForgeProject, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct CurseForgeFile { + id: u32, + #[serde(rename = "fileName")] + file_name: String, + #[serde(rename = "downloadUrl")] + download_url: Option, + #[serde(rename = "gameVersions")] + game_versions: Vec, + #[serde(rename = "sortableGameVersions")] + sortable_game_versions: Vec, + #[serde(rename = "releaseType")] + release_type: Option, + #[serde(rename = "fileLength")] + file_length: u64, + #[serde(rename = "fileDate")] + file_date: String, + hashes: Vec, + dependencies: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct CurseForgeFilesResponse { + data: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct CurseForgeSearchResponse { + data: Vec, +} diff --git a/src/platform/github.rs b/src/platform/github.rs new file mode 100644 index 0000000..cfc4f65 --- /dev/null +++ b/src/platform/github.rs @@ -0,0 +1,580 @@ +use std::collections::HashMap; + +use async_trait::async_trait; +use regex::Regex; +use reqwest::Client; +use serde::{Deserialize, Serialize}; + +use super::traits::PlatformClient; +use crate::{ + error::{PakkerError, Result}, + model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType}, + utils::generate_pakku_id, +}; + +const GITHUB_API_BASE: &str = "https://api.github.com"; + +pub struct GitHubPlatform { + client: Client, + token: Option, +} + +impl GitHubPlatform { + pub fn new(token: Option) -> Self { + Self { + client: Client::new(), + token, + } + } + + fn get_headers(&self) -> Result { + let mut headers = reqwest::header::HeaderMap::new(); + headers.insert( + reqwest::header::USER_AGENT, + reqwest::header::HeaderValue::from_static("Pakker"), + ); + + if let Some(token) = &self.token { + headers.insert( + reqwest::header::AUTHORIZATION, + reqwest::header::HeaderValue::from_str(&format!("Bearer {token}")) + .map_err(|_| { + PakkerError::ConfigError("Invalid GitHub token".to_string()) + })?, + ); + } + + Ok(headers) + } + + fn parse_repo_identifier(identifier: &str) -> Result<(String, String)> { + // Expected formats: + // - "owner/repo" + // - "github:owner/repo" + // - "https://github.com/owner/repo" + + let identifier = identifier + .trim_start_matches("github:") + .trim_start_matches("https://github.com/") + .trim_start_matches("http://github.com/") + .trim_end_matches(".git"); + + let parts: Vec<&str> = identifier.split('/').collect(); + if parts.len() >= 2 { + Ok((parts[0].to_string(), parts[1].to_string())) + } else { + Err(PakkerError::InvalidInput(format!( + "Invalid GitHub repository identifier: {identifier}" + ))) + } + } + + fn convert_release( + &self, + owner: &str, + repo: &str, + release: GitHubRelease, + ) -> Project { + let pakku_id = generate_pakku_id(); + let mut project = + Project::new(pakku_id, ProjectType::Mod, ProjectSide::Both); + + let repo_full = format!("{owner}/{repo}"); + project.add_platform( + "github".to_string(), + repo_full.clone(), + repo_full, + release.name.unwrap_or_else(|| repo.to_string()), + ); + + project + } +} + +// Helper functions for extracting metadata from GitHub releases +fn extract_mc_versions(tag: &str, asset_name: &str) -> Vec { + let re = Regex::new(r"(?:^|[^\d.])(\d+\.\d+(?:\.\d+)?)(?:[^\d]|$)").unwrap(); + let mut versions = Vec::new(); + + log::debug!("Extracting MC versions from tag='{tag}', asset='{asset_name}'"); + + for text in &[tag, asset_name] { + for cap in re.captures_iter(text) { + if let Some(version) = cap.get(1) { + let v = version.as_str().to_string(); + if !versions.contains(&v) { + log::debug!(" Found MC version: {v}"); + versions.push(v); + } + } + } + } + + log::debug!("Extracted MC versions: {versions:?}"); + versions +} + +fn extract_loaders(tag: &str, asset_name: &str) -> Vec { + let mut loaders = Vec::new(); + let text = format!("{} {}", tag.to_lowercase(), asset_name.to_lowercase()); + + log::debug!("Extracting loaders from: '{text}'"); + + if text.contains("fabric") { + log::debug!(" Found loader: fabric"); + loaders.push("fabric".to_string()); + } + if text.contains("forge") && !text.contains("neoforge") { + log::debug!(" Found loader: forge"); + loaders.push("forge".to_string()); + } + if text.contains("neoforge") { + log::debug!(" Found loader: neoforge"); + loaders.push("neoforge".to_string()); + } + if text.contains("quilt") { + log::debug!(" Found loader: quilt"); + loaders.push("quilt".to_string()); + } + + log::debug!("Extracted loaders: {loaders:?}"); + loaders +} + +fn detect_project_type(asset_name: &str, repo_name: &str) -> ProjectType { + let name_lower = asset_name.to_lowercase(); + let repo_lower = repo_name.to_lowercase(); + + // Check for resourcepack indicators + if name_lower.contains("resourcepack") + || name_lower.contains("resource-pack") + || name_lower.contains("texture") + || repo_lower.contains("resourcepack") + || repo_lower.contains("texture") + { + return ProjectType::ResourcePack; + } + + // Check for datapack indicators + if name_lower.contains("datapack") + || name_lower.contains("data-pack") + || repo_lower.contains("datapack") + { + return ProjectType::DataPack; + } + + // Check for shader indicators + if name_lower.contains("shader") || repo_lower.contains("shader") { + return ProjectType::Shader; + } + + // Check for world/save indicators + if name_lower.contains("world") + || name_lower.contains("save") + || repo_lower.contains("world") + { + return ProjectType::World; + } + + // Default to mod for .jar files + ProjectType::Mod +} + +impl GitHubPlatform { + fn convert_asset( + &self, + asset: GitHubAsset, + release: &GitHubRelease, + repo_id: &str, + repo_name: &str, + ) -> ProjectFile { + let hashes = HashMap::new(); + + // Extract MC versions and loaders from tag and asset name + let mc_versions = extract_mc_versions(&release.tag_name, &asset.name); + let loaders = extract_loaders(&release.tag_name, &asset.name); + + // Detect project type from asset name and repo + let file_type = match detect_project_type(&asset.name, repo_name) { + ProjectType::Mod => "mod", + ProjectType::ResourcePack => "resourcepack", + ProjectType::DataPack => "datapack", + ProjectType::Shader => "shader", + ProjectType::World => "world", + }; + + ProjectFile { + file_type: file_type.to_string(), + file_name: asset.name.clone(), + mc_versions, + loaders, + release_type: if release.prerelease { + ReleaseType::Beta + } else { + ReleaseType::Release + }, + url: asset.browser_download_url.clone(), + id: asset.id.to_string(), + parent_id: repo_id.to_string(), + hashes, + required_dependencies: vec![], + size: asset.size, + date_published: release.published_at.clone().unwrap_or_default(), + } + } + + async fn get_latest_release( + &self, + owner: &str, + repo: &str, + ) -> Result { + let url = format!("{GITHUB_API_BASE}/repos/{owner}/{repo}/releases/latest"); + + let response = self + .client + .get(&url) + .headers(self.get_headers()?) + .send() + .await?; + + if !response.status().is_success() { + return Err(PakkerError::ProjectNotFound(format!("{owner}/{repo}"))); + } + + let release: GitHubRelease = response.json().await?; + Ok(release) + } + + async fn get_all_releases( + &self, + owner: &str, + repo: &str, + ) -> Result> { + let url = format!("{GITHUB_API_BASE}/repos/{owner}/{repo}/releases"); + + let response = self + .client + .get(&url) + .headers(self.get_headers()?) + .send() + .await?; + + if !response.status().is_success() { + return Err(PakkerError::ProjectNotFound(format!("{owner}/{repo}"))); + } + + let releases: Vec = response.json().await?; + Ok(releases) + } +} + +#[async_trait] +impl PlatformClient for GitHubPlatform { + async fn request_project( + &self, + identifier: &str, + _mc_versions: &[String], + _loaders: &[String], + ) -> Result { + let (owner, repo) = Self::parse_repo_identifier(identifier)?; + let release = self.get_latest_release(&owner, &repo).await?; + Ok(self.convert_release(&owner, &repo, release)) + } + + async fn request_project_files( + &self, + project_id: &str, + _mc_versions: &[String], + _loaders: &[String], + ) -> Result> { + let (owner, repo) = Self::parse_repo_identifier(project_id)?; + let releases = self.get_all_releases(&owner, &repo).await?; + + let mut files = Vec::new(); + + for release in releases { + for asset in &release.assets { + // Filter for .jar files (mods) or .zip files (modpacks) + if asset.name.ends_with(".jar") || asset.name.ends_with(".zip") { + let file = + self.convert_asset(asset.clone(), &release, project_id, &repo); + files.push(file); + } + } + } + + Ok(files) + } + + async fn request_project_with_files( + &self, + identifier: &str, + _mc_versions: &[String], + _loaders: &[String], + ) -> Result { + let mut project = self + .request_project(identifier, _mc_versions, _loaders) + .await?; + + let project_id = project + .get_platform_id("github") + .ok_or_else(|| { + PakkerError::InternalError("Missing github ID".to_string()) + })? + .clone(); + + let files = self + .request_project_files(&project_id, _mc_versions, _loaders) + .await?; + + project.files = files; + + Ok(project) + } + + async fn lookup_by_hash(&self, hash: &str) -> Result> { + log::debug!("GitHub lookup_by_hash: searching for hash={hash}"); + + // GitHub Code Search API: search for files containing the hash + // Note: This is rate-limited (10 req/min without auth, 30 req/min with + // auth) + let url = format!("{GITHUB_API_BASE}/search/code?q={hash}+in:file"); + log::debug!("GitHub search URL: {url}"); + + let response = match self + .client + .get(&url) + .headers(self.get_headers()?) + .send() + .await + { + Ok(resp) => { + log::debug!("GitHub search response status: {}", resp.status()); + resp + }, + Err(e) => { + log::warn!("GitHub hash lookup failed: {e}"); + return Ok(None); + }, + }; + + // Handle rate limiting gracefully + if response.status().as_u16() == 403 { + log::warn!("GitHub API rate limit exceeded for hash lookup"); + return Ok(None); + } + + if !response.status().is_success() { + log::debug!( + "GitHub search returned non-success status: {}", + response.status() + ); + return Ok(None); + } + + let search_result: GitHubCodeSearchResult = match response.json().await { + Ok(result) => result, + Err(e) => { + log::warn!("Failed to parse GitHub search result: {e}"); + return Ok(None); + }, + }; + + log::debug!("GitHub search found {} items", search_result.items.len()); + + // If we found matches, try to extract repo info from first result + if let Some(item) = search_result.items.first() { + let repo_full = item.repository.full_name.clone(); + log::info!("GitHub hash lookup found match in repo: {repo_full}"); + + // Try to get the latest release for this repo + match self.request_project(&repo_full, &[], &[]).await { + Ok(project) => { + log::info!("GitHub hash lookup succeeded for {repo_full}"); + Ok(Some(project)) + }, + Err(e) => { + log::warn!("Failed to fetch project for {repo_full}: {e}"); + Ok(None) + }, + } + } else { + log::debug!("GitHub hash lookup found no matches"); + Ok(None) + } + } +} + +// GitHub API models +#[derive(Debug, Clone, Deserialize, Serialize)] +struct GitHubRelease { + id: u64, + tag_name: String, + name: Option, + prerelease: bool, + published_at: Option, + assets: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct GitHubAsset { + id: u64, + name: String, + browser_download_url: String, + size: u64, +} + +#[derive(Debug, Deserialize)] +struct GitHubCodeSearchResult { + items: Vec, +} + +#[derive(Debug, Deserialize)] +struct GitHubCodeSearchItem { + repository: GitHubRepository, +} + +#[derive(Debug, Deserialize)] +struct GitHubRepository { + full_name: String, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_repo_identifier() { + let cases = vec![ + ("owner/repo", ("owner", "repo")), + ("github:owner/repo", ("owner", "repo")), + ("https://github.com/owner/repo", ("owner", "repo")), + ("https://github.com/owner/repo.git", ("owner", "repo")), + ]; + + for (input, (expected_owner, expected_repo)) in cases { + let (owner, repo) = GitHubPlatform::parse_repo_identifier(input).unwrap(); + assert_eq!(owner, expected_owner); + assert_eq!(repo, expected_repo); + } + } + + #[test] + fn test_parse_repo_identifier_invalid() { + let result = GitHubPlatform::parse_repo_identifier("invalid"); + assert!(result.is_err()); + } + + #[test] + fn test_extract_mc_versions() { + let cases = vec![ + ("1.20.4-forge-1.0.0", "", vec!["1.20.4", "1.0.0"]), + ("fabric-1.21-1.0.0", "", vec!["1.21"]), + ("mc1.20.4", "", vec!["1.20.4"]), + ("1.20.1-1.20.2", "", vec!["1.20.1"]), + ("mymod-1.0.0", "", vec!["1.0.0"]), + ("mc1.20.4-v1.0.0", "", vec!["1.20.4", "1.0.0"]), + ("v1.0.0", "mymod-1.20.4.jar", vec!["1.0.0", "1.20.4"]), + ("1.20.1-47.1.0", "", vec!["1.20.1"]), + ("v0.5.1+1.20.1", "", vec!["0.5.1"]), + ("1.20.4-1.0.0+fabric", "", vec!["1.20.4"]), + ("mc1.19.2-v2.1.3", "", vec!["1.19.2", "2.1.3"]), + ("1.20-Snapshot", "", vec!["1.20"]), + ("v3.0.0-beta.2+mc1.20.4", "", vec!["3.0.0", "1.20.4"]), + ("1.16.5-1.0", "", vec!["1.16.5"]), + ("forge-1.20.1-47.2.0", "", vec!["1.20.1"]), + ("1.20.2-neoforge-20.2.59", "", vec!["1.20.2", "20.2.59"]), + ("release-1.20.1", "", vec!["1.20.1"]), + ("1.19.4_v2.5.0", "", vec!["1.19.4", "2.5.0"]), + ("MC1.18.2-v1.0.0", "", vec!["1.18.2", "1.0.0"]), + ("1.20.1-forge-v1.2.3", "", vec!["1.20.1", "1.2.3"]), + ("Minecraft_1.19.2-v0.8.1", "", vec!["1.19.2", "0.8.1"]), + ("build-1.20.4-2.1.0", "", vec!["1.20.4"]), + ("1.20.x-1.5.0", "", vec!["1.20", "1.5.0"]), + ("1.12.2-14.23.5.2859", "", vec!["1.12.2"]), + ]; + + for (tag, asset, expected) in cases { + let result = extract_mc_versions(tag, asset); + assert_eq!( + result, expected, + "Failed for tag: {}, asset: {}", + tag, asset + ); + } + } + + #[test] + fn test_extract_loaders() { + let cases = vec![ + ("1.20.4-forge-1.0.0", "", vec!["forge"]), + ("fabric-1.21-1.0.0", "", vec!["fabric"]), + ("1.20.1-neoforge", "", vec!["neoforge"]), + ("quilt-1.20.4", "", vec!["quilt"]), + ("mymod-1.0.0", "", vec![]), + ("1.20.4-forge-fabric", "", vec!["fabric", "forge"]), /* Alphabetical + * order */ + ("v1.0.0", "mymod-fabric-1.20.4.jar", vec!["fabric"]), + // Real-world patterns + ("1.20.1-forge-47.1.0", "", vec!["forge"]), + ("fabric-api-0.92.0+1.20.4", "", vec!["fabric"]), + ("1.19.2-neoforge-20.2.59", "", vec!["neoforge"]), + ("quilt-loader-0.23.0", "", vec!["quilt"]), + ("1.20.4-Fabric-1.0.0", "", vec!["fabric"]), // Capitalized + ("forge-1.20.1", "", vec!["forge"]), + ("v1.0.0-fabric", "", vec!["fabric"]), + ("1.18.2-forge+fabric", "", vec!["fabric", "forge"]), // Both loaders + ("NeoForge-1.20.2", "", vec!["neoforge"]), /* Capitalized + * NeoForge */ + ("1.12.2-forge-14.23.5.2859", "", vec!["forge"]), // Old format + ]; + + for (tag, asset, expected) in cases { + let result = extract_loaders(tag, asset); + assert_eq!( + result, expected, + "Failed for tag: {}, asset: {}", + tag, asset + ); + } + } + + #[test] + fn test_detect_project_type() { + let cases = vec![ + ("mymod.jar", "mymod", crate::model::ProjectType::Mod), + ( + "texture-pack.zip", + "texture", + crate::model::ProjectType::ResourcePack, + ), + ( + "resourcepack.zip", + "resources", + crate::model::ProjectType::ResourcePack, + ), + ( + "datapack.zip", + "data-stuff", + crate::model::ProjectType::DataPack, + ), + ( + "shader.zip", + "awesome-shaders", + crate::model::ProjectType::Shader, + ), + ("world.zip", "my-world", crate::model::ProjectType::World), + ("save.zip", "survival", crate::model::ProjectType::World), + ("unknown.zip", "stuff", crate::model::ProjectType::Mod), + ]; + + for (filename, repo_name, expected) in cases { + let result = detect_project_type(filename, repo_name); + assert_eq!( + result, expected, + "Failed for filename: {}, repo: {}", + filename, repo_name + ); + } + } +} diff --git a/src/platform/modrinth.rs b/src/platform/modrinth.rs new file mode 100644 index 0000000..34b3790 --- /dev/null +++ b/src/platform/modrinth.rs @@ -0,0 +1,282 @@ +use std::collections::HashMap; + +use async_trait::async_trait; +use reqwest::Client; +use serde::{Deserialize, Serialize}; + +use super::traits::PlatformClient; +use crate::{ + error::{PakkerError, Result}, + model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType}, + utils::generate_pakku_id, +}; + +const MODRINTH_API_BASE: &str = "https://api.modrinth.com/v2"; + +pub struct ModrinthPlatform { + client: Client, +} + +impl ModrinthPlatform { + pub fn new() -> Self { + Self { + client: Client::new(), + } + } + + fn map_project_type(type_str: &str) -> ProjectType { + match type_str { + "mod" => ProjectType::Mod, + "resourcepack" => ProjectType::ResourcePack, + "datapack" => ProjectType::DataPack, + "shader" => ProjectType::Shader, + _ => ProjectType::Mod, + } + } + + const fn map_side(client: bool, server: bool) -> ProjectSide { + match (client, server) { + (true, true) => ProjectSide::Both, + (true, false) => ProjectSide::Client, + (false, true) => ProjectSide::Server, + _ => ProjectSide::Both, + } + } + + fn map_release_type(type_str: &str) -> ReleaseType { + match type_str { + "release" => ReleaseType::Release, + "beta" => ReleaseType::Beta, + "alpha" => ReleaseType::Alpha, + _ => ReleaseType::Release, + } + } + + fn convert_project(&self, mr_project: ModrinthProject) -> Project { + let pakku_id = generate_pakku_id(); + let mut project = Project::new( + pakku_id, + Self::map_project_type(&mr_project.project_type), + Self::map_side( + mr_project.client_side != "unsupported", + mr_project.server_side != "unsupported", + ), + ); + + project.add_platform( + "modrinth".to_string(), + mr_project.id.clone(), + mr_project.slug.clone(), + mr_project.title, + ); + + project + } + + fn convert_version( + &self, + mr_version: ModrinthVersion, + project_id: &str, + ) -> ProjectFile { + let mut hashes = HashMap::new(); + + // Get primary file + let primary_file = mr_version + .files + .iter() + .find(|f| f.primary) + .or_else(|| mr_version.files.first()) + .expect("Version must have at least one file"); + + for (algo, hash) in &primary_file.hashes { + hashes.insert(algo.clone(), hash.clone()); + } + + ProjectFile { + file_type: "mod".to_string(), + file_name: primary_file.filename.clone(), + mc_versions: mr_version.game_versions.clone(), + loaders: mr_version.loaders.clone(), + release_type: Self::map_release_type(&mr_version.version_type), + url: primary_file.url.clone(), + id: mr_version.id.clone(), + parent_id: project_id.to_string(), + hashes, + required_dependencies: mr_version + .dependencies + .iter() + .filter(|d| d.dependency_type == "required") + .filter_map(|d| d.project_id.clone()) + .collect(), + size: primary_file.size, + date_published: mr_version.date_published.clone(), + } + } +} + +#[async_trait] +impl PlatformClient for ModrinthPlatform { + async fn request_project( + &self, + identifier: &str, + _mc_versions: &[String], + _loaders: &[String], + ) -> Result { + let url = format!("{MODRINTH_API_BASE}/project/{identifier}"); + + let response = self.client.get(&url).send().await?; + + if !response.status().is_success() { + return Err(PakkerError::ProjectNotFound(identifier.to_string())); + } + + let mr_project: ModrinthProject = response.json().await?; + Ok(self.convert_project(mr_project)) + } + + async fn request_project_files( + &self, + project_id: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result> { + let mut url = format!("{MODRINTH_API_BASE}/project/{project_id}/version"); + + // Add query parameters + let mut params = vec![]; + if !mc_versions.is_empty() { + params.push(format!( + "game_versions=[{}]", + mc_versions + .iter() + .map(|v| format!("\"{v}\"")) + .collect::>() + .join(",") + )); + } + if !loaders.is_empty() { + params.push(format!( + "loaders=[{}]", + loaders + .iter() + .map(|l| format!("\"{l}\"")) + .collect::>() + .join(",") + )); + } + + if !params.is_empty() { + url.push('?'); + url.push_str(¶ms.join("&")); + } + + let response = self.client.get(&url).send().await?; + + if !response.status().is_success() { + return Err(PakkerError::ProjectNotFound(project_id.to_string())); + } + + let mr_versions: Vec = response.json().await?; + + Ok( + mr_versions + .into_iter() + .map(|v| self.convert_version(v, project_id)) + .collect(), + ) + } + + async fn request_project_with_files( + &self, + identifier: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result { + let mut project = self + .request_project(identifier, mc_versions, loaders) + .await?; + let project_id = project + .get_platform_id("modrinth") + .ok_or_else(|| { + PakkerError::InternalError("Missing modrinth ID".to_string()) + })? + .clone(); + + let files = self + .request_project_files(&project_id, mc_versions, loaders) + .await?; + project.files = files; + + Ok(project) + } + + async fn lookup_by_hash(&self, hash: &str) -> Result> { + // Modrinth uses SHA-1 hash for file lookups + let url = format!("{MODRINTH_API_BASE}/version_file/{hash}"); + + let response = self.client.get(&url).send().await?; + + if response.status().as_u16() == 404 { + return Ok(None); + } + + if !response.status().is_success() { + return Err(PakkerError::PlatformApiError(format!( + "Modrinth API error: {}", + response.status() + ))); + } + + let version_data: serde_json::Value = response.json().await?; + + let project_id = version_data["project_id"].as_str().ok_or_else(|| { + PakkerError::InvalidResponse("Missing project_id".to_string()) + })?; + + self + .request_project_with_files(project_id, &[], &[]) + .await + .map(Some) + } +} + +// Modrinth API models +#[derive(Debug, Clone, Deserialize, Serialize)] +struct ModrinthProject { + id: String, + slug: String, + title: String, + #[serde(rename = "project_type")] + project_type: String, + client_side: String, + server_side: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct ModrinthVersion { + id: String, + project_id: String, + name: String, + version_number: String, + game_versions: Vec, + version_type: String, + loaders: Vec, + date_published: String, + files: Vec, + dependencies: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct ModrinthFile { + hashes: HashMap, + url: String, + filename: String, + primary: bool, + size: u64, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +struct ModrinthDependency { + project_id: Option, + dependency_type: String, +} diff --git a/src/platform/traits.rs b/src/platform/traits.rs new file mode 100644 index 0000000..598a72a --- /dev/null +++ b/src/platform/traits.rs @@ -0,0 +1,32 @@ +use async_trait::async_trait; + +use crate::{error::Result, model::Project}; + +#[async_trait] +pub trait PlatformClient: Send + Sync { + /// Request a single project by identifier + async fn request_project( + &self, + project_id: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result; + + /// Request files for a project + async fn request_project_files( + &self, + project_id: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result>; + + /// Request a project with its files + async fn request_project_with_files( + &self, + project_id: &str, + mc_versions: &[String], + loaders: &[String], + ) -> Result; + + async fn lookup_by_hash(&self, hash: &str) -> Result>; +} diff --git a/src/rate_limiter.rs b/src/rate_limiter.rs new file mode 100644 index 0000000..dc5b97e --- /dev/null +++ b/src/rate_limiter.rs @@ -0,0 +1,104 @@ +use std::{ + collections::HashMap, + sync::Arc, + time::{Duration, Instant}, +}; + +use tokio::sync::Mutex; + +use crate::error::Result; + +#[derive(Clone)] +pub struct RateLimiter { + inner: Arc>, +} + +struct RateLimiterInner { + requests: HashMap>, + config: RateLimitConfig, +} + +#[derive(Clone, Debug)] +pub struct RateLimitConfig { + pub modrinth_requests_per_min: u32, + pub modrinth_burst: u32, + pub curseforge_requests_per_min: u32, + pub curseforge_burst: u32, + pub github_requests_per_min: u32, + pub github_burst: u32, + pub default_requests_per_min: u32, + pub default_burst: u32, +} + +impl Default for RateLimitConfig { + fn default() -> Self { + Self { + modrinth_requests_per_min: 100, + modrinth_burst: 10, + curseforge_requests_per_min: 60, + curseforge_burst: 5, + github_requests_per_min: 50, + github_burst: 5, + default_requests_per_min: 30, + default_burst: 3, + } + } +} + +impl RateLimiter { + pub fn new(config: Option) -> Self { + Self { + inner: Arc::new(Mutex::new(RateLimiterInner { + requests: HashMap::new(), + config: config.unwrap_or_default(), + })), + } + } + + pub async fn acquire(&self, platform: &str) -> Result<()> { + let config = { + let inner = self.inner.lock().await; + inner.config.clone() + }; + + let (rate, burst) = match platform.to_lowercase().as_str() { + "modrinth" => (config.modrinth_requests_per_min, config.modrinth_burst), + "curseforge" => { + (config.curseforge_requests_per_min, config.curseforge_burst) + }, + "github" => (config.github_requests_per_min, config.github_burst), + _ => (config.default_requests_per_min, config.default_burst), + }; + + let interval = Duration::from_secs(60) / rate.max(1); + + let mut inner = self.inner.lock().await; + let now = Instant::now(); + let platform_requests = + inner.requests.entry(platform.to_string()).or_default(); + + platform_requests + .retain(|t| now.duration_since(*t) < Duration::from_secs(60)); + + if platform_requests.len() >= burst as usize { + if let Some(oldest) = platform_requests.first() { + let wait_time = interval.saturating_sub(now.duration_since(*oldest)); + if wait_time > Duration::ZERO { + drop(inner); + tokio::time::sleep(wait_time).await; + } + } + } + + let mut inner = self.inner.lock().await; + let platform_requests = + inner.requests.entry(platform.to_string()).or_default(); + platform_requests.push(Instant::now()); + + Ok(()) + } + + pub async fn wait_for(&self, platform: &str) { + let _ = self.acquire(platform).await; + } +} diff --git a/src/resolver.rs b/src/resolver.rs new file mode 100644 index 0000000..906d1b5 --- /dev/null +++ b/src/resolver.rs @@ -0,0 +1,150 @@ +use std::collections::{HashMap, HashSet}; + +use crate::{ + error::{PakkerError, Result}, + model::{LockFile, Project}, + platform::PlatformClient, +}; + +pub struct DependencyResolver { + visited: HashSet, + path: Vec, +} + +impl DependencyResolver { + pub fn new() -> Self { + Self { + visited: HashSet::new(), + path: Vec::new(), + } + } + + pub fn resolve<'a>( + &'a mut self, + project: &'a mut Project, + lockfile: &'a mut LockFile, + platforms: &'a HashMap>, + ) -> std::pin::Pin< + Box>> + 'a>, + > { + Box::pin(async move { + let mut resolved = Vec::new(); + + if let Some(ref pakku_id) = project.pakku_id { + if lockfile.get_project(pakku_id).is_some() { + log::debug!("Project already in lockfile: {}", project.get_name()); + return Ok(resolved); + } + if self.path.contains(pakku_id) { + let cycle_path = self.path.join(" -> "); + return Err(PakkerError::CircularDependency(format!( + "{cycle_path} -> {pakku_id}" + ))); + } + self.path.push(pakku_id.clone()); + } else { + return Ok(resolved); + } + + let mut dependencies_set: HashSet = HashSet::new(); + for file in &project.files { + for dep_id in &file.required_dependencies { + dependencies_set.insert(dep_id.clone()); + } + } + let dependencies: Vec = dependencies_set.into_iter().collect(); + + for dep_id in dependencies { + let existing_pakku_id = lockfile + .find_project_by_platform_id("modrinth", &dep_id) + .or_else(|| { + lockfile.find_project_by_platform_id("curseforge", &dep_id) + }) + .or_else(|| lockfile.find_project_by_platform_id("github", &dep_id)) + .map(|p| p.pakku_id.clone()); + + if let Some(Some(existing_id)) = existing_pakku_id { + if let Some(ref my_id) = project.pakku_id { + project.pakku_links.insert(existing_id.clone()); + if let Some(existing_mut) = lockfile.find_project_mut(&existing_id) + { + existing_mut.pakku_links.insert(my_id.clone()); + } + } + continue; + } + + let mut dep_project = + self.fetch_dependency(&dep_id, lockfile, platforms).await?; + + if let (Some(dep_id), Some(my_id)) = + (&dep_project.pakku_id, &project.pakku_id) + { + project.pakku_links.insert(dep_id.clone()); + dep_project.pakku_links.insert(my_id.clone()); + } + + let mut sub_deps = + self.resolve(&mut dep_project, lockfile, platforms).await?; + + resolved.push(dep_project); + resolved.append(&mut sub_deps); + } + + if let Some(ref pakku_id) = project.pakku_id { + self.visited.insert(pakku_id.clone()); + } + self.path.pop(); + + Ok(resolved) + }) + } + + async fn fetch_dependency( + &self, + dep_id: &str, + lockfile: &LockFile, + platforms: &HashMap>, + ) -> Result { + let mut projects = Vec::new(); + + for (platform_name, client) in platforms { + match client + .request_project_with_files( + dep_id, + &lockfile.mc_versions, + &lockfile.get_loader_names(), + ) + .await + { + Ok(project) => { + log::info!("Found dependency {dep_id} on {platform_name}"); + projects.push(project); + }, + Err(e) => { + log::debug!("Could not find {dep_id} on {platform_name}: {e}"); + }, + } + } + + if projects.is_empty() { + return Err(PakkerError::ProjectNotFound(dep_id.to_string())); + } + + if projects.len() == 1 { + Ok(projects.into_iter().next().unwrap()) + } else { + let mut merged = projects.remove(0); + for project in projects { + merged.merge(project); + } + Ok(merged) + } + } +} + +impl Default for DependencyResolver { + fn default() -> Self { + Self::new() + } +} diff --git a/src/ui_utils.rs b/src/ui_utils.rs new file mode 100644 index 0000000..095e4d9 --- /dev/null +++ b/src/ui_utils.rs @@ -0,0 +1,77 @@ +// UI utility functions for terminal formatting and interactive prompts + +use std::io; + +use dialoguer::{Confirm, MultiSelect, Select, theme::ColorfulTheme}; + +/// Creates a terminal hyperlink using OSC 8 escape sequence +/// Format: \x1b]8;;\x1b\\\x1b]8;;\x1b\\ +pub fn hyperlink(url: &str, text: &str) -> String { + format!("\x1b]8;;{url}\x1b\\{text}\x1b]8;;\x1b\\") +} + +/// Prompts user with a yes/no question +/// Returns true for yes, false for no +pub fn prompt_yes_no(question: &str, default: bool) -> io::Result { + Confirm::with_theme(&ColorfulTheme::default()) + .with_prompt(question) + .default(default) + .interact() + .map_err(io::Error::other) +} + +/// Prompts user to select from a list of options +/// Returns the index of the selected option +#[allow(dead_code)] +pub fn prompt_select(question: &str, options: &[&str]) -> io::Result { + Select::with_theme(&ColorfulTheme::default()) + .with_prompt(question) + .items(options) + .default(0) + .interact() + .map_err(io::Error::other) +} + +/// Prompts user to select multiple items from a list +/// Returns the indices of the selected options +#[allow(dead_code)] +pub fn prompt_multi_select( + question: &str, + options: &[&str], +) -> io::Result> { + MultiSelect::with_theme(&ColorfulTheme::default()) + .with_prompt(question) + .items(options) + .interact() + .map_err(io::Error::other) +} + +/// Creates a formatted project URL for Modrinth +#[allow(dead_code)] +pub fn modrinth_project_url(slug: &str) -> String { + format!("https://modrinth.com/mod/{slug}") +} + +/// Creates a formatted project URL for `CurseForge` +#[allow(dead_code)] +pub fn curseforge_project_url(project_id: &str) -> String { + format!("https://www.curseforge.com/minecraft/mc-mods/{project_id}") +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_hyperlink() { + let result = hyperlink("https://example.com", "Example"); + assert!(result.contains("https://example.com")); + assert!(result.contains("Example")); + } + + #[test] + fn test_modrinth_url() { + let url = modrinth_project_url("sodium"); + assert_eq!(url, "https://modrinth.com/mod/sodium"); + } +} diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 0000000..582c559 --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,6 @@ +pub mod hash; +pub mod id; +pub mod prompt; + +pub use hash::verify_hash; +pub use id::generate_pakku_id; diff --git a/src/utils/hash.rs b/src/utils/hash.rs new file mode 100644 index 0000000..88abdde --- /dev/null +++ b/src/utils/hash.rs @@ -0,0 +1,231 @@ +use std::{ + fs::File, + io::{BufReader, Read}, + path::Path, +}; + +use md5::{Digest as Md5Digest, Md5}; +use sha1::Sha1; +use sha2::{Sha256, Sha512}; + +use crate::error::{PakkerError, Result}; + +/// Compute Murmur2 hash (32-bit) for `CurseForge` fingerprinting +#[allow(dead_code)] +pub fn compute_murmur2_hash(data: &[u8]) -> u32 { + murmur2_hash(data, 1) +} + +/// Murmur2 hash implementation +#[allow(dead_code)] +fn murmur2_hash(data: &[u8], seed: u32) -> u32 { + const M: u32 = 0x5BD1E995; + const R: i32 = 24; + + let mut h: u32 = seed ^ (data.len() as u32); + let mut chunks = data.chunks_exact(4); + + for chunk in chunks.by_ref() { + let mut k = u32::from_le_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]); + k = k.wrapping_mul(M); + k ^= k >> R; + k = k.wrapping_mul(M); + + h = h.wrapping_mul(M); + h ^= k; + } + + let remainder = chunks.remainder(); + match remainder.len() { + 3 => { + h ^= u32::from(remainder[2]) << 16; + h ^= u32::from(remainder[1]) << 8; + h ^= u32::from(remainder[0]); + h = h.wrapping_mul(M); + }, + 2 => { + h ^= u32::from(remainder[1]) << 8; + h ^= u32::from(remainder[0]); + h = h.wrapping_mul(M); + }, + 1 => { + h ^= u32::from(remainder[0]); + h = h.wrapping_mul(M); + }, + _ => {}, + } + + h ^= h >> 13; + h = h.wrapping_mul(M); + h ^= h >> 15; + + h +} + +/// Compute SHA1 hash of a file +pub fn compute_sha1>(path: P) -> Result { + let file = File::open(path)?; + let mut reader = BufReader::new(file); + let mut hasher = Sha1::new(); + let mut buffer = [0; 8192]; + + loop { + let n = reader.read(&mut buffer)?; + if n == 0 { + break; + } + hasher.update(&buffer[..n]); + } + + Ok(format!("{:x}", hasher.finalize())) +} + +/// Compute SHA256 hash of a file +pub fn compute_sha256>(path: P) -> Result { + let file = File::open(path)?; + let mut reader = BufReader::new(file); + let mut hasher = Sha256::new(); + let mut buffer = [0; 8192]; + + loop { + let n = reader.read(&mut buffer)?; + if n == 0 { + break; + } + hasher.update(&buffer[..n]); + } + + Ok(format!("{:x}", hasher.finalize())) +} + +/// Compute SHA256 hash of byte data +pub fn compute_sha256_bytes(data: &[u8]) -> String { + let mut hasher = Sha256::new(); + hasher.update(data); + format!("{:x}", hasher.finalize()) +} + +/// Compute SHA512 hash of a file +pub fn compute_sha512>(path: P) -> Result { + let file = File::open(path)?; + let mut reader = BufReader::new(file); + let mut hasher = Sha512::new(); + let mut buffer = [0; 8192]; + + loop { + let n = reader.read(&mut buffer)?; + if n == 0 { + break; + } + hasher.update(&buffer[..n]); + } + + Ok(format!("{:x}", hasher.finalize())) +} + +/// Compute MD5 hash of a file +pub fn compute_md5>(path: P) -> Result { + let file = File::open(path)?; + let mut reader = BufReader::new(file); + let mut hasher = Md5::new(); + let mut buffer = [0; 8192]; + + loop { + let n = reader.read(&mut buffer)?; + if n == 0 { + break; + } + hasher.update(&buffer[..n]); + } + + Ok(format!("{:x}", hasher.finalize())) +} + +/// Verify a file's hash against expected value +pub fn verify_hash>( + path: P, + algorithm: &str, + expected: &str, +) -> Result { + let path = path.as_ref(); + let actual = match algorithm { + "sha1" => compute_sha1(path)?, + "sha256" => compute_sha256(path)?, + "sha512" => compute_sha512(path)?, + "md5" => compute_md5(path)?, + _ => { + return Err(PakkerError::InternalError(format!( + "Unknown hash algorithm: {algorithm}" + ))); + }, + }; + + Ok(actual.eq_ignore_ascii_case(expected)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_murmur2_hash_deterministic() { + let data = b"hello world"; + let hash1 = compute_murmur2_hash(data); + let hash2 = compute_murmur2_hash(data); + assert_eq!(hash1, hash2, "Murmur2 hash must be deterministic"); + } + + #[test] + fn test_murmur2_hash_empty() { + let data = b""; + let hash = compute_murmur2_hash(data); + assert_ne!(hash, 0, "Empty data should produce a non-zero hash"); + } + + #[test] + fn test_murmur2_hash_different_inputs() { + let hash1 = compute_murmur2_hash(b"hello"); + let hash2 = compute_murmur2_hash(b"world"); + assert_ne!( + hash1, hash2, + "Different inputs should produce different hashes" + ); + } + + #[test] + fn test_sha256_bytes_deterministic() { + let data = b"test data"; + let hash1 = compute_sha256_bytes(data); + let hash2 = compute_sha256_bytes(data); + assert_eq!(hash1, hash2, "SHA256 must be deterministic"); + } + + #[test] + fn test_sha256_bytes_format() { + let data = b"hello"; + let hash = compute_sha256_bytes(data); + assert_eq!(hash.len(), 64, "SHA256 hex should be 64 characters"); + assert!( + hash.chars().all(|c| c.is_ascii_hexdigit()), + "SHA256 should only contain hex digits" + ); + } + + #[test] + fn test_sha256_bytes_empty() { + let hash = compute_sha256_bytes(b""); + assert_eq!( + hash, + "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" + ); + } + + #[test] + fn test_sha256_bytes_known_value() { + // SHA256 of "hello" in hex + let expected = + "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"; + let hash = compute_sha256_bytes(b"hello"); + assert_eq!(hash, expected); + } +} diff --git a/src/utils/id.rs b/src/utils/id.rs new file mode 100644 index 0000000..062f0dc --- /dev/null +++ b/src/utils/id.rs @@ -0,0 +1,35 @@ +use rand::Rng; + +const CHARSET: &[u8] = + b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; +const ID_LENGTH: usize = 16; + +/// Generate a random 16-character alphanumeric pakku ID +pub fn generate_pakku_id() -> String { + let mut rng = rand::rng(); + (0..ID_LENGTH) + .map(|_| { + let idx = rng.random_range(0..CHARSET.len()); + CHARSET[idx] as char + }) + .collect() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_generate_pakku_id() { + let id = generate_pakku_id(); + assert_eq!(id.len(), ID_LENGTH); + assert!(id.chars().all(|c| c.is_alphanumeric())); + } + + #[test] + fn test_unique_ids() { + let id1 = generate_pakku_id(); + let id2 = generate_pakku_id(); + assert_ne!(id1, id2); + } +} diff --git a/src/utils/prompt.rs b/src/utils/prompt.rs new file mode 100644 index 0000000..aa6c1dc --- /dev/null +++ b/src/utils/prompt.rs @@ -0,0 +1,56 @@ +use std::io::{self, Write}; + +use crate::error::Result; + +#[allow(dead_code)] +pub fn prompt_user(message: &str) -> Result { + print!("{message}"); + io::stdout().flush()?; + + let mut input = String::new(); + io::stdin().read_line(&mut input)?; + + Ok(input.trim().to_string()) +} + +#[allow(dead_code)] +pub fn prompt_select(message: &str, options: &[String]) -> Result { + println!("{message}"); + for (i, option) in options.iter().enumerate() { + println!(" {}. {}", i + 1, option); + } + + loop { + print!("Select (1-{}): ", options.len()); + io::stdout().flush()?; + + let mut input = String::new(); + io::stdin().read_line(&mut input)?; + + if let Ok(choice) = input.trim().parse::() + && choice > 0 + && choice <= options.len() + { + return Ok(choice - 1); + } + + println!("Invalid selection. Please try again."); + } +} + +#[allow(dead_code)] +pub fn prompt_confirm(message: &str) -> Result { + print!("{message} (y/n): "); + io::stdout().flush()?; + + let mut input = String::new(); + io::stdin().read_line(&mut input)?; + + let answer = input.trim().to_lowercase(); + Ok(answer == "y" || answer == "yes") +} + +#[allow(dead_code)] +pub fn confirm(message: &str) -> Result { + prompt_confirm(message) +} diff --git a/tests/common/mod.rs b/tests/common/mod.rs new file mode 100644 index 0000000..b8f64cb --- /dev/null +++ b/tests/common/mod.rs @@ -0,0 +1,65 @@ +use std::{env, error::Error, fs, path::PathBuf}; + +use git2::{Repository, Signature}; + +pub fn pakker_bin_path() -> PathBuf { + let manifest = env!("CARGO_MANIFEST_DIR"); + PathBuf::from(manifest).join("target/debug/pakker") +} + +pub fn init_bare_repo(path: &PathBuf) -> Result { + Repository::init_bare(path) +} + +pub fn init_repo_with_commit( + path: &PathBuf, + file: &str, + content: &str, +) -> Result> { + let repo = Repository::init(path)?; + let sig = Signature::now("Test User", "test@example.com")?; + + let workdir = repo.workdir().ok_or("no workdir")?; + let file_path = workdir.join(file); + fs::write(&file_path, content)?; + + let mut index = repo.index()?; + index.add_path(std::path::Path::new(file))?; + index.write()?; + let tree_oid = index.write_tree()?; + let tree = repo.find_tree(tree_oid)?; + + // initial commit + let commit_oid = + repo.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[])?; + // create/ensure master branch points to this commit and set HEAD + repo.reference("refs/heads/master", commit_oid, true, "create master")?; + repo.set_head("refs/heads/master")?; + // drop tree to avoid holding a borrow of `repo` when returning it + drop(tree); + Ok(repo) +} + +pub fn push_to_remote( + repo: &Repository, + remote_name: &str, + remote_url: &str, +) -> Result<(), git2::Error> { + // Try to create the remote; if it already exists, find it instead + let mut remote = match repo.remote(remote_name, remote_url) { + Ok(r) => r, + Err(_) => repo.find_remote(remote_name)?, + }; + // Push current HEAD to refs/heads/master on remote + remote.push(&["HEAD:refs/heads/master"], None)?; + + // If remote is a local filesystem path, ensure its HEAD points to master + if remote_url.starts_with('/') + && let Ok(bare) = Repository::open(remote_url) + { + // Set bare repo HEAD to refs/heads/master + let _ = bare.set_head("refs/heads/master"); + } + + Ok(()) +} diff --git a/tests/fork_from_path.rs b/tests/fork_from_path.rs new file mode 100644 index 0000000..7411c2b --- /dev/null +++ b/tests/fork_from_path.rs @@ -0,0 +1,196 @@ +use std::{error::Error, fs, process::Command}; + +use git2::{Repository, Signature}; +use tempfile::TempDir; + +// shared test helpers +mod common; +use common::{ + init_bare_repo, + init_repo_with_commit, + pakker_bin_path, + push_to_remote, +}; + +#[test] +fn happy_path_from_path() -> Result<(), Box> { + let tmp = TempDir::new()?; + let tmp_path = tmp.path().to_path_buf(); + + let upstream = tmp_path.join("upstream.git"); + init_bare_repo(&upstream)?; + + let work = tmp_path.join("work_repo"); + fs::create_dir_all(&work)?; + let work_repo = init_repo_with_commit(&work, "README.md", "hello")?; + push_to_remote(&work_repo, "origin", upstream.to_str().unwrap())?; + + // Clone upstream to local path + let local = tmp_path.join("local_clone"); + Repository::clone(upstream.to_str().unwrap(), &local)?; + + // Now create a new project dir where pakker will be initialized + let project = tmp_path.join("project_dir"); + fs::create_dir_all(&project)?; + + let pakker = pakker_bin_path(); + let status = Command::new(pakker) + .args([ + "fork", + "init", + "--from-path", + local.to_str().unwrap(), + "--ref-name", + "master", + ]) + .current_dir(&project) + .status()?; + assert!(status.success()); + + let parent = project.join(".pakku").join("parent"); + assert!(parent.exists()); + Ok(()) +} + +#[test] +fn fails_with_uncommitted_changes() -> Result<(), Box> { + let tmp = TempDir::new()?; + let tmp_path = tmp.path().to_path_buf(); + + let upstream = tmp_path.join("upstream2.git"); + init_bare_repo(&upstream)?; + + let work = tmp_path.join("work2"); + fs::create_dir_all(&work)?; + let work_repo = init_repo_with_commit(&work, "file.txt", "a")?; + work_repo.remote("origin", upstream.to_str().unwrap())?; + push_to_remote(&work_repo, "origin", upstream.to_str().unwrap())?; + + let local = tmp_path.join("local2"); + Repository::clone(upstream.to_str().unwrap(), &local)?; + + // Make an uncommitted change + fs::write(local.join("UNCOMMITTED.md"), "oops")?; + + let project = tmp_path.join("project2"); + fs::create_dir_all(&project)?; + let pakker = pakker_bin_path(); + let output = Command::new(pakker) + .args([ + "fork", + "init", + "--from-path", + local.to_str().unwrap(), + "--ref-name", + "master", + ]) + .current_dir(&project) + .output()?; + + assert!(!output.status.success()); + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + stderr.to_lowercase().contains("uncommitted") + || stderr.to_lowercase().contains("dirty") + ); + Ok(()) +} + +#[test] +fn fails_when_local_ahead() -> Result<(), Box> { + let tmp = TempDir::new()?; + let tmp_path = tmp.path().to_path_buf(); + + let upstream = tmp_path.join("upstream3.git"); + init_bare_repo(&upstream)?; + + let work = tmp_path.join("work3"); + fs::create_dir_all(&work)?; + let work_repo = init_repo_with_commit(&work, "f.txt", "1")?; + work_repo.remote("origin", upstream.to_str().unwrap())?; + push_to_remote(&work_repo, "origin", upstream.to_str().unwrap())?; + + let local = tmp_path.join("local3"); + Repository::clone(upstream.to_str().unwrap(), &local)?; + + // Create a new commit locally that is not pushed + { + let repo = Repository::open(&local)?; + let workdir = repo.workdir().ok_or("no workdir")?; + fs::write(workdir.join("f.txt"), "2")?; + let mut index = repo.index()?; + index.add_path(std::path::Path::new("f.txt"))?; + index.write()?; + let tree_oid = index.write_tree()?; + let tree = repo.find_tree(tree_oid)?; + let sig = Signature::now("Test User", "test@example.com")?; + let head = repo.head()?; + let parent = head.peel_to_commit()?; + repo.commit(Some("HEAD"), &sig, &sig, "local change", &tree, &[&parent])?; + } + + let project = tmp_path.join("project3"); + fs::create_dir_all(&project)?; + let pakker = pakker_bin_path(); + let output = Command::new(pakker) + .args([ + "fork", + "init", + "--from-path", + local.to_str().unwrap(), + "--ref-name", + "master", + ]) + .current_dir(&project) + .output()?; + + assert!(!output.status.success()); + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + stderr.to_lowercase().contains("commits not present") + || stderr.to_lowercase().contains("not present on upstream") + ); + Ok(()) +} + +#[test] +fn warns_on_fetch_failure_and_proceeds() -> Result<(), Box> { + let tmp = TempDir::new()?; + let tmp_path = tmp.path().to_path_buf(); + + let upstream = tmp_path.join("upstream4.git"); + init_bare_repo(&upstream)?; + + let work = tmp_path.join("work4"); + fs::create_dir_all(&work)?; + let work_repo = init_repo_with_commit(&work, "a.txt", "1")?; + work_repo.remote("origin", upstream.to_str().unwrap())?; + push_to_remote(&work_repo, "origin", upstream.to_str().unwrap())?; + + let local = tmp_path.join("local4"); + Repository::clone(upstream.to_str().unwrap(), &local)?; + + // Set an invalid URL for origin to force fetch failure + let repo = Repository::open(&local)?; + repo.remote_set_url("origin", "git@invalid:nonexistent/repo.git")?; + + let project = tmp_path.join("project4"); + fs::create_dir_all(&project)?; + let pakker = pakker_bin_path(); + let status = Command::new(pakker) + .args([ + "fork", + "init", + "--from-path", + local.to_str().unwrap(), + "--ref-name", + "master", + ]) + .current_dir(&project) + .status()?; + + assert!(status.success()); + let parent = project.join(".pakku").join("parent"); + assert!(parent.exists()); + Ok(()) +}