initial commit

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I6a6a69644236ae18e7b46856fb6d6d6c998f8467
This commit is contained in:
raf 2025-10-05 21:12:25 +03:00
commit c07b295f71
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
29 changed files with 6780 additions and 0 deletions

2
.envrc Normal file
View file

@ -0,0 +1,2 @@
use flake

5
.gitignore vendored Normal file
View file

@ -0,0 +1,5 @@
/target
/result*
# For reference
nix-output-monitor

28
.rustfmt.toml Normal file
View file

@ -0,0 +1,28 @@
condense_wildcard_suffixes = true
doc_comment_code_block_width = 80
edition = "2024" # Keep in sync with Cargo.toml.
enum_discrim_align_threshold = 60
force_explicit_abi = false
force_multiline_blocks = true
format_code_in_doc_comments = true
format_macro_matchers = true
format_strings = true
group_imports = "StdExternalCrate"
hex_literal_case = "Upper"
imports_granularity = "Crate"
imports_layout = "HorizontalVertical"
inline_attribute_width = 60
match_block_trailing_comma = true
max_width = 80
newline_style = "Unix"
normalize_comments = true
normalize_doc_attributes = true
overflow_delimited_expr = true
struct_field_align_threshold = 60
tab_spaces = 2
unstable_features = true
use_field_init_shorthand = true
use_try_shorthand = true
wrap_comments = true

1229
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

36
Cargo.toml Normal file
View file

@ -0,0 +1,36 @@
[workspace]
members = [ "cognos", "rom" ]
resolver = "3"
[workspace.package]
name = "rom"
version = "0.1.0"
edition = "2024"
authors = ["NotAShelf <raf@notashelf.dev>"]
description = "Pretty build graphs for Nix builds"
license = "MPL-2.0"
repository = "https://github.com/notashelf/rom"
homepage = "https://github.com/notashelf/rom"
rust-version = "1.85"
readme = true
[workspace.dependencies]
anyhow = "1.0.100"
clap = { version = "4.5.48", features = ["derive"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145"
serde_repr = "0.1.20"
crossterm = "0.29.0"
ratatui = "0.29.0"
indexmap = { version = "2.11.4", features = ["serde"] }
csv = "1.3.1"
thiserror = "2.0.17"
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.20", features = ["env-filter"] }
eyre = "0.6.12"
[profile.release]
opt-level = 3
lto = true
codegen-units = 1
strip = true

328
LICENSE Normal file
View file

@ -0,0 +1,328 @@
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributors Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form,
and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the terms
of a Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material,
in a separate file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently,
any and all of the rights conveyed by this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the
GNU Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this License.
For legal entities, “You” includes any entity that controls,
is controlled by, or is under common control with You. For purposes of
this definition, “control” means (a) the power, direct or indirect,
to cause the direction or management of such entity, whether by contract
or otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications,
or as part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell,
offer for sale, have made, import, and otherwise transfer either
its Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor
first distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted
under this License. No additional rights or licenses will be implied
from the distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted
by a Contributor:
a. for any code that a Contributor has removed from
Covered Software; or
b. for infringements caused by: (i) Your and any other third partys
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its
Contributor Version); or
c. under Patent Claims infringed by Covered Software in the
absence of its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License
(if permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing,
or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the
licenses granted in Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including
any Modifications that You create or to which You contribute, must be
under the terms of this License. You must inform recipients that the
Source Code Form of the Covered Software is governed by the terms
of this License, and how they can obtain a copy of this License.
You may not attempt to alter or restrict the recipients rights
in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more than
the cost of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of
Covered Software with a work governed by one or more Secondary Licenses,
and the Covered Software is not Incompatible With Secondary Licenses,
this License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the
Covered Software under the terms of either this License or such
Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of
Covered Software. However, You may do so only on Your own behalf,
and not on behalf of any Contributor. You must make it absolutely clear
that any such warranty, support, indemnity, or liability obligation is
offered by You alone, and You hereby agree to indemnify every Contributor
for any liability incurred by such Contributor as a result of warranty,
support, indemnity or liability terms You offer. You may include
additional disclaimers of warranty and limitations of liability
specific to any jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute,
judicial order, or regulation then You must: (a) comply with the terms of
this License to the maximum extent possible; and (b) describe the limitations
and the code they affect. Such description must be placed in a text file
included with all distributions of the Covered Software under this License.
Except to the extent prohibited by statute or regulation, such description
must be sufficiently detailed for a recipient of ordinary skill
to be able to understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means,
this is the first time You have received notice of non-compliance with
this License from such Contributor, and You become compliant prior to
30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted
to You by any and all Contributors for the Covered Software under
Section 2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire risk
as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You
(not any Contributor) assume the cost of any necessary servicing, repair,
or correction. This disclaimer of warranty constitutes an essential part of
this License. No use of any Covered Software is authorized under this
License except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort
(including negligence), contract, or otherwise, shall any Contributor, or
anyone who distributes Covered Software as permitted above, be liable to
You for any direct, indirect, special, incidental, or consequential damages
of any character including, without limitation, damages for lost profits,
loss of goodwill, work stoppage, computer failure or malfunction, or any and
all other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from
such partys negligence to the extent applicable law prohibits such
limitation. Some jurisdictions do not allow the exclusion or limitation of
incidental or consequential damages, so this exclusion and limitation may
not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a partys ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable,
such provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in
Section 10.3, no one other than the license steward has the right to
modify or publish new versions of this License. Each version will be
given a distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published
by the license steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is
Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is
Incompatible With Secondary Licenses under the terms of this version of
the License, the notice described in Exhibit B of this
License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the terms of the
Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed
with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file,
then You may include the notice in a location (such as a LICENSE file in a
relevant directory) where a recipient would be likely to
look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible With Secondary Licenses”,
as defined by the Mozilla Public License, v. 2.0.

86
cognos/Cargo.lock generated Normal file
View file

@ -0,0 +1,86 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "nous"
version = "0.1.0"
dependencies = [
"serde",
"serde_repr",
]
[[package]]
name = "proc-macro2"
version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
dependencies = [
"proc-macro2",
]
[[package]]
name = "serde"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [
"serde_core",
]
[[package]]
name = "serde_core"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_repr"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "syn"
version = "2.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"

15
cognos/Cargo.toml Normal file
View file

@ -0,0 +1,15 @@
[package]
name = "cognos"
description = "Minimalistic parser for Nix's ATerm .drv and internal-json log formats"
version.workspace = true
edition.workspace = true
authors.workspace = true
rust-version.workspace = true
[lib]
path = "src/lib.rs"
[dependencies]
serde.workspace = true
serde_repr.workspace = true
serde_json.workspace = true

414
cognos/src/aterm.rs Normal file
View file

@ -0,0 +1,414 @@
//! `ATerm` and Nix .drv file parser
//!
//! Parses Nix .drv files in `ATerm` format to extract dependency information.
use std::{fs, path::Path};
/// Parsed derivation information from a .drv file
#[derive(Debug, Clone)]
pub struct ParsedDerivation {
pub outputs: Vec<(String, String)>,
pub input_drvs: Vec<(String, Vec<String>)>,
pub input_srcs: Vec<String>,
pub platform: String,
pub builder: String,
pub args: Vec<String>,
pub env: Vec<(String, String)>,
}
/// Parse a .drv file and extract its dependency information
pub fn parse_drv_file<P: AsRef<Path>>(
path: P,
) -> Result<ParsedDerivation, String> {
let content = fs::read_to_string(path)
.map_err(|e| format!("Failed to read file: {e}"))?;
parse_drv_content(&content)
}
/// Parse the content of a .drv file
pub fn parse_drv_content(content: &str) -> Result<ParsedDerivation, String> {
let content = content.trim();
if !content.starts_with("Derive(") {
return Err(
"Invalid derivation format: must start with 'Derive('".to_string(),
);
}
let inner = content
.strip_prefix("Derive(")
.and_then(|s| s.strip_suffix(")"))
.ok_or("Invalid derivation format: missing closing parenthesis")?;
// XXX: The derivation has this structure:
// Derive(outputs, inputDrvs, inputSrcs, platform, builder, args, env)
let parts = parse_top_level_list(inner)?;
if parts.len() < 7 {
return Err(format!(
"Invalid derivation format: expected 7 parts, got {}",
parts.len()
));
}
let outputs = parse_outputs(&parts[0])?;
let input_drvs = parse_input_drvs(&parts[1])?;
let input_srcs = parse_string_list(&parts[2])?;
let platform = parse_string(&parts[3])?;
let builder = parse_string(&parts[4])?;
let args = parse_string_list(&parts[5])?;
let env = parse_env(&parts[6])?;
Ok(ParsedDerivation {
outputs,
input_drvs,
input_srcs,
platform,
builder,
args,
env,
})
}
/// Parse the top-level comma-separated list, respecting nested brackets
fn parse_top_level_list(s: &str) -> Result<Vec<String>, String> {
let mut parts = Vec::new();
let mut current = String::new();
let mut depth = 0;
let mut in_string = false;
let mut escape = false;
for ch in s.chars() {
if escape {
current.push(ch);
escape = false;
continue;
}
match ch {
'\\' if in_string => {
escape = true;
current.push(ch);
},
'"' => {
in_string = !in_string;
current.push(ch);
},
'[' | '(' if !in_string => {
depth += 1;
current.push(ch);
},
']' | ')' if !in_string => {
depth -= 1;
current.push(ch);
},
',' if depth == 0 && !in_string => {
parts.push(current.trim().to_string());
current.clear();
},
_ => {
current.push(ch);
},
}
}
if !current.trim().is_empty() {
parts.push(current.trim().to_string());
}
Ok(parts)
}
/// Parse outputs: [("out","/nix/store/...","",""),...]
fn parse_outputs(s: &str) -> Result<Vec<(String, String)>, String> {
let s = s.trim();
if s == "[]" {
return Ok(Vec::new());
}
let inner = s
.strip_prefix('[')
.and_then(|s| s.strip_suffix(']'))
.ok_or("Invalid outputs format")?;
let tuples = parse_top_level_list(inner)?;
let mut outputs = Vec::new();
for tuple in tuples {
let tuple = tuple.trim();
let inner = tuple
.strip_prefix('(')
.and_then(|s| s.strip_suffix(')'))
.ok_or("Invalid output tuple format")?;
let parts = parse_top_level_list(inner)?;
if parts.len() >= 2 {
let name = parse_string(&parts[0])?;
let path = parse_string(&parts[1])?;
outputs.push((name, path));
}
}
Ok(outputs)
}
/// Parse input derivations: [("/nix/store/foo.drv",["out"]),...]
fn parse_input_drvs(s: &str) -> Result<Vec<(String, Vec<String>)>, String> {
let s = s.trim();
if s == "[]" {
return Ok(Vec::new());
}
let inner = s
.strip_prefix('[')
.and_then(|s| s.strip_suffix(']'))
.ok_or("Invalid input drvs format")?;
let tuples = parse_top_level_list(inner)?;
let mut input_drvs = Vec::new();
for tuple in tuples {
let tuple = tuple.trim();
let inner = tuple
.strip_prefix('(')
.and_then(|s| s.strip_suffix(')'))
.ok_or("Invalid input drv tuple format")?;
let parts = parse_top_level_list(inner)?;
if parts.len() >= 2 {
let drv_path = parse_string(&parts[0])?;
let outputs = parse_string_list(&parts[1])?;
input_drvs.push((drv_path, outputs));
}
}
Ok(input_drvs)
}
/// Parse environment variables: [("name","value"),...]
fn parse_env(s: &str) -> Result<Vec<(String, String)>, String> {
let s = s.trim();
if s == "[]" {
return Ok(Vec::new());
}
let inner = s
.strip_prefix('[')
.and_then(|s| s.strip_suffix(']'))
.ok_or("Invalid env format")?;
let tuples = parse_top_level_list(inner)?;
let mut env = Vec::new();
for tuple in tuples {
let tuple = tuple.trim();
let inner = tuple
.strip_prefix('(')
.and_then(|s| s.strip_suffix(')'))
.ok_or("Invalid env tuple format")?;
let parts = parse_top_level_list(inner)?;
if parts.len() >= 2 {
let name = parse_string(&parts[0])?;
let value = parse_string(&parts[1])?;
env.push((name, value));
}
}
Ok(env)
}
/// Parse a list of strings: ["foo","bar",...]
fn parse_string_list(s: &str) -> Result<Vec<String>, String> {
let s = s.trim();
if s == "[]" {
return Ok(Vec::new());
}
let inner = s
.strip_prefix('[')
.and_then(|s| s.strip_suffix(']'))
.ok_or("Invalid string list format")?;
let items = parse_top_level_list(inner)?;
items.into_iter().map(|item| parse_string(&item)).collect()
}
/// Parse a quoted string: "foo" -> foo
fn parse_string(s: &str) -> Result<String, String> {
let s = s.trim();
let inner = s
.strip_prefix('"')
.and_then(|s| s.strip_suffix('"'))
.ok_or_else(|| format!("Invalid string format: {s}"))?;
// Unescape the string
Ok(unescape_string(inner))
}
/// Unescape a string (handle \n, \t, \\, \", etc.)
fn unescape_string(s: &str) -> String {
let mut result = String::new();
let mut chars = s.chars();
while let Some(ch) = chars.next() {
if ch == '\\' {
match chars.next() {
Some('n') => result.push('\n'),
Some('t') => result.push('\t'),
Some('r') => result.push('\r'),
Some('\\') => result.push('\\'),
Some('"') => result.push('"'),
Some(c) => {
result.push('\\');
result.push(c);
},
None => result.push('\\'),
}
} else {
result.push(ch);
}
}
result
}
/// Extract all input derivation paths from a .drv file
pub fn get_input_derivations<P: AsRef<Path>>(
path: P,
) -> Result<Vec<String>, String> {
let parsed = parse_drv_file(path)?;
Ok(
parsed
.input_drvs
.into_iter()
.map(|(path, _)| path)
.collect(),
)
}
/// Extract pname from environment variables
#[must_use]
pub fn extract_pname(env: &[(String, String)]) -> Option<String> {
env
.iter()
.find(|(k, _)| k == "pname")
.map(|(_, v)| v.clone())
}
/// Extract version from environment variables
#[must_use]
pub fn extract_version(env: &[(String, String)]) -> Option<String> {
env
.iter()
.find(|(k, _)| k == "version")
.map(|(_, v)| v.clone())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_string() {
assert_eq!(parse_string(r#""hello""#).unwrap(), "hello");
assert_eq!(parse_string(r#""hello world""#).unwrap(), "hello world");
assert_eq!(parse_string(r#""hello\nworld""#).unwrap(), "hello\nworld");
}
#[test]
fn test_parse_string_list() {
let list = r#"["foo","bar","baz"]"#;
let result = parse_string_list(list).unwrap();
assert_eq!(result, vec!["foo", "bar", "baz"]);
let empty = "[]";
let result = parse_string_list(empty).unwrap();
assert_eq!(result, Vec::<String>::new());
}
#[test]
fn test_parse_outputs() {
let outputs = r#"[("out","/nix/store/abc-foo","","")]"#;
let result = parse_outputs(outputs).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].0, "out");
assert_eq!(result[0].1, "/nix/store/abc-foo");
}
#[test]
fn test_parse_input_drvs() {
let input = r#"[("/nix/store/abc-foo.drv",["out"]),("/nix/store/def-bar.drv",["out","dev"])]"#;
let result = parse_input_drvs(input).unwrap();
assert_eq!(result.len(), 2);
assert_eq!(result[0].0, "/nix/store/abc-foo.drv");
assert_eq!(result[0].1, vec!["out"]);
assert_eq!(result[1].0, "/nix/store/def-bar.drv");
assert_eq!(result[1].1, vec!["out", "dev"]);
}
#[test]
fn test_parse_minimal_drv() {
let drv = r#"Derive([("out","/nix/store/output","","")],[],[],"x86_64-linux","/bin/sh",[],[("name","value")])"#;
let result = parse_drv_content(drv).unwrap();
assert_eq!(result.outputs.len(), 1);
assert_eq!(result.outputs[0].0, "out");
assert_eq!(result.platform, "x86_64-linux");
assert_eq!(result.builder, "/bin/sh");
}
#[test]
fn test_parse_with_dependencies() {
let drv = r#"Derive([("out","/nix/store/abc-foo","","")],[("/nix/store/dep1.drv",["out"]),("/nix/store/dep2.drv",["out","dev"])],[],"x86_64-linux","/bin/sh",[],[("name","foo")])"#;
let result = parse_drv_content(drv).unwrap();
assert_eq!(result.input_drvs.len(), 2);
assert_eq!(result.input_drvs[0].0, "/nix/store/dep1.drv");
assert_eq!(result.input_drvs[0].1, vec!["out"]);
assert_eq!(result.input_drvs[1].0, "/nix/store/dep2.drv");
assert_eq!(result.input_drvs[1].1, vec!["out", "dev"]);
}
#[test]
fn test_parse_real_world_hello_drv() {
// Stripped down version of a real hello.drv
let drv = r#"Derive([("out","/nix/store/b1ayn0ln6n8bm2spz441csqc2ss66az3-hello-2.12.2","","")],[("/nix/store/1s1ir3vhwq86x0c7ikhhp3c9cin4095k-hello-2.12.2.tar.gz.drv",["out"]),("/nix/store/bjsb6wdjykafnkixq156qdvmxhsm2bai-bash-5.3p3.drv",["out"]),("/nix/store/lzvy25g887aypn07ah8igv72z7b9jb88-version-check-hook.drv",["out"]),("/nix/store/p76r0cwlf6k97ibprrpfd8xw0r8wc3nx-stdenv-linux.drv",["out"])],["/nix/store/l622p70vy8k5sh7y5wizi5f2mic6ynpg-source-stdenv.sh","/nix/store/shkw4qm9qcw5sc5n1k5jznc83ny02r39-default-builder.sh"],"x86_64-linux","/nix/store/q7sqwn7i6w2b67adw0bmix29pxg85x3w-bash-5.3p3/bin/bash",["-e","/nix/store/l622p70vy8k5sh7y5wizi5f2mic6ynpg-source-stdenv.sh"],[("name","hello-2.12.2"),("pname","hello"),("version","2.12.2"),("system","x86_64-linux")])"#;
let result = parse_drv_content(drv).unwrap();
// Verify outputs
assert_eq!(result.outputs.len(), 1);
assert_eq!(result.outputs[0].0, "out");
assert!(result.outputs[0].1.contains("hello-2.12.2"));
// Verify input derivations
assert_eq!(result.input_drvs.len(), 4);
assert!(result.input_drvs[0].0.contains("hello-2.12.2.tar.gz.drv"));
assert!(result.input_drvs[1].0.contains("bash-5.3p3.drv"));
assert!(result.input_drvs[2].0.contains("version-check-hook.drv"));
assert!(result.input_drvs[3].0.contains("stdenv-linux.drv"));
// Verify all inputs have "out" output
for (_, outputs) in &result.input_drvs {
assert_eq!(outputs, &vec!["out"]);
}
// Verify platform
assert_eq!(result.platform, "x86_64-linux");
// Verify builder
assert!(result.builder.contains("bash"));
// Verify environment
assert_eq!(extract_pname(&result.env), Some("hello".to_string()));
assert_eq!(extract_version(&result.env), Some("2.12.2".to_string()));
}
#[test]
fn test_get_input_derivations() {
let drv = r#"Derive([("out","/nix/store/out","","")],[("/nix/store/dep.drv",["out"])],[],"x86_64-linux","/bin/sh",[],[("pname","hello"),("version","1.0")])"#;
let result = parse_drv_content(drv).unwrap();
assert_eq!(result.input_drvs.len(), 1);
assert_eq!(result.input_drvs[0].0, "/nix/store/dep.drv");
assert_eq!(extract_pname(&result.env).unwrap(), "hello");
assert_eq!(extract_version(&result.env).unwrap(), "1.0");
}
}

View file

@ -0,0 +1,67 @@
use serde::Deserialize;
use serde_repr::Deserialize_repr;
#[derive(Deserialize_repr, Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum Activities {
Unknown = 0,
CopyPath = 100,
FileTransfer = 101,
Realise = 102,
CopyPaths = 103,
Builds = 104,
Build = 105,
OptimiseStore = 106,
VerifyPath = 107,
Substitute = 108,
QueryPathInfo = 109,
PostBuildHook = 110,
BuildWaiting = 111,
FetchTree = 112,
}
#[derive(
Deserialize_repr, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord,
)]
#[repr(u8)]
pub enum Verbosity {
Error = 0,
Warning = 1,
Notice = 2,
Info = 3,
Talkative = 4,
Chatty = 5,
Debug = 6,
Vomit = 7,
}
pub type Id = u64;
#[derive(Deserialize, Debug, Clone)]
#[serde(tag = "action")]
pub enum Actions {
#[serde(rename = "start")]
Start {
id: Id,
level: Verbosity,
#[serde(default)]
parent: Id,
text: String,
#[serde(rename = "type")]
activity: Activities,
#[serde(default)]
fields: Vec<serde_json::Value>,
},
#[serde(rename = "stop")]
Stop { id: Id },
#[serde(rename = "msg")]
Message { level: Verbosity, msg: String },
#[serde(rename = "result")]
Result {
#[serde(default)]
fields: Vec<serde_json::Value>,
id: Id,
#[serde(rename = "type")]
activity: Activities,
},
}

12
cognos/src/lib.rs Normal file
View file

@ -0,0 +1,12 @@
pub mod aterm;
mod internal_json;
mod state;
pub use aterm::{
ParsedDerivation,
extract_pname,
extract_version,
parse_drv_file,
};
pub use internal_json::{Actions, Activities, Id, Verbosity};
pub use state::{BuildInfo, BuildStatus, Derivation, Host, State};

73
cognos/src/state.rs Normal file
View file

@ -0,0 +1,73 @@
use std::{collections::HashMap, path::PathBuf};
use crate::internal_json::Actions;
pub type Id = u64;
pub enum StorePath {
Downloading,
Uploading,
Downloaded,
Uploaded,
}
pub enum BuildStatus {
Planned,
Running,
Complete,
Failed,
}
pub enum Progress {
JustStarted,
InputReceived,
Finished,
}
pub enum OutputName {
Out,
Doc,
Dev,
Bin,
Info,
Lib,
Man,
Dist,
Other(String),
}
pub enum Host {
Local,
Host(String),
}
pub struct Derivation {
store_path: PathBuf,
}
pub struct BuildInfo {
start: f64,
host: Host,
estimate: Option<u64>,
activity_id: Id,
state: BuildStatus,
}
pub enum DependencyState {
Planned,
Running,
Completed,
}
pub struct Dependencies {
deps: HashMap<Id, BuildInfo>,
}
// #[derive(Default)]
pub struct State {
progress: Progress,
}
impl State {
pub fn imbibe(&mut self, update: Actions) {}
}

109
docs/FORMATS.md Normal file
View file

@ -0,0 +1,109 @@
# ROM Display Formats
Unlike prior art, ROM features several different display and legend formats as
opposed to NOM's immutable design. This allows for the freedom to mix and match
different component styles in the build graph.
## Display Formats
ROM supports three display formats controlled by the `--format` flag:
1. Tree Format (Default)
2. Plain Format
3. Dashboard Format
### 1. Tree Format (Default)
The tree format shows a hierarchical dependency graph with build progress.
**Usage:**
```bash
rom --format tree build nixpkgs#hello
# or simply (tree is default)
rom build nixpkgs#hello
```
### Examples
**Tree Format**:
```plaintext
┏━ Dependency Graph:
┃ ⏵ hello-2.12.2 (buildPhase) ⏱ 5s
┣━━━ Builds
┗━ ∑ ⏵ 1 │ ✔ 0 │ ✗ 0 │ ⏸ 4 │ ⏱ 5s
```
**Plain Format**:
```plaintext
━ ⏱ ⏸ 4 planned ↓ 2 downloading ↑ 1 uploading 5.7s
↓ breakpad-2024.02.16 1.2 MB/5.0 MB (24%)
↓ spirv-tools-1.4.321.0 0 B
↑ gcc-13.2.0 250 KB
⏵ hello-2.12.2 5s
```
**Dashboard Format**:
```plaintext
BUILD GRAPH: hello-2.12.2
────────────────────────────────────────────
Host │ localhost
Status │ ⏵ building
Duration │ 8.1s
────────────────────────────────────────────
Summary │ jobs=1 ok=1 failed=0 total=8.1s
```
## Legend Styles
Legend styles control how thee build statistics are displayed at the bottom of
the screen. At this moment they only affect the **tree format**.
1. Table Style
2. Compact Style
3. Verbose Style
### Examples
**Table**:
```plaintext
┏━ Dependency Graph:
┃ ⏵ hello-2.12.2 (buildPhase) ⏱ 5s
┣━━━ Builds
┗━ ∑ ⏵ 1 │ ✔ 0 │ ✗ 0 │ ⏸ 4 │ ⏱ 5s
```
**Compact**:
```plaintext
┏━ Dependency Graph:
┃ ⏵ hello-2.12.2 (buildPhase) ⏱ 5s
┗━ ⏵ 1 │ ✔ 0 │ ✗ 0 │ ⏸ 4 │ ⏱ 5s
```
**Verbose**:
```plaintext
┏━ Dependency Graph:
┃ ⏵ hello-2.12.2 (buildPhase) ⏱ 5s
┣━━━ Build Summary:
┗━ ⏵ 1 running │ ✔ 0 completed │ ✗ 0 failed │ ⏸ 4 planned │ ⏱ 5s
```
## Icon Legend
All formats use consistent icons:
| Icon | Meaning | Color |
| ---- | ----------------- | ------ |
| ⏵ | Building/Running | Yellow |
| ✔ | Completed/Success | Green |
| ✗ | Failed/Error | Red |
| ⏸ | Planned/Waiting | Grey |
| ⏱ | Time/Duration | Grey |
| ↓ | Downloading | Blue |
| ↑ | Uploading | Green |

101
docs/README.md Normal file
View file

@ -0,0 +1,101 @@
# ROM
A Nix build output monitor for visualizing your Nix builds, with Rust
characteristics and DX.
This project is heavily work in progress. The parser is mostly complete, and
located in [`./cognos`](./cognos)
> [!NOTE]
> This project is not yet stable. While it can not harm your projeeect, bugs
> must be expected due to its in-dev status. If you end up using ROM, please
> make sure to report any bugs :)
## Usage
> [!WARNING]
> The CLI interface of ROM is not yet stable, and may be subject to change.
> Plase consult the `--help` output before reporting a bug.
```terminal
$ rom -h
Rust Output Monitor - A Nix build output monitor
Usage: rom [OPTIONS] [COMMAND]
Commands:
build Run nix build with monitoring
shell Run nix shell with monitoring
develop Run nix develop with monitoring
help Print this message or the help of the given subcommand(s)
Options:
--json Parse JSON output from nix --log-format=internal-json
--silent Minimal output
--format <FORMAT> Output format: tree, plain [default: tree]
--legend <LEGEND> Legend display style: compact, table, verbose [default: table]
-h, --help Print help
-V, --version Print version
```
ROM is primarily designed to wrap the Nix installation on your system. As such,
the _recommended_ interface is using `rom build`, `rom shell` and `rom develop`
for their Nix counterparts. The CLI of ROM is similar to NOM, the Haskell
utility ROM is designed after. To build a package with Nix, let's say
`pkgs.hello`, you can do:
```terminal
$ rom build nixpkgs#hello
┏━ Dependency Graph:
┃ ⏵ hello-2.12.2 (configurePhase) ⏱ 2s
┣━━━ Builds
┗━ ∑ ⏵ 1 │ ✔ 0 │ ✗ 0 │ ⏸ 4 │ ⏱ 2s
```
and the dependency tree will appear below.
### Argument Passthrough
At times, especially while you're calling ROM as a standalone executable, you
might need to pass additional flags to the Nix command being invoked. ROM allows
for this behaviour by accepting `--` as a delimiter and passing any arguments
that come after to Nix. For example:
```terminal
$ rom develop nixpkgs#hello -- --substituters ""
fetching git input 'git+file:///home/notashelf/Dev/notashelf/rom'
┗━ ⏵ 0 │ ✔ 2 │ ✗ 0 │ ⏸ 0 │ ⏱ 1s
notashelf@enyo ~/Dev/notashelf/rom [git:(9e83f57...) *]
i $ hello
Hello, world!
```
## FAQ
**Q**: If "NOM" is nix-output-monitor, what does "ROM stand for"?
**A**: It doesn't stand for anything, I named it _rom_ beceuse it sounds like
_rum_. I like rum. However you may choose to name it "rusty output monitor" or
"raf's output monitor" at your convenience. I don't know, be creative.
## Attributions
This project is clearly inspired by the famous
<https://github.com/maralorn/nix-output-monitor>. I am a huge fan of NOM's
design, but I am a little disappointed by its lack of configurability. This is a
more flexible replacement that makes both my life, and displaying build graphs
easier.
The ATerm and internal-json log parser was inspired, and mostly copied from
<https://git.atagen.co/atagen/nous> with consolidation, cleaner repo layout, and
a better separation of concerns. rom builds on the ideas previously pondered by
nous, and provides a subcrate under [`./cognos`](./cognos) for easy parsing.
Thank you Atagen for letting me play with the idea.
## License
This project is made available under Mozilla Public License (MPL) version 2.0.
See [LICENSE](LICENSE) for more details on the exact conditions. An online copy
is provided [here](https://www.mozilla.org/en-US/MPL/2.0/).

27
flake.lock generated Normal file
View file

@ -0,0 +1,27 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1759381078,
"narHash": "sha256-gTrEEp5gEspIcCOx9PD8kMaF1iEmfBcTbO0Jag2QhQs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "7df7ff7d8e00218376575f0acdcc5d66741351ee",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

24
flake.nix Normal file
View file

@ -0,0 +1,24 @@
{
description = "Rust Project Template";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
outputs = {
self,
nixpkgs,
}: let
systems = ["x86_64-linux" "aarch64-linux"];
forEachSystem = nixpkgs.lib.genAttrs systems;
pkgsForEach = nixpkgs.legacyPackages;
in {
packages = forEachSystem (system: {
default = pkgsForEach.${system}.callPackage ./nix/package.nix {};
});
devShells = forEachSystem (system: {
default = pkgsForEach.${system}.callPackage ./nix/shell.nix {};
});
hydraJobs = self.packages;
};
}

30
nix/package.nix Normal file
View file

@ -0,0 +1,30 @@
{
lib,
rustPlatform,
}:
rustPlatform.buildRustPackage (finalAttrs: {
pname = "sample-rust";
version = "0.1.0";
src = let
fs = lib.fileset;
s = ../.;
in
fs.toSource {
root = s;
fileset = fs.unions [
(fs.fileFilter (file: builtins.any file.hasExt ["rs"]) s + /src)
(s + /Cargo.lock)
(s + /Cargo.toml)
];
};
cargoLock.lockFile = "${finalAttrs.src}/Cargo.lock";
useFetchCargoVendor = true;
enableParallelBuilding = true;
meta = {
description = "Experimental nftables ruleset formatter and prettier";
maintainers = with lib.licenses; [NotAShelf];
};
})

26
nix/shell.nix Normal file
View file

@ -0,0 +1,26 @@
{
mkShell,
rustc,
cargo,
rustfmt,
clippy,
taplo,
rust-analyzer-unwrapped,
rustPlatform,
}:
mkShell {
name = "rust";
packages = [
rustc
cargo
(rustfmt.override {asNightly = true;})
clippy
cargo
taplo
rust-analyzer-unwrapped
];
RUST_SRC_PATH = "${rustPlatform.rustLibSrc}";
}

28
rom/Cargo.toml Normal file
View file

@ -0,0 +1,28 @@
[package]
name = "rom"
description.workspace = true
version.workspace = true
edition.workspace = true
authors.workspace = true
rust-version.workspace = true
[lib]
path = "src/lib.rs"
[dependencies]
cognos = {path = "../cognos"}
anyhow.workspace = true
clap.workspace = true
serde.workspace = true
serde_json.workspace = true
crossterm = "0.29"
ratatui = "0.29"
indexmap.workspace = true
csv.workspace = true
thiserror.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
eyre.workspace = true
[dev-dependencies]
tempfile = "3.23.0"

748
rom/src/cli.rs Normal file
View file

@ -0,0 +1,748 @@
//! CLI interface for ROM
use std::{
io,
path::PathBuf,
process::{Command, Stdio},
};
use clap::Parser;
#[derive(Debug, Parser)]
#[command(name = "rom", version, about = "ROM - A Nix build output monitor")]
pub struct Cli {
#[command(subcommand)]
pub command: Option<Commands>,
/// Parse JSON output from nix --log-format=internal-json
#[arg(long, global = true)]
pub json: bool,
/// Minimal output
#[arg(long, global = true)]
pub silent: bool,
/// Output format: tree, plain
#[arg(long, global = true, default_value = "tree")]
pub format: String,
/// Legend display style: compact, table, verbose
#[arg(long, global = true, default_value = "table")]
pub legend: String,
/// Summary display style: concise, table, full
#[arg(long, global = true, default_value = "concise")]
pub summary: String,
}
#[derive(Debug, clap::Subcommand)]
pub enum Commands {
/// Run nix build with monitoring
Build {
/// Package/flake to build and arguments to pass to Nix
#[arg(trailing_var_arg = true, allow_hyphen_values = true)]
args: Vec<String>,
},
/// Run nix shell with monitoring
Shell {
/// Package/flake and arguments to pass to Nix
#[arg(trailing_var_arg = true, allow_hyphen_values = true)]
args: Vec<String>,
},
/// Run nix develop with monitoring
Develop {
/// Package/flake and arguments to pass to nix Nix
#[arg(trailing_var_arg = true, allow_hyphen_values = true)]
args: Vec<String>,
},
}
/// Run the CLI application
pub fn run() -> eyre::Result<()> {
let cli = Cli::parse();
// Check if we're being called as a symlink (rom-build, rom-shell)
let program_name = std::env::args()
.next()
.and_then(|path| {
PathBuf::from(&path)
.file_name()
.and_then(|n| n.to_str())
.map(std::string::ToString::to_string)
})
.unwrap_or_else(|| "rom".to_string());
match (&program_name[..], cli.command) {
// rom-build symlink
("rom-build", _) => {
let args: Vec<String> = std::env::args().skip(1).collect();
let (package_and_rom_args, nix_args) = parse_args_with_separator(&args);
run_nix_build_wrapper(
package_and_rom_args,
nix_args,
cli.silent,
cli.format.clone(),
cli.legend.clone(),
cli.summary.clone(),
)?;
Ok(())
},
// nom-shell symlink
("rom-shell", _) => {
let args: Vec<String> = std::env::args().skip(1).collect();
let (package_and_rom_args, nix_args) = parse_args_with_separator(&args);
run_nix_shell_wrapper(
package_and_rom_args,
nix_args,
cli.silent,
cli.format.clone(),
cli.legend.clone(),
cli.summary.clone(),
)?;
Ok(())
},
// rom build command
(_, Some(Commands::Build { args })) => {
// If no args provided and --json is set, use piping mode from stdin
if args.is_empty() && cli.json {
let config = crate::types::Config {
piping: false,
silent: cli.silent,
input_mode: crate::types::InputMode::Json,
show_timers: true,
width: None,
format: crate::types::DisplayFormat::from_str(&cli.format),
legend_style: cli.legend.clone(),
summary_style: cli.summary.clone(),
};
let stdin = io::stdin();
let stdout = io::stdout();
return Ok(crate::monitor_stream(config, stdin.lock(), stdout.lock())?);
}
let (package_and_rom_args, nix_args) = parse_args_with_separator(&args);
if package_and_rom_args.is_empty() {
eyre::bail!(
"No package or flake specified for nix build\nUsage: rom build \
<package> [-- <nix-flags>]\nExample: rom build nixpkgs#hello -- \
--rebuild"
);
}
run_nix_build_wrapper(
package_and_rom_args,
nix_args,
cli.silent,
cli.format.clone(),
cli.legend.clone(),
cli.summary.clone(),
)?;
Ok(())
},
// rom shell command
(_, Some(Commands::Shell { args })) => {
// If no args provided and --json is set, use piping mode from stdin
if args.is_empty() && cli.json {
let config = crate::types::Config {
piping: false,
silent: cli.silent,
input_mode: crate::types::InputMode::Json,
show_timers: true,
width: None,
format: crate::types::DisplayFormat::from_str(&cli.format),
legend_style: cli.legend.clone(),
summary_style: cli.summary.clone(),
};
let stdin = io::stdin();
let stdout = io::stdout();
return Ok(crate::monitor_stream(config, stdin.lock(), stdout.lock())?);
}
let (package_and_rom_args, nix_args) = parse_args_with_separator(&args);
if package_and_rom_args.is_empty() {
eyre::bail!(
"No package or flake specified for nix shell\nUsage: rom shell \
<package> [-- <nix-flags>]\nExample: rom shell nixpkgs#python3 -- \
--pure"
);
}
run_nix_shell_wrapper(
package_and_rom_args,
nix_args,
cli.silent,
cli.format.clone(),
cli.legend.clone(),
cli.summary.clone(),
)?;
Ok(())
},
// rom develop command
(_, Some(Commands::Develop { args })) => {
// If no args provided and --json is set, use piping mode from stdin
if args.is_empty() && cli.json {
let config = crate::types::Config {
piping: false,
silent: cli.silent,
input_mode: crate::types::InputMode::Json,
show_timers: true,
width: None,
format: crate::types::DisplayFormat::from_str(&cli.format),
legend_style: cli.legend.clone(),
summary_style: cli.summary.clone(),
};
let stdin = io::stdin();
let stdout = io::stdout();
return Ok(crate::monitor_stream(config, stdin.lock(), stdout.lock())?);
}
let (package_and_rom_args, nix_args) = parse_args_with_separator(&args);
if package_and_rom_args.is_empty() {
eyre::bail!(
"No package or flake specified for nix develop\nUsage: rom develop \
<package> [-- <nix-flags>]\nExample: rom develop nixpkgs#hello -- \
--impure"
);
}
run_nix_develop_wrapper(
package_and_rom_args,
nix_args,
cli.silent,
cli.format.clone(),
cli.legend.clone(),
cli.summary.clone(),
)?;
Ok(())
},
// Direct piping mode, read from stdin
(_, None) => {
let input_mode = if cli.json {
crate::types::InputMode::Json
} else {
crate::types::InputMode::Human
};
let config = crate::types::Config {
piping: false,
silent: cli.silent,
input_mode,
show_timers: true,
width: None,
format: crate::types::DisplayFormat::from_str(&cli.format),
legend_style: cli.legend.clone(),
summary_style: cli.summary.clone(),
};
let stdin = io::stdin();
let stdout = io::stdout();
Ok(crate::monitor_stream(config, stdin.lock(), stdout.lock())?)
},
}
}
/// Parse arguments, separating those before and after `--`
/// Returns (`args_before_separator`, `args_after_separator`)
///
/// Everything before `--` is for the package name and rom arguments.
/// Everything after `--` goes directly to nix.
pub fn parse_args_with_separator(
args: &[String],
) -> (Vec<String>, Vec<String>) {
if let Some(pos) = args.iter().position(|arg| arg == "--") {
// Arguments before -- are package/rom args
let before = args[..pos].to_vec();
// Arguments after -- go to nix
let after = args[pos + 1..].to_vec();
(before, after)
} else {
// No separator found - all args are package/rom args for backward
// compatibility
(args.to_vec(), Vec::new())
}
}
/// Run nix build with monitoring
fn run_nix_build_wrapper(
package_and_rom_args: Vec<String>,
user_nix_args: Vec<String>,
silent: bool,
format: String,
legend_style: String,
summary_style: String,
) -> eyre::Result<()> {
// Validate that at least one package/flake is specified
if package_and_rom_args.is_empty() {
eyre::bail!(
"No package or flake specified for nix build\nUsage: rom build \
<package> [-- <nix-flags>]\nExample: rom build nixpkgs#hello -- \
--rebuild"
);
}
let mut nix_args = vec![
"build".to_string(),
"-v".to_string(),
"--log-format".to_string(),
"internal-json".to_string(),
];
// Add package/flake argument(s)
nix_args.extend(package_and_rom_args);
// Add user-provided nix flags (after --)
nix_args.extend(user_nix_args);
let exit_code = run_monitored_command(
"nix",
nix_args,
silent,
format,
legend_style,
summary_style,
)?;
if exit_code != 0 {
std::process::exit(exit_code);
}
Ok(())
}
/// Run nix shell with monitoring
fn run_nix_shell_wrapper(
package_and_rom_args: Vec<String>,
user_nix_args: Vec<String>,
silent: bool,
format: String,
legend_style: String,
summary_style: String,
) -> eyre::Result<()> {
// Validate that at least one package/flake is specified
if package_and_rom_args.is_empty() {
eyre::bail!(
"No package or flake specified for nix shell\nUsage: rom shell \
<package> [-- <nix-flags>]\nExample: rom shell nixpkgs#python3 -- \
--pure"
);
}
// For nix shell, we need to run it twice:
// 1. First with --command exit to monitor the build
// 2. Then normally to actually enter the shell
let mut monitor_args = vec![
"shell".to_string(),
"-v".to_string(),
"--log-format".to_string(),
"internal-json".to_string(),
];
// Replace or append --command with exit
let args_without_command = replace_command_with_exit(&package_and_rom_args);
monitor_args.extend(args_without_command);
// Add user-provided nix flags
monitor_args.extend(user_nix_args.clone());
// Run first pass with monitoring
let exit_code = run_monitored_command(
"nix",
monitor_args,
silent,
format,
legend_style,
summary_style,
)?;
if exit_code != 0 {
std::process::exit(exit_code);
}
// If monitoring succeeded and not silent, run the actual shell command
if !silent {
let mut shell_args = vec!["shell".to_string()];
shell_args.extend(package_and_rom_args);
shell_args.extend(user_nix_args);
let status = Command::new("nix")
.args(&shell_args)
.status()
.map_err(crate::error::RomError::Io)?;
std::process::exit(status.code().unwrap_or(1));
}
Ok(())
}
/// Run nix develop with monitoring
fn run_nix_develop_wrapper(
package_and_rom_args: Vec<String>,
user_nix_args: Vec<String>,
silent: bool,
format: String,
legend_style: String,
summary_style: String,
) -> eyre::Result<()> {
// Validate that at least one package/flake is specified (can be empty for
// current flake) develop without args is valid (uses current directory's
// flake)
// Similar to shell - run twice
let mut monitor_args = vec![
"develop".to_string(),
"-v".to_string(),
"--log-format".to_string(),
"internal-json".to_string(),
"--command".to_string(),
"true".to_string(),
];
monitor_args.extend(package_and_rom_args.clone());
// Add user-provided nix flags
monitor_args.extend(user_nix_args.clone());
// Run first pass with monitoring
let exit_code = run_monitored_command(
"nix",
monitor_args,
silent,
format,
legend_style,
summary_style,
)?;
if exit_code != 0 {
std::process::exit(exit_code);
}
// Run the actual develop command
if !silent {
let mut develop_args = vec!["develop".to_string()];
develop_args.extend(package_and_rom_args);
develop_args.extend(user_nix_args);
let status = Command::new("nix")
.args(&develop_args)
.status()
.map_err(crate::error::RomError::Io)?;
std::process::exit(status.code().unwrap_or(1));
}
Ok(())
}
/// Run a nix command with output monitoring
fn run_monitored_command(
command: &str,
args: Vec<String>,
silent: bool,
format_str: String,
legend_style_str: String,
summary_style_str: String,
) -> eyre::Result<i32> {
use std::{
io::{BufRead, BufReader},
sync::{Arc, Mutex},
thread,
time::Duration,
};
let mut child = Command::new(command)
.args(&args)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.map_err(crate::error::RomError::Io)?;
let stderr = child.stderr.take().expect("Failed to capture stderr");
let stdout = child.stdout.take().expect("Failed to capture stdout");
// Create shared state
let state = Arc::new(Mutex::new(crate::state::State::new()));
let state_clone = state.clone();
let render_state = state;
// Track whether we're done processing
let processing_done = Arc::new(Mutex::new(false));
let processing_done_clone = processing_done.clone();
// Track start time for initial timer
let start_time = Arc::new(Mutex::new(crate::state::current_time()));
let start_time_clone = start_time.clone();
// Spawn thread to read and parse stderr (where nix outputs logs)
let stderr_thread = thread::spawn(move || {
use tracing::debug;
let reader = BufReader::new(stderr);
let mut json_count = 0;
let mut non_json_count = 0;
for line in reader.lines().map_while(Result::ok) {
// Try to parse as JSON message
if let Some(json_line) = line.strip_prefix("@nix ") {
json_count += 1;
if let Ok(action) = serde_json::from_str::<cognos::Actions>(json_line) {
debug!("Parsed JSON message #{}: {:?}", json_count, action);
// Print messages immediately to stdout
if let cognos::Actions::Message { msg, .. } = &action {
println!("{}", msg);
}
let mut state = state_clone.lock().unwrap();
let derivation_count_before = state.derivation_infos.len();
crate::update::process_message(&mut state, action);
crate::update::maintain_state(
&mut state,
crate::state::current_time(),
);
let derivation_count_after = state.derivation_infos.len();
if derivation_count_after != derivation_count_before {
debug!(
"Derivation count changed: {} -> {}",
derivation_count_before, derivation_count_after
);
}
} else {
debug!("Failed to parse JSON: {}", json_line);
}
} else {
// Non-JSON lines, pass through
non_json_count += 1;
println!("{}", line);
}
}
debug!(
"Stderr thread finished: {} JSON messages, {} non-JSON lines",
json_count, non_json_count
);
*processing_done_clone.lock().unwrap() = true;
});
// Read stdout (final nix output)
let stdout_lines = Arc::new(Mutex::new(Vec::new()));
let stdout_lines_clone = stdout_lines.clone();
let stdout_thread = thread::spawn(move || {
let reader = BufReader::new(stdout);
for line in reader.lines().map_while(Result::ok) {
stdout_lines_clone.lock().unwrap().push(line);
}
});
// Render loop - this is what displays the build graph
let render_thread = thread::spawn(move || {
use crate::display::{Display, DisplayConfig};
let legend_style = match legend_style_str.to_lowercase().as_str() {
"compact" => crate::display::LegendStyle::Compact,
"verbose" => crate::display::LegendStyle::Verbose,
_ => crate::display::LegendStyle::Table,
};
let format = crate::types::DisplayFormat::from_str(&format_str);
let summary_style = match summary_style_str.to_lowercase().as_str() {
"table" => crate::display::SummaryStyle::Table,
"full" => crate::display::SummaryStyle::Full,
_ => crate::display::SummaryStyle::Concise,
};
let display_config = DisplayConfig {
show_timers: !silent,
max_tree_depth: 10,
max_visible_lines: 100,
use_color: !silent,
format,
legend_style,
summary_style,
};
let mut display = Display::new(io::stderr(), display_config).unwrap();
let mut last_timer_display: Option<String> = None;
// Render loop
loop {
thread::sleep(Duration::from_millis(100));
let done = *processing_done.lock().unwrap();
let state = render_state.lock().unwrap();
let has_activity = !state.derivation_infos.is_empty()
|| !state.full_summary.running_builds.is_empty()
|| !state.full_summary.planned_builds.is_empty();
if !silent {
if has_activity
|| state.progress_state != crate::state::ProgressState::JustStarted
{
// Clear any previous timer display
if last_timer_display.is_some() {
display.clear_previous().ok();
last_timer_display = None;
}
let _ = display.render(&state, &[]);
} else {
// Show initial timer while waiting for activity
let start = *start_time_clone.lock().unwrap();
let elapsed = crate::state::current_time() - start;
let timer_text =
format!("{}", crate::display::format_duration(elapsed));
// Only update if changed (to avoid flicker)
if last_timer_display.as_ref() != Some(&timer_text) {
display.clear_previous().ok();
eprintln!("{}", timer_text);
last_timer_display = Some(timer_text);
}
}
}
if done {
break;
}
}
// Give it a moment for final state updates
thread::sleep(Duration::from_millis(50));
// Final render
if !silent {
let mut state = render_state.lock().unwrap();
crate::update::finish_state(&mut state);
let _ = display.render_final(&state);
}
});
// Wait for process to complete
let status = child.wait().map_err(crate::error::RomError::Io)?;
// Wait for threads to finish
let _ = stderr_thread.join();
let _ = stdout_thread.join();
let _ = render_thread.join();
// Print captured stdout (nix's final output)
let stdout_lines = stdout_lines.lock().unwrap();
for line in stdout_lines.iter() {
use std::io::Write;
let _ = writeln!(std::io::stdout(), "{line}");
}
Ok(status.code().unwrap_or(1))
}
/// Replace --command/-c arguments with "sh -c exit" for monitoring pass
fn replace_command_with_exit(args: &[String]) -> Vec<String> {
let mut result = Vec::new();
let mut skip_next = false;
for arg in args {
if skip_next {
skip_next = false;
continue;
}
if arg == "--command" || arg == "-c" {
// Skip this and the next argument
skip_next = true;
continue;
}
result.push(arg.clone());
}
// Add our exit command
result.push("--command".to_string());
result.push("sh".to_string());
result.push("-c".to_string());
result.push("exit".to_string());
result
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_replace_command_with_exit() {
let args = vec![
"nixpkgs#hello".to_string(),
"--command".to_string(),
"bash".to_string(),
];
let result = replace_command_with_exit(&args);
assert_eq!(result[0], "nixpkgs#hello");
assert!(result.contains(&"--command".to_string()));
assert!(result.contains(&"exit".to_string()));
assert!(!result.contains(&"bash".to_string()));
}
#[test]
fn test_replace_command_short_form() {
let args = vec![
"nixpkgs#hello".to_string(),
"-c".to_string(),
"echo test".to_string(),
];
let result = replace_command_with_exit(&args);
assert_eq!(result[0], "nixpkgs#hello");
assert!(result.contains(&"exit".to_string()));
assert!(!result.contains(&"echo test".to_string()));
}
#[test]
fn test_parse_args_with_separator() {
// Test with separator
let args = vec![
"nixpkgs#hello".to_string(),
"--".to_string(),
"--help".to_string(),
];
let (before, after) = parse_args_with_separator(&args);
assert_eq!(before, vec!["nixpkgs#hello".to_string()]);
assert_eq!(after, vec!["--help".to_string()]);
// Test without separator (backward compatibility)
let args = vec!["nixpkgs#hello".to_string(), "--help".to_string()];
let (before, after) = parse_args_with_separator(&args);
assert_eq!(before, vec![
"nixpkgs#hello".to_string(),
"--help".to_string()
]);
assert_eq!(after, Vec::<String>::new());
// Test with multiple nix args after separator
let args = vec![
"nixpkgs#hello".to_string(),
"--".to_string(),
"--option".to_string(),
"foo".to_string(),
"bar".to_string(),
];
let (before, after) = parse_args_with_separator(&args);
assert_eq!(before, vec!["nixpkgs#hello".to_string()]);
assert_eq!(after, vec![
"--option".to_string(),
"foo".to_string(),
"bar".to_string()
]);
// Test with only separator (edge case)
let args = vec!["--".to_string(), "--help".to_string()];
let (before, after) = parse_args_with_separator(&args);
assert_eq!(before, Vec::<String>::new());
assert_eq!(after, vec!["--help".to_string()]);
}
}

1090
rom/src/display.rs Normal file

File diff suppressed because it is too large Load diff

71
rom/src/error.rs Normal file
View file

@ -0,0 +1,71 @@
//! Error types for ROM
use std::io;
use thiserror::Error;
/// Result type for ROM operations
pub type Result<T> = std::result::Result<T, RomError>;
/// Main error type for ROM
#[derive(Debug, Error)]
pub enum RomError {
/// IO error
#[error("IO error: {0}")]
Io(#[from] io::Error),
/// JSON parsing error
#[error("JSON parsing error: {0}")]
Json(#[from] serde_json::Error),
/// Build failed
#[error("Build failed")]
BuildFailed,
/// Process execution error
#[error("Process error: {0}")]
Process(String),
/// Configuration error
#[error("Configuration error: {0}")]
Config(String),
/// Parse error
#[error("Parse error: {0}")]
Parse(String),
/// Terminal error
#[error("Terminal error: {0}")]
Terminal(String),
/// Other error
#[error("{0}")]
Other(String),
}
impl RomError {
/// Create a process error
pub fn process<S: Into<String>>(msg: S) -> Self {
Self::Process(msg.into())
}
/// Create a config error
pub fn config<S: Into<String>>(msg: S) -> Self {
Self::Config(msg.into())
}
/// Create a parse error
pub fn parse<S: Into<String>>(msg: S) -> Self {
Self::Parse(msg.into())
}
/// Create a terminal error
pub fn terminal<S: Into<String>>(msg: S) -> Self {
Self::Terminal(msg.into())
}
/// Create an other error
pub fn other<S: Into<String>>(msg: S) -> Self {
Self::Other(msg.into())
}
}

50
rom/src/lib.rs Normal file
View file

@ -0,0 +1,50 @@
//! ROM - Rust Output Monitor
pub mod cli;
pub mod display;
pub mod error;
pub mod monitor;
pub mod state;
pub mod types;
pub mod update;
use std::io::{BufRead, Write};
pub use cli::{Cli, Commands};
pub use error::{Result, RomError};
pub use monitor::Monitor;
pub use types::{Config, InputMode};
/// Monitor a stream of nix output and display enhanced progress information.
///
/// # Arguments
///
/// * `config` - Configuration for the monitor
/// * `reader` - Input stream containing nix output
/// * `writer` - Output stream for enhanced display
///
/// # Errors
///
/// Returns an error if monitoring fails due to I/O issues or parsing errors.
pub fn monitor_stream<R, W>(config: Config, reader: R, writer: W) -> Result<()>
where
R: BufRead,
W: Write,
{
let mut monitor = Monitor::new(config, writer)?;
monitor.process_stream(reader)
}
/// Run the CLI application with the provided arguments.
///
/// This is the main entry point for the CLI application.
pub fn run() -> eyre::Result<()> {
cli::run()
}
/// Create a new monitor instance with the given configuration.
pub fn create_monitor<W: Write>(
config: Config,
writer: W,
) -> Result<Monitor<W>> {
Monitor::new(config, writer)
}

14
rom/src/main.rs Normal file
View file

@ -0,0 +1,14 @@
fn main() -> eyre::Result<()> {
// Initialize tracing
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("rom=warn")),
)
.with_target(false)
.with_writer(std::io::stderr)
.init();
// Run the CLI
rom::run()
}

299
rom/src/monitor.rs Normal file
View file

@ -0,0 +1,299 @@
//! Monitor module for orchestrating state updates and display rendering
use std::{
io::{BufRead, Write},
time::Duration,
};
use crate::{
display::{Display, DisplayConfig},
error::{Result, RomError},
state::State,
types::{Config, InputMode},
update,
};
/// Main monitor that processes nix output and displays progress
pub struct Monitor<W: Write> {
state: State,
display: Display<W>,
config: Config,
}
impl<W: Write> Monitor<W> {
/// Create a new monitor
pub fn new(config: Config, writer: W) -> Result<Self> {
let legend_style = match config.legend_style.to_lowercase().as_str() {
"compact" => crate::display::LegendStyle::Compact,
"verbose" => crate::display::LegendStyle::Verbose,
_ => crate::display::LegendStyle::Table,
};
let summary_style = match config.summary_style.to_lowercase().as_str() {
"table" => crate::display::SummaryStyle::Table,
"full" => crate::display::SummaryStyle::Full,
_ => crate::display::SummaryStyle::Concise,
};
let display_config = DisplayConfig {
show_timers: config.show_timers,
max_tree_depth: 10,
max_visible_lines: 100,
use_color: !config.piping,
format: config.format.clone(),
legend_style,
summary_style,
};
let display = Display::new(writer, display_config)?;
let state = State::new();
Ok(Self {
state,
display,
config,
})
}
/// Process a stream of input
pub fn process_stream<R: BufRead>(&mut self, reader: R) -> Result<()> {
let mut last_render = std::time::Instant::now();
let render_interval = Duration::from_millis(100);
for line in reader.lines() {
let line = line.map_err(RomError::Io)?;
// Process the line
self.process_line(&line)?;
// Render periodically
if last_render.elapsed() >= render_interval {
if !self.config.silent {
self.display.render(&self.state, &[])?;
}
last_render = std::time::Instant::now();
}
}
// Mark as finished and do final render
crate::update::finish_state(&mut self.state);
if !self.config.silent {
self.display.render_final(&self.state)?;
}
// Return error code if there were failures
if self.state.has_errors() {
return Err(RomError::BuildFailed);
}
Ok(())
}
/// Process a single line of input
fn process_line(&mut self, line: &str) -> Result<bool> {
// Auto-detect format: lines starting with "@nix " are JSON
if line.starts_with("@nix ") {
self.process_json_line(line)
} else {
match self.config.input_mode {
InputMode::Json => self.process_json_line(line),
InputMode::Human => self.process_human_line(line),
}
}
}
/// Process a JSON-formatted line
fn process_json_line(&mut self, line: &str) -> Result<bool> {
// Nix JSON lines are prefixed with "@nix "
if let Some(json_str) = line.strip_prefix("@nix ") {
match serde_json::from_str::<cognos::Actions>(json_str) {
Ok(action) => {
// Handle message passthrough - print directly to stdout
if let cognos::Actions::Message { msg, .. } = &action {
println!("{}", msg);
}
let changed = update::process_message(&mut self.state, action);
Ok(changed)
},
Err(e) => {
// Log parsing errors but don't fail
tracing::debug!("Failed to parse JSON message: {}", e);
Ok(false)
},
}
} else {
// Non-JSON lines in JSON mode are passed through
println!("{}", line);
Ok(false)
}
}
/// Process a human-readable line
fn process_human_line(&mut self, line: &str) -> Result<bool> {
// Parse human-readable nix output
// This is a simplified version - the full implementation would need
// comprehensive parsing of nix's output format
let line = line.trim();
// Skip empty lines
if line.is_empty() {
return Ok(false);
}
// Try to detect build-related messages
if line.starts_with("building") || line.contains("building '") {
if let Some(drv_path) = extract_path_from_message(line) {
if let Some(drv) = crate::state::Derivation::parse(&drv_path) {
let drv_id = self.state.get_or_create_derivation_id(drv);
let now = crate::state::current_time();
let build_info = crate::state::BuildInfo {
start: now,
host: crate::state::Host::Localhost,
estimate: None,
activity_id: None,
};
self.state.update_build_status(
drv_id,
crate::state::BuildStatus::Building(build_info),
);
return Ok(true);
}
}
}
// Detect downloads
if line.starts_with("downloading") || line.contains("downloading '") {
if let Some(path_str) = extract_path_from_message(line) {
if let Some(path) = crate::state::StorePath::parse(&path_str) {
let path_id = self.state.get_or_create_store_path_id(path);
let now = crate::state::current_time();
let transfer = crate::state::TransferInfo {
start: now,
host: crate::state::Host::Localhost,
activity_id: 0, // No activity ID in human mode
bytes_transferred: 0,
total_bytes: None,
};
if let Some(path_info) = self.state.get_store_path_info_mut(path_id) {
path_info
.states
.insert(crate::state::StorePathState::Downloading(
transfer.clone(),
));
}
self
.state
.full_summary
.running_downloads
.insert(path_id, transfer);
return Ok(true);
}
}
}
// Detect errors
if line.starts_with("error:") || line.contains("error:") {
self.state.nix_errors.push(line.to_string());
return Ok(true);
}
// Detect build completions
if line.starts_with("built") || line.contains("built '") {
if let Some(drv_path) = extract_path_from_message(line) {
if let Some(drv) = crate::state::Derivation::parse(&drv_path) {
if let Some(&drv_id) = self.state.derivation_ids.get(&drv) {
if let Some(info) = self.state.get_derivation_info(drv_id) {
if let crate::state::BuildStatus::Building(build_info) =
&info.build_status
{
let now = crate::state::current_time();
self.state.update_build_status(
drv_id,
crate::state::BuildStatus::Built {
info: build_info.clone(),
end: now,
},
);
return Ok(true);
}
}
}
}
}
}
Ok(false)
}
/// Get a reference to the current state
pub const fn state(&self) -> &State {
&self.state
}
/// Get a mutable reference to the current state
pub const fn state_mut(&mut self) -> &mut State {
&mut self.state
}
}
/// Extract a path from a message line
fn extract_path_from_message(line: &str) -> Option<String> {
// Look for quoted paths
if let Some(start) = line.find('\'') {
if let Some(end) = line[start + 1..].find('\'') {
return Some(line[start + 1..start + 1 + end].to_string());
}
}
// Look for unquoted store paths
for word in line.split_whitespace() {
if word.starts_with("/nix/store/") {
return Some(
word
.trim_matches(|c: char| {
!c.is_ascii_alphanumeric() && c != '/' && c != '-' && c != '.'
})
.to_string(),
);
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_monitor_creation() {
let config = Config::default();
let output = Vec::new();
let monitor = Monitor::new(config, output);
assert!(monitor.is_ok());
}
#[test]
fn test_extract_path_from_message() {
let line = "building '/nix/store/abc123-hello-1.0.drv'";
let path = extract_path_from_message(line);
assert!(path.is_some());
assert!(path.unwrap().contains("hello-1.0.drv"));
}
#[test]
fn test_extract_path_unquoted() {
let line = "building /nix/store/abc123-hello-1.0.drv locally";
let path = extract_path_from_message(line);
assert!(path.is_some());
}
}

779
rom/src/state.rs Normal file
View file

@ -0,0 +1,779 @@
//! State management for ROM
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
time::{Duration, SystemTime},
};
use cognos::Id;
use indexmap::IndexMap;
/// Unique identifier for store paths
pub type StorePathId = usize;
/// Unique identifier for derivations
pub type DerivationId = usize;
/// Unique identifier for activities
pub type ActivityId = Id;
/// Overall progress state
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ProgressState {
JustStarted,
InputReceived,
Finished,
}
/// Build host information
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Host {
Localhost,
Remote(String),
}
impl Host {
#[must_use]
pub const fn is_local(&self) -> bool {
matches!(self, Self::Localhost)
}
#[must_use]
pub fn name(&self) -> &str {
match self {
Self::Localhost => "localhost",
Self::Remote(name) => name,
}
}
}
/// Store path representation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StorePath {
pub path: PathBuf,
pub hash: String,
pub name: String,
}
impl StorePath {
#[must_use]
pub fn parse(path: &str) -> Option<Self> {
if !path.starts_with("/nix/store/") {
return None;
}
let path_buf = PathBuf::from(path);
let file_name = path_buf.file_name()?.to_str()?;
let parts: Vec<&str> = file_name.splitn(2, '-').collect();
if parts.len() != 2 {
return None;
}
Some(Self {
path: path_buf.clone(),
hash: parts[0].to_string(),
name: parts[1].to_string(),
})
}
}
/// Derivation representation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Derivation {
pub path: PathBuf,
pub name: String,
}
impl Derivation {
#[must_use]
pub fn parse(path: &str) -> Option<Self> {
let path_buf = PathBuf::from(path);
let file_name = path_buf.file_name()?.to_str()?;
if !file_name.ends_with(".drv") {
return None;
}
let name = file_name.strip_suffix(".drv")?;
let parts: Vec<&str> = name.splitn(2, '-').collect();
let display_name = if parts.len() == 2 {
parts[1].to_string()
} else {
name.to_string()
};
Some(Self {
path: path_buf,
name: display_name,
})
}
}
/// Output name for derivations
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum OutputName {
Out,
Doc,
Dev,
Bin,
Info,
Lib,
Man,
Dist,
Other(String),
}
impl OutputName {
#[must_use]
pub fn parse(name: &str) -> Self {
match name.to_lowercase().as_str() {
"out" => Self::Out,
"doc" => Self::Doc,
"dev" => Self::Dev,
"bin" => Self::Bin,
"info" => Self::Info,
"lib" => Self::Lib,
"man" => Self::Man,
"dist" => Self::Dist,
_ => Self::Other(name.to_string()),
}
}
}
/// Transfer information (download/upload)
#[derive(Debug, Clone)]
pub struct TransferInfo {
pub start: f64,
pub host: Host,
pub activity_id: ActivityId,
pub bytes_transferred: u64,
pub total_bytes: Option<u64>,
}
/// Completed transfer information
#[derive(Debug, Clone)]
pub struct CompletedTransferInfo {
pub start: f64,
pub end: f64,
pub host: Host,
pub total_bytes: u64,
}
/// Store path state
#[derive(Debug, Clone)]
pub enum StorePathState {
DownloadPlanned,
Downloading(TransferInfo),
Uploading(TransferInfo),
Downloaded(CompletedTransferInfo),
Uploaded(CompletedTransferInfo),
}
/// Store path information
#[derive(Debug, Clone)]
pub struct StorePathInfo {
pub name: StorePath,
pub states: HashSet<StorePathState>,
pub producer: Option<DerivationId>,
pub input_for: HashSet<DerivationId>,
}
impl PartialEq for StorePathState {
fn eq(&self, other: &Self) -> bool {
std::mem::discriminant(self) == std::mem::discriminant(other)
}
}
impl Eq for StorePathState {}
impl std::hash::Hash for StorePathState {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
std::mem::discriminant(self).hash(state);
}
}
/// Build information
#[derive(Debug, Clone)]
pub struct BuildInfo {
pub start: f64,
pub host: Host,
pub estimate: Option<u64>,
pub activity_id: Option<ActivityId>,
}
/// Build failure information
#[derive(Debug, Clone)]
pub struct BuildFail {
pub at: f64,
pub fail_type: FailType,
}
/// Failure type
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FailType {
BuildFailed(i32),
Timeout,
HashMismatch,
DependencyFailed,
Unknown,
}
/// Build status
#[derive(Debug, Clone)]
pub enum BuildStatus {
Unknown,
Planned,
Building(BuildInfo),
Built { info: BuildInfo, end: f64 },
Failed { info: BuildInfo, fail: BuildFail },
}
/// Input derivation for dependency tracking
#[derive(Debug, Clone)]
pub struct InputDerivation {
pub derivation: DerivationId,
pub outputs: HashSet<OutputName>,
}
/// Derivation information
#[derive(Debug, Clone)]
pub struct DerivationInfo {
pub name: Derivation,
pub outputs: HashMap<OutputName, StorePathId>,
pub input_derivations: Vec<InputDerivation>,
pub input_sources: HashSet<StorePathId>,
pub build_status: BuildStatus,
pub dependency_summary: DependencySummary,
pub cached: bool,
pub derivation_parents: HashSet<DerivationId>,
pub pname: Option<String>,
pub platform: Option<String>,
}
/// Dependency summary for tracking build progress
#[derive(Debug, Clone, Default)]
pub struct DependencySummary {
pub planned_builds: HashSet<DerivationId>,
pub running_builds: HashMap<DerivationId, BuildInfo>,
pub completed_builds: HashMap<DerivationId, CompletedBuildInfo>,
pub failed_builds: HashMap<DerivationId, FailedBuildInfo>,
pub planned_downloads: HashSet<StorePathId>,
pub completed_downloads: HashMap<StorePathId, CompletedTransferInfo>,
pub completed_uploads: HashMap<StorePathId, CompletedTransferInfo>,
pub running_downloads: HashMap<StorePathId, TransferInfo>,
pub running_uploads: HashMap<StorePathId, TransferInfo>,
}
impl DependencySummary {
pub fn merge(&mut self, other: &Self) {
self
.planned_builds
.extend(other.planned_builds.iter().copied());
self
.running_builds
.extend(other.running_builds.iter().map(|(k, v)| (*k, v.clone())));
self
.completed_builds
.extend(other.completed_builds.iter().map(|(k, v)| (*k, v.clone())));
self
.failed_builds
.extend(other.failed_builds.iter().map(|(k, v)| (*k, v.clone())));
self
.planned_downloads
.extend(other.planned_downloads.iter().copied());
self.completed_downloads.extend(
other
.completed_downloads
.iter()
.map(|(k, v)| (*k, v.clone())),
);
self
.completed_uploads
.extend(other.completed_uploads.iter().map(|(k, v)| (*k, v.clone())));
self
.running_downloads
.extend(other.running_downloads.iter().map(|(k, v)| (*k, v.clone())));
self
.running_uploads
.extend(other.running_uploads.iter().map(|(k, v)| (*k, v.clone())));
}
pub fn clear_derivation(
&mut self,
id: DerivationId,
old_status: &BuildStatus,
) {
match old_status {
BuildStatus::Unknown => {},
BuildStatus::Planned => {
self.planned_builds.remove(&id);
},
BuildStatus::Building(_) => {
self.running_builds.remove(&id);
},
BuildStatus::Built { .. } => {
self.completed_builds.remove(&id);
},
BuildStatus::Failed { .. } => {
self.failed_builds.remove(&id);
},
}
}
pub fn update_derivation(
&mut self,
id: DerivationId,
new_status: &BuildStatus,
) {
match new_status {
BuildStatus::Unknown => {},
BuildStatus::Planned => {
self.planned_builds.insert(id);
},
BuildStatus::Building(info) => {
self.running_builds.insert(id, info.clone());
},
BuildStatus::Built { info, end } => {
self.completed_builds.insert(id, CompletedBuildInfo {
start: info.start,
end: *end,
host: info.host.clone(),
});
},
BuildStatus::Failed { info, fail } => {
self.failed_builds.insert(id, FailedBuildInfo {
start: info.start,
end: fail.at,
host: info.host.clone(),
fail_type: fail.fail_type.clone(),
});
},
}
}
}
/// Completed build information
#[derive(Debug, Clone)]
pub struct CompletedBuildInfo {
pub start: f64,
pub end: f64,
pub host: Host,
}
/// Failed build information
#[derive(Debug, Clone)]
pub struct FailedBuildInfo {
pub start: f64,
pub end: f64,
pub host: Host,
pub fail_type: FailType,
}
/// Activity status tracking
#[derive(Debug, Clone)]
pub struct ActivityStatus {
pub activity: u8,
pub text: String,
pub parent: Option<ActivityId>,
pub phase: Option<String>,
}
/// Build report for caching
#[derive(Debug, Clone)]
pub struct BuildReport {
pub derivation_name: String,
pub platform: String,
pub duration_secs: f64,
pub completed_at: SystemTime,
pub host: String,
pub success: bool,
}
/// Evaluation information
#[derive(Debug, Clone, Default)]
pub struct EvalInfo {
pub last_file_name: Option<String>,
pub count: usize,
pub at: f64,
}
/// Main state for ROM
#[derive(Debug, Clone)]
pub struct State {
pub derivation_infos: IndexMap<DerivationId, DerivationInfo>,
pub store_path_infos: IndexMap<StorePathId, StorePathInfo>,
pub full_summary: DependencySummary,
pub forest_roots: Vec<DerivationId>,
pub build_reports: HashMap<String, Vec<BuildReport>>,
pub start_time: f64,
pub progress_state: ProgressState,
pub store_path_ids: HashMap<StorePath, StorePathId>,
pub derivation_ids: HashMap<Derivation, DerivationId>,
pub touched_ids: HashSet<DerivationId>,
pub activities: HashMap<ActivityId, ActivityStatus>,
pub nix_errors: Vec<String>,
pub build_logs: Vec<String>,
pub build_platform: Option<String>,
pub evaluation_state: EvalInfo,
next_store_path_id: StorePathId,
next_derivation_id: DerivationId,
}
impl Default for State {
fn default() -> Self {
Self::new()
}
}
impl State {
#[must_use]
pub fn new() -> Self {
Self {
derivation_infos: IndexMap::new(),
store_path_infos: IndexMap::new(),
full_summary: DependencySummary::default(),
forest_roots: Vec::new(),
build_reports: HashMap::new(),
start_time: current_time(),
progress_state: ProgressState::JustStarted,
store_path_ids: HashMap::new(),
derivation_ids: HashMap::new(),
touched_ids: HashSet::new(),
activities: HashMap::new(),
nix_errors: Vec::new(),
build_logs: Vec::new(),
build_platform: None,
evaluation_state: EvalInfo::default(),
next_store_path_id: 0,
next_derivation_id: 0,
}
}
#[must_use]
pub fn with_platform(platform: Option<String>) -> Self {
let mut state = Self::new();
state.build_platform = platform;
state
}
pub fn get_or_create_store_path_id(
&mut self,
path: StorePath,
) -> StorePathId {
if let Some(&id) = self.store_path_ids.get(&path) {
return id;
}
let id = self.next_store_path_id;
self.next_store_path_id += 1;
self.store_path_infos.insert(id, StorePathInfo {
name: path.clone(),
states: HashSet::new(),
producer: None,
input_for: HashSet::new(),
});
self.store_path_ids.insert(path, id);
id
}
pub fn get_or_create_derivation_id(
&mut self,
drv: Derivation,
) -> DerivationId {
if let Some(&id) = self.derivation_ids.get(&drv) {
return id;
}
let id = self.next_derivation_id;
self.next_derivation_id += 1;
self.derivation_infos.insert(id, DerivationInfo {
name: drv.clone(),
outputs: HashMap::new(),
input_derivations: Vec::new(),
input_sources: HashSet::new(),
build_status: BuildStatus::Unknown,
dependency_summary: DependencySummary::default(),
cached: false,
derivation_parents: HashSet::new(),
pname: None,
platform: None,
});
self.derivation_ids.insert(drv, id);
id
}
/// Populate derivation dependencies by parsing its .drv file
pub fn populate_derivation_dependencies(&mut self, drv_id: DerivationId) {
use cognos::aterm;
use tracing::debug;
// Check if we've already parsed this derivation's dependencies
// to avoid infinite recursion in circular dependency graphs
let already_parsed = {
if let Some(info) = self.get_derivation_info(drv_id) {
!info.input_derivations.is_empty()
} else {
false
}
};
if already_parsed {
debug!("Skipping already-parsed derivation {}", drv_id);
return;
}
let drv_path = {
let info = match self.get_derivation_info(drv_id) {
Some(i) => i,
None => return,
};
// Path already includes .drv extension from Derivation::parse
info.name.path.display().to_string()
};
debug!("Attempting to parse .drv file: {}", drv_path);
let parsed = match aterm::parse_drv_file(&drv_path) {
Ok(p) => {
debug!(
"Successfully parsed .drv file: {} with {} input derivations",
drv_path,
p.input_drvs.len()
);
p
},
Err(e) => {
debug!("Failed to parse .drv file {}: {}", drv_path, e);
return;
},
};
// Extract metadata
if let Some(pname) = aterm::extract_pname(&parsed.env) {
if let Some(info) = self.get_derivation_info_mut(drv_id) {
info.pname = Some(pname);
}
}
if let Some(info) = self.get_derivation_info_mut(drv_id) {
info.platform = Some(parsed.platform);
}
// Check if parent derivation is actively building
let parent_is_building = {
if let Some(parent_info) = self.get_derivation_info(drv_id) {
matches!(parent_info.build_status, BuildStatus::Building(_))
} else {
false
}
};
// Process input derivations
for (input_drv_path, outputs) in parsed.input_drvs {
if let Some(input_drv) = Derivation::parse(&input_drv_path) {
let input_drv_id = self.get_or_create_derivation_id(input_drv);
// Mark dependencies as Planned if parent is Building and input is
// Unknown This ensures we only count real dependencies that
// will be built
if parent_is_building {
if let Some(input_info) = self.get_derivation_info(input_drv_id) {
if matches!(input_info.build_status, BuildStatus::Unknown) {
debug!(
"Marking input derivation {} as Planned (parent {} is \
Building)",
input_drv_id, drv_id
);
self.update_build_status(input_drv_id, BuildStatus::Planned);
} else {
debug!(
"Input derivation {} current status: {:?}",
input_drv_id, input_info.build_status
);
}
}
}
// Create output set
let mut output_set = HashSet::new();
for output in outputs {
output_set.insert(parse_output_name(&output));
}
// Add to parent's input derivations
if let Some(parent_info) = self.get_derivation_info_mut(drv_id) {
let input = InputDerivation {
derivation: input_drv_id,
outputs: output_set,
};
if parent_info
.input_derivations
.iter()
.any(|d| d.derivation == input_drv_id)
{
debug!(
"Input derivation {} already in parent {}",
input_drv_id, drv_id
);
} else {
parent_info.input_derivations.push(input);
debug!(
"Added input derivation {} to {} (parent now has {} inputs)",
input_drv_id,
drv_id,
parent_info.input_derivations.len()
);
}
} else {
debug!(
"Parent derivation {} not found when trying to add input {}",
drv_id, input_drv_id
);
}
// Mark child as having this parent
if let Some(child_info) = self.get_derivation_info_mut(input_drv_id) {
child_info.derivation_parents.insert(drv_id);
}
// Remove from forest roots if it has a parent
self.forest_roots.retain(|&id| id != input_drv_id);
// Recursively populate child dependencies
self.populate_derivation_dependencies(input_drv_id);
}
}
}
#[must_use]
pub fn get_derivation_info(
&self,
id: DerivationId,
) -> Option<&DerivationInfo> {
self.derivation_infos.get(&id)
}
pub fn get_derivation_info_mut(
&mut self,
id: DerivationId,
) -> Option<&mut DerivationInfo> {
self.derivation_infos.get_mut(&id)
}
#[must_use]
pub fn get_store_path_info(&self, id: StorePathId) -> Option<&StorePathInfo> {
self.store_path_infos.get(&id)
}
pub fn get_store_path_info_mut(
&mut self,
id: StorePathId,
) -> Option<&mut StorePathInfo> {
self.store_path_infos.get_mut(&id)
}
pub fn update_build_status(
&mut self,
id: DerivationId,
new_status: BuildStatus,
) {
if let Some(info) = self.derivation_infos.get_mut(&id) {
let old_status =
std::mem::replace(&mut info.build_status, new_status.clone());
self.full_summary.clear_derivation(id, &old_status);
self.full_summary.update_derivation(id, &new_status);
self.touched_ids.insert(id);
}
}
#[must_use]
pub fn has_errors(&self) -> bool {
!self.nix_errors.is_empty() || !self.full_summary.failed_builds.is_empty()
}
#[must_use]
pub fn total_builds(&self) -> usize {
self.full_summary.planned_builds.len()
+ self.full_summary.running_builds.len()
+ self.full_summary.completed_builds.len()
+ self.full_summary.failed_builds.len()
}
#[must_use]
pub fn running_builds_for_host(
&self,
host: &Host,
) -> Vec<(DerivationId, &BuildInfo)> {
self
.full_summary
.running_builds
.iter()
.filter(|(_, info)| &info.host == host)
.map(|(id, info)| (*id, info))
.collect()
}
}
#[must_use]
pub fn current_time() -> f64 {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or(Duration::ZERO)
.as_secs_f64()
}
fn parse_output_name(name: &str) -> OutputName {
match name {
"out" => OutputName::Out,
"doc" => OutputName::Doc,
"dev" => OutputName::Dev,
"bin" => OutputName::Bin,
"info" => OutputName::Info,
"lib" => OutputName::Lib,
"man" => OutputName::Man,
"dist" => OutputName::Dist,
_ => OutputName::Other(name.to_string()),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_store_path_parse() {
let path = "/nix/store/abc123-hello-1.0";
let sp = StorePath::parse(path).unwrap();
assert_eq!(sp.hash, "abc123");
assert_eq!(sp.name, "hello-1.0");
}
#[test]
fn test_derivation_parse() {
let path = "/nix/store/abc123-hello-1.0.drv";
let drv = Derivation::parse(path).unwrap();
assert_eq!(drv.name, "hello-1.0");
}
#[test]
fn test_state_creation() {
let state = State::new();
assert_eq!(state.progress_state, ProgressState::JustStarted);
assert_eq!(state.total_builds(), 0);
}
#[test]
fn test_get_or_create_ids() {
let mut state = State::new();
let path = StorePath::parse("/nix/store/abc123-hello-1.0").unwrap();
let id1 = state.get_or_create_store_path_id(path.clone());
let id2 = state.get_or_create_store_path_id(path);
assert_eq!(id1, id2);
}
}

111
rom/src/types.rs Normal file
View file

@ -0,0 +1,111 @@
//! Core types for ROM
/// Display format for output
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum DisplayFormat {
/// Show dependency tree graph
Tree,
/// Plain text output
Plain,
/// Dashboard summary view
Dashboard,
}
/// Summary display style
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SummaryStyle {
/// Concise single-line summary
Concise,
/// Table with host breakdown
Table,
/// Full detailed summary
Full,
}
impl SummaryStyle {
pub fn from_str(s: &str) -> Self {
match s.to_lowercase().as_str() {
"concise" => Self::Concise,
"table" => Self::Table,
"full" => Self::Full,
_ => Self::Concise,
}
}
}
impl DisplayFormat {
pub fn from_str(s: &str) -> Self {
match s.to_lowercase().as_str() {
"tree" => Self::Tree,
"plain" => Self::Plain,
"dashboard" => Self::Dashboard,
_ => Self::Tree,
}
}
}
/// Configuration for the monitor
#[derive(Debug, Clone)]
pub struct Config {
/// Whether we're piping output through
pub piping: bool,
/// Silent mode - minimal output
pub silent: bool,
/// Input parsing mode
pub input_mode: InputMode,
/// Show completion times
pub show_timers: bool,
/// Terminal width override
pub width: Option<usize>,
/// Display format
pub format: DisplayFormat,
/// Legend display style
pub legend_style: String,
/// Summary display style
pub summary_style: String,
}
impl Default for Config {
fn default() -> Self {
Self {
piping: false,
silent: false,
input_mode: InputMode::Human,
show_timers: true,
width: None,
format: DisplayFormat::Tree,
legend_style: "table".to_string(),
summary_style: "concise".to_string(),
}
}
}
/// Input parsing mode
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InputMode {
/// Parse JSON output from nix --log-format=internal-json
Json,
/// Parse human-readable nix output
Human,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config_default() {
let config = Config::default();
assert!(!config.piping);
assert!(!config.silent);
assert_eq!(config.input_mode, InputMode::Human);
assert!(config.show_timers);
assert_eq!(config.format, DisplayFormat::Tree);
}
#[test]
fn test_input_mode_comparison() {
assert_eq!(InputMode::Json, InputMode::Json);
assert_ne!(InputMode::Json, InputMode::Human);
}
}

903
rom/src/update.rs Normal file
View file

@ -0,0 +1,903 @@
//! State update logic for processing nix messages
use cognos::{Actions, Activities, Id, Verbosity};
use tracing::{debug, trace};
use crate::state::{
ActivityStatus,
BuildFail,
BuildInfo,
BuildStatus,
CompletedBuildInfo,
CompletedTransferInfo,
Derivation,
DerivationId,
FailType,
FailedBuildInfo,
Host,
InputDerivation,
OutputName,
ProgressState,
State,
StorePath,
StorePathId,
StorePathState,
TransferInfo,
current_time,
};
/// Process a nix JSON message and update state
pub fn process_message(state: &mut State, action: Actions) -> bool {
let now = current_time();
let mut changed = false;
// Mark that we've received input
if state.progress_state == ProgressState::JustStarted {
state.progress_state = ProgressState::InputReceived;
changed = true;
}
trace!("Processing action: {:?}", action);
match action {
Actions::Start {
id,
level,
parent,
text,
activity,
fields,
} => {
changed |=
handle_start(state, id, level, parent, text, activity, fields, now);
},
Actions::Stop { id } => {
changed |= handle_stop(state, id, now);
},
Actions::Message { level, msg } => {
changed |= handle_message(state, level, msg);
},
Actions::Result {
id,
activity,
fields,
} => {
changed |= handle_result(state, id, activity as u8, fields, now);
},
}
changed
}
fn handle_start(
state: &mut State,
id: Id,
_level: Verbosity,
parent: Id,
text: String,
activity: Activities,
fields: Vec<serde_json::Value>,
now: f64,
) -> bool {
// Store activity status
let parent_id = if parent == 0 { None } else { Some(parent) };
let activity_u8 = activity as u8;
state.activities.insert(id, ActivityStatus {
activity: activity_u8,
text: text.clone(),
parent: parent_id,
phase: None,
});
let changed = match activity_u8 {
104 | 105 => handle_build_start(state, id, parent_id, &text, &fields, now), /* Builds | Build */
108 => handle_substitute_start(state, id, &text, &fields, now), /* Substitute */
101 => handle_transfer_start(state, id, &text, &fields, now, false), /* FileTransfer */
100 | 103 => handle_transfer_start(state, id, &text, &fields, now, true), /* CopyPath | CopyPaths */
_ => false,
};
// Track parent-child relationships for dependency tree
if changed
&& (activity_u8 == 104 || activity_u8 == 105)
&& parent_id.is_some()
{
let parent_act_id = parent_id.unwrap();
// Find parent and child derivation IDs
let parent_drv_id = find_derivation_by_activity(state, parent_act_id);
let child_drv_id = find_derivation_by_activity(state, id);
if let Some(parent_drv_id) = parent_drv_id {
if let Some(child_drv_id) = child_drv_id {
debug!(
"Establishing parent-child relationship: parent={}, child={}",
parent_drv_id, child_drv_id
);
// Add child as a dependency of parent
if let Some(parent_info) = state.get_derivation_info_mut(parent_drv_id)
{
let input = InputDerivation {
derivation: child_drv_id,
outputs: std::collections::HashSet::new(),
};
if !parent_info
.input_derivations
.iter()
.any(|d| d.derivation == child_drv_id)
{
parent_info.input_derivations.push(input);
debug!("Added child to parent's input_derivations");
}
}
// Mark child as having a parent
if let Some(child_info) = state.get_derivation_info_mut(child_drv_id) {
child_info.derivation_parents.insert(parent_drv_id);
}
// Remove child from forest roots since it has a parent
state.forest_roots.retain(|&id| id != child_drv_id);
}
}
}
changed
}
fn handle_stop(state: &mut State, id: Id, now: f64) -> bool {
let activity = state.activities.get(&id).cloned();
if let Some(activity_status) = activity {
state.activities.remove(&id);
match activity_status.activity {
104 | 105 => handle_build_stop(state, id, now), // Builds | Build
108 => handle_substitute_stop(state, id, now), // Substitute
101 | 100 | 103 => handle_transfer_stop(state, id, now), /* FileTransfer, CopyPath, CopyPaths */
_ => false,
}
} else {
false
}
}
fn handle_message(state: &mut State, level: Verbosity, msg: String) -> bool {
// Store all build logs for display
state.build_logs.push(msg.clone());
// Extract phase from log messages like "Running phase: configurePhase"
if let Some(phase_start) = msg.find("Running phase: ") {
let phase_name = &msg[phase_start + 15..]; // Skip "Running phase: "
let phase = phase_name.trim().to_string();
// Find the active build and update its phase
for activity in state.activities.values_mut() {
if activity.activity == 105 {
// Build activity
activity.phase = Some(phase.clone());
}
}
}
match level {
Verbosity::Error => {
// Track errors
if msg.contains("error:") || msg.contains("failed") {
state.nix_errors.push(msg.clone());
// Try to extract which build failed
if let Some(drv_path) = extract_derivation_from_error(&msg) {
if let Some(drv) = Derivation::parse(&drv_path) {
let drv_id = state.get_or_create_derivation_id(drv);
// Get build info first
let build_info_opt =
state.get_derivation_info(drv_id).and_then(|info| {
if let BuildStatus::Building(build_info) = &info.build_status {
Some(build_info.clone())
} else {
None
}
});
if let Some(build_info) = build_info_opt {
let fail = BuildFail {
at: current_time(),
fail_type: parse_fail_type(&msg),
};
state.update_build_status(drv_id, BuildStatus::Failed {
info: build_info,
fail,
});
}
}
}
return true;
}
false
},
Verbosity::Info | Verbosity::Notice => {
// Track info messages for evaluation progress
if msg.contains("evaluating") || msg.contains("copying") {
// Update evaluation state
if let Some(file_name) = extract_file_name(&msg) {
state.evaluation_state.last_file_name = Some(file_name);
state.evaluation_state.count += 1;
state.evaluation_state.at = current_time();
}
}
true // return true since we stored the log
},
_ => {
true // return true since we stored the log
},
}
}
fn handle_result(
state: &mut State,
id: Id,
activity: u8,
fields: Vec<serde_json::Value>,
_now: f64,
) -> bool {
match activity {
101 | 108 => {
// FileTransfer or Substitute
// Fields contain progress information
// XXX: Format: [bytes_transferred, total_bytes]
if fields.len() >= 2 {
update_transfer_progress(state, id, &fields);
}
false
},
104 => {
// Builds activity type - contains phase information
if !fields.is_empty() {
if let Some(phase_str) = fields[0].as_str() {
// Update the activity's phase field
if let Some(activity) = state.activities.get_mut(&id) {
activity.phase = Some(phase_str.to_string());
return true;
}
}
}
false
},
105 => {
// Build completed, fields contain output path
complete_build(state, id)
},
_ => false,
}
}
fn handle_build_start(
state: &mut State,
id: Id,
parent_id: Option<Id>,
text: &str,
fields: &[serde_json::Value],
now: f64,
) -> bool {
debug!(
"handle_build_start: id={}, text={}, fields={:?}",
id, text, fields
);
// First try to get derivation path from fields
let drv_path = if fields.is_empty() {
extract_derivation_path(text)
} else {
fields[0].as_str().map(std::string::ToString::to_string)
};
if let Some(drv_path) = drv_path {
debug!("Extracted derivation path: {}", drv_path);
if let Some(drv) = Derivation::parse(&drv_path) {
let drv_id = state.get_or_create_derivation_id(drv);
let host = extract_host(text);
let build_info = BuildInfo {
start: now,
host,
estimate: None,
activity_id: Some(id),
};
debug!("Setting derivation {} to Building status", drv_id);
state.update_build_status(drv_id, BuildStatus::Building(build_info));
debug!(
"After update_build_status, state has {} derivations",
state.derivation_infos.len()
);
// Parse .drv file to populate dependency tree
state.populate_derivation_dependencies(drv_id);
debug!(
"After populate_derivation_dependencies, state has {} derivations",
state.derivation_infos.len()
);
// Mark as forest root if no parent
// Only add to forest roots if no parent
if parent_id.is_none() && !state.forest_roots.contains(&drv_id) {
state.forest_roots.push(drv_id);
}
// Store activity -> derivation mapping
// Phase will be extracted from log messages
return true;
}
debug!("Failed to parse derivation from path: {}", drv_path);
} else {
debug!(
"No derivation path found - creating placeholder for activity {}",
id
);
// For shell/develop commands, nix doesn't report specific derivation paths
// Create a placeholder derivation to track that builds are happening
use std::path::PathBuf;
let placeholder_name = format!("building-{}", id);
let placeholder_path = format!("/nix/store/placeholder-{}.drv", id);
let placeholder_drv = Derivation {
path: PathBuf::from(placeholder_path),
name: placeholder_name,
};
let drv_id = state.get_or_create_derivation_id(placeholder_drv);
let host = extract_host(text);
let build_info = BuildInfo {
start: now,
host,
estimate: None,
activity_id: Some(id),
};
debug!(
"Setting placeholder derivation {} to Building status",
drv_id
);
state.update_build_status(drv_id, BuildStatus::Building(build_info));
// Mark as forest root if no parent
if parent_id.is_none() && !state.forest_roots.contains(&drv_id) {
state.forest_roots.push(drv_id);
}
return true;
}
false
}
fn handle_build_stop(state: &mut State, id: Id, _now: f64) -> bool {
// Find the derivation associated with this activity
for (drv_id, info) in &state.derivation_infos {
match &info.build_status {
BuildStatus::Building(build_info)
if build_info.activity_id == Some(id) =>
{
// Build was stopped but not marked as completed
// It might be cancelled
debug!("Build stopped for derivation {}", drv_id);
return false;
},
_ => {},
}
}
false
}
fn handle_substitute_start(
state: &mut State,
id: Id,
text: &str,
fields: &[serde_json::Value],
now: f64,
) -> bool {
// Extract store path
let path_str = if fields.is_empty() {
extract_store_path(text)
} else {
fields[0].as_str().map(std::string::ToString::to_string)
};
if let Some(path_str) = path_str {
if let Some(path) = StorePath::parse(&path_str) {
let path_id = state.get_or_create_store_path_id(path);
let host = extract_host(text);
let transfer = TransferInfo {
start: now,
host,
activity_id: id,
bytes_transferred: 0,
total_bytes: None,
};
if let Some(path_info) = state.get_store_path_info_mut(path_id) {
path_info
.states
.insert(StorePathState::Downloading(transfer.clone()));
}
state
.full_summary
.running_downloads
.insert(path_id, transfer);
return true;
}
}
false
}
fn handle_substitute_stop(state: &mut State, id: Id, now: f64) -> bool {
// Find the store path associated with this activity
for (path_id, transfer_info) in &state.full_summary.running_downloads.clone()
{
if transfer_info.activity_id == id {
state.full_summary.running_downloads.remove(path_id);
let completed = CompletedTransferInfo {
start: transfer_info.start,
end: now,
host: transfer_info.host.clone(),
total_bytes: transfer_info.bytes_transferred,
};
state
.full_summary
.completed_downloads
.insert(*path_id, completed);
if let Some(path_info) = state.get_store_path_info_mut(*path_id) {
path_info
.states
.remove(&StorePathState::Downloading(transfer_info.clone()));
path_info.states.insert(StorePathState::Downloaded(
CompletedTransferInfo {
start: transfer_info.start,
end: now,
host: transfer_info.host.clone(),
total_bytes: transfer_info.bytes_transferred,
},
));
}
return true;
}
}
false
}
fn handle_transfer_start(
state: &mut State,
id: Id,
text: &str,
fields: &[serde_json::Value],
now: f64,
is_copy: bool,
) -> bool {
let path_str = if fields.is_empty() {
extract_store_path(text)
} else {
fields[0].as_str().map(std::string::ToString::to_string)
};
if let Some(path_str) = path_str {
if let Some(path) = StorePath::parse(&path_str) {
let path_id = state.get_or_create_store_path_id(path);
let host = extract_host(text);
let transfer = TransferInfo {
start: now,
host,
activity_id: id,
bytes_transferred: 0,
total_bytes: None,
};
if is_copy {
state.full_summary.running_uploads.insert(path_id, transfer);
} else {
state
.full_summary
.running_downloads
.insert(path_id, transfer);
}
return true;
}
}
false
}
fn handle_transfer_stop(state: &mut State, id: Id, now: f64) -> bool {
// Check downloads
for (path_id, transfer_info) in &state.full_summary.running_downloads.clone()
{
if transfer_info.activity_id == id {
state.full_summary.running_downloads.remove(path_id);
let completed = CompletedTransferInfo {
start: transfer_info.start,
end: now,
host: transfer_info.host.clone(),
total_bytes: transfer_info.bytes_transferred,
};
state
.full_summary
.completed_downloads
.insert(*path_id, completed);
return true;
}
}
// Check uploads
for (path_id, transfer_info) in &state.full_summary.running_uploads.clone() {
if transfer_info.activity_id == id {
state.full_summary.running_uploads.remove(path_id);
let completed = CompletedTransferInfo {
start: transfer_info.start,
end: now,
host: transfer_info.host.clone(),
total_bytes: transfer_info.bytes_transferred,
};
state
.full_summary
.completed_uploads
.insert(*path_id, completed);
return true;
}
}
false
}
fn update_transfer_progress(
state: &mut State,
id: Id,
fields: &[serde_json::Value],
) {
if fields.len() < 2 {
return;
}
let bytes_transferred = fields[0].as_u64().unwrap_or(0);
let total_bytes = fields[1].as_u64();
// Update running downloads
for transfer_info in state.full_summary.running_downloads.values_mut() {
if transfer_info.activity_id == id {
transfer_info.bytes_transferred = bytes_transferred;
transfer_info.total_bytes = total_bytes;
return;
}
}
// Update running uploads
for transfer_info in state.full_summary.running_uploads.values_mut() {
if transfer_info.activity_id == id {
transfer_info.bytes_transferred = bytes_transferred;
transfer_info.total_bytes = total_bytes;
return;
}
}
}
fn complete_build(state: &mut State, id: Id) -> bool {
// Find the derivation that just completed
for (drv_id, info) in &state.derivation_infos.clone() {
if let BuildStatus::Building(build_info) = &info.build_status {
if build_info.activity_id == Some(id) {
let end = current_time();
state.update_build_status(*drv_id, BuildStatus::Built {
info: build_info.clone(),
end,
});
return true;
}
}
}
false
}
fn extract_derivation_path(text: &str) -> Option<String> {
// Look for .drv paths in the text
if let Some(start) = text.find("/nix/store/") {
if let Some(end) = text[start..].find(".drv") {
return Some(text[start..start + end + 4].to_string());
}
}
None
}
fn extract_store_path(text: &str) -> Option<String> {
// Look for store paths in the text
if let Some(start) = text.find("/nix/store/") {
// Find the end of the path (space or end of string)
let rest = &text[start..];
let end = rest
.find(|c: char| c.is_whitespace() || c == '\'' || c == '"')
.unwrap_or(rest.len());
return Some(rest[..end].to_string());
}
None
}
fn extract_host(text: &str) -> Host {
if text.contains("on ") {
// Format: "building X on hostname"
if let Some(pos) = text.rfind("on ") {
let rest = &text[pos + 3..];
let hostname = rest
.split_whitespace()
.next()
.unwrap_or("localhost")
.trim_matches(|c| c == '\'' || c == '"')
.to_string();
return Host::Remote(hostname);
}
}
Host::Localhost
}
fn extract_derivation_from_error(msg: &str) -> Option<String> {
extract_derivation_path(msg)
}
fn extract_file_name(msg: &str) -> Option<String> {
// Try to extract file name from evaluation messages
if let Some(start) = msg.find('\'') {
if let Some(end) = msg[start + 1..].find('\'') {
return Some(msg[start + 1..start + 1 + end].to_string());
}
}
None
}
fn parse_fail_type(msg: &str) -> FailType {
if msg.contains("timeout") {
FailType::Timeout
} else if msg.contains("hash mismatch") || msg.contains("hash") {
FailType::HashMismatch
} else if msg.contains("dependency failed") {
FailType::DependencyFailed
} else {
FailType::Unknown
}
}
fn find_derivation_by_activity(
state: &State,
activity_id: Id,
) -> Option<DerivationId> {
// Try to find in running builds first
for (drv_id, build_info) in &state.full_summary.running_builds {
if build_info.activity_id == Some(activity_id) {
return Some(*drv_id);
}
}
// Search through all derivations
for (drv_id, info) in &state.derivation_infos {
match &info.build_status {
BuildStatus::Building(build_info)
if build_info.activity_id == Some(activity_id) =>
{
return Some(*drv_id);
},
BuildStatus::Built { info, .. }
if info.activity_id == Some(activity_id) =>
{
return Some(*drv_id);
},
BuildStatus::Failed { info, .. }
if info.activity_id == Some(activity_id) =>
{
return Some(*drv_id);
},
_ => {},
}
}
None
}
/// Maintain state consistency
pub fn maintain_state(state: &mut State, now: f64) {
// Clear touched IDs - they've been processed
if !state.touched_ids.is_empty() {
state.touched_ids.clear();
}
// Update summaries
update_summaries(state, now);
}
fn update_summaries(state: &mut State, _now: f64) {
use tracing::debug;
// Update build summaries
state.full_summary.planned_builds.clear();
state.full_summary.running_builds.clear();
state.full_summary.completed_builds.clear();
state.full_summary.failed_builds.clear();
debug!(
"update_summaries: processing {} derivations",
state.derivation_infos.len()
);
let mut building_count = 0;
let mut planned_count = 0;
for (drv_id, info) in &state.derivation_infos {
debug!(" derivation {} status: {:?}", drv_id, info.build_status);
match &info.build_status {
BuildStatus::Planned => {
// Only count explicitly planned builds, not unknown ones
state.full_summary.planned_builds.insert(*drv_id);
planned_count += 1;
},
BuildStatus::Unknown => {
// Unknown derivations are cached/already built, don't count them
},
BuildStatus::Building(build_info) => {
debug!(" → Adding {} to running_builds", drv_id);
state
.full_summary
.running_builds
.insert(*drv_id, build_info.clone());
building_count += 1;
},
BuildStatus::Built { info, end } => {
state.full_summary.completed_builds.insert(
*drv_id,
CompletedBuildInfo {
start: info.start,
end: *end,
host: info.host.clone(),
},
);
},
BuildStatus::Failed { info, fail } => {
state
.full_summary
.failed_builds
.insert(*drv_id, FailedBuildInfo {
start: info.start,
end: fail.at,
host: info.host.clone(),
fail_type: fail.fail_type.clone(),
});
},
}
}
debug!(
"update_summaries complete: {} running (counted {}), {} planned (counted \
{}), {} completed, {} failed",
state.full_summary.running_builds.len(),
building_count,
state.full_summary.planned_builds.len(),
planned_count,
state.full_summary.completed_builds.len(),
state.full_summary.failed_builds.len()
);
}
fn complete_build_success(state: &mut State, drv_id: DerivationId, now: f64) {
let build_info = state.get_derivation_info(drv_id).and_then(|info| {
if let BuildStatus::Building(build_info) = &info.build_status {
Some(build_info.clone())
} else {
None
}
});
if let Some(build_info) = build_info {
state.update_build_status(drv_id, BuildStatus::Built {
info: build_info,
end: now,
});
}
}
pub fn finish_state(state: &mut State) {
state.progress_state = ProgressState::Finished;
let building: Vec<DerivationId> = state
.derivation_infos
.iter()
.filter_map(|(drv_id, info)| {
if matches!(info.build_status, BuildStatus::Building(_)) {
Some(*drv_id)
} else {
None
}
})
.collect();
for drv_id in building {
complete_build_success(state, drv_id, current_time());
}
let downloading: Vec<StorePathId> = state
.full_summary
.running_downloads
.keys()
.copied()
.collect();
for path_id in downloading {
if let Some(transfer) =
state.full_summary.running_downloads.remove(&path_id)
{
let completed = CompletedTransferInfo {
start: transfer.start,
end: current_time(),
host: transfer.host,
total_bytes: transfer.total_bytes.unwrap_or(0),
};
state
.full_summary
.completed_downloads
.insert(path_id, completed.clone());
if let Some(path_info) = state.get_store_path_info_mut(path_id) {
path_info.states.clear();
path_info
.states
.insert(StorePathState::Downloaded(completed));
}
}
}
let uploading: Vec<StorePathId> =
state.full_summary.running_uploads.keys().copied().collect();
for path_id in uploading {
if let Some(transfer) = state.full_summary.running_uploads.remove(&path_id)
{
let completed = CompletedTransferInfo {
start: transfer.start,
end: current_time(),
host: transfer.host,
total_bytes: transfer.total_bytes.unwrap_or(0),
};
state
.full_summary
.completed_uploads
.insert(path_id, completed.clone());
if let Some(path_info) = state.get_store_path_info_mut(path_id) {
path_info.states.clear();
path_info.states.insert(StorePathState::Uploaded(completed));
}
}
}
}
/// Parse output name string to `OutputName` enum
fn parse_output_name(s: &str) -> Option<OutputName> {
match s {
"out" => Some(OutputName::Out),
"doc" => Some(OutputName::Doc),
"dev" => Some(OutputName::Dev),
"bin" => Some(OutputName::Bin),
"info" => Some(OutputName::Info),
"lib" => Some(OutputName::Lib),
"man" => Some(OutputName::Man),
"dist" => Some(OutputName::Dist),
other => Some(OutputName::Other(other.to_string())),
}
}

View file

@ -0,0 +1,75 @@
use std::process::Command;
fn run_rom(args: &[&str]) -> (String, String, i32) {
let output = Command::new(env!("CARGO_BIN_EXE_rom"))
.args(args)
.output()
.expect("failed to execute rom binary");
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
let status = output.status.code().unwrap_or(-1);
(stdout, stderr, status)
}
#[test]
fn test_parse_args_with_separator_passthrough() {
// This test verifies the splitting logic for passthrough args
let args = ["--", "--rebuild", "--refresh"];
let args: Vec<String> = args.iter().map(|s| s.to_string()).collect();
let (package_and_rom_args, nix_flags) =
rom::cli::parse_args_with_separator(&args);
assert!(
package_and_rom_args.is_empty(),
"package_and_rom_args should be empty"
);
assert_eq!(
nix_flags,
vec!["--rebuild", "--refresh"],
"nix_flags should contain passthrough args"
);
}
#[test]
fn test_missing_expression_errors() {
// No expression, no passthrough args
let (_out, err, status) = run_rom(&["build", "--tree"]);
assert_ne!(status, 0, "should fail with missing expression");
assert!(
err.contains("No package or flake specified for nix build"),
"should print missing expression error, got: {}",
err
);
}
#[test]
fn test_passthrough_args_without_expression_errors() {
// No expression, only passthrough args after --
let (_out, err, status) =
run_rom(&["build", "--tree", "--", "--rebuild", "--refresh"]);
assert_ne!(
status, 0,
"should fail with missing expression even with passthrough args"
);
assert!(
err.contains("No package or flake specified for nix build"),
"should print missing expression error, got: {}",
err
);
}
#[test]
fn test_valid_expression_with_passthrough_args_succeeds() {
// With expression and passthrough args, should not error about missing
// expression Use a trivial expression that should always exist (like
// nixpkgs#hello)
let (_out, err, status) =
run_rom(&["build", "--tree", "nixpkgs#hello", "--", "--rebuild"]);
// Should not error about missing expression
assert!(
!err.contains("No package or flake specified for nix build"),
"should not print missing expression error, got: {}",
err
);
// Status may be 0 or nonzero depending on nix, but should not be our error
}