nff: add parsing and CST inspection commands

This commit is contained in:
raf 2025-06-02 15:24:18 +03:00
commit 13245c08fe
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
3 changed files with 161 additions and 5 deletions

View file

@ -120,6 +120,20 @@ enum Commands {
#[arg(long, value_delimiter = ',')]
modules: Option<Vec<String>>,
},
/// Parse and display file in CST format for debugging
Parse {
/// nftables config file to parse
#[arg(value_name = "FILE")]
file: String,
/// Show tree structure with indentation
#[arg(long)]
tree: bool,
/// Show detailed node information
#[arg(long)]
verbose: bool,
},
}
fn discover_nftables_files() -> Result<Vec<String>> {
@ -228,7 +242,7 @@ fn process_single_file_format(
// Read file contents
let source =
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
fs::read_to_string(file).with_context(|| format!("Failed to read file: {}", file))?;
// Tokenize
let mut lexer = NftablesLexer::new(&source);
@ -278,7 +292,7 @@ fn process_single_file_format(
}
eprintln!();
}
parsed_ruleset.unwrap_or_else(|| crate::ast::Ruleset::new())
parsed_ruleset.unwrap_or_else(crate::ast::Ruleset::new)
} else {
let mut parser = NftablesParser::new(tokens.clone());
parser
@ -399,7 +413,7 @@ fn process_single_file_lint(
// Read file contents
let source =
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
fs::read_to_string(file).with_context(|| format!("Failed to read file: {}", file))?;
if debug {
// Tokenize for debug output
@ -440,9 +454,9 @@ fn process_single_file_lint(
let diagnostics = if let Some(modules) = &modules {
let module_names: Vec<&str> = modules.iter().map(|s| s.as_str()).collect();
analyzer.analyze_with_modules(&source, &file, &module_names)
analyzer.analyze_with_modules(&source, file, &module_names)
} else {
analyzer.analyze(&source, &file)
analyzer.analyze(&source, file)
};
if json {
@ -688,6 +702,51 @@ fn position_from_range(range: &text_size::TextRange, source: &str) -> (usize, us
(1, 1) // fallback
}
fn process_parse_command(file: String, tree: bool, verbose: bool, debug: bool) -> Result<()> {
let source =
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
// Tokenize
let mut lexer = NftablesLexer::new(&source);
let tokens = lexer
.tokenize()
.map_err(|e| FormatterError::ParseError(format!("Tokenization failed: {}", e)))?;
if debug {
eprintln!("=== TOKENS ===");
for (i, token) in tokens.iter().enumerate() {
eprintln!(
"{:3}: {:?} @ {:?} = '{}'",
i, token.kind, token.range, token.text
);
}
eprintln!();
}
// Build CST
let cst_tree = CstBuilder::build_tree(&tokens);
// Validate CST
match CstBuilder::validate_tree(&cst_tree) {
Ok(()) => {
if debug {
eprintln!("CST validation passed");
eprintln!();
}
}
Err(e) => {
eprintln!("Warning: CST validation error: {}", e);
eprintln!();
}
}
// Display CST
let cst_display = CstBuilder::display_tree(&cst_tree, tree, verbose);
println!("{}", cst_display);
Ok(())
}
fn main() -> Result<()> {
let args = Args::parse();
@ -726,6 +785,11 @@ fn main() -> Result<()> {
modules.clone(),
args.debug,
),
Commands::Parse {
file,
tree,
verbose,
} => process_parse_command(file.clone(), *tree, *verbose, args.debug),
};
if let Err(e) = result {