nff: add parsing and CST inspection commands

This commit is contained in:
raf 2025-06-02 15:24:18 +03:00
commit 13245c08fe
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
3 changed files with 161 additions and 5 deletions

View file

@ -94,6 +94,25 @@ nff lint config.nft --style-warnings=false --best-practices=false
nff lint config.nft --debug
```
### Parsing and CST Inspection
```bash
# Parse and display CST structure for debugging
nff parse /etc/nftables.conf
# Show tree structure with indentation
nff parse config.nft --tree
# Show detailed node information
nff parse config.nft --verbose
# Combined tree and verbose output
nff parse config.nft --tree --verbose
# Debug output with tokens and CST validation
nff parse config.nft --debug
```
## Architecture
### Processing Pipeline

View file

@ -381,6 +381,79 @@ impl CstBuilder {
Self::validate_tree(&tree)?;
Ok(tree)
}
/// Display CST in a human-readable format for debugging
pub fn display_tree(node: &GreenNode, tree_format: bool, verbose: bool) -> String {
let mut output = String::new();
Self::display_node_recursive(node, 0, tree_format, verbose, &mut output);
output
}
fn display_node_recursive(
node: &GreenNode,
indent_level: usize,
tree_format: bool,
verbose: bool,
output: &mut String,
) {
let kind = SyntaxKind::from_raw(node.kind());
let indent = if tree_format {
" ".repeat(indent_level)
} else {
String::new()
};
if tree_format {
output.push_str(&format!("{}├─ {}", indent, kind));
} else {
output.push_str(&format!("{}{}", indent, kind));
}
if verbose {
output.push_str(&format!(
" (kind: {:?}, width: {:?})",
node.kind(),
node.text_len()
));
}
output.push('\n');
// Display children
for child in node.children() {
match child {
NodeOrToken::Node(child_node) => {
Self::display_node_recursive(
child_node,
indent_level + 1,
tree_format,
verbose,
output,
);
}
NodeOrToken::Token(token) => {
let token_indent = if tree_format {
" ".repeat(indent_level + 1)
} else {
String::new()
};
let token_kind = SyntaxKind::from_raw(token.kind());
if tree_format {
output.push_str(&format!("{}├─ {}", token_indent, token_kind));
} else {
output.push_str(&format!("{}{}", token_indent, token_kind));
}
if verbose {
output.push_str(&format!(" (width: {:?})", token.text_len()));
}
output.push('\n');
}
}
}
}
}
/// Internal tree builder that constructs CST according to nftables grammar

View file

@ -120,6 +120,20 @@ enum Commands {
#[arg(long, value_delimiter = ',')]
modules: Option<Vec<String>>,
},
/// Parse and display file in CST format for debugging
Parse {
/// nftables config file to parse
#[arg(value_name = "FILE")]
file: String,
/// Show tree structure with indentation
#[arg(long)]
tree: bool,
/// Show detailed node information
#[arg(long)]
verbose: bool,
},
}
fn discover_nftables_files() -> Result<Vec<String>> {
@ -228,7 +242,7 @@ fn process_single_file_format(
// Read file contents
let source =
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
fs::read_to_string(file).with_context(|| format!("Failed to read file: {}", file))?;
// Tokenize
let mut lexer = NftablesLexer::new(&source);
@ -278,7 +292,7 @@ fn process_single_file_format(
}
eprintln!();
}
parsed_ruleset.unwrap_or_else(|| crate::ast::Ruleset::new())
parsed_ruleset.unwrap_or_else(crate::ast::Ruleset::new)
} else {
let mut parser = NftablesParser::new(tokens.clone());
parser
@ -399,7 +413,7 @@ fn process_single_file_lint(
// Read file contents
let source =
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
fs::read_to_string(file).with_context(|| format!("Failed to read file: {}", file))?;
if debug {
// Tokenize for debug output
@ -440,9 +454,9 @@ fn process_single_file_lint(
let diagnostics = if let Some(modules) = &modules {
let module_names: Vec<&str> = modules.iter().map(|s| s.as_str()).collect();
analyzer.analyze_with_modules(&source, &file, &module_names)
analyzer.analyze_with_modules(&source, file, &module_names)
} else {
analyzer.analyze(&source, &file)
analyzer.analyze(&source, file)
};
if json {
@ -688,6 +702,51 @@ fn position_from_range(range: &text_size::TextRange, source: &str) -> (usize, us
(1, 1) // fallback
}
fn process_parse_command(file: String, tree: bool, verbose: bool, debug: bool) -> Result<()> {
let source =
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
// Tokenize
let mut lexer = NftablesLexer::new(&source);
let tokens = lexer
.tokenize()
.map_err(|e| FormatterError::ParseError(format!("Tokenization failed: {}", e)))?;
if debug {
eprintln!("=== TOKENS ===");
for (i, token) in tokens.iter().enumerate() {
eprintln!(
"{:3}: {:?} @ {:?} = '{}'",
i, token.kind, token.range, token.text
);
}
eprintln!();
}
// Build CST
let cst_tree = CstBuilder::build_tree(&tokens);
// Validate CST
match CstBuilder::validate_tree(&cst_tree) {
Ok(()) => {
if debug {
eprintln!("CST validation passed");
eprintln!();
}
}
Err(e) => {
eprintln!("Warning: CST validation error: {}", e);
eprintln!();
}
}
// Display CST
let cst_display = CstBuilder::display_tree(&cst_tree, tree, verbose);
println!("{}", cst_display);
Ok(())
}
fn main() -> Result<()> {
let args = Args::parse();
@ -726,6 +785,11 @@ fn main() -> Result<()> {
modules.clone(),
args.debug,
),
Commands::Parse {
file,
tree,
verbose,
} => process_parse_command(file.clone(), *tree, *verbose, args.debug),
};
if let Err(e) = result {