nff/src/main.rs

819 lines
24 KiB
Rust
Executable file

mod ast;
mod cst;
mod diagnostic;
mod lexer;
mod parser;
mod syntax;
use anyhow::{Context, Result};
use clap::{Parser, Subcommand};
use glob::glob;
use std::fs;
use std::io::{self, Write};
use std::path::Path;
use thiserror::Error;
use crate::cst::CstBuilder;
use crate::diagnostic::{DiagnosticAnalyzer, DiagnosticConfig};
use crate::lexer::{NftablesLexer, Token, TokenKind};
use crate::parser::Parser as NftablesParser;
use crate::syntax::{FormatConfig, IndentStyle, NftablesFormatter};
#[derive(Error, Debug)]
enum FormatterError {
#[error("File not found: {0}")]
FileNotFound(String),
#[error("Invalid file: {0}")]
InvalidFile(String),
#[error("Parse error: {0}")]
ParseError(String),
#[error("Syntax error at line {line}, column {column}: {message}")]
SyntaxError {
line: usize,
column: usize,
message: String,
suggestion: Option<String>,
},
#[error("IO error: {0}")]
Io(#[from] io::Error),
}
#[derive(Error, Debug)]
enum LintError {
#[error("Lint errors found in {file_count} file(s)")]
DiagnosticErrors { file_count: usize },
#[error("File discovery error: {0}")]
FileDiscovery(#[from] anyhow::Error),
}
#[derive(Parser, Debug, Clone)]
#[command(
name = "nff",
version = "0.1.0",
about = "A high-quality nftables formatter and linter",
long_about = "nff (nftables formatter) is a tool for formatting and linting nftables configuration files with proper indentation and structure."
)]
struct Args {
#[command(subcommand)]
command: Commands,
/// Show debug information (tokens, AST, etc.)
#[arg(long, global = true)]
debug: bool,
}
#[derive(Subcommand, Debug, Clone)]
enum Commands {
/// Format nftables configuration files
Format {
/// nftables config file (e.g: /etc/nftables.conf). If not provided, formats all .nft files in current directory
#[arg(value_name = "FILE")]
file: Option<String>,
/// Type of indentation
#[arg(short, long, default_value = "tabs", value_parser = clap::value_parser!(IndentStyle))]
indent: IndentStyle,
/// Print formatted output to stdout instead of modifying files in place
#[arg(long)]
stdout: bool,
/// Optimize output by removing excessive empty lines
#[arg(long)]
optimize: bool,
/// Number of spaces per indentation level (only used with --indent=spaces)
#[arg(long, default_value = "2", value_name = "N")]
spaces: usize,
/// Check syntax only, don't format
#[arg(long)]
check: bool,
},
/// Lint nftables configuration files and show diagnostics
Lint {
/// nftables config file (e.g: /etc/nftables.conf). If not provided, lints all .nft files in current directory
#[arg(value_name = "FILE")]
file: Option<String>,
/// Output diagnostics in JSON format (useful for tooling integration)
#[arg(long)]
json: bool,
/// Include style warnings in diagnostics
#[arg(long, action = clap::ArgAction::Set, default_value = "true")]
style_warnings: bool,
/// Include best practice recommendations in diagnostics
#[arg(long, action = clap::ArgAction::Set, default_value = "true")]
best_practices: bool,
/// Include performance hints in diagnostics
#[arg(long, action = clap::ArgAction::Set, default_value = "true")]
performance_hints: bool,
/// Include security warnings in diagnostics
#[arg(long, action = clap::ArgAction::Set, default_value = "true")]
security_warnings: bool,
/// Diagnostic modules to run (comma-separated: lexical,syntax,style,semantic)
#[arg(long, value_delimiter = ',')]
modules: Option<Vec<String>>,
},
/// Parse and display file in CST format for debugging
Parse {
/// nftables config file to parse
#[arg(value_name = "FILE")]
file: String,
/// Show tree structure with indentation
#[arg(long)]
tree: bool,
/// Show detailed node information
#[arg(long)]
verbose: bool,
},
}
fn discover_nftables_files() -> Result<Vec<String>> {
let mut files = Vec::new();
// Common nftables file patterns
let patterns = [
"*.nft",
"*.nftables",
"/etc/nftables.conf",
"/etc/nftables/*.nft",
];
for pattern in &patterns {
match glob(pattern) {
Ok(paths) => {
for entry in paths {
match entry {
Ok(path) => {
if path.is_file() {
if let Some(path_str) = path.to_str() {
files.push(path_str.to_string());
}
}
}
Err(e) => eprintln!("Warning: Error reading path: {}", e),
}
}
}
Err(e) => {
// Only warn for non-current directory patterns
if !pattern.starts_with("*.") {
eprintln!("Warning: Failed to search pattern {}: {}", pattern, e);
}
}
}
}
if files.is_empty() {
return Err(anyhow::anyhow!(
"No nftables files found. Please specify a file explicitly or ensure .nft/.nftables files exist in the current directory."
));
}
// Remove duplicates and sort
files.sort();
files.dedup();
Ok(files)
}
fn process_format_command(
file: Option<String>,
indent: IndentStyle,
stdout: bool,
optimize: bool,
spaces: usize,
check: bool,
debug: bool,
) -> Result<()> {
let files = match file {
Some(f) => vec![f],
None => discover_nftables_files()?,
};
let is_multiple_files = files.len() > 1;
for file_path in files {
if let Err(e) = process_single_file_format(
&file_path,
indent,
stdout,
optimize,
spaces,
check,
debug,
is_multiple_files,
) {
eprintln!("Error processing {}: {}", file_path, e);
if !is_multiple_files {
return Err(e);
}
}
}
Ok(())
}
fn process_single_file_format(
file: &str,
indent: IndentStyle,
stdout: bool,
optimize: bool,
spaces: usize,
check: bool,
debug: bool,
is_multiple_files: bool,
) -> Result<()> {
let path = Path::new(&file);
if !path.exists() {
return Err(FormatterError::FileNotFound(file.to_string()).into());
}
if !path.is_file() {
return Err(FormatterError::InvalidFile("Not a regular file".to_string()).into());
}
// Read file contents
let source =
fs::read_to_string(file).with_context(|| format!("Failed to read file: {}", file))?;
// Tokenize
let mut lexer = NftablesLexer::new(&source);
let tokens = if debug {
// Use error-recovery tokenization for debug mode
lexer.tokenize_with_errors()
} else {
lexer
.tokenize()
.map_err(|e| FormatterError::ParseError(e.to_string()))?
};
if debug {
eprintln!("=== TOKENS ===");
for (i, token) in tokens.iter().enumerate() {
eprintln!(
"{:3}: {:?} @ {:?} = '{}'",
i, token.kind, token.range, token.text
);
}
eprintln!();
// Build and validate CST
eprintln!("=== CST ===");
let cst_tree = CstBuilder::build_tree(&tokens);
match CstBuilder::validate_tree(&cst_tree) {
Ok(()) => eprintln!("CST validation passed"),
Err(e) => eprintln!("CST validation error: {}", e),
}
// Also test parse_to_cst
match CstBuilder::parse_to_cst(&tokens) {
Ok(_) => eprintln!("CST parsing successful"),
Err(e) => eprintln!("CST parsing error: {}", e),
}
eprintln!();
}
// Parse
let ruleset = if debug {
// Use error-recovery parsing for debug mode
let (parsed_ruleset, errors) = NftablesParser::parse_with_errors(&source);
if !errors.is_empty() {
eprintln!("=== PARSE ERRORS ===");
for error in &errors {
eprintln!("Parse error: {}", error);
}
eprintln!();
}
parsed_ruleset.unwrap_or_else(crate::ast::Ruleset::new)
} else {
let mut parser = NftablesParser::new(tokens.clone());
parser
.parse()
.map_err(|e| convert_parse_error_to_formatter_error(&e, &source, &tokens))?
};
if debug {
eprintln!("=== AST ===");
eprintln!("{:#?}", ruleset);
eprintln!();
}
if check {
println!("Syntax check passed for: {}", file);
return Ok(());
}
// Format
let config = FormatConfig {
indent_style: indent,
spaces_per_level: spaces,
optimize,
max_empty_lines: if optimize { 1 } else { 2 },
};
let formatter = NftablesFormatter::new(config);
let formatted_output = formatter.format_ruleset(&ruleset);
// Write output
if stdout {
// Output to stdout
if is_multiple_files {
println!("=== {} ===", file);
}
io::stdout()
.write_all(formatted_output.as_bytes())
.with_context(|| "Failed to write to stdout")?;
} else {
// Format in place
fs::write(file, &formatted_output)
.with_context(|| format!("Failed to write formatted content back to: {}", file))?;
if is_multiple_files || debug {
println!("Formatted: {}", file);
}
}
Ok(())
}
fn process_lint_command(
file: Option<String>,
json: bool,
style_warnings: bool,
best_practices: bool,
performance_hints: bool,
security_warnings: bool,
modules: Option<Vec<String>>,
debug: bool,
) -> Result<()> {
let files = match file {
Some(f) => vec![f],
None => discover_nftables_files()?,
};
let is_multiple_files = files.len() > 1;
let mut error_file_count = 0;
for file_path in files {
if let Err(e) = process_single_file_lint(
&file_path,
json,
style_warnings,
best_practices,
performance_hints,
security_warnings,
modules.as_ref(),
debug,
is_multiple_files,
) {
eprintln!("Error processing {}: {}", file_path, e);
error_file_count += 1;
if !is_multiple_files {
return Err(e);
}
}
}
if error_file_count > 0 {
return Err(LintError::DiagnosticErrors {
file_count: error_file_count,
}
.into());
}
Ok(())
}
fn process_single_file_lint(
file: &str,
json: bool,
style_warnings: bool,
best_practices: bool,
performance_hints: bool,
security_warnings: bool,
modules: Option<&Vec<String>>,
debug: bool,
is_multiple_files: bool,
) -> Result<()> {
let path = Path::new(&file);
if !path.exists() {
return Err(FormatterError::FileNotFound(file.to_string()).into());
}
if !path.is_file() {
return Err(FormatterError::InvalidFile("Not a regular file".to_string()).into());
}
// Read file contents
let source =
fs::read_to_string(file).with_context(|| format!("Failed to read file: {}", file))?;
if debug {
// Tokenize for debug output
let mut lexer = NftablesLexer::new(&source);
let tokens = lexer.tokenize_with_errors();
eprintln!("=== TOKENS ===");
for (i, token) in tokens.iter().enumerate() {
eprintln!(
"{:3}: {:?} @ {:?} = '{}'",
i, token.kind, token.range, token.text
);
}
eprintln!();
// Build and validate CST
eprintln!("=== CST ===");
let cst_tree = CstBuilder::build_tree(&tokens);
match CstBuilder::validate_tree(&cst_tree) {
Ok(()) => eprintln!("CST validation passed"),
Err(e) => eprintln!("CST validation error: {}", e),
}
eprintln!();
}
// Run diagnostics
let diagnostic_config = DiagnosticConfig {
enable_style_warnings: style_warnings,
enable_best_practices: best_practices,
enable_performance_hints: performance_hints,
enable_security_warnings: security_warnings,
max_line_length: 120,
max_empty_lines: 2,
preferred_indent: None, // Don't enforce indent style in lint mode
};
let analyzer = DiagnosticAnalyzer::new(diagnostic_config);
let diagnostics = if let Some(modules) = &modules {
let module_names: Vec<&str> = modules.iter().map(|s| s.as_str()).collect();
analyzer.analyze_with_modules(&source, file, &module_names)
} else {
analyzer.analyze(&source, file)
};
if json {
// Output JSON format for tooling integration
match diagnostics.to_json() {
Ok(json) => println!("{}", json),
Err(e) => {
// Even JSON serialization errors should be in JSON format when --json is used
let error_json = format!(r#"{{"error": "JSON serialization failed: {}"}}"#, e);
println!("{}", error_json);
}
}
} else {
// Output human-readable format
if is_multiple_files {
println!("=== {} ===", file);
}
println!("{}", diagnostics.to_human_readable());
}
// Return error if there are diagnostics errors
if diagnostics.has_errors() {
return Err(anyhow::anyhow!("Diagnostics found errors in file"));
}
Ok(())
}
/// Convert parser errors to formatter errors with proper location information
fn convert_parse_error_to_formatter_error(
error: &crate::parser::ParseError,
source: &str,
tokens: &[Token],
) -> FormatterError {
use crate::parser::ParseError;
match error {
ParseError::UnexpectedToken {
line,
column,
expected,
found,
} => FormatterError::SyntaxError {
line: *line,
column: *column,
message: format!("Expected {}, found '{}'", expected, found),
suggestion: None,
},
ParseError::MissingToken { expected } => {
let (line, column) = if let Some(last_token) = tokens.last() {
position_from_range(&last_token.range, source)
} else {
(1, 1)
};
FormatterError::SyntaxError {
line,
column,
message: format!("Missing token: expected {}", expected),
suggestion: None,
}
}
ParseError::InvalidExpression { message } => {
let (line, column) = find_current_parse_position(tokens, source);
FormatterError::SyntaxError {
line,
column,
message: format!("Invalid expression: {}", message),
suggestion: None,
}
}
ParseError::InvalidStatement { message } => {
let (line, column) = find_current_parse_position(tokens, source);
FormatterError::SyntaxError {
line,
column,
message: format!("Invalid statement: {}", message),
suggestion: None,
}
}
ParseError::SemanticError { message } => {
let (line, column) = find_current_parse_position(tokens, source);
FormatterError::SyntaxError {
line,
column,
message: format!("Semantic error: {}", message),
suggestion: None,
}
}
ParseError::LexError(lex_error) => {
// Convert lexical errors to formatter errors with location
convert_lex_error_to_formatter_error(lex_error, source)
}
ParseError::AnyhowError(anyhow_error) => {
// For anyhow errors, try to extract location from error message and context
let error_msg = anyhow_error.to_string();
let (line, column) = find_error_location_from_context(&error_msg, tokens, source);
let suggestion = generate_suggestion_for_error(&error_msg);
FormatterError::SyntaxError {
line,
column,
message: error_msg,
suggestion,
}
}
}
}
/// Find the current parsing position from tokens
fn find_current_parse_position(tokens: &[Token], source: &str) -> (usize, usize) {
// Look for the last non-whitespace, non-comment token
for token in tokens.iter().rev() {
match token.kind {
TokenKind::Newline | TokenKind::CommentLine(_) => continue,
_ => return position_from_range(&token.range, source),
}
}
(1, 1) // fallback
}
/// Convert lexical errors to formatter errors
fn convert_lex_error_to_formatter_error(
lex_error: &crate::lexer::LexError,
source: &str,
) -> FormatterError {
use crate::lexer::LexError;
match lex_error {
LexError::InvalidToken { position, text } => {
let (line, column) = offset_to_line_column(*position, source);
FormatterError::SyntaxError {
line,
column,
message: format!("Invalid token: '{}'", text),
suggestion: None,
}
}
LexError::UnterminatedString { position } => {
let (line, column) = offset_to_line_column(*position, source);
FormatterError::SyntaxError {
line,
column,
message: "Unterminated string literal".to_string(),
suggestion: Some("Add closing quote".to_string()),
}
}
LexError::InvalidNumber { position, text } => {
let (line, column) = offset_to_line_column(*position, source);
FormatterError::SyntaxError {
line,
column,
message: format!("Invalid number: '{}'", text),
suggestion: Some("Check number format".to_string()),
}
}
}
}
/// Convert byte offset to line/column position
fn offset_to_line_column(offset: usize, source: &str) -> (usize, usize) {
let mut line = 1;
let mut column = 1;
for (i, ch) in source.char_indices() {
if i >= offset {
break;
}
if ch == '\n' {
line += 1;
column = 1;
} else {
column += 1;
}
}
(line, column)
}
/// Find error location from context clues in the error message
fn find_error_location_from_context(
error_msg: &str,
tokens: &[Token],
source: &str,
) -> (usize, usize) {
// Look for context clues in the error message
if error_msg.contains("Expected string or identifier, got:") {
// Find the problematic token mentioned in the error
if let Some(bad_token_text) = extract_token_from_error_message(error_msg) {
// Find this token in the token stream
for token in tokens {
if token.text == bad_token_text {
return position_from_range(&token.range, source);
}
}
}
}
// Fallback to finding last meaningful token
find_current_parse_position(tokens, source)
}
/// Extract the problematic token from error message
fn extract_token_from_error_message(error_msg: &str) -> Option<String> {
// Parse messages like "Expected string or identifier, got: {"
if let Some(got_part) = error_msg.split("got: ").nth(1) {
Some(got_part.trim().to_string())
} else {
None
}
}
/// Generate helpful suggestions based on error message
fn generate_suggestion_for_error(error_msg: &str) -> Option<String> {
if error_msg.contains("Expected string or identifier") {
Some(
"Check if you're missing quotes around a string value or have an unexpected character"
.to_string(),
)
} else if error_msg.contains("Expected") && error_msg.contains("got:") {
Some("Check syntax and ensure proper nftables structure".to_string())
} else {
None
}
}
/// Convert TextRange to line/column position
fn position_from_range(range: &text_size::TextRange, source: &str) -> (usize, usize) {
let start_offset: usize = range.start().into();
let lines: Vec<&str> = source.lines().collect();
let mut current_offset = 0;
for (line_idx, line) in lines.iter().enumerate() {
let line_end = current_offset + line.len();
if start_offset <= line_end {
let column = start_offset - current_offset;
return (line_idx + 1, column + 1); // 1-based indexing
}
current_offset = line_end + 1; // +1 for newline
}
(1, 1) // fallback
}
fn process_parse_command(file: String, tree: bool, verbose: bool, debug: bool) -> Result<()> {
let source =
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
// Tokenize
let mut lexer = NftablesLexer::new(&source);
let tokens = lexer
.tokenize()
.map_err(|e| FormatterError::ParseError(format!("Tokenization failed: {}", e)))?;
if debug {
eprintln!("=== TOKENS ===");
for (i, token) in tokens.iter().enumerate() {
eprintln!(
"{:3}: {:?} @ {:?} = '{}'",
i, token.kind, token.range, token.text
);
}
eprintln!();
}
// Build CST
let cst_tree = CstBuilder::build_tree(&tokens);
// Validate CST
match CstBuilder::validate_tree(&cst_tree) {
Ok(()) => {
if debug {
eprintln!("CST validation passed");
eprintln!();
}
}
Err(e) => {
eprintln!("Warning: CST validation error: {}", e);
eprintln!();
}
}
// Display CST
let cst_display = CstBuilder::display_tree(&cst_tree, tree, verbose);
println!("{}", cst_display);
Ok(())
}
fn main() -> Result<()> {
let args = Args::parse();
let result = match &args.command {
Commands::Format {
file,
indent,
stdout,
optimize,
spaces,
check,
} => process_format_command(
file.clone(),
*indent,
*stdout,
*optimize,
*spaces,
*check,
args.debug,
),
Commands::Lint {
file,
json,
style_warnings,
best_practices,
performance_hints,
security_warnings,
modules,
} => process_lint_command(
file.clone(),
*json,
*style_warnings,
*best_practices,
*performance_hints,
*security_warnings,
modules.clone(),
args.debug,
),
Commands::Parse {
file,
tree,
verbose,
} => process_parse_command(file.clone(), *tree, *verbose, args.debug),
};
if let Err(e) = result {
// Check if we're in lint mode with JSON output for error formatting
let use_json = match &args.command {
Commands::Lint { json, .. } => *json,
_ => false,
};
if use_json {
// Output error in JSON format when --json flag is used in lint mode
let error_json = format!(r#"{{"error": "{}"}}"#, e);
println!("{}", error_json);
} else {
eprintln!("Error: {}", e);
// Print the error chain
let mut current = e.source();
while let Some(cause) = current {
eprintln!(" Caused by: {}", cause);
current = cause.source();
}
}
std::process::exit(1);
}
Ok(())
}