nff: separate format and lint commands
This commit is contained in:
parent
9791296634
commit
6362ade5bd
3 changed files with 376 additions and 135 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -176,6 +176,12 @@ dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "glob"
|
||||||
|
version = "0.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hashbrown"
|
name = "hashbrown"
|
||||||
version = "0.15.3"
|
version = "0.15.3"
|
||||||
|
@ -273,6 +279,7 @@ dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
"cstree",
|
"cstree",
|
||||||
|
"glob",
|
||||||
"logos",
|
"logos",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
|
|
|
@ -15,3 +15,4 @@ text-size = "1.1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
regex = "1.11.1"
|
regex = "1.11.1"
|
||||||
|
glob = "0.3"
|
||||||
|
|
503
src/main.rs
503
src/main.rs
|
@ -6,7 +6,8 @@ mod parser;
|
||||||
mod syntax;
|
mod syntax;
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use clap::Parser;
|
use clap::{Parser, Subcommand};
|
||||||
|
use glob::glob;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
@ -34,71 +35,176 @@ enum FormatterError {
|
||||||
#[command(
|
#[command(
|
||||||
name = "nff",
|
name = "nff",
|
||||||
version = "0.1.0",
|
version = "0.1.0",
|
||||||
about = "A high-quality nftables formatter and beautifier",
|
about = "A high-quality nftables formatter and linter",
|
||||||
long_about = "nff (nftables formatter) is a tool for formatting and beautifying nftables configuration files with proper indentation and structure."
|
long_about = "nff (nftables formatter) is a tool for formatting and linting nftables configuration files with proper indentation and structure."
|
||||||
)]
|
)]
|
||||||
struct Args {
|
struct Args {
|
||||||
/// nftables config file (e.g: /etc/nftables.conf)
|
#[command(subcommand)]
|
||||||
#[arg(short, long, value_name = "FILE")]
|
command: Commands,
|
||||||
file: String,
|
|
||||||
|
|
||||||
/// Type of indentation
|
|
||||||
#[arg(short, long, default_value = "tabs", value_parser = clap::value_parser!(IndentStyle))]
|
|
||||||
indent: IndentStyle,
|
|
||||||
|
|
||||||
/// Output file (writes to stdout if not specified)
|
|
||||||
#[arg(short, long, value_name = "FILE")]
|
|
||||||
output: Option<String>,
|
|
||||||
|
|
||||||
/// Optimize output by removing excessive empty lines
|
|
||||||
#[arg(long)]
|
|
||||||
optimize: bool,
|
|
||||||
|
|
||||||
/// Number of spaces per indentation level (only used with --indent=spaces)
|
|
||||||
#[arg(long, default_value = "2", value_name = "N")]
|
|
||||||
spaces: usize,
|
|
||||||
|
|
||||||
/// Show debug information (tokens, AST, etc.)
|
/// Show debug information (tokens, AST, etc.)
|
||||||
#[arg(long)]
|
#[arg(long, global = true)]
|
||||||
debug: bool,
|
debug: bool,
|
||||||
|
|
||||||
/// Check syntax only, don't format
|
|
||||||
#[arg(long)]
|
|
||||||
check: bool,
|
|
||||||
|
|
||||||
/// Run diagnostics and show issues (syntax, style, best practices)
|
|
||||||
#[arg(long)]
|
|
||||||
diagnostics: bool,
|
|
||||||
|
|
||||||
/// Output diagnostics in JSON format (useful for tooling integration)
|
|
||||||
#[arg(long)]
|
|
||||||
json: bool,
|
|
||||||
|
|
||||||
/// Include style warnings in diagnostics
|
|
||||||
#[arg(long, default_value = "true")]
|
|
||||||
style_warnings: bool,
|
|
||||||
|
|
||||||
/// Include best practice recommendations in diagnostics
|
|
||||||
#[arg(long, default_value = "true")]
|
|
||||||
best_practices: bool,
|
|
||||||
|
|
||||||
/// Include performance hints in diagnostics
|
|
||||||
#[arg(long, default_value = "true")]
|
|
||||||
performance_hints: bool,
|
|
||||||
|
|
||||||
/// Include security warnings in diagnostics
|
|
||||||
#[arg(long, default_value = "true")]
|
|
||||||
security_warnings: bool,
|
|
||||||
|
|
||||||
/// Diagnostic modules to run (comma-separated: lexical,syntax,style,semantic)
|
|
||||||
#[arg(long, value_delimiter = ',')]
|
|
||||||
modules: Option<Vec<String>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_nftables_config(args: Args) -> Result<()> {
|
#[derive(Subcommand, Debug, Clone)]
|
||||||
let path = Path::new(&args.file);
|
enum Commands {
|
||||||
|
/// Format nftables configuration files
|
||||||
|
Format {
|
||||||
|
/// nftables config file (e.g: /etc/nftables.conf). If not provided, formats all .nft files in current directory
|
||||||
|
#[arg(value_name = "FILE")]
|
||||||
|
file: Option<String>,
|
||||||
|
|
||||||
|
/// Type of indentation
|
||||||
|
#[arg(short, long, default_value = "tabs", value_parser = clap::value_parser!(IndentStyle))]
|
||||||
|
indent: IndentStyle,
|
||||||
|
|
||||||
|
/// Print formatted output to stdout instead of modifying files in place
|
||||||
|
#[arg(long)]
|
||||||
|
stdout: bool,
|
||||||
|
|
||||||
|
/// Optimize output by removing excessive empty lines
|
||||||
|
#[arg(long)]
|
||||||
|
optimize: bool,
|
||||||
|
|
||||||
|
/// Number of spaces per indentation level (only used with --indent=spaces)
|
||||||
|
#[arg(long, default_value = "2", value_name = "N")]
|
||||||
|
spaces: usize,
|
||||||
|
|
||||||
|
/// Check syntax only, don't format
|
||||||
|
#[arg(long)]
|
||||||
|
check: bool,
|
||||||
|
},
|
||||||
|
/// Lint nftables configuration files and show diagnostics
|
||||||
|
Lint {
|
||||||
|
/// nftables config file (e.g: /etc/nftables.conf). If not provided, lints all .nft files in current directory
|
||||||
|
#[arg(value_name = "FILE")]
|
||||||
|
file: Option<String>,
|
||||||
|
|
||||||
|
/// Output diagnostics in JSON format (useful for tooling integration)
|
||||||
|
#[arg(long)]
|
||||||
|
json: bool,
|
||||||
|
|
||||||
|
/// Include style warnings in diagnostics
|
||||||
|
#[arg(long, default_value = "true")]
|
||||||
|
style_warnings: bool,
|
||||||
|
|
||||||
|
/// Include best practice recommendations in diagnostics
|
||||||
|
#[arg(long, default_value = "true")]
|
||||||
|
best_practices: bool,
|
||||||
|
|
||||||
|
/// Include performance hints in diagnostics
|
||||||
|
#[arg(long, default_value = "true")]
|
||||||
|
performance_hints: bool,
|
||||||
|
|
||||||
|
/// Include security warnings in diagnostics
|
||||||
|
#[arg(long, default_value = "true")]
|
||||||
|
security_warnings: bool,
|
||||||
|
|
||||||
|
/// Diagnostic modules to run (comma-separated: lexical,syntax,style,semantic)
|
||||||
|
#[arg(long, value_delimiter = ',')]
|
||||||
|
modules: Option<Vec<String>>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
fn discover_nftables_files() -> Result<Vec<String>> {
|
||||||
|
let mut files = Vec::new();
|
||||||
|
|
||||||
|
// Common nftables file patterns
|
||||||
|
let patterns = [
|
||||||
|
"*.nft",
|
||||||
|
"*.nftables",
|
||||||
|
"/etc/nftables.conf",
|
||||||
|
"/etc/nftables/*.nft",
|
||||||
|
];
|
||||||
|
|
||||||
|
for pattern in &patterns {
|
||||||
|
match glob(pattern) {
|
||||||
|
Ok(paths) => {
|
||||||
|
for entry in paths {
|
||||||
|
match entry {
|
||||||
|
Ok(path) => {
|
||||||
|
if path.is_file() {
|
||||||
|
if let Some(path_str) = path.to_str() {
|
||||||
|
files.push(path_str.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => eprintln!("Warning: Error reading path: {}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
// Only warn for non-current directory patterns
|
||||||
|
if !pattern.starts_with("*.") {
|
||||||
|
eprintln!("Warning: Failed to search pattern {}: {}", pattern, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if files.is_empty() {
|
||||||
|
return Err(anyhow::anyhow!(
|
||||||
|
"No nftables files found. Please specify a file explicitly or ensure .nft/.nftables files exist in the current directory."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove duplicates and sort
|
||||||
|
files.sort();
|
||||||
|
files.dedup();
|
||||||
|
|
||||||
|
Ok(files)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_format_command(
|
||||||
|
file: Option<String>,
|
||||||
|
indent: IndentStyle,
|
||||||
|
stdout: bool,
|
||||||
|
optimize: bool,
|
||||||
|
spaces: usize,
|
||||||
|
check: bool,
|
||||||
|
debug: bool,
|
||||||
|
) -> Result<()> {
|
||||||
|
let files = match file {
|
||||||
|
Some(f) => vec![f],
|
||||||
|
None => discover_nftables_files()?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let is_multiple_files = files.len() > 1;
|
||||||
|
for file_path in files {
|
||||||
|
if let Err(e) = process_single_file_format(
|
||||||
|
&file_path,
|
||||||
|
indent,
|
||||||
|
stdout,
|
||||||
|
optimize,
|
||||||
|
spaces,
|
||||||
|
check,
|
||||||
|
debug,
|
||||||
|
is_multiple_files,
|
||||||
|
) {
|
||||||
|
eprintln!("Error processing {}: {}", file_path, e);
|
||||||
|
if !is_multiple_files {
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_single_file_format(
|
||||||
|
file: &str,
|
||||||
|
indent: IndentStyle,
|
||||||
|
stdout: bool,
|
||||||
|
optimize: bool,
|
||||||
|
spaces: usize,
|
||||||
|
check: bool,
|
||||||
|
debug: bool,
|
||||||
|
is_multiple_files: bool,
|
||||||
|
) -> Result<()> {
|
||||||
|
let path = Path::new(&file);
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
return Err(FormatterError::FileNotFound(args.file).into());
|
return Err(FormatterError::FileNotFound(file.to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
if !path.is_file() {
|
if !path.is_file() {
|
||||||
|
@ -106,12 +212,12 @@ fn process_nftables_config(args: Args) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read file contents
|
// Read file contents
|
||||||
let source = fs::read_to_string(&args.file)
|
let source =
|
||||||
.with_context(|| format!("Failed to read file: {}", args.file))?;
|
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
|
||||||
|
|
||||||
// Tokenize
|
// Tokenize
|
||||||
let mut lexer = NftablesLexer::new(&source);
|
let mut lexer = NftablesLexer::new(&source);
|
||||||
let tokens = if args.debug {
|
let tokens = if debug {
|
||||||
// Use error-recovery tokenization for debug mode
|
// Use error-recovery tokenization for debug mode
|
||||||
lexer.tokenize_with_errors()
|
lexer.tokenize_with_errors()
|
||||||
} else {
|
} else {
|
||||||
|
@ -120,7 +226,7 @@ fn process_nftables_config(args: Args) -> Result<()> {
|
||||||
.map_err(|e| FormatterError::ParseError(e.to_string()))?
|
.map_err(|e| FormatterError::ParseError(e.to_string()))?
|
||||||
};
|
};
|
||||||
|
|
||||||
if args.debug {
|
if debug {
|
||||||
eprintln!("=== TOKENS ===");
|
eprintln!("=== TOKENS ===");
|
||||||
for (i, token) in tokens.iter().enumerate() {
|
for (i, token) in tokens.iter().enumerate() {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
|
@ -146,60 +252,8 @@ fn process_nftables_config(args: Args) -> Result<()> {
|
||||||
eprintln!();
|
eprintln!();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run diagnostics if requested (do this early to catch parse errors)
|
// Parse
|
||||||
if args.diagnostics || args.json {
|
let ruleset = if debug {
|
||||||
let diagnostic_config = DiagnosticConfig {
|
|
||||||
enable_style_warnings: args.style_warnings,
|
|
||||||
enable_best_practices: args.best_practices,
|
|
||||||
enable_performance_hints: args.performance_hints,
|
|
||||||
enable_security_warnings: args.security_warnings,
|
|
||||||
max_line_length: 120,
|
|
||||||
max_empty_lines: if args.optimize { 1 } else { 2 },
|
|
||||||
preferred_indent: Some(match args.indent {
|
|
||||||
IndentStyle::Tabs => "tabs".to_string(),
|
|
||||||
IndentStyle::Spaces => "spaces".to_string(),
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
let analyzer = DiagnosticAnalyzer::new(diagnostic_config);
|
|
||||||
|
|
||||||
let diagnostics = if let Some(modules) = &args.modules {
|
|
||||||
let module_names: Vec<&str> = modules.iter().map(|s| s.as_str()).collect();
|
|
||||||
analyzer.analyze_with_modules(&source, &args.file, &module_names)
|
|
||||||
} else {
|
|
||||||
analyzer.analyze(&source, &args.file)
|
|
||||||
};
|
|
||||||
|
|
||||||
if args.json {
|
|
||||||
// Output JSON format for tooling integration
|
|
||||||
match diagnostics.to_json() {
|
|
||||||
Ok(json) => println!("{}", json),
|
|
||||||
Err(e) => {
|
|
||||||
if args.json {
|
|
||||||
// Even JSON serialization errors should be in JSON format when --json is used
|
|
||||||
let error_json =
|
|
||||||
format!(r#"{{"error": "JSON serialization failed: {}"}}"#, e);
|
|
||||||
println!("{}", error_json);
|
|
||||||
} else {
|
|
||||||
eprintln!("Error serializing diagnostics to JSON: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Output human-readable format
|
|
||||||
println!("{}", diagnostics.to_human_readable());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Exit with non-zero code if there are errors
|
|
||||||
if diagnostics.has_errors() {
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse (only if not doing diagnostics)
|
|
||||||
let ruleset = if args.debug {
|
|
||||||
// Use error-recovery parsing for debug mode
|
// Use error-recovery parsing for debug mode
|
||||||
let (parsed_ruleset, errors) = NftablesParser::parse_with_errors(&source);
|
let (parsed_ruleset, errors) = NftablesParser::parse_with_errors(&source);
|
||||||
if !errors.is_empty() {
|
if !errors.is_empty() {
|
||||||
|
@ -217,51 +271,230 @@ fn process_nftables_config(args: Args) -> Result<()> {
|
||||||
.map_err(|e| FormatterError::ParseError(e.to_string()))?
|
.map_err(|e| FormatterError::ParseError(e.to_string()))?
|
||||||
};
|
};
|
||||||
|
|
||||||
if args.debug {
|
if debug {
|
||||||
eprintln!("=== AST ===");
|
eprintln!("=== AST ===");
|
||||||
eprintln!("{:#?}", ruleset);
|
eprintln!("{:#?}", ruleset);
|
||||||
eprintln!();
|
eprintln!();
|
||||||
}
|
}
|
||||||
|
|
||||||
if args.check {
|
if check {
|
||||||
println!("Syntax check passed for: {}", args.file);
|
println!("Syntax check passed for: {}", file);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Format
|
// Format
|
||||||
let config = FormatConfig {
|
let config = FormatConfig {
|
||||||
indent_style: args.indent,
|
indent_style: indent,
|
||||||
spaces_per_level: args.spaces,
|
spaces_per_level: spaces,
|
||||||
optimize: args.optimize,
|
optimize,
|
||||||
max_empty_lines: if args.optimize { 1 } else { 2 },
|
max_empty_lines: if optimize { 1 } else { 2 },
|
||||||
};
|
};
|
||||||
|
|
||||||
let formatter = NftablesFormatter::new(config);
|
let formatter = NftablesFormatter::new(config);
|
||||||
let formatted_output = formatter.format_ruleset(&ruleset);
|
let formatted_output = formatter.format_ruleset(&ruleset);
|
||||||
|
|
||||||
// Write output
|
// Write output
|
||||||
match &args.output {
|
if stdout {
|
||||||
Some(output_file) => {
|
// Output to stdout
|
||||||
fs::write(output_file, &formatted_output)
|
if is_multiple_files {
|
||||||
.with_context(|| format!("Failed to write to output file: {}", output_file))?;
|
println!("=== {} ===", file);
|
||||||
println!("Formatted output written to: {}", output_file);
|
|
||||||
}
|
}
|
||||||
None => {
|
io::stdout()
|
||||||
io::stdout()
|
.write_all(formatted_output.as_bytes())
|
||||||
.write_all(formatted_output.as_bytes())
|
.with_context(|| "Failed to write to stdout")?;
|
||||||
.with_context(|| "Failed to write to stdout")?;
|
} else {
|
||||||
|
// Format in place
|
||||||
|
fs::write(file, &formatted_output)
|
||||||
|
.with_context(|| format!("Failed to write formatted content back to: {}", file))?;
|
||||||
|
if is_multiple_files || debug {
|
||||||
|
println!("Formatted: {}", file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn process_lint_command(
|
||||||
|
file: Option<String>,
|
||||||
|
json: bool,
|
||||||
|
style_warnings: bool,
|
||||||
|
best_practices: bool,
|
||||||
|
performance_hints: bool,
|
||||||
|
security_warnings: bool,
|
||||||
|
modules: Option<Vec<String>>,
|
||||||
|
debug: bool,
|
||||||
|
) -> Result<()> {
|
||||||
|
let files = match file {
|
||||||
|
Some(f) => vec![f],
|
||||||
|
None => discover_nftables_files()?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let is_multiple_files = files.len() > 1;
|
||||||
|
for file_path in files {
|
||||||
|
if let Err(e) = process_single_file_lint(
|
||||||
|
&file_path,
|
||||||
|
json,
|
||||||
|
style_warnings,
|
||||||
|
best_practices,
|
||||||
|
performance_hints,
|
||||||
|
security_warnings,
|
||||||
|
modules.as_ref(),
|
||||||
|
debug,
|
||||||
|
is_multiple_files,
|
||||||
|
) {
|
||||||
|
eprintln!("Error processing {}: {}", file_path, e);
|
||||||
|
if !is_multiple_files {
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_single_file_lint(
|
||||||
|
file: &str,
|
||||||
|
json: bool,
|
||||||
|
style_warnings: bool,
|
||||||
|
best_practices: bool,
|
||||||
|
performance_hints: bool,
|
||||||
|
security_warnings: bool,
|
||||||
|
modules: Option<&Vec<String>>,
|
||||||
|
debug: bool,
|
||||||
|
is_multiple_files: bool,
|
||||||
|
) -> Result<()> {
|
||||||
|
let path = Path::new(&file);
|
||||||
|
if !path.exists() {
|
||||||
|
return Err(FormatterError::FileNotFound(file.to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !path.is_file() {
|
||||||
|
return Err(FormatterError::InvalidFile("Not a regular file".to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read file contents
|
||||||
|
let source =
|
||||||
|
fs::read_to_string(&file).with_context(|| format!("Failed to read file: {}", file))?;
|
||||||
|
|
||||||
|
if debug {
|
||||||
|
// Tokenize for debug output
|
||||||
|
let mut lexer = NftablesLexer::new(&source);
|
||||||
|
let tokens = lexer.tokenize_with_errors();
|
||||||
|
|
||||||
|
eprintln!("=== TOKENS ===");
|
||||||
|
for (i, token) in tokens.iter().enumerate() {
|
||||||
|
eprintln!(
|
||||||
|
"{:3}: {:?} @ {:?} = '{}'",
|
||||||
|
i, token.kind, token.range, token.text
|
||||||
|
);
|
||||||
|
}
|
||||||
|
eprintln!();
|
||||||
|
|
||||||
|
// Build and validate CST
|
||||||
|
eprintln!("=== CST ===");
|
||||||
|
let cst_tree = CstBuilder::build_tree(&tokens);
|
||||||
|
match CstBuilder::validate_tree(&cst_tree) {
|
||||||
|
Ok(()) => eprintln!("CST validation passed"),
|
||||||
|
Err(e) => eprintln!("CST validation error: {}", e),
|
||||||
|
}
|
||||||
|
eprintln!();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run diagnostics
|
||||||
|
let diagnostic_config = DiagnosticConfig {
|
||||||
|
enable_style_warnings: style_warnings,
|
||||||
|
enable_best_practices: best_practices,
|
||||||
|
enable_performance_hints: performance_hints,
|
||||||
|
enable_security_warnings: security_warnings,
|
||||||
|
max_line_length: 120,
|
||||||
|
max_empty_lines: 2,
|
||||||
|
preferred_indent: None, // Don't enforce indent style in lint mode
|
||||||
|
};
|
||||||
|
|
||||||
|
let analyzer = DiagnosticAnalyzer::new(diagnostic_config);
|
||||||
|
|
||||||
|
let diagnostics = if let Some(modules) = &modules {
|
||||||
|
let module_names: Vec<&str> = modules.iter().map(|s| s.as_str()).collect();
|
||||||
|
analyzer.analyze_with_modules(&source, &file, &module_names)
|
||||||
|
} else {
|
||||||
|
analyzer.analyze(&source, &file)
|
||||||
|
};
|
||||||
|
|
||||||
|
if json {
|
||||||
|
// Output JSON format for tooling integration
|
||||||
|
match diagnostics.to_json() {
|
||||||
|
Ok(json) => println!("{}", json),
|
||||||
|
Err(e) => {
|
||||||
|
// Even JSON serialization errors should be in JSON format when --json is used
|
||||||
|
let error_json = format!(r#"{{"error": "JSON serialization failed: {}"}}"#, e);
|
||||||
|
println!("{}", error_json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Output human-readable format
|
||||||
|
if is_multiple_files {
|
||||||
|
println!("=== {} ===", file);
|
||||||
|
}
|
||||||
|
println!("{}", diagnostics.to_human_readable());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exit with non-zero code if there are errors
|
||||||
|
if diagnostics.has_errors() {
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
if let Err(e) = process_nftables_config(args.clone()) {
|
let result = match &args.command {
|
||||||
if args.json {
|
Commands::Format {
|
||||||
// Output error in JSON format when --json flag is used
|
file,
|
||||||
|
indent,
|
||||||
|
stdout,
|
||||||
|
optimize,
|
||||||
|
spaces,
|
||||||
|
check,
|
||||||
|
} => process_format_command(
|
||||||
|
file.clone(),
|
||||||
|
*indent,
|
||||||
|
*stdout,
|
||||||
|
*optimize,
|
||||||
|
*spaces,
|
||||||
|
*check,
|
||||||
|
args.debug,
|
||||||
|
),
|
||||||
|
Commands::Lint {
|
||||||
|
file,
|
||||||
|
json,
|
||||||
|
style_warnings,
|
||||||
|
best_practices,
|
||||||
|
performance_hints,
|
||||||
|
security_warnings,
|
||||||
|
modules,
|
||||||
|
} => process_lint_command(
|
||||||
|
file.clone(),
|
||||||
|
*json,
|
||||||
|
*style_warnings,
|
||||||
|
*best_practices,
|
||||||
|
*performance_hints,
|
||||||
|
*security_warnings,
|
||||||
|
modules.clone(),
|
||||||
|
args.debug,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = result {
|
||||||
|
// Check if we're in lint mode with JSON output for error formatting
|
||||||
|
let use_json = match &args.command {
|
||||||
|
Commands::Lint { json, .. } => *json,
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
|
||||||
|
if use_json {
|
||||||
|
// Output error in JSON format when --json flag is used in lint mode
|
||||||
let error_json = format!(r#"{{"error": "{}"}}"#, e);
|
let error_json = format!(r#"{{"error": "{}"}}"#, e);
|
||||||
println!("{}", error_json);
|
println!("{}", error_json);
|
||||||
} else {
|
} else {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue