//! Diagnostic system for nftables configuration files //! //! This module provides comprehensive diagnostic capabilities including: //! - Syntax errors with precise location information //! - Semantic validation warnings //! - Style and best practice recommendations //! - Language Server Protocol (LSP) compatible output //! - JSON output for tooling integration use crate::lexer::LexError; use crate::parser::{ParseError, Parser}; use serde::{Deserialize, Serialize}; use serde_json; use std::collections::{HashMap, HashSet}; use std::fmt; use text_size::TextSize; /// Diagnostic severity levels following LSP specification #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum DiagnosticSeverity { /// Reports an error that prevents successful processing Error = 1, /// Reports a warning that should be addressed Warning = 2, /// Reports information that might be useful Information = 3, /// Reports a hint for potential improvements Hint = 4, } impl fmt::Display for DiagnosticSeverity { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { DiagnosticSeverity::Error => write!(f, "error"), DiagnosticSeverity::Warning => write!(f, "warning"), DiagnosticSeverity::Information => write!(f, "info"), DiagnosticSeverity::Hint => write!(f, "hint"), } } } /// Diagnostic codes for categorizing issues #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum DiagnosticCode { // Syntax errors SyntaxError, UnexpectedToken, MissingToken, UnterminatedString, InvalidNumber, InvalidToken, // Semantic errors UnknownTableFamily, UnknownChainType, UnknownHook, InvalidPriority, InvalidPolicy, DuplicateTableName, DuplicateChainName, UndefinedVariable, InvalidCidrNotation, InvalidPortRange, InvalidProtocol, // Style warnings MissingShebang, InconsistentIndentation, TrailingWhitespace, TooManyEmptyLines, LongLine, PreferredAlternative, // Best practices ChainWithoutPolicy, RuleWithoutAction, OverlyPermissiveRule, DuplicateRule, ConflictingRules, UnusedVariable, UnusedSet, DeprecatedSyntax, MissingDocumentation, SecurityRisk, // Performance IneffientRuleOrder, LargeSetWithoutTimeout, MissingCounters, // Indentation and formatting MixedIndentation, IncorrectIndentationLevel, MissingSpaceAfterComma, ExtraWhitespace, // nftables specific ChainMissingHook, InvalidTableFamily, InvalidChainPriority, MissingChainType, RedundantRule, UnnecessaryJump, } impl fmt::Display for DiagnosticCode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let code = match self { DiagnosticCode::SyntaxError => "NFT001", DiagnosticCode::UnexpectedToken => "NFT002", DiagnosticCode::MissingToken => "NFT003", DiagnosticCode::UnterminatedString => "NFT004", DiagnosticCode::InvalidNumber => "NFT005", DiagnosticCode::InvalidToken => "NFT006", DiagnosticCode::UnknownTableFamily => "NFT101", DiagnosticCode::UnknownChainType => "NFT102", DiagnosticCode::UnknownHook => "NFT103", DiagnosticCode::InvalidPriority => "NFT104", DiagnosticCode::InvalidPolicy => "NFT105", DiagnosticCode::DuplicateTableName => "NFT106", DiagnosticCode::DuplicateChainName => "NFT107", DiagnosticCode::UndefinedVariable => "NFT108", DiagnosticCode::InvalidCidrNotation => "NFT109", DiagnosticCode::InvalidPortRange => "NFT110", DiagnosticCode::InvalidProtocol => "NFT111", DiagnosticCode::MissingShebang => "NFT201", DiagnosticCode::InconsistentIndentation => "NFT202", DiagnosticCode::TrailingWhitespace => "NFT203", DiagnosticCode::TooManyEmptyLines => "NFT204", DiagnosticCode::LongLine => "NFT205", DiagnosticCode::PreferredAlternative => "NFT206", DiagnosticCode::ChainWithoutPolicy => "NFT301", DiagnosticCode::RuleWithoutAction => "NFT302", DiagnosticCode::OverlyPermissiveRule => "NFT303", DiagnosticCode::DuplicateRule => "NFT304", DiagnosticCode::ConflictingRules => "NFT305", DiagnosticCode::UnusedVariable => "NFT306", DiagnosticCode::UnusedSet => "NFT307", DiagnosticCode::DeprecatedSyntax => "NFT308", DiagnosticCode::MissingDocumentation => "NFT309", DiagnosticCode::SecurityRisk => "NFT310", DiagnosticCode::IneffientRuleOrder => "NFT401", DiagnosticCode::LargeSetWithoutTimeout => "NFT402", DiagnosticCode::MissingCounters => "NFT403", DiagnosticCode::MixedIndentation => "NFT501", DiagnosticCode::IncorrectIndentationLevel => "NFT502", DiagnosticCode::MissingSpaceAfterComma => "NFT503", DiagnosticCode::ExtraWhitespace => "NFT504", DiagnosticCode::ChainMissingHook => "NFT601", DiagnosticCode::InvalidTableFamily => "NFT602", DiagnosticCode::InvalidChainPriority => "NFT603", DiagnosticCode::MissingChainType => "NFT604", DiagnosticCode::RedundantRule => "NFT605", DiagnosticCode::UnnecessaryJump => "NFT606", }; write!(f, "{}", code) } } /// Position information for diagnostics #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Position { pub line: u32, pub character: u32, } impl Position { pub fn new(line: u32, character: u32) -> Self { Self { line, character } } pub fn from_text_size(text_size: TextSize, source: &str) -> Self { let mut line = 0; let mut character = 0; let offset = text_size.into(); for (i, ch) in source.char_indices() { if i >= offset { break; } if ch == '\n' { line += 1; character = 0; } else { character += 1; } } Self { line, character } } } /// Range information for diagnostics #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Range { pub start: Position, pub end: Position, } impl Range { pub fn new(start: Position, end: Position) -> Self { Self { start, end } } pub fn single_position(position: Position) -> Self { Self { start: position.clone(), end: position, } } } /// Related information for diagnostics #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct DiagnosticRelatedInformation { pub location: Range, pub message: String, } /// Code action that can fix a diagnostic #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct CodeAction { pub title: String, pub kind: String, pub edit: Option, } /// Text edit for code actions #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct TextEdit { pub range: Range, pub new_text: String, } /// Workspace edit containing text changes #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct WorkspaceEdit { pub changes: HashMap>, } /// A single diagnostic issue #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Diagnostic { /// The range at which the message applies pub range: Range, /// The diagnostic's severity pub severity: DiagnosticSeverity, /// The diagnostic's code pub code: DiagnosticCode, /// A human-readable string describing the source of this diagnostic pub source: String, /// The diagnostic's message pub message: String, /// Additional metadata about the diagnostic pub related_information: Vec, /// Code actions that can address this diagnostic pub code_actions: Vec, /// Tags providing additional metadata pub tags: Vec, } impl Diagnostic { pub fn new( range: Range, severity: DiagnosticSeverity, code: DiagnosticCode, message: String, ) -> Self { Self { range, severity, code, source: "nff".to_string(), message, related_information: Vec::new(), code_actions: Vec::new(), tags: Vec::new(), } } } /// Collection of diagnostics for a file #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct DiagnosticCollection { pub diagnostics: Vec, pub file_path: String, pub source_text: String, } impl DiagnosticCollection { pub fn new(file_path: String, source_text: String) -> Self { Self { diagnostics: Vec::new(), file_path, source_text, } } pub fn extend(&mut self, diagnostics: Vec) { self.diagnostics.extend(diagnostics); } pub fn errors(&self) -> impl Iterator { self.diagnostics .iter() .filter(|d| d.severity == DiagnosticSeverity::Error) } pub fn has_errors(&self) -> bool { self.errors().count() > 0 } /// Convert to JSON for LSP or tooling integration pub fn to_json(&self) -> serde_json::Result { serde_json::to_string_pretty(self) } /// Convert to a human-readable format pub fn to_human_readable(&self) -> String { let mut output = String::new(); if self.diagnostics.is_empty() { output.push_str("No issues found.\n"); return output; } output.push_str(&format!( "Found {} issues in {}:\n\n", self.diagnostics.len(), self.file_path )); for diagnostic in &self.diagnostics { output.push_str(&format!( "{}:{}:{}: {}: {} [{}]\n", self.file_path, diagnostic.range.start.line + 1, diagnostic.range.start.character + 1, diagnostic.severity, diagnostic.message, diagnostic.code )); // Add code snippet context if let Some(context) = self.get_context_lines(&diagnostic.range, 2) { for line in context { output.push_str(&format!(" {}\n", line)); } output.push('\n'); } } output } fn get_context_lines(&self, range: &Range, context_lines: usize) -> Option> { let lines: Vec<&str> = self.source_text.lines().collect(); let start_line = range.start.line as usize; let end_line = range.end.line as usize; if start_line >= lines.len() { return None; } let context_start = start_line.saturating_sub(context_lines); let context_end = std::cmp::min(end_line + context_lines + 1, lines.len()); let mut result = Vec::new(); for (i, line) in lines[context_start..context_end].iter().enumerate() { let line_num = context_start + i + 1; if i + context_start == start_line { result.push(format!("→ {:4}: {}", line_num, line)); } else { result.push(format!(" {:4}: {}", line_num, line)); } } Some(result) } } /// Configuration for diagnostic analysis #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DiagnosticConfig { /// Enable style warnings pub enable_style_warnings: bool, /// Enable best practice checks pub enable_best_practices: bool, /// Enable performance hints pub enable_performance_hints: bool, /// Enable security warnings pub enable_security_warnings: bool, /// Maximum line length for style checks pub max_line_length: usize, /// Maximum consecutive empty lines pub max_empty_lines: usize, /// Preferred indentation style pub preferred_indent: Option, } impl Default for DiagnosticConfig { fn default() -> Self { Self { enable_style_warnings: true, enable_best_practices: true, enable_performance_hints: true, enable_security_warnings: true, max_line_length: 120, max_empty_lines: 2, preferred_indent: Some("tabs".to_string()), } } } /// Trait for specialized diagnostic analyzers pub trait AnalyzerModule { fn analyze(&self, source: &str, config: &DiagnosticConfig) -> Vec; fn name(&self) -> &'static str; } /// Lexical analysis module pub struct LexicalAnalyzer; impl AnalyzerModule for LexicalAnalyzer { fn analyze(&self, source: &str, _config: &DiagnosticConfig) -> Vec { use crate::lexer::NftablesLexer; let mut diagnostics = Vec::new(); let mut lexer = NftablesLexer::new(source); match lexer.tokenize() { Ok(_) => { // No lexical errors } Err(lex_error) => { let diagnostic = Self::lex_error_to_diagnostic(&lex_error, source); diagnostics.push(diagnostic); } } diagnostics } fn name(&self) -> &'static str { "lexical" } } impl LexicalAnalyzer { fn lex_error_to_diagnostic(error: &LexError, source: &str) -> Diagnostic { match error { LexError::InvalidToken { position, text } => { let pos = Position::from_text_size(TextSize::from(*position as u32), source); let range = Range::new( pos.clone(), Position::new(pos.line, pos.character + text.len() as u32), ); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::InvalidToken, format!("Invalid token: '{}'", text), ) } LexError::UnterminatedString { position } => { let pos = Position::from_text_size(TextSize::from(*position as u32), source); let range = Range::single_position(pos); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::UnterminatedString, "Unterminated string literal".to_string(), ) } LexError::InvalidNumber { text } => { if let Some(pos) = source.find(text) { let start_pos = Position::from_text_size(TextSize::from(pos as u32), source); let end_pos = Position::new(start_pos.line, start_pos.character + text.len() as u32); let range = Range::new(start_pos, end_pos); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::InvalidNumber, format!("Invalid number: '{}'", text), ) } else { let range = Range::single_position(Position::new(0, 0)); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::InvalidNumber, format!("Invalid number: '{}'", text), ) } } } } } /// Syntax analysis module pub struct SyntaxAnalyzer; impl AnalyzerModule for SyntaxAnalyzer { fn analyze(&self, source: &str, _config: &DiagnosticConfig) -> Vec { use crate::lexer::NftablesLexer; let mut diagnostics = Vec::new(); let mut lexer = NftablesLexer::new(source); match lexer.tokenize() { Ok(tokens) => { let mut parser = Parser::new(tokens); match parser.parse() { Ok(_) => { // No parse errors } Err(parse_error) => { let diagnostic = Self::parse_error_to_diagnostic(&parse_error, source); diagnostics.push(diagnostic); } } } Err(_) => { // Already handled in lexical analysis } } diagnostics } fn name(&self) -> &'static str { "syntax" } } impl SyntaxAnalyzer { fn parse_error_to_diagnostic(error: &ParseError, _source: &str) -> Diagnostic { match error { ParseError::UnexpectedToken { line, column, expected, found, } => { let pos = Position::new(*line as u32, *column as u32); let range = Range::single_position(pos); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::UnexpectedToken, format!("Expected {}, found '{}'", expected, found), ) } ParseError::MissingToken { expected } => { let range = Range::single_position(Position::new(0, 0)); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::MissingToken, format!("Missing token: expected {}", expected), ) } ParseError::InvalidExpression { message } => { let range = Range::single_position(Position::new(0, 0)); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::SyntaxError, format!("Invalid expression: {}", message), ) } ParseError::InvalidStatement { message } => { let range = Range::single_position(Position::new(0, 0)); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::SyntaxError, format!("Invalid statement: {}", message), ) } ParseError::SemanticError { message } => { let range = Range::single_position(Position::new(0, 0)); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::SyntaxError, format!("Semantic error: {}", message), ) } ParseError::LexError(lex_error) => { LexicalAnalyzer::lex_error_to_diagnostic(lex_error, _source) } ParseError::AnyhowError(anyhow_error) => { let range = Range::single_position(Position::new(0, 0)); Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::SyntaxError, format!("Parse error: {}", anyhow_error), ) } } } } /// Style and formatting analysis module pub struct StyleAnalyzer; impl AnalyzerModule for StyleAnalyzer { fn analyze(&self, source: &str, config: &DiagnosticConfig) -> Vec { let mut diagnostics = Vec::new(); // Check for missing shebang if !source.starts_with("#!") { let range = Range::new(Position::new(0, 0), Position::new(0, 0)); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Warning, DiagnosticCode::MissingShebang, "Consider adding a shebang line (e.g., #!/usr/sbin/nft -f)".to_string(), ); diagnostics.push(diagnostic); } diagnostics.extend(self.analyze_line_issues(source, config)); diagnostics.extend(self.analyze_whitespace_issues(source, config)); diagnostics.extend(self.analyze_indentation(source, config)); diagnostics } fn name(&self) -> &'static str { "style" } } impl StyleAnalyzer { fn analyze_line_issues(&self, source: &str, config: &DiagnosticConfig) -> Vec { let mut diagnostics = Vec::new(); for (line_idx, line) in source.lines().enumerate() { let line_num = line_idx as u32; // Long lines if line.len() > config.max_line_length { let start = Position::new(line_num, config.max_line_length as u32); let end = Position::new(line_num, line.len() as u32); let range = Range::new(start, end); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Warning, DiagnosticCode::LongLine, format!( "Line too long ({} > {} characters)", line.len(), config.max_line_length ), ); diagnostics.push(diagnostic); } // Trailing whitespace if line.ends_with(' ') || line.ends_with('\t') { let trimmed_len = line.trim_end().len(); let start = Position::new(line_num, trimmed_len as u32); let end = Position::new(line_num, line.len() as u32); let range = Range::new(start, end); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Warning, DiagnosticCode::TrailingWhitespace, "Trailing whitespace".to_string(), ); diagnostics.push(diagnostic); } } diagnostics } fn analyze_whitespace_issues( &self, source: &str, config: &DiagnosticConfig, ) -> Vec { let mut diagnostics = Vec::new(); let lines: Vec<&str> = source.lines().collect(); let mut empty_count = 0; let mut empty_start = 0; for (line_idx, line) in lines.iter().enumerate() { if line.trim().is_empty() { if empty_count == 0 { empty_start = line_idx; } empty_count += 1; } else { if empty_count > config.max_empty_lines { let start = Position::new(empty_start as u32, 0); let end = Position::new((empty_start + empty_count - 1) as u32, 0); let range = Range::new(start, end); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Warning, DiagnosticCode::TooManyEmptyLines, format!( "Too many consecutive empty lines ({} > {})", empty_count, config.max_empty_lines ), ); diagnostics.push(diagnostic); } empty_count = 0; } } diagnostics } fn analyze_indentation(&self, source: &str, config: &DiagnosticConfig) -> Vec { let mut diagnostics = Vec::new(); let mut has_tabs = false; let mut has_spaces = false; for (line_idx, line) in source.lines().enumerate() { let line_num = line_idx as u32; if line.trim().is_empty() { continue; } let leading_whitespace: String = line .chars() .take_while(|&c| c == ' ' || c == '\t') .collect(); if leading_whitespace.contains('\t') { has_tabs = true; } if leading_whitespace.contains(' ') { has_spaces = true; } // Check for mixed indentation in a single line if leading_whitespace.contains('\t') && leading_whitespace.contains(' ') { let range = Range::new( Position::new(line_num, 0), Position::new(line_num, leading_whitespace.len() as u32), ); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Warning, DiagnosticCode::MixedIndentation, "Mixed tabs and spaces in indentation".to_string(), ); diagnostics.push(diagnostic); } } // Check for mixed indentation across the file if has_tabs && has_spaces { if let Some(preferred) = &config.preferred_indent { let range = Range::single_position(Position::new(0, 0)); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Information, DiagnosticCode::InconsistentIndentation, format!("File uses mixed indentation; prefer {}", preferred), ); diagnostics.push(diagnostic); } } diagnostics } } /// Semantic analysis module for nftables-specific validation pub struct SemanticAnalyzer; impl AnalyzerModule for SemanticAnalyzer { fn analyze(&self, source: &str, _config: &DiagnosticConfig) -> Vec { let mut diagnostics = Vec::new(); // Parse and validate nftables-specific constructs diagnostics.extend(self.validate_table_declarations(source)); diagnostics.extend(self.validate_chain_declarations(source)); diagnostics.extend(self.validate_cidr_notation(source)); diagnostics.extend(self.check_for_redundant_rules(source)); diagnostics } fn name(&self) -> &'static str { "semantic" } } impl SemanticAnalyzer { fn validate_table_declarations(&self, source: &str) -> Vec { let mut diagnostics = Vec::new(); let mut seen_tables = HashSet::new(); for (line_idx, line) in source.lines().enumerate() { let line_num = line_idx as u32; let trimmed = line.trim(); if trimmed.starts_with("table ") { let parts: Vec<&str> = trimmed.split_whitespace().collect(); if parts.len() >= 3 { let family = parts[1]; let name = parts[2]; // Validate family match family { "ip" | "ip6" | "inet" | "arp" | "bridge" | "netdev" => { // Valid family } _ => { let start_col = line.find(family).unwrap_or(0); let range = Range::new( Position::new(line_num, start_col as u32), Position::new(line_num, (start_col + family.len()) as u32), ); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::InvalidTableFamily, format!("Unknown table family: '{}'", family), ); diagnostics.push(diagnostic); } } // Check for duplicate table names let table_key = format!("{}:{}", family, name); if seen_tables.contains(&table_key) { let start_col = line.find(name).unwrap_or(0); let range = Range::new( Position::new(line_num, start_col as u32), Position::new(line_num, (start_col + name.len()) as u32), ); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::DuplicateTableName, format!("Duplicate table name: '{}'", name), ); diagnostics.push(diagnostic); } else { seen_tables.insert(table_key); } } } } diagnostics } fn validate_chain_declarations(&self, source: &str) -> Vec { let mut diagnostics = Vec::new(); for (line_idx, line) in source.lines().enumerate() { let line_num = line_idx as u32; let trimmed = line.trim(); if trimmed.starts_with("type ") && trimmed.contains("hook") { // Validate chain type and hook if let Some(hook_pos) = trimmed.find("hook") { let hook_part = &trimmed[hook_pos..]; let hook_words: Vec<&str> = hook_part.split_whitespace().collect(); if hook_words.len() >= 2 { let hook = hook_words[1]; match hook { "input" | "output" | "forward" | "prerouting" | "postrouting" => { // Valid hook } _ => { let start_col = line.find(hook).unwrap_or(0); let range = Range::new( Position::new(line_num, start_col as u32), Position::new(line_num, (start_col + hook.len()) as u32), ); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::UnknownHook, format!("Unknown hook: '{}'", hook), ); diagnostics.push(diagnostic); } } } } // Check for missing policy in filter chains if trimmed.contains("type filter") && !trimmed.contains("policy") { let range = Range::new( Position::new(line_num, 0), Position::new(line_num, line.len() as u32), ); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Warning, DiagnosticCode::ChainWithoutPolicy, "Filter chain should have an explicit policy".to_string(), ); diagnostics.push(diagnostic); } } } diagnostics } fn validate_cidr_notation(&self, source: &str) -> Vec { let mut diagnostics = Vec::new(); for (line_idx, line) in source.lines().enumerate() { let line_num = line_idx as u32; // Simple CIDR validation without regex dependency let words: Vec<&str> = line.split_whitespace().collect(); for word in words { if word.contains('/') && word.chars().any(|c| c.is_ascii_digit()) { if let Some(slash_pos) = word.find('/') { let (ip_part, prefix_part) = word.split_at(slash_pos); let prefix_part = &prefix_part[1..]; // Remove the '/' // Basic IPv4 validation let ip_parts: Vec<&str> = ip_part.split('.').collect(); if ip_parts.len() == 4 { let mut valid_ip = true; for part in ip_parts { if part.parse::().is_err() { valid_ip = false; break; } } // Validate prefix length if valid_ip { if let Ok(prefix) = prefix_part.parse::() { if prefix > 32 { if let Some(start_col) = line.find(word) { let range = Range::new( Position::new(line_num, start_col as u32), Position::new( line_num, (start_col + word.len()) as u32, ), ); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::InvalidCidrNotation, format!( "Invalid CIDR prefix length: '{}' (max 32 for IPv4)", prefix ), ); diagnostics.push(diagnostic); } } } else if !prefix_part.is_empty() { if let Some(start_col) = line.find(word) { let range = Range::new( Position::new(line_num, start_col as u32), Position::new( line_num, (start_col + word.len()) as u32, ), ); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::InvalidCidrNotation, format!("Invalid CIDR notation: '{}'", word), ); diagnostics.push(diagnostic); } } } } } } } } diagnostics } fn check_for_redundant_rules(&self, source: &str) -> Vec { let mut diagnostics = Vec::new(); let mut rules = Vec::new(); for (line_idx, line) in source.lines().enumerate() { let line_num = line_idx as u32; let trimmed = line.trim(); // Simple rule detection (lines that contain actions) if trimmed.contains(" accept") || trimmed.contains(" drop") || trimmed.contains(" reject") { for (existing_idx, existing_rule) in rules.iter().enumerate() { if existing_rule == &trimmed { let range = Range::new( Position::new(line_num, 0), Position::new(line_num, line.len() as u32), ); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Warning, DiagnosticCode::RedundantRule, format!( "Duplicate rule found (first occurrence at line {})", existing_idx + 1 ), ); diagnostics.push(diagnostic); break; } } rules.push(trimmed.to_string()); } } diagnostics } } /// Main diagnostic analyzer pub struct DiagnosticAnalyzer { config: DiagnosticConfig, } impl DiagnosticAnalyzer { pub fn new(config: DiagnosticConfig) -> Self { Self { config } } /// Analyze source code with all available modules pub fn analyze(&self, source: &str, file_path: &str) -> DiagnosticCollection { self.analyze_with_modules( source, file_path, &["lexical", "syntax", "style", "semantic"], ) } /// Analyze source code with specific modules pub fn analyze_with_modules( &self, source: &str, file_path: &str, module_names: &[&str], ) -> DiagnosticCollection { let mut collection = DiagnosticCollection::new(file_path.to_string(), source.to_string()); let modules: Vec> = vec![ Box::new(LexicalAnalyzer), Box::new(SyntaxAnalyzer), Box::new(StyleAnalyzer), Box::new(SemanticAnalyzer), ]; for module in modules { if module_names.contains(&module.name()) { let diagnostics = module.analyze(source, &self.config); collection.extend(diagnostics); } } collection } } impl Default for DiagnosticAnalyzer { fn default() -> Self { Self::new(DiagnosticConfig::default()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_diagnostic_creation() { let range = Range::new(Position::new(0, 0), Position::new(0, 10)); let diagnostic = Diagnostic::new( range, DiagnosticSeverity::Error, DiagnosticCode::SyntaxError, "Test error".to_string(), ); assert_eq!(diagnostic.severity, DiagnosticSeverity::Error); assert_eq!(diagnostic.code, DiagnosticCode::SyntaxError); assert_eq!(diagnostic.message, "Test error"); } #[test] fn test_position_from_text_size() { let source = "line 1\nline 2\nline 3"; let pos = Position::from_text_size(TextSize::from(8), source); assert_eq!(pos.line, 1); assert_eq!(pos.character, 1); } #[test] fn test_style_analysis() { let analyzer = DiagnosticAnalyzer::default(); let source = "table inet filter {\n chain input \n chain output\n}"; let diagnostics = analyzer.analyze(source, "test.nft"); // Should find missing shebang and trailing whitespace assert!(!diagnostics.diagnostics.is_empty()); assert!( diagnostics .diagnostics .iter() .any(|d| d.code == DiagnosticCode::MissingShebang) ); assert!( diagnostics .diagnostics .iter() .any(|d| d.code == DiagnosticCode::TrailingWhitespace) ); } }