docs: update rustdoc comments

This commit is contained in:
raf 2025-06-02 19:03:12 +03:00
commit ee54f8f8d3
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
3 changed files with 410 additions and 60 deletions

View file

@ -150,7 +150,7 @@ pub enum SyntaxKind {
// Error recovery // Error recovery
Error = 106, Error = 106,
// Additional protocol keywords // Protocol keywords for nftables
VmapKw = 107, VmapKw = 107,
NdRouterAdvertKw = 108, NdRouterAdvertKw = 108,
NdNeighborSolicitKw = 109, NdNeighborSolicitKw = 109,
@ -237,7 +237,7 @@ impl From<TokenKind> for SyntaxKind {
New => SyntaxKind::NewKw, New => SyntaxKind::NewKw,
Invalid => SyntaxKind::InvalidKw, Invalid => SyntaxKind::InvalidKw,
// Additional protocol keywords // Protocol keywords for ICMP/ICMPv6
Vmap => SyntaxKind::VmapKw, Vmap => SyntaxKind::VmapKw,
NdRouterAdvert => SyntaxKind::NdRouterAdvertKw, NdRouterAdvert => SyntaxKind::NdRouterAdvertKw,
NdNeighborSolicit => SyntaxKind::NdNeighborSolicitKw, NdNeighborSolicit => SyntaxKind::NdNeighborSolicitKw,
@ -358,7 +358,7 @@ pub enum CstError {
/// Result type for CST operations /// Result type for CST operations
pub type CstResult<T> = Result<T, CstError>; pub type CstResult<T> = Result<T, CstError>;
/// Basic CST builder /// CST builder for nftables syntax
pub struct CstBuilder; pub struct CstBuilder;
impl CstBuilder { impl CstBuilder {

View file

@ -10,9 +10,9 @@
use crate::lexer::LexError; use crate::lexer::LexError;
use crate::parser::{ParseError, Parser}; use crate::parser::{ParseError, Parser};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fmt; use std::fmt;
use std::net::IpAddr;
use text_size::TextSize; use text_size::TextSize;
/// Diagnostic severity levels following LSP specification /// Diagnostic severity levels following LSP specification
@ -171,10 +171,23 @@ pub struct Position {
} }
impl Position { impl Position {
/// Creates a new position with line and character coordinates
///
/// # Parameters
/// * `line` - Zero-based line number
/// * `character` - Zero-based character offset in the line
pub fn new(line: u32, character: u32) -> Self { pub fn new(line: u32, character: u32) -> Self {
Self { line, character } Self { line, character }
} }
/// Converts a text offset to line and character coordinates
///
/// # Parameters
/// * `text_size` - Byte offset in the source code
/// * `source` - Complete source text to analyze for line breaks
///
/// # Returns
/// A position with line and character coordinates corresponding to the text offset
pub fn from_text_size(text_size: TextSize, source: &str) -> Self { pub fn from_text_size(text_size: TextSize, source: &str) -> Self {
let mut line = 0; let mut line = 0;
let mut character = 0; let mut character = 0;
@ -204,10 +217,21 @@ pub struct Range {
} }
impl Range { impl Range {
/// Creates a new range with start and end positions
///
/// # Parameters
/// * `start` - Starting position (line and character)
/// * `end` - Ending position (line and character)
pub fn new(start: Position, end: Position) -> Self { pub fn new(start: Position, end: Position) -> Self {
Self { start, end } Self { start, end }
} }
/// Creates a range that covers only a single position
///
/// Useful for diagnostics that point to a specific location rather than a range
///
/// # Parameters
/// * `position` - The position to create a single-point range for
pub fn single_position(position: Position) -> Self { pub fn single_position(position: Position) -> Self {
Self { Self {
start: position.clone(), start: position.clone(),
@ -266,6 +290,16 @@ pub struct Diagnostic {
} }
impl Diagnostic { impl Diagnostic {
/// Creates a new diagnostic with essential information
///
/// # Parameters
/// * `range` - The source code range the diagnostic applies to
/// * `severity` - The severity level of the diagnostic
/// * `code` - The diagnostic code indicating the type of issue
/// * `message` - A human-readable description of the diagnostic
///
/// # Returns
/// A new diagnostic with default values for other fields
pub fn new( pub fn new(
range: Range, range: Range,
severity: DiagnosticSeverity, severity: DiagnosticSeverity,
@ -294,6 +328,11 @@ pub struct DiagnosticCollection {
} }
impl DiagnosticCollection { impl DiagnosticCollection {
/// Creates a new diagnostic collection for a file
///
/// # Parameters
/// * `file_path` - Path to the file being analyzed
/// * `source_text` - The content of the file
pub fn new(file_path: String, source_text: String) -> Self { pub fn new(file_path: String, source_text: String) -> Self {
Self { Self {
diagnostics: Vec::new(), diagnostics: Vec::new(),
@ -302,26 +341,44 @@ impl DiagnosticCollection {
} }
} }
/// Adds multiple diagnostics to the collection
///
/// # Parameters
/// * `diagnostics` - Vector of diagnostics to add
pub fn extend(&mut self, diagnostics: Vec<Diagnostic>) { pub fn extend(&mut self, diagnostics: Vec<Diagnostic>) {
self.diagnostics.extend(diagnostics); self.diagnostics.extend(diagnostics);
} }
/// Returns an iterator over all error-level diagnostics in the collection
pub fn errors(&self) -> impl Iterator<Item = &Diagnostic> { pub fn errors(&self) -> impl Iterator<Item = &Diagnostic> {
self.diagnostics self.diagnostics
.iter() .iter()
.filter(|d| d.severity == DiagnosticSeverity::Error) .filter(|d| d.severity == DiagnosticSeverity::Error)
} }
/// Checks if the collection contains any error-level diagnostics
pub fn has_errors(&self) -> bool { pub fn has_errors(&self) -> bool {
self.errors().count() > 0 self.errors().count() > 0
} }
/// Convert to JSON for LSP or tooling integration /// Converts the diagnostic collection to JSON format
///
/// Useful for integrating with Language Server Protocol (LSP) clients
/// or other tools that consume structured diagnostic data.
///
/// # Returns
/// A JSON string representation of the diagnostic collection, or an error if serialization fails
pub fn to_json(&self) -> serde_json::Result<String> { pub fn to_json(&self) -> serde_json::Result<String> {
serde_json::to_string_pretty(self) serde_json::to_string_pretty(self)
} }
/// Convert to a human-readable format /// Converts the diagnostic collection to a human-readable text format
///
/// Produces a formatted report suitable for display in a terminal,
/// with file locations, error codes, and code snippets for context.
///
/// # Returns
/// A formatted string containing all diagnostics with relevant context
pub fn to_human_readable(&self) -> String { pub fn to_human_readable(&self) -> String {
let mut output = String::new(); let mut output = String::new();
@ -359,6 +416,18 @@ impl DiagnosticCollection {
output output
} }
/// Extracts source code lines around a diagnostic location
///
/// Provides context for a diagnostic by showing the lines of code surrounding
/// the issue, with line numbers and a marker pointing to the problematic line.
///
/// # Parameters
/// * `range` - The range in the source code to provide context for
/// * `context_lines` - Number of lines to include before and after the range
///
/// # Returns
/// A vector of formatted strings containing the context lines with line numbers,
/// or None if the range is invalid
fn get_context_lines(&self, range: &Range, context_lines: usize) -> Option<Vec<String>> { fn get_context_lines(&self, range: &Range, context_lines: usize) -> Option<Vec<String>> {
let lines: Vec<&str> = self.source_text.lines().collect(); let lines: Vec<&str> = self.source_text.lines().collect();
let start_line = range.start.line as usize; let start_line = range.start.line as usize;
@ -435,9 +504,7 @@ impl AnalyzerModule for LexicalAnalyzer {
let mut lexer = NftablesLexer::new(source); let mut lexer = NftablesLexer::new(source);
match lexer.tokenize() { match lexer.tokenize() {
Ok(_) => { Ok(_) => {}
// No lexical errors
}
Err(lex_error) => { Err(lex_error) => {
let diagnostic = Self::lex_error_to_diagnostic(&lex_error, source); let diagnostic = Self::lex_error_to_diagnostic(&lex_error, source);
diagnostics.push(diagnostic); diagnostics.push(diagnostic);
@ -453,6 +520,17 @@ impl AnalyzerModule for LexicalAnalyzer {
} }
impl LexicalAnalyzer { impl LexicalAnalyzer {
/// Converts a lexical error to a diagnostic
///
/// Translates lexer-specific errors into the generic diagnostic format
/// with appropriate severity, code, and location information.
///
/// # Parameters
/// * `error` - The lexical error to convert
/// * `source` - Source code for position calculation
///
/// # Returns
/// A diagnostic describing the lexical error
pub fn lex_error_to_diagnostic(error: &LexError, source: &str) -> Diagnostic { pub fn lex_error_to_diagnostic(error: &LexError, source: &str) -> Diagnostic {
match error { match error {
LexError::InvalidToken { position, text } => { LexError::InvalidToken { position, text } => {
@ -508,18 +586,14 @@ impl AnalyzerModule for SyntaxAnalyzer {
Ok(tokens) => { Ok(tokens) => {
let mut parser = Parser::new(tokens); let mut parser = Parser::new(tokens);
match parser.parse() { match parser.parse() {
Ok(_) => { Ok(_) => {}
// No parse errors
}
Err(parse_error) => { Err(parse_error) => {
let diagnostic = Self::parse_error_to_diagnostic(&parse_error, source); let diagnostic = Self::parse_error_to_diagnostic(&parse_error, source);
diagnostics.push(diagnostic); diagnostics.push(diagnostic);
} }
} }
} }
Err(_) => { Err(_) => {}
// Already handled in lexical analysis
}
} }
diagnostics diagnostics
@ -531,6 +605,17 @@ impl AnalyzerModule for SyntaxAnalyzer {
} }
impl SyntaxAnalyzer { impl SyntaxAnalyzer {
/// Converts a parse error to a diagnostic
///
/// Translates parser-specific errors into the generic diagnostic format
/// with appropriate severity, code, and meaningful error messages.
///
/// # Parameters
/// * `error` - The parse error to convert
/// * `_source` - Source code for position calculation
///
/// # Returns
/// A diagnostic describing the syntax error
fn parse_error_to_diagnostic(error: &ParseError, _source: &str) -> Diagnostic { fn parse_error_to_diagnostic(error: &ParseError, _source: &str) -> Diagnostic {
match error { match error {
ParseError::UnexpectedToken { ParseError::UnexpectedToken {
@ -607,12 +692,9 @@ impl AnalyzerModule for StyleAnalyzer {
fn analyze(&self, source: &str, config: &DiagnosticConfig) -> Vec<Diagnostic> { fn analyze(&self, source: &str, config: &DiagnosticConfig) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
// Only perform style analysis if enabled
if !config.enable_style_warnings { if !config.enable_style_warnings {
return diagnostics; return diagnostics;
} }
// Check for missing shebang
if !source.starts_with("#!") { if !source.starts_with("#!") {
let range = Range::new(Position::new(0, 0), Position::new(0, 0)); let range = Range::new(Position::new(0, 0), Position::new(0, 0));
let diagnostic = Diagnostic::new( let diagnostic = Diagnostic::new(
@ -637,6 +719,11 @@ impl AnalyzerModule for StyleAnalyzer {
} }
impl StyleAnalyzer { impl StyleAnalyzer {
/// Analyzes line issues in nftables configuration
///
/// Detects:
/// - Lines exceeding maximum length
/// - Trailing whitespace at end of lines
fn analyze_line_issues(&self, source: &str, config: &DiagnosticConfig) -> Vec<Diagnostic> { fn analyze_line_issues(&self, source: &str, config: &DiagnosticConfig) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
@ -680,6 +767,11 @@ impl StyleAnalyzer {
diagnostics diagnostics
} }
/// Analyzes whitespace issues in nftables configuration
///
/// Detects:
/// - Too many consecutive empty lines
/// - Trailing empty lines at end of file
fn analyze_whitespace_issues( fn analyze_whitespace_issues(
&self, &self,
source: &str, source: &str,
@ -736,6 +828,12 @@ impl StyleAnalyzer {
diagnostics diagnostics
} }
/// Analyzes indentation consistency in nftables configuration
///
/// Checks for:
/// - Mixed tabs and spaces in a single line
/// - Inconsistent indentation styles across the file
/// - Adherence to preferred indentation style if specified
fn analyze_indentation(&self, source: &str, config: &DiagnosticConfig) -> Vec<Diagnostic> { fn analyze_indentation(&self, source: &str, config: &DiagnosticConfig) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
let mut has_tabs = false; let mut has_tabs = false;
@ -810,23 +908,19 @@ impl AnalyzerModule for SemanticAnalyzer {
fn analyze(&self, source: &str, config: &DiagnosticConfig) -> Vec<Diagnostic> { fn analyze(&self, source: &str, config: &DiagnosticConfig) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
// Always run semantic validation (syntax/semantic errors)
diagnostics.extend(self.validate_table_declarations(source)); diagnostics.extend(self.validate_table_declarations(source));
diagnostics.extend(self.validate_chain_declarations_semantic(source)); diagnostics.extend(self.validate_chain_declarations_semantic(source));
diagnostics.extend(self.validate_cidr_notation(source)); diagnostics.extend(self.validate_cidr_notation(source));
// Best practices checks (only if enabled)
if config.enable_best_practices { if config.enable_best_practices {
diagnostics.extend(self.validate_chain_best_practices(source)); diagnostics.extend(self.validate_chain_best_practices(source));
diagnostics.extend(self.check_for_redundant_rules(source)); diagnostics.extend(self.check_for_redundant_rules(source));
} }
// Performance hints (only if enabled)
if config.enable_performance_hints { if config.enable_performance_hints {
diagnostics.extend(self.check_performance_hints(source)); diagnostics.extend(self.check_performance_hints(source));
} }
// Security warnings (only if enabled)
if config.enable_security_warnings { if config.enable_security_warnings {
diagnostics.extend(self.check_security_warnings(source)); diagnostics.extend(self.check_security_warnings(source));
} }
@ -840,6 +934,11 @@ impl AnalyzerModule for SemanticAnalyzer {
} }
impl SemanticAnalyzer { impl SemanticAnalyzer {
/// Validates table declarations in nftables configuration
///
/// Checks for:
/// - Valid table family (ip, ip6, inet, arp, bridge, netdev)
/// - Duplicate table names
fn validate_table_declarations(&self, source: &str) -> Vec<Diagnostic> { fn validate_table_declarations(&self, source: &str) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
let mut seen_tables = HashSet::new(); let mut seen_tables = HashSet::new();
@ -854,11 +953,8 @@ impl SemanticAnalyzer {
let family = parts[1]; let family = parts[1];
let name = parts[2]; let name = parts[2];
// Validate family
match family { match family {
"ip" | "ip6" | "inet" | "arp" | "bridge" | "netdev" => { "ip" | "ip6" | "inet" | "arp" | "bridge" | "netdev" => {}
// Valid family
}
_ => { _ => {
let start_col = line.find(family).unwrap_or(0); let start_col = line.find(family).unwrap_or(0);
let range = Range::new( let range = Range::new(
@ -900,6 +996,11 @@ impl SemanticAnalyzer {
diagnostics diagnostics
} }
/// Validates chain declarations for correct hook types
///
/// Checks for valid hooks in chain declarations:
/// - input, output, forward, prerouting, postrouting
/// - Reports unknown hooks as errors
fn validate_chain_declarations_semantic(&self, source: &str) -> Vec<Diagnostic> { fn validate_chain_declarations_semantic(&self, source: &str) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
@ -908,7 +1009,6 @@ impl SemanticAnalyzer {
let trimmed = line.trim(); let trimmed = line.trim();
if trimmed.starts_with("type ") && trimmed.contains("hook") { if trimmed.starts_with("type ") && trimmed.contains("hook") {
// Validate chain type and hook (semantic validation)
if let Some(hook_pos) = trimmed.find("hook") { if let Some(hook_pos) = trimmed.find("hook") {
let hook_part = &trimmed[hook_pos..]; let hook_part = &trimmed[hook_pos..];
let hook_words: Vec<&str> = hook_part.split_whitespace().collect(); let hook_words: Vec<&str> = hook_part.split_whitespace().collect();
@ -916,9 +1016,7 @@ impl SemanticAnalyzer {
if hook_words.len() >= 2 { if hook_words.len() >= 2 {
let hook = hook_words[1]; let hook = hook_words[1];
match hook { match hook {
"input" | "output" | "forward" | "prerouting" | "postrouting" => { "input" | "output" | "forward" | "prerouting" | "postrouting" => {}
// Valid hook
}
_ => { _ => {
let start_col = line.find(hook).unwrap_or(0); let start_col = line.find(hook).unwrap_or(0);
let range = Range::new( let range = Range::new(
@ -942,6 +1040,10 @@ impl SemanticAnalyzer {
diagnostics diagnostics
} }
/// Checks for best practices in nftables chain definitions
///
/// Verifies that:
/// - Filter chains have explicit policies defined
fn validate_chain_best_practices(&self, source: &str) -> Vec<Diagnostic> { fn validate_chain_best_practices(&self, source: &str) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
@ -949,7 +1051,6 @@ impl SemanticAnalyzer {
let line_num = line_idx as u32; let line_num = line_idx as u32;
let trimmed = line.trim(); let trimmed = line.trim();
// Check for missing policy in filter chains (best practice)
if trimmed.contains("type filter") && !trimmed.contains("policy") { if trimmed.contains("type filter") && !trimmed.contains("policy") {
let range = Range::new( let range = Range::new(
Position::new(line_num, 0), Position::new(line_num, 0),
@ -968,6 +1069,11 @@ impl SemanticAnalyzer {
diagnostics diagnostics
} }
/// Analyzes nftables configuration for performance optimizations
///
/// Checks for:
/// - Large sets without timeouts
/// - Rules without counters for monitoring
fn check_performance_hints(&self, source: &str) -> Vec<Diagnostic> { fn check_performance_hints(&self, source: &str) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
@ -975,9 +1081,7 @@ impl SemanticAnalyzer {
let line_num = line_idx as u32; let line_num = line_idx as u32;
let trimmed = line.trim(); let trimmed = line.trim();
// Check for large sets without timeout (performance hint)
if trimmed.contains("set ") && trimmed.contains("{") && !trimmed.contains("timeout") { if trimmed.contains("set ") && trimmed.contains("{") && !trimmed.contains("timeout") {
// Simple heuristic: if set definition is long, suggest timeout
if trimmed.len() > 100 { if trimmed.len() > 100 {
let range = Range::new( let range = Range::new(
Position::new(line_num, 0), Position::new(line_num, 0),
@ -1015,6 +1119,10 @@ impl SemanticAnalyzer {
diagnostics diagnostics
} }
/// Checks for security risks in nftables configuration
///
/// Identifies:
/// - Overly permissive rules that accept traffic from anywhere (0.0.0.0/0 or ::/0)
fn check_security_warnings(&self, source: &str) -> Vec<Diagnostic> { fn check_security_warnings(&self, source: &str) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
@ -1044,35 +1152,62 @@ impl SemanticAnalyzer {
diagnostics diagnostics
} }
/// Validates CIDR notation in nftables configuration
///
/// Checks for:
/// - Valid IP address format (both IPv4 and IPv6)
/// - Valid prefix length (0-32 for IPv4, 0-128 for IPv6)
/// - Valid numeric prefix format
fn validate_cidr_notation(&self, source: &str) -> Vec<Diagnostic> { fn validate_cidr_notation(&self, source: &str) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
for (line_idx, line) in source.lines().enumerate() { for (line_idx, line) in source.lines().enumerate() {
let line_num = line_idx as u32; let line_num = line_idx as u32;
// Simple CIDR validation without regex dependency // Look for potential CIDR notation patterns
let words: Vec<&str> = line.split_whitespace().collect(); let words: Vec<&str> = line.split_whitespace().collect();
for word in words { for word in words {
if word.contains('/') && word.chars().any(|c| c.is_ascii_digit()) { if word.contains('/') && word.chars().any(|c| c.is_ascii_digit()) {
if let Some(slash_pos) = word.find('/') { if let Some(slash_pos) = word.find('/') {
let (ip_part, prefix_part) = word.split_at(slash_pos); let (ip_part, prefix_part) = word.split_at(slash_pos);
let prefix_part = &prefix_part[1..]; // Remove the '/' let prefix_part = &prefix_part[1..];
// Basic IPv4 validation match ip_part.parse::<IpAddr>() {
let ip_parts: Vec<&str> = ip_part.split('.').collect(); Ok(ip_addr) => match prefix_part.parse::<u8>() {
if ip_parts.len() == 4 { Ok(prefix) => {
let mut valid_ip = true; let max_prefix = match ip_addr {
for part in ip_parts { IpAddr::V4(_) => 32,
if part.parse::<u8>().is_err() { IpAddr::V6(_) => 128,
valid_ip = false; };
break;
if prefix > max_prefix {
if let Some(start_col) = line.find(word) {
let range = Range::new(
Position::new(line_num, start_col as u32),
Position::new(
line_num,
(start_col + word.len()) as u32,
),
);
let ip_type = match ip_addr {
IpAddr::V4(_) => "IPv4",
IpAddr::V6(_) => "IPv6",
};
let diagnostic = Diagnostic::new(
range,
DiagnosticSeverity::Error,
DiagnosticCode::InvalidCidrNotation,
format!(
"Invalid CIDR prefix length: '{}' (max {} for {})",
prefix, max_prefix, ip_type
),
);
diagnostics.push(diagnostic);
}
}
} }
} Err(_) => {
if !prefix_part.is_empty() {
// Validate prefix length
if valid_ip {
if let Ok(prefix) = prefix_part.parse::<u8>() {
if prefix > 32 {
if let Some(start_col) = line.find(word) { if let Some(start_col) = line.find(word) {
let range = Range::new( let range = Range::new(
Position::new(line_num, start_col as u32), Position::new(line_num, start_col as u32),
@ -1086,14 +1221,23 @@ impl SemanticAnalyzer {
DiagnosticSeverity::Error, DiagnosticSeverity::Error,
DiagnosticCode::InvalidCidrNotation, DiagnosticCode::InvalidCidrNotation,
format!( format!(
"Invalid CIDR prefix length: '{}' (max 32 for IPv4)", "Invalid CIDR prefix: '{}' must be a number",
prefix prefix_part
), ),
); );
diagnostics.push(diagnostic); diagnostics.push(diagnostic);
} }
} }
} else if !prefix_part.is_empty() { }
},
Err(_) => {
if (ip_part.contains('.')
&& ip_part.chars().any(|c| c.is_ascii_digit()))
|| (ip_part.contains(':')
&& ip_part
.chars()
.any(|c| c.is_ascii_digit() || c.is_ascii_hexdigit()))
{
if let Some(start_col) = line.find(word) { if let Some(start_col) = line.find(word) {
let range = Range::new( let range = Range::new(
Position::new(line_num, start_col as u32), Position::new(line_num, start_col as u32),
@ -1106,7 +1250,10 @@ impl SemanticAnalyzer {
range, range,
DiagnosticSeverity::Error, DiagnosticSeverity::Error,
DiagnosticCode::InvalidCidrNotation, DiagnosticCode::InvalidCidrNotation,
format!("Invalid CIDR notation: '{}'", word), format!(
"Invalid IP address in CIDR notation: '{}'",
ip_part
),
); );
diagnostics.push(diagnostic); diagnostics.push(diagnostic);
} }
@ -1121,6 +1268,10 @@ impl SemanticAnalyzer {
diagnostics diagnostics
} }
/// Identifies redundant rules in nftables configuration
///
/// Detects:
/// - Duplicate accept/drop/reject rules
fn check_for_redundant_rules(&self, source: &str) -> Vec<Diagnostic> { fn check_for_redundant_rules(&self, source: &str) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new(); let mut diagnostics = Vec::new();
let mut seen_rules = HashSet::new(); let mut seen_rules = HashSet::new();
@ -1129,12 +1280,10 @@ impl SemanticAnalyzer {
let line_num = line_idx as u32; let line_num = line_idx as u32;
let trimmed = line.trim(); let trimmed = line.trim();
// Simple rule detection (lines that contain actions)
if trimmed.contains(" accept") if trimmed.contains(" accept")
|| trimmed.contains(" drop") || trimmed.contains(" drop")
|| trimmed.contains(" reject") || trimmed.contains(" reject")
{ {
// Check if rule already exists in the HashSet
if seen_rules.contains(trimmed) { if seen_rules.contains(trimmed) {
let range = Range::new( let range = Range::new(
Position::new(line_num, 0), Position::new(line_num, 0),
@ -1148,7 +1297,6 @@ impl SemanticAnalyzer {
); );
diagnostics.push(diagnostic); diagnostics.push(diagnostic);
} else { } else {
// Add the rule to the HashSet if it's not a duplicate
seen_rules.insert(trimmed.to_string()); seen_rules.insert(trimmed.to_string());
} }
} }
@ -1164,11 +1312,25 @@ pub struct DiagnosticAnalyzer {
} }
impl DiagnosticAnalyzer { impl DiagnosticAnalyzer {
/// Creates a new diagnostic analyzer with the specified configuration
///
/// # Parameters
/// * `config` - Configuration settings for the analyzer
pub fn new(config: DiagnosticConfig) -> Self { pub fn new(config: DiagnosticConfig) -> Self {
Self { config } Self { config }
} }
/// Analyze source code with all available modules /// Analyzes source code with all standard analysis modules
///
/// This is the main entry point for a complete analysis of nftables configurations.
/// It runs lexical, syntax, style, and semantic analysis on the provided source.
///
/// # Parameters
/// * `source` - Source code to analyze
/// * `file_path` - Path to the file being analyzed
///
/// # Returns
/// A collection of all diagnostics found in the source
pub fn analyze(&self, source: &str, file_path: &str) -> DiagnosticCollection { pub fn analyze(&self, source: &str, file_path: &str) -> DiagnosticCollection {
self.analyze_with_modules( self.analyze_with_modules(
source, source,
@ -1177,7 +1339,17 @@ impl DiagnosticAnalyzer {
) )
} }
/// Analyze source code with specific modules /// Analyzes source code with specific analysis modules
///
/// Allows running only selected analysis modules for more targeted diagnostics.
///
/// # Parameters
/// * `source` - Source code to analyze
/// * `file_path` - Path to the file being analyzed
/// * `module_names` - Names of modules to run ("lexical", "syntax", "style", "semantic")
///
/// # Returns
/// A collection of diagnostics from the selected modules
pub fn analyze_with_modules( pub fn analyze_with_modules(
&self, &self,
source: &str, source: &str,
@ -1258,4 +1430,184 @@ mod tests {
.any(|d| d.code == DiagnosticCode::TrailingWhitespace) .any(|d| d.code == DiagnosticCode::TrailingWhitespace)
); );
} }
#[test]
fn test_cidr_validation_ipv4_valid() {
let analyzer = SemanticAnalyzer;
let source = "ip saddr 192.168.1.0/24 accept";
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify valid IPv4 CIDR notation doesn't produce errors
assert!(
!diagnostics
.iter()
.any(|d| d.code == DiagnosticCode::InvalidCidrNotation)
);
}
#[test]
fn test_cidr_validation_ipv4_invalid_prefix() {
let analyzer = SemanticAnalyzer;
let source = "ip saddr 192.168.1.0/33 accept";
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify detection of IPv4 prefix exceeding max (32)
assert!(diagnostics.iter().any(|d| {
d.code == DiagnosticCode::InvalidCidrNotation
&& d.message
.contains("Invalid CIDR prefix length: '33' (max 32 for IPv4)")
}));
}
#[test]
fn test_cidr_validation_ipv6_valid() {
let analyzer = SemanticAnalyzer;
let source = "ip6 saddr 2001:db8::/32 accept";
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify valid IPv6 CIDR notation doesn't produce errors
assert!(
!diagnostics
.iter()
.any(|d| d.code == DiagnosticCode::InvalidCidrNotation)
);
}
#[test]
fn test_cidr_validation_ipv6_invalid_prefix() {
let analyzer = SemanticAnalyzer;
let source = "ip6 saddr 2001:db8::/129 accept";
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify detection of IPv6 prefix exceeding max (128)
assert!(diagnostics.iter().any(|d| {
d.code == DiagnosticCode::InvalidCidrNotation
&& d.message
.contains("Invalid CIDR prefix length: '129' (max 128 for IPv6)")
}));
}
#[test]
fn test_cidr_validation_invalid_ip_address() {
let analyzer = SemanticAnalyzer;
let source = "ip saddr 192.168.1.256/24 accept";
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify detection of invalid IP address format
assert!(diagnostics.iter().any(|d| {
d.code == DiagnosticCode::InvalidCidrNotation
&& d.message
.contains("Invalid IP address in CIDR notation: '192.168.1.256'")
}));
}
#[test]
fn test_cidr_validation_invalid_prefix_format() {
let analyzer = SemanticAnalyzer;
let source = "ip saddr 192.168.1.0/abc accept";
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify detection of non-numeric CIDR prefix
assert!(diagnostics.iter().any(|d| {
d.code == DiagnosticCode::InvalidCidrNotation
&& d.message
.contains("Invalid CIDR prefix: 'abc' must be a number")
}));
}
#[test]
fn test_cidr_validation_ipv6_compressed_notation() {
let analyzer = SemanticAnalyzer;
let source = "ip6 saddr ::1/128 accept";
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify compressed IPv6 notation is properly handled
assert!(
!diagnostics
.iter()
.any(|d| d.code == DiagnosticCode::InvalidCidrNotation)
);
}
#[test]
fn test_cidr_validation_multiple_cidrs() {
let analyzer = SemanticAnalyzer;
let source = r#"
ip saddr 192.168.1.0/24 accept
ip6 saddr 2001:db8::/32 accept
ip saddr 10.0.0.0/8 drop
ip6 saddr fe80::/64 accept
"#;
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify multiple valid CIDRs are properly parsed
assert!(
!diagnostics
.iter()
.any(|d| d.code == DiagnosticCode::InvalidCidrNotation)
);
}
#[test]
fn test_cidr_validation_mixed_valid_invalid() {
let analyzer = SemanticAnalyzer;
let source = r#"
ip saddr 192.168.1.0/24 accept
ip saddr 192.168.1.0/33 drop
ip6 saddr 2001:db8::/32 accept
ip6 saddr 2001:db8::/129 drop
"#;
let diagnostics = analyzer.validate_cidr_notation(source);
// Verify detection of specific invalid prefixes in a mixed content
let cidr_errors: Vec<_> = diagnostics
.iter()
.filter(|d| d.code == DiagnosticCode::InvalidCidrNotation)
.collect();
assert_eq!(cidr_errors.len(), 2);
// Check for IPv4 error
assert!(
cidr_errors
.iter()
.any(|d| d.message.contains("33") && d.message.contains("IPv4"))
);
// Check for IPv6 error
assert!(
cidr_errors
.iter()
.any(|d| d.message.contains("129") && d.message.contains("IPv6"))
);
}
#[test]
fn test_cidr_validation_edge_cases() {
let analyzer = SemanticAnalyzer;
// Test edge case: maximum allowed prefix lengths
let source_ipv4_max = "ip saddr 192.168.1.0/32 accept";
let diagnostics_ipv4 = analyzer.validate_cidr_notation(source_ipv4_max);
assert!(
!diagnostics_ipv4
.iter()
.any(|d| d.code == DiagnosticCode::InvalidCidrNotation)
);
let source_ipv6_max = "ip6 saddr 2001:db8::/128 accept";
let diagnostics_ipv6 = analyzer.validate_cidr_notation(source_ipv6_max);
assert!(
!diagnostics_ipv6
.iter()
.any(|d| d.code == DiagnosticCode::InvalidCidrNotation)
);
// Test edge case: minimum prefix (catch-all address)
let source_zero = "ip saddr 0.0.0.0/0 accept";
let diagnostics_zero = analyzer.validate_cidr_notation(source_zero);
assert!(
!diagnostics_zero
.iter()
.any(|d| d.code == DiagnosticCode::InvalidCidrNotation)
);
}
} }

View file

@ -506,7 +506,6 @@ fn convert_parse_error_to_formatter_error(
suggestion: None, suggestion: None,
}, },
ParseError::MissingToken { expected } => { ParseError::MissingToken { expected } => {
// Try to find current position from last token
let (line, column) = if let Some(last_token) = tokens.last() { let (line, column) = if let Some(last_token) = tokens.last() {
position_from_range(&last_token.range, source) position_from_range(&last_token.range, source)
} else { } else {
@ -520,7 +519,6 @@ fn convert_parse_error_to_formatter_error(
} }
} }
ParseError::InvalidExpression { message } => { ParseError::InvalidExpression { message } => {
// Try to find the current token position
let (line, column) = find_current_parse_position(tokens, source); let (line, column) = find_current_parse_position(tokens, source);
FormatterError::SyntaxError { FormatterError::SyntaxError {
line, line,