initial working implementation
This commit is contained in:
parent
b865832523
commit
c4beb3e65f
8 changed files with 3858 additions and 126 deletions
389
src/ast.rs
Normal file
389
src/ast.rs
Normal file
|
@ -0,0 +1,389 @@
|
|||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
|
||||
/// Represents the nftables address family
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Family {
|
||||
Ip,
|
||||
Ip6,
|
||||
Inet,
|
||||
Arp,
|
||||
Bridge,
|
||||
Netdev,
|
||||
}
|
||||
|
||||
impl fmt::Display for Family {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Family::Ip => write!(f, "ip"),
|
||||
Family::Ip6 => write!(f, "ip6"),
|
||||
Family::Inet => write!(f, "inet"),
|
||||
Family::Arp => write!(f, "arp"),
|
||||
Family::Bridge => write!(f, "bridge"),
|
||||
Family::Netdev => write!(f, "netdev"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents chain types
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ChainType {
|
||||
Filter,
|
||||
Nat,
|
||||
Route,
|
||||
}
|
||||
|
||||
impl fmt::Display for ChainType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ChainType::Filter => write!(f, "filter"),
|
||||
ChainType::Nat => write!(f, "nat"),
|
||||
ChainType::Route => write!(f, "route"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents chain hooks
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Hook {
|
||||
Input,
|
||||
Output,
|
||||
Forward,
|
||||
Prerouting,
|
||||
Postrouting,
|
||||
}
|
||||
|
||||
impl fmt::Display for Hook {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Hook::Input => write!(f, "input"),
|
||||
Hook::Output => write!(f, "output"),
|
||||
Hook::Forward => write!(f, "forward"),
|
||||
Hook::Prerouting => write!(f, "prerouting"),
|
||||
Hook::Postrouting => write!(f, "postrouting"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents chain policies
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Policy {
|
||||
Accept,
|
||||
Drop,
|
||||
}
|
||||
|
||||
impl fmt::Display for Policy {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Policy::Accept => write!(f, "accept"),
|
||||
Policy::Drop => write!(f, "drop"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents expressions in nftables rules
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Expression {
|
||||
// Literals
|
||||
Identifier(String),
|
||||
String(String),
|
||||
Number(u64),
|
||||
IpAddress(String),
|
||||
Ipv6Address(String),
|
||||
MacAddress(String),
|
||||
|
||||
// Binary operations
|
||||
Binary {
|
||||
left: Box<Expression>,
|
||||
operator: BinaryOperator,
|
||||
right: Box<Expression>,
|
||||
},
|
||||
|
||||
// Protocol matches
|
||||
Protocol(String),
|
||||
Port {
|
||||
direction: PortDirection,
|
||||
value: Box<Expression>,
|
||||
},
|
||||
Address {
|
||||
direction: AddressDirection,
|
||||
value: Box<Expression>,
|
||||
},
|
||||
|
||||
// Interface matches
|
||||
Interface {
|
||||
direction: InterfaceDirection,
|
||||
name: String,
|
||||
},
|
||||
|
||||
// Connection tracking
|
||||
ConnTrack {
|
||||
field: String,
|
||||
value: Box<Expression>,
|
||||
},
|
||||
|
||||
// Set expressions
|
||||
Set(Vec<Expression>),
|
||||
|
||||
// Range expressions
|
||||
Range {
|
||||
start: Box<Expression>,
|
||||
end: Box<Expression>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum BinaryOperator {
|
||||
Eq,
|
||||
Ne,
|
||||
Lt,
|
||||
Le,
|
||||
Gt,
|
||||
Ge,
|
||||
}
|
||||
|
||||
impl fmt::Display for BinaryOperator {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
BinaryOperator::Eq => write!(f, "=="),
|
||||
BinaryOperator::Ne => write!(f, "!="),
|
||||
BinaryOperator::Lt => write!(f, "<"),
|
||||
BinaryOperator::Le => write!(f, "<="),
|
||||
BinaryOperator::Gt => write!(f, ">"),
|
||||
BinaryOperator::Ge => write!(f, ">="),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum PortDirection {
|
||||
Source,
|
||||
Destination,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum AddressDirection {
|
||||
Source,
|
||||
Destination,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum InterfaceDirection {
|
||||
Input,
|
||||
Output,
|
||||
}
|
||||
|
||||
/// Represents actions that can be taken on matching packets
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Action {
|
||||
Accept,
|
||||
Drop,
|
||||
Reject,
|
||||
Return,
|
||||
Jump(String),
|
||||
Goto(String),
|
||||
Continue,
|
||||
Log {
|
||||
prefix: Option<String>,
|
||||
level: Option<String>,
|
||||
},
|
||||
Comment(String),
|
||||
}
|
||||
|
||||
impl fmt::Display for Action {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Action::Accept => write!(f, "accept"),
|
||||
Action::Drop => write!(f, "drop"),
|
||||
Action::Reject => write!(f, "reject"),
|
||||
Action::Return => write!(f, "return"),
|
||||
Action::Jump(target) => write!(f, "jump {}", target),
|
||||
Action::Goto(target) => write!(f, "goto {}", target),
|
||||
Action::Continue => write!(f, "continue"),
|
||||
Action::Log { prefix, level } => {
|
||||
write!(f, "log")?;
|
||||
if let Some(p) = prefix {
|
||||
write!(f, " prefix \"{}\"", p)?;
|
||||
}
|
||||
if let Some(l) = level {
|
||||
write!(f, " level {}", l)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Action::Comment(text) => write!(f, "comment \"{}\"", text),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a rule in a chain
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Rule {
|
||||
pub expressions: Vec<Expression>,
|
||||
pub action: Action,
|
||||
pub handle: Option<u64>,
|
||||
}
|
||||
|
||||
impl Rule {
|
||||
pub fn new(expressions: Vec<Expression>, action: Action) -> Self {
|
||||
Self {
|
||||
expressions,
|
||||
action,
|
||||
handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_handle(mut self, handle: u64) -> Self {
|
||||
self.handle = Some(handle);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a chain in a table
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Chain {
|
||||
pub name: String,
|
||||
pub chain_type: Option<ChainType>,
|
||||
pub hook: Option<Hook>,
|
||||
pub priority: Option<i32>,
|
||||
pub policy: Option<Policy>,
|
||||
pub device: Option<String>,
|
||||
pub rules: Vec<Rule>,
|
||||
pub handle: Option<u64>,
|
||||
}
|
||||
|
||||
impl Chain {
|
||||
pub fn new(name: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
chain_type: None,
|
||||
hook: None,
|
||||
priority: None,
|
||||
policy: None,
|
||||
device: None,
|
||||
rules: Vec::new(),
|
||||
handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_type(mut self, chain_type: ChainType) -> Self {
|
||||
self.chain_type = Some(chain_type);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_hook(mut self, hook: Hook) -> Self {
|
||||
self.hook = Some(hook);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_priority(mut self, priority: i32) -> Self {
|
||||
self.priority = Some(priority);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_policy(mut self, policy: Policy) -> Self {
|
||||
self.policy = Some(policy);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_device(mut self, device: String) -> Self {
|
||||
self.device = Some(device);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_rule(mut self, rule: Rule) -> Self {
|
||||
self.rules.push(rule);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a table containing chains
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Table {
|
||||
pub family: Family,
|
||||
pub name: String,
|
||||
pub chains: HashMap<String, Chain>,
|
||||
pub handle: Option<u64>,
|
||||
}
|
||||
|
||||
impl Table {
|
||||
pub fn new(family: Family, name: String) -> Self {
|
||||
Self {
|
||||
family,
|
||||
name,
|
||||
chains: HashMap::new(),
|
||||
handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_chain(mut self, chain: Chain) -> Self {
|
||||
self.chains.insert(chain.name.clone(), chain);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents an include statement
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Include {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
/// Represents a define statement
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Define {
|
||||
pub name: String,
|
||||
pub value: Expression,
|
||||
}
|
||||
|
||||
/// Represents the root of an nftables configuration
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Ruleset {
|
||||
pub includes: Vec<Include>,
|
||||
pub defines: Vec<Define>,
|
||||
pub tables: HashMap<(Family, String), Table>,
|
||||
pub shebang: Option<String>,
|
||||
pub comments: Vec<String>,
|
||||
}
|
||||
|
||||
impl Ruleset {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
includes: Vec::new(),
|
||||
defines: Vec::new(),
|
||||
tables: HashMap::new(),
|
||||
shebang: None,
|
||||
comments: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_shebang(mut self, shebang: String) -> Self {
|
||||
self.shebang = Some(shebang);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_include(mut self, include: Include) -> Self {
|
||||
self.includes.push(include);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_define(mut self, define: Define) -> Self {
|
||||
self.defines.push(define);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_table(mut self, table: Table) -> Self {
|
||||
let key = (table.family.clone(), table.name.clone());
|
||||
self.tables.insert(key, table);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_comment(mut self, comment: String) -> Self {
|
||||
self.comments.push(comment);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Ruleset {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
1197
src/cst.rs
Normal file
1197
src/cst.rs
Normal file
File diff suppressed because it is too large
Load diff
416
src/lexer.rs
Normal file
416
src/lexer.rs
Normal file
|
@ -0,0 +1,416 @@
|
|||
use logos::{Lexer, Logos};
|
||||
use std::fmt;
|
||||
use text_size::{TextRange, TextSize};
|
||||
use thiserror::Error;
|
||||
|
||||
/// Lexical analysis errors
|
||||
#[derive(Error, Debug, PartialEq)]
|
||||
pub enum LexError {
|
||||
#[error("Invalid token at position {position}: {text}")]
|
||||
InvalidToken { position: usize, text: String },
|
||||
#[error("Unterminated string literal starting at position {position}")]
|
||||
UnterminatedString { position: usize },
|
||||
#[error("Invalid numeric literal: {text}")]
|
||||
InvalidNumber { text: String },
|
||||
}
|
||||
|
||||
/// Result type for lexical analysis
|
||||
pub type LexResult<T> = Result<T, LexError>;
|
||||
|
||||
/// Token kinds for nftables configuration files
|
||||
#[derive(Logos, Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[logos(skip r"[ \t\f]+")] // Skip whitespace but not newlines
|
||||
pub enum TokenKind {
|
||||
// Keywords
|
||||
#[token("table")]
|
||||
Table,
|
||||
#[token("chain")]
|
||||
Chain,
|
||||
#[token("rule")]
|
||||
Rule,
|
||||
#[token("set")]
|
||||
Set,
|
||||
#[token("map")]
|
||||
Map,
|
||||
#[token("element")]
|
||||
Element,
|
||||
#[token("include")]
|
||||
Include,
|
||||
#[token("define")]
|
||||
Define,
|
||||
#[token("flush")]
|
||||
Flush,
|
||||
#[token("add")]
|
||||
Add,
|
||||
#[token("delete")]
|
||||
Delete,
|
||||
#[token("insert")]
|
||||
Insert,
|
||||
#[token("replace")]
|
||||
Replace,
|
||||
|
||||
// Chain types
|
||||
#[token("filter")]
|
||||
Filter,
|
||||
#[token("nat")]
|
||||
Nat,
|
||||
#[token("route")]
|
||||
Route,
|
||||
|
||||
// Hooks
|
||||
#[token("input")]
|
||||
Input,
|
||||
#[token("output")]
|
||||
Output,
|
||||
#[token("forward")]
|
||||
Forward,
|
||||
#[token("prerouting")]
|
||||
Prerouting,
|
||||
#[token("postrouting")]
|
||||
Postrouting,
|
||||
|
||||
// Protocols and families
|
||||
#[token("ip")]
|
||||
Ip,
|
||||
#[token("ip6")]
|
||||
Ip6,
|
||||
#[token("inet")]
|
||||
Inet,
|
||||
#[token("arp")]
|
||||
Arp,
|
||||
#[token("bridge")]
|
||||
Bridge,
|
||||
#[token("netdev")]
|
||||
Netdev,
|
||||
#[token("tcp")]
|
||||
Tcp,
|
||||
#[token("udp")]
|
||||
Udp,
|
||||
#[token("icmp")]
|
||||
Icmp,
|
||||
#[token("icmpv6")]
|
||||
Icmpv6,
|
||||
|
||||
// Match keywords
|
||||
#[token("sport")]
|
||||
Sport,
|
||||
#[token("dport")]
|
||||
Dport,
|
||||
#[token("saddr")]
|
||||
Saddr,
|
||||
#[token("daddr")]
|
||||
Daddr,
|
||||
#[token("protocol")]
|
||||
Protocol,
|
||||
#[token("nexthdr")]
|
||||
Nexthdr,
|
||||
#[token("type")]
|
||||
Type,
|
||||
#[token("hook")]
|
||||
Hook,
|
||||
#[token("priority")]
|
||||
Priority,
|
||||
#[token("policy")]
|
||||
Policy,
|
||||
#[token("iifname")]
|
||||
Iifname,
|
||||
#[token("oifname")]
|
||||
Oifname,
|
||||
#[token("ct")]
|
||||
Ct,
|
||||
#[token("state")]
|
||||
State,
|
||||
#[token("established")]
|
||||
Established,
|
||||
#[token("related")]
|
||||
Related,
|
||||
#[token("invalid")]
|
||||
Invalid,
|
||||
#[token("new")]
|
||||
New,
|
||||
|
||||
// Actions
|
||||
#[token("accept")]
|
||||
Accept,
|
||||
#[token("drop")]
|
||||
Drop,
|
||||
#[token("reject")]
|
||||
Reject,
|
||||
#[token("return")]
|
||||
Return,
|
||||
#[token("jump")]
|
||||
Jump,
|
||||
#[token("goto")]
|
||||
Goto,
|
||||
#[token("continue")]
|
||||
Continue,
|
||||
#[token("log")]
|
||||
Log,
|
||||
#[token("comment")]
|
||||
Comment,
|
||||
|
||||
// Operators
|
||||
#[token("==")]
|
||||
Eq,
|
||||
#[token("!=")]
|
||||
Ne,
|
||||
#[token("<=")]
|
||||
Le,
|
||||
#[token(">=")]
|
||||
Ge,
|
||||
#[token("<")]
|
||||
Lt,
|
||||
#[token(">")]
|
||||
Gt,
|
||||
|
||||
// Punctuation
|
||||
#[token("{")]
|
||||
LeftBrace,
|
||||
#[token("}")]
|
||||
RightBrace,
|
||||
#[token("(")]
|
||||
LeftParen,
|
||||
#[token(")")]
|
||||
RightParen,
|
||||
#[token("[")]
|
||||
LeftBracket,
|
||||
#[token("]")]
|
||||
RightBracket,
|
||||
#[token(",")]
|
||||
Comma,
|
||||
#[token(";")]
|
||||
Semicolon,
|
||||
#[token(":")]
|
||||
Colon,
|
||||
#[token("=")]
|
||||
Assign,
|
||||
#[token("-")]
|
||||
Dash,
|
||||
#[token("/")]
|
||||
Slash,
|
||||
#[token(".")]
|
||||
Dot,
|
||||
|
||||
// Literals
|
||||
#[regex(r#""([^"\\]|\\.)*""#, string_literal)]
|
||||
StringLiteral(String),
|
||||
#[regex(r"[0-9]+", number_literal, priority = 2)]
|
||||
NumberLiteral(u64),
|
||||
#[regex(r"[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+", |lex| lex.slice().to_owned())]
|
||||
IpAddress(String),
|
||||
#[regex(r"(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:)*::[0-9a-fA-F:]*", ipv6_address, priority = 5)]
|
||||
Ipv6Address(String),
|
||||
#[regex(r"[a-fA-F0-9]{2}:[a-fA-F0-9]{2}:[a-fA-F0-9]{2}:[a-fA-F0-9]{2}:[a-fA-F0-9]{2}:[a-fA-F0-9]{2}", |lex| lex.slice().to_owned())]
|
||||
MacAddress(String),
|
||||
|
||||
// Identifiers
|
||||
#[regex(r"[a-zA-Z_][a-zA-Z0-9_-]*", |lex| lex.slice().to_owned(), priority = 1)]
|
||||
Identifier(String),
|
||||
|
||||
// Special tokens
|
||||
#[token("\n")]
|
||||
Newline,
|
||||
#[regex(r"#[^\n]*", comment_literal)]
|
||||
CommentLine(String),
|
||||
#[regex(r"#![^\n]*", shebang_literal)]
|
||||
Shebang(String),
|
||||
|
||||
// Error token
|
||||
Error,
|
||||
}
|
||||
|
||||
impl fmt::Display for TokenKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
TokenKind::Table => write!(f, "table"),
|
||||
TokenKind::Chain => write!(f, "chain"),
|
||||
TokenKind::Rule => write!(f, "rule"),
|
||||
TokenKind::LeftBrace => write!(f, "{{"),
|
||||
TokenKind::RightBrace => write!(f, "}}"),
|
||||
TokenKind::Identifier(_) => write!(f, "identifier"),
|
||||
TokenKind::StringLiteral(_) => write!(f, "string"),
|
||||
TokenKind::NumberLiteral(_) => write!(f, "number"),
|
||||
TokenKind::IpAddress(_) => write!(f, "ip_address"),
|
||||
TokenKind::Ipv6Address(_) => write!(f, "ipv6_address"),
|
||||
TokenKind::MacAddress(_) => write!(f, "mac_address"),
|
||||
TokenKind::Newline => write!(f, "newline"),
|
||||
TokenKind::CommentLine(_) => write!(f, "comment"),
|
||||
TokenKind::Shebang(_) => write!(f, "shebang"),
|
||||
TokenKind::Error => write!(f, "error"),
|
||||
_ => write!(f, "{:?}", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn string_literal(lex: &mut Lexer<TokenKind>) -> String {
|
||||
let slice = lex.slice();
|
||||
// Remove surrounding quotes and process escape sequences
|
||||
slice[1..slice.len()-1].replace("\\\"", "\"").replace("\\\\", "\\")
|
||||
}
|
||||
|
||||
fn number_literal(lex: &mut Lexer<TokenKind>) -> Option<u64> {
|
||||
lex.slice().parse().ok()
|
||||
}
|
||||
|
||||
fn ipv6_address(lex: &mut Lexer<TokenKind>) -> Option<String> {
|
||||
let slice = lex.slice();
|
||||
// Basic validation for IPv6 address format
|
||||
if slice.contains("::") || slice.matches(':').count() >= 2 {
|
||||
Some(slice.to_owned())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn comment_literal(lex: &mut Lexer<TokenKind>) -> String {
|
||||
let slice = lex.slice();
|
||||
slice[1..].to_owned() // Remove the '#' prefix
|
||||
}
|
||||
|
||||
fn shebang_literal(lex: &mut Lexer<TokenKind>) -> String {
|
||||
let slice = lex.slice();
|
||||
slice[2..].to_owned() // Remove the '#!' prefix
|
||||
}
|
||||
|
||||
/// A token with its kind and range in the source text
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
pub range: TextRange,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn new(kind: TokenKind, range: TextRange, text: String) -> Self {
|
||||
Self { kind, range, text }
|
||||
}
|
||||
}
|
||||
|
||||
/// Tokenizer for nftables configuration files
|
||||
pub struct NftablesLexer<'a> {
|
||||
lexer: Lexer<'a, TokenKind>,
|
||||
source: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> NftablesLexer<'a> {
|
||||
pub fn new(source: &'a str) -> Self {
|
||||
Self {
|
||||
lexer: TokenKind::lexer(source),
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
/// Tokenize the source text, returning all tokens or the first error encountered
|
||||
pub fn tokenize(&mut self) -> LexResult<Vec<Token>> {
|
||||
let mut tokens = Vec::new();
|
||||
|
||||
while let Some(result) = self.lexer.next() {
|
||||
let span = self.lexer.span();
|
||||
let text = &self.source[span.clone()];
|
||||
let range = TextRange::new(
|
||||
TextSize::from(span.start as u32),
|
||||
TextSize::from(span.end as u32)
|
||||
);
|
||||
|
||||
match result {
|
||||
Ok(kind) => {
|
||||
tokens.push(Token::new(kind, range, text.to_owned()));
|
||||
}
|
||||
Err(_) => {
|
||||
// Analyze the text to determine specific error type
|
||||
if text.starts_with('"') && !text.ends_with('"') {
|
||||
return Err(LexError::UnterminatedString {
|
||||
position: span.start,
|
||||
});
|
||||
} else if text.chars().any(|c| c.is_ascii_digit()) &&
|
||||
text.chars().any(|c| !c.is_ascii_digit() && c != '.' && c != 'x' && c != 'X') {
|
||||
return Err(LexError::InvalidNumber {
|
||||
text: text.to_owned(),
|
||||
});
|
||||
} else {
|
||||
return Err(LexError::InvalidToken {
|
||||
position: span.start,
|
||||
text: text.to_owned(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(tokens)
|
||||
}
|
||||
|
||||
/// Get all tokens including error tokens for error recovery
|
||||
pub fn tokenize_with_errors(&mut self) -> Vec<Token> {
|
||||
let mut tokens = Vec::new();
|
||||
|
||||
while let Some(result) = self.lexer.next() {
|
||||
let span = self.lexer.span();
|
||||
let text = &self.source[span.clone()];
|
||||
let range = TextRange::new(
|
||||
TextSize::from(span.start as u32),
|
||||
TextSize::from(span.end as u32)
|
||||
);
|
||||
|
||||
let kind = result.unwrap_or(TokenKind::Error);
|
||||
tokens.push(Token::new(kind, range, text.to_owned()));
|
||||
}
|
||||
|
||||
tokens
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_basic_tokenization() {
|
||||
let source = "table inet filter {\n chain input {\n type filter hook input priority 0;\n }\n}";
|
||||
let mut lexer = NftablesLexer::new(source);
|
||||
let tokens = lexer.tokenize().expect("Tokenization should succeed");
|
||||
|
||||
assert!(!tokens.is_empty());
|
||||
|
||||
assert_eq!(tokens[0].kind, TokenKind::Table);
|
||||
assert_eq!(tokens[1].kind, TokenKind::Inet);
|
||||
assert_eq!(tokens[2].kind, TokenKind::Filter);
|
||||
assert_eq!(tokens[3].kind, TokenKind::LeftBrace);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ip_address_tokenization() {
|
||||
let source = "192.168.1.1";
|
||||
let mut lexer = NftablesLexer::new(source);
|
||||
let tokens = lexer.tokenize().expect("Tokenization should succeed");
|
||||
|
||||
assert_eq!(tokens.len(), 1);
|
||||
assert!(matches!(tokens[0].kind, TokenKind::IpAddress(_)));
|
||||
assert_eq!(tokens[0].text, "192.168.1.1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_comment_tokenization() {
|
||||
let source = "# This is a comment\ntable inet test";
|
||||
let mut lexer = NftablesLexer::new(source);
|
||||
let tokens = lexer.tokenize().expect("Tokenization should succeed");
|
||||
|
||||
assert!(tokens.iter().any(|t| matches!(t.kind, TokenKind::CommentLine(_))));
|
||||
assert!(tokens.iter().any(|t| t.kind == TokenKind::Table));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_handling() {
|
||||
let source = "table ∞ filter"; // Invalid character
|
||||
let mut lexer = NftablesLexer::new(source);
|
||||
let result = lexer.tokenize();
|
||||
|
||||
assert!(result.is_err());
|
||||
if let Err(LexError::InvalidToken { position, text }) = result {
|
||||
assert_eq!(position, 6); // Position of the invalid character
|
||||
assert_eq!(text, "∞");
|
||||
} else {
|
||||
panic!("Expected InvalidToken error");
|
||||
}
|
||||
}
|
||||
}
|
236
src/main.rs
236
src/main.rs
|
@ -1,47 +1,33 @@
|
|||
use std::fs::{self, File};
|
||||
use std::io::{self, BufRead, BufReader, Write};
|
||||
mod ast;
|
||||
mod cst;
|
||||
mod lexer;
|
||||
mod parser;
|
||||
mod syntax;
|
||||
|
||||
use std::fs;
|
||||
use std::io::{self, Write};
|
||||
use std::path::Path;
|
||||
use clap::Parser;
|
||||
use anyhow::{Context, Result};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::lexer::NftablesLexer;
|
||||
use crate::parser::Parser as NftablesParser;
|
||||
use crate::syntax::{FormatConfig, IndentStyle, NftablesFormatter};
|
||||
use crate::cst::CstBuilder;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
enum FormatterError {
|
||||
#[error("File not found: {0}")]
|
||||
FileNotFound(String),
|
||||
#[error("Invalid file: {0}")]
|
||||
InvalidFile(String),
|
||||
#[error("Parse error: {0}")]
|
||||
ParseError(String),
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] io::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
enum IndentStyle {
|
||||
Tabs,
|
||||
Spaces,
|
||||
}
|
||||
|
||||
impl IndentStyle {
|
||||
fn format(&self, level: usize, spaces_per_level: usize) -> String {
|
||||
match self {
|
||||
Self::Tabs => "\t".repeat(level),
|
||||
Self::Spaces => " ".repeat(spaces_per_level * level),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for IndentStyle {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"tabs" | "tab" => Ok(Self::Tabs),
|
||||
"spaces" | "space" => Ok(Self::Spaces),
|
||||
_ => Err(format!("Invalid indent style: {}. Use 'tabs' or 'spaces'", s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(
|
||||
name = "nff",
|
||||
|
@ -69,87 +55,14 @@ struct Args {
|
|||
/// Number of spaces per indentation level (only used with --indent=spaces)
|
||||
#[arg(long, default_value = "2", value_name = "N")]
|
||||
spaces: usize,
|
||||
}
|
||||
|
||||
struct NftablesFormatter {
|
||||
indent_style: IndentStyle,
|
||||
spaces_per_level: usize,
|
||||
optimize: bool,
|
||||
}
|
||||
/// Show debug information (tokens, AST, etc.)
|
||||
#[arg(long)]
|
||||
debug: bool,
|
||||
|
||||
impl NftablesFormatter {
|
||||
fn new(indent_style: IndentStyle, spaces_per_level: usize, optimize: bool) -> Self {
|
||||
Self {
|
||||
indent_style,
|
||||
spaces_per_level,
|
||||
optimize,
|
||||
}
|
||||
}
|
||||
|
||||
fn format_lines(&self, lines: Vec<String>) -> Vec<String> {
|
||||
let mut output_lines = Vec::new();
|
||||
let mut level = 0;
|
||||
let mut prev_was_empty = false;
|
||||
|
||||
for (i, line) in lines.iter().enumerate() {
|
||||
let line = line.trim();
|
||||
|
||||
// Handle empty lines
|
||||
if line.is_empty() {
|
||||
if self.optimize {
|
||||
if prev_was_empty {
|
||||
continue;
|
||||
}
|
||||
prev_was_empty = true;
|
||||
} else {
|
||||
prev_was_empty = false;
|
||||
}
|
||||
output_lines.push(String::new());
|
||||
continue;
|
||||
} else {
|
||||
prev_was_empty = false;
|
||||
}
|
||||
|
||||
// Skip lines that contain both opening and closing braces (single-line blocks)
|
||||
if line.contains('{') && line.contains('}') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Adjust indentation level before formatting if this line closes a block
|
||||
if line.ends_with('}') || line == "}" {
|
||||
if level > 0 {
|
||||
level -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate indentation
|
||||
let indentation = self.indent_style.format(level, self.spaces_per_level);
|
||||
|
||||
// Format the line
|
||||
let formatted_line = format!("{}{}", indentation, line);
|
||||
|
||||
// Skip empty lines before closing braces if optimizing
|
||||
if self.optimize && i > 0 && lines[i-1].trim().is_empty() {
|
||||
if line.ends_with('}') || line == "}" {
|
||||
// Remove the last empty line
|
||||
if let Some(last) = output_lines.last() {
|
||||
if last.trim().is_empty() {
|
||||
output_lines.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output_lines.push(formatted_line);
|
||||
|
||||
// Adjust indentation level after formatting if this line opens a block
|
||||
if line.ends_with('{') {
|
||||
level += 1;
|
||||
}
|
||||
}
|
||||
|
||||
output_lines
|
||||
}
|
||||
/// Check syntax only, don't format
|
||||
#[arg(long)]
|
||||
check: bool,
|
||||
}
|
||||
|
||||
fn process_nftables_config(args: Args) -> Result<()> {
|
||||
|
@ -162,33 +75,92 @@ fn process_nftables_config(args: Args) -> Result<()> {
|
|||
return Err(FormatterError::InvalidFile("Not a regular file".to_string()).into());
|
||||
}
|
||||
|
||||
let file = File::open(&args.file)
|
||||
.with_context(|| format!("Failed to open file: {}", args.file))?;
|
||||
// Read file contents
|
||||
let source = fs::read_to_string(&args.file)
|
||||
.with_context(|| format!("Failed to read file: {}", args.file))?;
|
||||
|
||||
let reader = BufReader::new(file);
|
||||
let lines: Result<Vec<String>, io::Error> = reader.lines().collect();
|
||||
let lines = lines.with_context(|| "Failed to read file contents")?;
|
||||
|
||||
let formatter = NftablesFormatter::new(args.indent, args.spaces, args.optimize);
|
||||
let formatted_lines = formatter.format_lines(lines);
|
||||
|
||||
// Create output content
|
||||
let output_content = formatted_lines.join("\n");
|
||||
let output_content = if !output_content.ends_with('\n') && !output_content.is_empty() {
|
||||
format!("{}\n", output_content)
|
||||
// Tokenize
|
||||
let mut lexer = NftablesLexer::new(&source);
|
||||
let tokens = if args.debug {
|
||||
// Use error-recovery tokenization for debug mode
|
||||
lexer.tokenize_with_errors()
|
||||
} else {
|
||||
output_content
|
||||
lexer.tokenize()
|
||||
.map_err(|e| FormatterError::ParseError(e.to_string()))?
|
||||
};
|
||||
|
||||
if args.debug {
|
||||
eprintln!("=== TOKENS ===");
|
||||
for (i, token) in tokens.iter().enumerate() {
|
||||
eprintln!("{:3}: {:?} @ {:?} = '{}'", i, token.kind, token.range, token.text);
|
||||
}
|
||||
eprintln!();
|
||||
|
||||
// Build and validate CST
|
||||
eprintln!("=== CST ===");
|
||||
let cst_tree = CstBuilder::build_tree(&tokens);
|
||||
match CstBuilder::validate_tree(&cst_tree) {
|
||||
Ok(()) => eprintln!("CST validation passed"),
|
||||
Err(e) => eprintln!("CST validation error: {}", e),
|
||||
}
|
||||
|
||||
// Also test parse_to_cst
|
||||
match CstBuilder::parse_to_cst(&tokens) {
|
||||
Ok(_) => eprintln!("CST parsing successful"),
|
||||
Err(e) => eprintln!("CST parsing error: {}", e),
|
||||
}
|
||||
eprintln!();
|
||||
}
|
||||
|
||||
// Parse
|
||||
let ruleset = if args.debug {
|
||||
// Use error-recovery parsing for debug mode
|
||||
let (parsed_ruleset, errors) = NftablesParser::parse_with_errors(&source);
|
||||
if !errors.is_empty() {
|
||||
eprintln!("=== PARSE ERRORS ===");
|
||||
for error in &errors {
|
||||
eprintln!("Parse error: {}", error);
|
||||
}
|
||||
eprintln!();
|
||||
}
|
||||
parsed_ruleset.unwrap_or_else(|| crate::ast::Ruleset::new())
|
||||
} else {
|
||||
let mut parser = NftablesParser::new(tokens.clone());
|
||||
parser.parse()
|
||||
.map_err(|e| FormatterError::ParseError(e.to_string()))?
|
||||
};
|
||||
|
||||
if args.debug {
|
||||
eprintln!("=== AST ===");
|
||||
eprintln!("{:#?}", ruleset);
|
||||
eprintln!();
|
||||
}
|
||||
|
||||
if args.check {
|
||||
println!("Syntax check passed for: {}", args.file);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Format
|
||||
let config = FormatConfig {
|
||||
indent_style: args.indent,
|
||||
spaces_per_level: args.spaces,
|
||||
optimize: args.optimize,
|
||||
max_empty_lines: if args.optimize { 1 } else { 2 },
|
||||
};
|
||||
|
||||
let formatter = NftablesFormatter::new(config);
|
||||
let formatted_output = formatter.format_ruleset(&ruleset);
|
||||
|
||||
// Write output
|
||||
match &args.output {
|
||||
Some(output_file) => {
|
||||
fs::write(output_file, output_content)
|
||||
fs::write(output_file, &formatted_output)
|
||||
.with_context(|| format!("Failed to write to output file: {}", output_file))?;
|
||||
println!("Formatted output written to: {}", output_file);
|
||||
}
|
||||
None => {
|
||||
io::stdout().write_all(output_content.as_bytes())
|
||||
io::stdout().write_all(formatted_output.as_bytes())
|
||||
.with_context(|| "Failed to write to stdout")?;
|
||||
}
|
||||
}
|
||||
|
@ -198,5 +170,19 @@ fn process_nftables_config(args: Args) -> Result<()> {
|
|||
|
||||
fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
process_nftables_config(args)
|
||||
|
||||
if let Err(e) = process_nftables_config(args) {
|
||||
eprintln!("Error: {}", e);
|
||||
|
||||
// Print the error chain
|
||||
let mut current = e.source();
|
||||
while let Some(cause) = current {
|
||||
eprintln!(" Caused by: {}", cause);
|
||||
current = cause.source();
|
||||
}
|
||||
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
1044
src/parser.rs
Normal file
1044
src/parser.rs
Normal file
File diff suppressed because it is too large
Load diff
316
src/syntax.rs
Normal file
316
src/syntax.rs
Normal file
|
@ -0,0 +1,316 @@
|
|||
use crate::ast::*;
|
||||
use std::fmt::Write;
|
||||
|
||||
/// Configuration for formatting output
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FormatConfig {
|
||||
pub indent_style: IndentStyle,
|
||||
pub spaces_per_level: usize,
|
||||
pub optimize: bool,
|
||||
pub max_empty_lines: usize,
|
||||
}
|
||||
|
||||
impl Default for FormatConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
indent_style: IndentStyle::Tabs,
|
||||
spaces_per_level: 2,
|
||||
optimize: false,
|
||||
max_empty_lines: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum IndentStyle {
|
||||
Tabs,
|
||||
Spaces,
|
||||
}
|
||||
|
||||
impl IndentStyle {
|
||||
pub fn format(&self, level: usize, spaces_per_level: usize) -> String {
|
||||
match self {
|
||||
IndentStyle::Tabs => "\t".repeat(level),
|
||||
IndentStyle::Spaces => " ".repeat(spaces_per_level * level),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Formatter for nftables AST
|
||||
pub struct NftablesFormatter {
|
||||
config: FormatConfig,
|
||||
}
|
||||
|
||||
impl NftablesFormatter {
|
||||
pub fn new(config: FormatConfig) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
|
||||
/// Add appropriate number of empty lines based on configuration
|
||||
fn add_separator(&self, output: &mut String) {
|
||||
if self.config.optimize {
|
||||
output.push('\n');
|
||||
} else {
|
||||
// Add newlines based on max_empty_lines setting
|
||||
for _ in 0..=self.config.max_empty_lines {
|
||||
output.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format_ruleset(&self, ruleset: &Ruleset) -> String {
|
||||
let mut output = String::new();
|
||||
|
||||
// Format shebang
|
||||
if let Some(shebang) = &ruleset.shebang {
|
||||
writeln!(output, "#!{}", shebang).unwrap();
|
||||
}
|
||||
|
||||
// Format includes
|
||||
for include in &ruleset.includes {
|
||||
self.format_include(&mut output, include, 0);
|
||||
}
|
||||
|
||||
// Add separator if we have includes
|
||||
if !ruleset.includes.is_empty() {
|
||||
self.add_separator(&mut output);
|
||||
}
|
||||
|
||||
// Format defines
|
||||
for define in &ruleset.defines {
|
||||
self.format_define(&mut output, define, 0);
|
||||
}
|
||||
|
||||
// Add separator if we have defines
|
||||
if !ruleset.defines.is_empty() {
|
||||
self.add_separator(&mut output);
|
||||
}
|
||||
|
||||
// Format tables
|
||||
let mut table_iter = ruleset.tables.values().peekable();
|
||||
while let Some(table) = table_iter.next() {
|
||||
self.format_table(&mut output, table, 0); // Add separator between tables
|
||||
if table_iter.peek().is_some() {
|
||||
self.add_separator(&mut output);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure file ends with newline
|
||||
if !output.ends_with('\n') {
|
||||
output.push('\n');
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
fn format_include(&self, output: &mut String, include: &Include, level: usize) {
|
||||
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
|
||||
writeln!(output, "{}include \"{}\"", indent, include.path).unwrap();
|
||||
}
|
||||
|
||||
fn format_define(&self, output: &mut String, define: &Define, level: usize) {
|
||||
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
|
||||
write!(output, "{}define {} = ", indent, define.name).unwrap();
|
||||
self.format_expression(output, &define.value);
|
||||
output.push('\n');
|
||||
}
|
||||
|
||||
fn format_table(&self, output: &mut String, table: &Table, level: usize) {
|
||||
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
|
||||
|
||||
writeln!(output, "{}table {} {} {{", indent, table.family, table.name).unwrap();
|
||||
|
||||
// Format chains
|
||||
let mut chain_iter = table.chains.values().peekable();
|
||||
while let Some(chain) = chain_iter.next() {
|
||||
self.format_chain(output, chain, level + 1); // Add separator between chains
|
||||
if chain_iter.peek().is_some() {
|
||||
self.add_separator(output);
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(output, "{}}}", indent).unwrap();
|
||||
}
|
||||
|
||||
fn format_chain(&self, output: &mut String, chain: &Chain, level: usize) {
|
||||
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
|
||||
|
||||
writeln!(output, "{}chain {} {{", indent, chain.name).unwrap();
|
||||
|
||||
// Format chain properties
|
||||
if let Some(chain_type) = &chain.chain_type {
|
||||
write!(output, "{}type {}",
|
||||
self.config.indent_style.format(level + 1, self.config.spaces_per_level),
|
||||
chain_type).unwrap();
|
||||
|
||||
if let Some(hook) = &chain.hook {
|
||||
write!(output, " hook {}", hook).unwrap();
|
||||
}
|
||||
|
||||
if let Some(priority) = chain.priority {
|
||||
write!(output, " priority {}", priority).unwrap();
|
||||
}
|
||||
|
||||
// Add semicolon after type/hook/priority
|
||||
output.push_str(";");
|
||||
|
||||
// Add policy on the same line if present
|
||||
if let Some(policy) = &chain.policy {
|
||||
write!(output, " policy {}", policy).unwrap();
|
||||
}
|
||||
|
||||
output.push_str(";\n");
|
||||
|
||||
if !chain.rules.is_empty() && !self.config.optimize {
|
||||
output.push('\n');
|
||||
}
|
||||
}
|
||||
|
||||
// Format rules
|
||||
for (i, rule) in chain.rules.iter().enumerate() {
|
||||
// Add spacing between rules (but not before first rule)
|
||||
if i > 0 && !self.config.optimize && self.config.max_empty_lines > 0 {
|
||||
output.push('\n');
|
||||
}
|
||||
self.format_rule(output, rule, level + 1);
|
||||
}
|
||||
|
||||
writeln!(output, "{}}}", indent).unwrap();
|
||||
}
|
||||
|
||||
fn format_rule(&self, output: &mut String, rule: &Rule, level: usize) {
|
||||
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
|
||||
|
||||
write!(output, "{}", indent).unwrap();
|
||||
|
||||
// Format expressions
|
||||
for (i, expr) in rule.expressions.iter().enumerate() {
|
||||
if i > 0 {
|
||||
output.push(' ');
|
||||
}
|
||||
self.format_expression(output, expr);
|
||||
}
|
||||
|
||||
// Add action
|
||||
if !rule.expressions.is_empty() {
|
||||
output.push(' ');
|
||||
}
|
||||
write!(output, "{}", rule.action).unwrap();
|
||||
|
||||
output.push('\n');
|
||||
|
||||
// Only add extra newline between rules, not after the last rule
|
||||
// We'll handle this in the chain formatting instead
|
||||
}
|
||||
|
||||
fn format_expression(&self, output: &mut String, expr: &Expression) {
|
||||
match expr {
|
||||
Expression::Identifier(name) => write!(output, "{}", name).unwrap(),
|
||||
Expression::String(s) => write!(output, "\"{}\"", s).unwrap(),
|
||||
Expression::Number(n) => write!(output, "{}", n).unwrap(),
|
||||
Expression::IpAddress(addr) => write!(output, "{}", addr).unwrap(),
|
||||
Expression::Ipv6Address(addr) => write!(output, "{}", addr).unwrap(),
|
||||
Expression::MacAddress(addr) => write!(output, "{}", addr).unwrap(),
|
||||
|
||||
Expression::Binary { left, operator, right } => {
|
||||
self.format_expression(output, left);
|
||||
write!(output, " {} ", operator).unwrap();
|
||||
self.format_expression(output, right);
|
||||
}
|
||||
|
||||
Expression::Protocol(proto) => write!(output, "protocol {}", proto).unwrap(),
|
||||
|
||||
Expression::Port { direction, value } => {
|
||||
match direction {
|
||||
PortDirection::Source => write!(output, "sport ").unwrap(),
|
||||
PortDirection::Destination => write!(output, "dport ").unwrap(),
|
||||
}
|
||||
self.format_expression(output, value);
|
||||
}
|
||||
|
||||
Expression::Address { direction, value } => {
|
||||
// Include the protocol family when formatting addresses
|
||||
write!(output, "ip ").unwrap();
|
||||
match direction {
|
||||
AddressDirection::Source => write!(output, "saddr ").unwrap(),
|
||||
AddressDirection::Destination => write!(output, "daddr ").unwrap(),
|
||||
}
|
||||
self.format_expression(output, value);
|
||||
}
|
||||
|
||||
Expression::Interface { direction, name } => {
|
||||
match direction {
|
||||
InterfaceDirection::Input => write!(output, "iifname ").unwrap(),
|
||||
InterfaceDirection::Output => write!(output, "oifname ").unwrap(),
|
||||
}
|
||||
write!(output, "{}", name).unwrap();
|
||||
}
|
||||
|
||||
Expression::ConnTrack { field, value } => {
|
||||
write!(output, "ct {} ", field).unwrap();
|
||||
self.format_expression(output, value);
|
||||
}
|
||||
|
||||
Expression::Set(elements) => {
|
||||
output.push_str("{ ");
|
||||
for (i, element) in elements.iter().enumerate() {
|
||||
if i > 0 {
|
||||
output.push_str(", ");
|
||||
}
|
||||
self.format_expression(output, element);
|
||||
}
|
||||
output.push_str(" }");
|
||||
}
|
||||
|
||||
Expression::Range { start, end } => {
|
||||
self.format_expression(output, start);
|
||||
output.push('-');
|
||||
self.format_expression(output, end);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert from string-based IndentStyle to our enum
|
||||
impl std::str::FromStr for IndentStyle {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"tabs" | "tab" => Ok(IndentStyle::Tabs),
|
||||
"spaces" | "space" => Ok(IndentStyle::Spaces),
|
||||
_ => Err(format!("Invalid indent style: {}. Use 'tabs' or 'spaces'", s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_format_simple_table() {
|
||||
let table = Table::new(Family::Inet, "test".to_string())
|
||||
.add_chain(
|
||||
Chain::new("input".to_string())
|
||||
.with_type(ChainType::Filter)
|
||||
.with_hook(Hook::Input)
|
||||
.with_priority(0)
|
||||
.with_policy(Policy::Accept)
|
||||
.add_rule(Rule::new(
|
||||
vec![Expression::Interface {
|
||||
direction: InterfaceDirection::Input,
|
||||
name: "lo".to_string(),
|
||||
}],
|
||||
Action::Accept,
|
||||
))
|
||||
);
|
||||
|
||||
let formatter = NftablesFormatter::new(FormatConfig::default());
|
||||
let mut output = String::new();
|
||||
formatter.format_table(&mut output, &table, 0);
|
||||
|
||||
// Just verify it doesn't panic and produces some output
|
||||
assert!(!output.is_empty());
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue