initial working implementation

This commit is contained in:
raf 2025-05-24 23:27:15 +03:00
commit c4beb3e65f
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
8 changed files with 3858 additions and 126 deletions

382
Cargo.lock generated
View file

@ -56,6 +56,36 @@ version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
[[package]]
name = "autocfg"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "beef"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1"
[[package]]
name = "bitflags"
version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
version = "4.5.2"
@ -84,7 +114,7 @@ version = "4.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47"
dependencies = [
"heck",
"heck 0.4.1",
"proc-macro2",
"quote",
"syn",
@ -102,21 +132,194 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "cstree"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d609e3b8b73dbace666e8a06351fd9062e1ec025e74b27952a932ccb8ec3a25"
dependencies = [
"fxhash",
"indexmap",
"parking_lot",
"sptr",
"text-size",
"triomphe",
]
[[package]]
name = "dyn-clone"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c7a8fb8a9fbf66c1f703fe16184d10ca0ee9d23be5b4436400408ba54a95005"
[[package]]
name = "equivalent"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "hashbrown"
version = "0.15.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3"
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "indexmap"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
dependencies = [
"equivalent",
"hashbrown",
]
[[package]]
name = "itoa"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.172"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "lock_api"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
dependencies = [
"autocfg",
"scopeguard",
]
[[package]]
name = "logos"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab6f536c1af4c7cc81edf73da1f8029896e7e1e16a219ef09b184e76a296f3db"
dependencies = [
"logos-derive",
]
[[package]]
name = "logos-codegen"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "189bbfd0b61330abea797e5e9276408f2edbe4f822d7ad08685d67419aafb34e"
dependencies = [
"beef",
"fnv",
"lazy_static",
"proc-macro2",
"quote",
"regex-syntax",
"rustc_version",
"syn",
]
[[package]]
name = "logos-derive"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebfe8e1a19049ddbfccbd14ac834b215e11b85b90bab0c2dba7c7b92fb5d5cba"
dependencies = [
"logos-codegen",
]
[[package]]
name = "memchr"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "nff"
version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"cstree",
"logos",
"nftables",
"text-size",
"thiserror",
]
[[package]]
name = "nftables"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "180f5f3983a76df4a48e01b9317832cc6fa6aa90ef0c73328658e0e5653f175a"
dependencies = [
"schemars",
"serde",
"serde_json",
"serde_path_to_error",
"strum",
"strum_macros",
"thiserror",
]
[[package]]
name = "parking_lot"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-targets",
]
[[package]]
name = "proc-macro2"
version = "1.0.95"
@ -135,12 +338,174 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "redox_syscall"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af"
dependencies = [
"bitflags",
]
[[package]]
name = "regex-syntax"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "rustc_version"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
dependencies = [
"semver",
]
[[package]]
name = "rustversion"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d"
[[package]]
name = "ryu"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "schemars"
version = "0.8.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615"
dependencies = [
"dyn-clone",
"schemars_derive",
"serde",
"serde_json",
]
[[package]]
name = "schemars_derive"
version = "0.8.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d"
dependencies = [
"proc-macro2",
"quote",
"serde_derive_internals",
"syn",
]
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "semver"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
[[package]]
name = "serde"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_derive_internals"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
[[package]]
name = "serde_path_to_error"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a"
dependencies = [
"itoa",
"serde",
]
[[package]]
name = "smallvec"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
[[package]]
name = "sptr"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a"
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "strsim"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01"
[[package]]
name = "strum"
version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
[[package]]
name = "strum_macros"
version = "0.27.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
"rustversion",
"syn",
]
[[package]]
name = "syn"
version = "2.0.101"
@ -152,6 +517,12 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "text-size"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
[[package]]
name = "thiserror"
version = "2.0.12"
@ -172,6 +543,15 @@ dependencies = [
"syn",
]
[[package]]
name = "triomphe"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85"
dependencies = [
"stable_deref_trait",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"

View file

@ -9,3 +9,7 @@ edition = "2024"
clap = { version = "4.5", features = ["derive"] }
anyhow = "1.0"
thiserror = "2.0"
logos = "0.15"
cstree = "0.12"
text-size = "1.1"
nftables = "0.6"

389
src/ast.rs Normal file
View file

@ -0,0 +1,389 @@
use std::collections::HashMap;
use std::fmt;
/// Represents the nftables address family
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Family {
Ip,
Ip6,
Inet,
Arp,
Bridge,
Netdev,
}
impl fmt::Display for Family {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Family::Ip => write!(f, "ip"),
Family::Ip6 => write!(f, "ip6"),
Family::Inet => write!(f, "inet"),
Family::Arp => write!(f, "arp"),
Family::Bridge => write!(f, "bridge"),
Family::Netdev => write!(f, "netdev"),
}
}
}
/// Represents chain types
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ChainType {
Filter,
Nat,
Route,
}
impl fmt::Display for ChainType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ChainType::Filter => write!(f, "filter"),
ChainType::Nat => write!(f, "nat"),
ChainType::Route => write!(f, "route"),
}
}
}
/// Represents chain hooks
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Hook {
Input,
Output,
Forward,
Prerouting,
Postrouting,
}
impl fmt::Display for Hook {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Hook::Input => write!(f, "input"),
Hook::Output => write!(f, "output"),
Hook::Forward => write!(f, "forward"),
Hook::Prerouting => write!(f, "prerouting"),
Hook::Postrouting => write!(f, "postrouting"),
}
}
}
/// Represents chain policies
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Policy {
Accept,
Drop,
}
impl fmt::Display for Policy {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Policy::Accept => write!(f, "accept"),
Policy::Drop => write!(f, "drop"),
}
}
}
/// Represents expressions in nftables rules
#[derive(Debug, Clone, PartialEq)]
pub enum Expression {
// Literals
Identifier(String),
String(String),
Number(u64),
IpAddress(String),
Ipv6Address(String),
MacAddress(String),
// Binary operations
Binary {
left: Box<Expression>,
operator: BinaryOperator,
right: Box<Expression>,
},
// Protocol matches
Protocol(String),
Port {
direction: PortDirection,
value: Box<Expression>,
},
Address {
direction: AddressDirection,
value: Box<Expression>,
},
// Interface matches
Interface {
direction: InterfaceDirection,
name: String,
},
// Connection tracking
ConnTrack {
field: String,
value: Box<Expression>,
},
// Set expressions
Set(Vec<Expression>),
// Range expressions
Range {
start: Box<Expression>,
end: Box<Expression>,
},
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum BinaryOperator {
Eq,
Ne,
Lt,
Le,
Gt,
Ge,
}
impl fmt::Display for BinaryOperator {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
BinaryOperator::Eq => write!(f, "=="),
BinaryOperator::Ne => write!(f, "!="),
BinaryOperator::Lt => write!(f, "<"),
BinaryOperator::Le => write!(f, "<="),
BinaryOperator::Gt => write!(f, ">"),
BinaryOperator::Ge => write!(f, ">="),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PortDirection {
Source,
Destination,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum AddressDirection {
Source,
Destination,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum InterfaceDirection {
Input,
Output,
}
/// Represents actions that can be taken on matching packets
#[derive(Debug, Clone, PartialEq)]
pub enum Action {
Accept,
Drop,
Reject,
Return,
Jump(String),
Goto(String),
Continue,
Log {
prefix: Option<String>,
level: Option<String>,
},
Comment(String),
}
impl fmt::Display for Action {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Action::Accept => write!(f, "accept"),
Action::Drop => write!(f, "drop"),
Action::Reject => write!(f, "reject"),
Action::Return => write!(f, "return"),
Action::Jump(target) => write!(f, "jump {}", target),
Action::Goto(target) => write!(f, "goto {}", target),
Action::Continue => write!(f, "continue"),
Action::Log { prefix, level } => {
write!(f, "log")?;
if let Some(p) = prefix {
write!(f, " prefix \"{}\"", p)?;
}
if let Some(l) = level {
write!(f, " level {}", l)?;
}
Ok(())
}
Action::Comment(text) => write!(f, "comment \"{}\"", text),
}
}
}
/// Represents a rule in a chain
#[derive(Debug, Clone, PartialEq)]
pub struct Rule {
pub expressions: Vec<Expression>,
pub action: Action,
pub handle: Option<u64>,
}
impl Rule {
pub fn new(expressions: Vec<Expression>, action: Action) -> Self {
Self {
expressions,
action,
handle: None,
}
}
pub fn with_handle(mut self, handle: u64) -> Self {
self.handle = Some(handle);
self
}
}
/// Represents a chain in a table
#[derive(Debug, Clone, PartialEq)]
pub struct Chain {
pub name: String,
pub chain_type: Option<ChainType>,
pub hook: Option<Hook>,
pub priority: Option<i32>,
pub policy: Option<Policy>,
pub device: Option<String>,
pub rules: Vec<Rule>,
pub handle: Option<u64>,
}
impl Chain {
pub fn new(name: String) -> Self {
Self {
name,
chain_type: None,
hook: None,
priority: None,
policy: None,
device: None,
rules: Vec::new(),
handle: None,
}
}
pub fn with_type(mut self, chain_type: ChainType) -> Self {
self.chain_type = Some(chain_type);
self
}
pub fn with_hook(mut self, hook: Hook) -> Self {
self.hook = Some(hook);
self
}
pub fn with_priority(mut self, priority: i32) -> Self {
self.priority = Some(priority);
self
}
pub fn with_policy(mut self, policy: Policy) -> Self {
self.policy = Some(policy);
self
}
pub fn with_device(mut self, device: String) -> Self {
self.device = Some(device);
self
}
pub fn add_rule(mut self, rule: Rule) -> Self {
self.rules.push(rule);
self
}
}
/// Represents a table containing chains
#[derive(Debug, Clone, PartialEq)]
pub struct Table {
pub family: Family,
pub name: String,
pub chains: HashMap<String, Chain>,
pub handle: Option<u64>,
}
impl Table {
pub fn new(family: Family, name: String) -> Self {
Self {
family,
name,
chains: HashMap::new(),
handle: None,
}
}
pub fn add_chain(mut self, chain: Chain) -> Self {
self.chains.insert(chain.name.clone(), chain);
self
}
}
/// Represents an include statement
#[derive(Debug, Clone, PartialEq)]
pub struct Include {
pub path: String,
}
/// Represents a define statement
#[derive(Debug, Clone, PartialEq)]
pub struct Define {
pub name: String,
pub value: Expression,
}
/// Represents the root of an nftables configuration
#[derive(Debug, Clone, PartialEq)]
pub struct Ruleset {
pub includes: Vec<Include>,
pub defines: Vec<Define>,
pub tables: HashMap<(Family, String), Table>,
pub shebang: Option<String>,
pub comments: Vec<String>,
}
impl Ruleset {
pub fn new() -> Self {
Self {
includes: Vec::new(),
defines: Vec::new(),
tables: HashMap::new(),
shebang: None,
comments: Vec::new(),
}
}
pub fn with_shebang(mut self, shebang: String) -> Self {
self.shebang = Some(shebang);
self
}
pub fn add_include(mut self, include: Include) -> Self {
self.includes.push(include);
self
}
pub fn add_define(mut self, define: Define) -> Self {
self.defines.push(define);
self
}
pub fn add_table(mut self, table: Table) -> Self {
let key = (table.family.clone(), table.name.clone());
self.tables.insert(key, table);
self
}
pub fn add_comment(mut self, comment: String) -> Self {
self.comments.push(comment);
self
}
}
impl Default for Ruleset {
fn default() -> Self {
Self::new()
}
}

1197
src/cst.rs Normal file

File diff suppressed because it is too large Load diff

416
src/lexer.rs Normal file
View file

@ -0,0 +1,416 @@
use logos::{Lexer, Logos};
use std::fmt;
use text_size::{TextRange, TextSize};
use thiserror::Error;
/// Lexical analysis errors
#[derive(Error, Debug, PartialEq)]
pub enum LexError {
#[error("Invalid token at position {position}: {text}")]
InvalidToken { position: usize, text: String },
#[error("Unterminated string literal starting at position {position}")]
UnterminatedString { position: usize },
#[error("Invalid numeric literal: {text}")]
InvalidNumber { text: String },
}
/// Result type for lexical analysis
pub type LexResult<T> = Result<T, LexError>;
/// Token kinds for nftables configuration files
#[derive(Logos, Debug, Clone, PartialEq, Eq, Hash)]
#[logos(skip r"[ \t\f]+")] // Skip whitespace but not newlines
pub enum TokenKind {
// Keywords
#[token("table")]
Table,
#[token("chain")]
Chain,
#[token("rule")]
Rule,
#[token("set")]
Set,
#[token("map")]
Map,
#[token("element")]
Element,
#[token("include")]
Include,
#[token("define")]
Define,
#[token("flush")]
Flush,
#[token("add")]
Add,
#[token("delete")]
Delete,
#[token("insert")]
Insert,
#[token("replace")]
Replace,
// Chain types
#[token("filter")]
Filter,
#[token("nat")]
Nat,
#[token("route")]
Route,
// Hooks
#[token("input")]
Input,
#[token("output")]
Output,
#[token("forward")]
Forward,
#[token("prerouting")]
Prerouting,
#[token("postrouting")]
Postrouting,
// Protocols and families
#[token("ip")]
Ip,
#[token("ip6")]
Ip6,
#[token("inet")]
Inet,
#[token("arp")]
Arp,
#[token("bridge")]
Bridge,
#[token("netdev")]
Netdev,
#[token("tcp")]
Tcp,
#[token("udp")]
Udp,
#[token("icmp")]
Icmp,
#[token("icmpv6")]
Icmpv6,
// Match keywords
#[token("sport")]
Sport,
#[token("dport")]
Dport,
#[token("saddr")]
Saddr,
#[token("daddr")]
Daddr,
#[token("protocol")]
Protocol,
#[token("nexthdr")]
Nexthdr,
#[token("type")]
Type,
#[token("hook")]
Hook,
#[token("priority")]
Priority,
#[token("policy")]
Policy,
#[token("iifname")]
Iifname,
#[token("oifname")]
Oifname,
#[token("ct")]
Ct,
#[token("state")]
State,
#[token("established")]
Established,
#[token("related")]
Related,
#[token("invalid")]
Invalid,
#[token("new")]
New,
// Actions
#[token("accept")]
Accept,
#[token("drop")]
Drop,
#[token("reject")]
Reject,
#[token("return")]
Return,
#[token("jump")]
Jump,
#[token("goto")]
Goto,
#[token("continue")]
Continue,
#[token("log")]
Log,
#[token("comment")]
Comment,
// Operators
#[token("==")]
Eq,
#[token("!=")]
Ne,
#[token("<=")]
Le,
#[token(">=")]
Ge,
#[token("<")]
Lt,
#[token(">")]
Gt,
// Punctuation
#[token("{")]
LeftBrace,
#[token("}")]
RightBrace,
#[token("(")]
LeftParen,
#[token(")")]
RightParen,
#[token("[")]
LeftBracket,
#[token("]")]
RightBracket,
#[token(",")]
Comma,
#[token(";")]
Semicolon,
#[token(":")]
Colon,
#[token("=")]
Assign,
#[token("-")]
Dash,
#[token("/")]
Slash,
#[token(".")]
Dot,
// Literals
#[regex(r#""([^"\\]|\\.)*""#, string_literal)]
StringLiteral(String),
#[regex(r"[0-9]+", number_literal, priority = 2)]
NumberLiteral(u64),
#[regex(r"[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+", |lex| lex.slice().to_owned())]
IpAddress(String),
#[regex(r"(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:)*::[0-9a-fA-F:]*", ipv6_address, priority = 5)]
Ipv6Address(String),
#[regex(r"[a-fA-F0-9]{2}:[a-fA-F0-9]{2}:[a-fA-F0-9]{2}:[a-fA-F0-9]{2}:[a-fA-F0-9]{2}:[a-fA-F0-9]{2}", |lex| lex.slice().to_owned())]
MacAddress(String),
// Identifiers
#[regex(r"[a-zA-Z_][a-zA-Z0-9_-]*", |lex| lex.slice().to_owned(), priority = 1)]
Identifier(String),
// Special tokens
#[token("\n")]
Newline,
#[regex(r"#[^\n]*", comment_literal)]
CommentLine(String),
#[regex(r"#![^\n]*", shebang_literal)]
Shebang(String),
// Error token
Error,
}
impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TokenKind::Table => write!(f, "table"),
TokenKind::Chain => write!(f, "chain"),
TokenKind::Rule => write!(f, "rule"),
TokenKind::LeftBrace => write!(f, "{{"),
TokenKind::RightBrace => write!(f, "}}"),
TokenKind::Identifier(_) => write!(f, "identifier"),
TokenKind::StringLiteral(_) => write!(f, "string"),
TokenKind::NumberLiteral(_) => write!(f, "number"),
TokenKind::IpAddress(_) => write!(f, "ip_address"),
TokenKind::Ipv6Address(_) => write!(f, "ipv6_address"),
TokenKind::MacAddress(_) => write!(f, "mac_address"),
TokenKind::Newline => write!(f, "newline"),
TokenKind::CommentLine(_) => write!(f, "comment"),
TokenKind::Shebang(_) => write!(f, "shebang"),
TokenKind::Error => write!(f, "error"),
_ => write!(f, "{:?}", self),
}
}
}
fn string_literal(lex: &mut Lexer<TokenKind>) -> String {
let slice = lex.slice();
// Remove surrounding quotes and process escape sequences
slice[1..slice.len()-1].replace("\\\"", "\"").replace("\\\\", "\\")
}
fn number_literal(lex: &mut Lexer<TokenKind>) -> Option<u64> {
lex.slice().parse().ok()
}
fn ipv6_address(lex: &mut Lexer<TokenKind>) -> Option<String> {
let slice = lex.slice();
// Basic validation for IPv6 address format
if slice.contains("::") || slice.matches(':').count() >= 2 {
Some(slice.to_owned())
} else {
None
}
}
fn comment_literal(lex: &mut Lexer<TokenKind>) -> String {
let slice = lex.slice();
slice[1..].to_owned() // Remove the '#' prefix
}
fn shebang_literal(lex: &mut Lexer<TokenKind>) -> String {
let slice = lex.slice();
slice[2..].to_owned() // Remove the '#!' prefix
}
/// A token with its kind and range in the source text
#[derive(Debug, Clone, PartialEq)]
pub struct Token {
pub kind: TokenKind,
pub range: TextRange,
pub text: String,
}
impl Token {
pub fn new(kind: TokenKind, range: TextRange, text: String) -> Self {
Self { kind, range, text }
}
}
/// Tokenizer for nftables configuration files
pub struct NftablesLexer<'a> {
lexer: Lexer<'a, TokenKind>,
source: &'a str,
}
impl<'a> NftablesLexer<'a> {
pub fn new(source: &'a str) -> Self {
Self {
lexer: TokenKind::lexer(source),
source,
}
}
/// Tokenize the source text, returning all tokens or the first error encountered
pub fn tokenize(&mut self) -> LexResult<Vec<Token>> {
let mut tokens = Vec::new();
while let Some(result) = self.lexer.next() {
let span = self.lexer.span();
let text = &self.source[span.clone()];
let range = TextRange::new(
TextSize::from(span.start as u32),
TextSize::from(span.end as u32)
);
match result {
Ok(kind) => {
tokens.push(Token::new(kind, range, text.to_owned()));
}
Err(_) => {
// Analyze the text to determine specific error type
if text.starts_with('"') && !text.ends_with('"') {
return Err(LexError::UnterminatedString {
position: span.start,
});
} else if text.chars().any(|c| c.is_ascii_digit()) &&
text.chars().any(|c| !c.is_ascii_digit() && c != '.' && c != 'x' && c != 'X') {
return Err(LexError::InvalidNumber {
text: text.to_owned(),
});
} else {
return Err(LexError::InvalidToken {
position: span.start,
text: text.to_owned(),
});
}
}
}
}
Ok(tokens)
}
/// Get all tokens including error tokens for error recovery
pub fn tokenize_with_errors(&mut self) -> Vec<Token> {
let mut tokens = Vec::new();
while let Some(result) = self.lexer.next() {
let span = self.lexer.span();
let text = &self.source[span.clone()];
let range = TextRange::new(
TextSize::from(span.start as u32),
TextSize::from(span.end as u32)
);
let kind = result.unwrap_or(TokenKind::Error);
tokens.push(Token::new(kind, range, text.to_owned()));
}
tokens
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_tokenization() {
let source = "table inet filter {\n chain input {\n type filter hook input priority 0;\n }\n}";
let mut lexer = NftablesLexer::new(source);
let tokens = lexer.tokenize().expect("Tokenization should succeed");
assert!(!tokens.is_empty());
assert_eq!(tokens[0].kind, TokenKind::Table);
assert_eq!(tokens[1].kind, TokenKind::Inet);
assert_eq!(tokens[2].kind, TokenKind::Filter);
assert_eq!(tokens[3].kind, TokenKind::LeftBrace);
}
#[test]
fn test_ip_address_tokenization() {
let source = "192.168.1.1";
let mut lexer = NftablesLexer::new(source);
let tokens = lexer.tokenize().expect("Tokenization should succeed");
assert_eq!(tokens.len(), 1);
assert!(matches!(tokens[0].kind, TokenKind::IpAddress(_)));
assert_eq!(tokens[0].text, "192.168.1.1");
}
#[test]
fn test_comment_tokenization() {
let source = "# This is a comment\ntable inet test";
let mut lexer = NftablesLexer::new(source);
let tokens = lexer.tokenize().expect("Tokenization should succeed");
assert!(tokens.iter().any(|t| matches!(t.kind, TokenKind::CommentLine(_))));
assert!(tokens.iter().any(|t| t.kind == TokenKind::Table));
}
#[test]
fn test_error_handling() {
let source = "table ∞ filter"; // Invalid character
let mut lexer = NftablesLexer::new(source);
let result = lexer.tokenize();
assert!(result.is_err());
if let Err(LexError::InvalidToken { position, text }) = result {
assert_eq!(position, 6); // Position of the invalid character
assert_eq!(text, "");
} else {
panic!("Expected InvalidToken error");
}
}
}

View file

@ -1,47 +1,33 @@
use std::fs::{self, File};
use std::io::{self, BufRead, BufReader, Write};
mod ast;
mod cst;
mod lexer;
mod parser;
mod syntax;
use std::fs;
use std::io::{self, Write};
use std::path::Path;
use clap::Parser;
use anyhow::{Context, Result};
use thiserror::Error;
use crate::lexer::NftablesLexer;
use crate::parser::Parser as NftablesParser;
use crate::syntax::{FormatConfig, IndentStyle, NftablesFormatter};
use crate::cst::CstBuilder;
#[derive(Error, Debug)]
enum FormatterError {
#[error("File not found: {0}")]
FileNotFound(String),
#[error("Invalid file: {0}")]
InvalidFile(String),
#[error("Parse error: {0}")]
ParseError(String),
#[error("IO error: {0}")]
Io(#[from] io::Error),
}
#[derive(Debug, Clone, Copy)]
enum IndentStyle {
Tabs,
Spaces,
}
impl IndentStyle {
fn format(&self, level: usize, spaces_per_level: usize) -> String {
match self {
Self::Tabs => "\t".repeat(level),
Self::Spaces => " ".repeat(spaces_per_level * level),
}
}
}
impl std::str::FromStr for IndentStyle {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"tabs" | "tab" => Ok(Self::Tabs),
"spaces" | "space" => Ok(Self::Spaces),
_ => Err(format!("Invalid indent style: {}. Use 'tabs' or 'spaces'", s)),
}
}
}
#[derive(Parser, Debug)]
#[command(
name = "nff",
@ -69,87 +55,14 @@ struct Args {
/// Number of spaces per indentation level (only used with --indent=spaces)
#[arg(long, default_value = "2", value_name = "N")]
spaces: usize,
}
struct NftablesFormatter {
indent_style: IndentStyle,
spaces_per_level: usize,
optimize: bool,
}
/// Show debug information (tokens, AST, etc.)
#[arg(long)]
debug: bool,
impl NftablesFormatter {
fn new(indent_style: IndentStyle, spaces_per_level: usize, optimize: bool) -> Self {
Self {
indent_style,
spaces_per_level,
optimize,
}
}
fn format_lines(&self, lines: Vec<String>) -> Vec<String> {
let mut output_lines = Vec::new();
let mut level = 0;
let mut prev_was_empty = false;
for (i, line) in lines.iter().enumerate() {
let line = line.trim();
// Handle empty lines
if line.is_empty() {
if self.optimize {
if prev_was_empty {
continue;
}
prev_was_empty = true;
} else {
prev_was_empty = false;
}
output_lines.push(String::new());
continue;
} else {
prev_was_empty = false;
}
// Skip lines that contain both opening and closing braces (single-line blocks)
if line.contains('{') && line.contains('}') {
continue;
}
// Adjust indentation level before formatting if this line closes a block
if line.ends_with('}') || line == "}" {
if level > 0 {
level -= 1;
}
}
// Generate indentation
let indentation = self.indent_style.format(level, self.spaces_per_level);
// Format the line
let formatted_line = format!("{}{}", indentation, line);
// Skip empty lines before closing braces if optimizing
if self.optimize && i > 0 && lines[i-1].trim().is_empty() {
if line.ends_with('}') || line == "}" {
// Remove the last empty line
if let Some(last) = output_lines.last() {
if last.trim().is_empty() {
output_lines.pop();
}
}
}
}
output_lines.push(formatted_line);
// Adjust indentation level after formatting if this line opens a block
if line.ends_with('{') {
level += 1;
}
}
output_lines
}
/// Check syntax only, don't format
#[arg(long)]
check: bool,
}
fn process_nftables_config(args: Args) -> Result<()> {
@ -162,33 +75,92 @@ fn process_nftables_config(args: Args) -> Result<()> {
return Err(FormatterError::InvalidFile("Not a regular file".to_string()).into());
}
let file = File::open(&args.file)
.with_context(|| format!("Failed to open file: {}", args.file))?;
// Read file contents
let source = fs::read_to_string(&args.file)
.with_context(|| format!("Failed to read file: {}", args.file))?;
let reader = BufReader::new(file);
let lines: Result<Vec<String>, io::Error> = reader.lines().collect();
let lines = lines.with_context(|| "Failed to read file contents")?;
let formatter = NftablesFormatter::new(args.indent, args.spaces, args.optimize);
let formatted_lines = formatter.format_lines(lines);
// Create output content
let output_content = formatted_lines.join("\n");
let output_content = if !output_content.ends_with('\n') && !output_content.is_empty() {
format!("{}\n", output_content)
// Tokenize
let mut lexer = NftablesLexer::new(&source);
let tokens = if args.debug {
// Use error-recovery tokenization for debug mode
lexer.tokenize_with_errors()
} else {
output_content
lexer.tokenize()
.map_err(|e| FormatterError::ParseError(e.to_string()))?
};
if args.debug {
eprintln!("=== TOKENS ===");
for (i, token) in tokens.iter().enumerate() {
eprintln!("{:3}: {:?} @ {:?} = '{}'", i, token.kind, token.range, token.text);
}
eprintln!();
// Build and validate CST
eprintln!("=== CST ===");
let cst_tree = CstBuilder::build_tree(&tokens);
match CstBuilder::validate_tree(&cst_tree) {
Ok(()) => eprintln!("CST validation passed"),
Err(e) => eprintln!("CST validation error: {}", e),
}
// Also test parse_to_cst
match CstBuilder::parse_to_cst(&tokens) {
Ok(_) => eprintln!("CST parsing successful"),
Err(e) => eprintln!("CST parsing error: {}", e),
}
eprintln!();
}
// Parse
let ruleset = if args.debug {
// Use error-recovery parsing for debug mode
let (parsed_ruleset, errors) = NftablesParser::parse_with_errors(&source);
if !errors.is_empty() {
eprintln!("=== PARSE ERRORS ===");
for error in &errors {
eprintln!("Parse error: {}", error);
}
eprintln!();
}
parsed_ruleset.unwrap_or_else(|| crate::ast::Ruleset::new())
} else {
let mut parser = NftablesParser::new(tokens.clone());
parser.parse()
.map_err(|e| FormatterError::ParseError(e.to_string()))?
};
if args.debug {
eprintln!("=== AST ===");
eprintln!("{:#?}", ruleset);
eprintln!();
}
if args.check {
println!("Syntax check passed for: {}", args.file);
return Ok(());
}
// Format
let config = FormatConfig {
indent_style: args.indent,
spaces_per_level: args.spaces,
optimize: args.optimize,
max_empty_lines: if args.optimize { 1 } else { 2 },
};
let formatter = NftablesFormatter::new(config);
let formatted_output = formatter.format_ruleset(&ruleset);
// Write output
match &args.output {
Some(output_file) => {
fs::write(output_file, output_content)
fs::write(output_file, &formatted_output)
.with_context(|| format!("Failed to write to output file: {}", output_file))?;
println!("Formatted output written to: {}", output_file);
}
None => {
io::stdout().write_all(output_content.as_bytes())
io::stdout().write_all(formatted_output.as_bytes())
.with_context(|| "Failed to write to stdout")?;
}
}
@ -198,5 +170,19 @@ fn process_nftables_config(args: Args) -> Result<()> {
fn main() -> Result<()> {
let args = Args::parse();
process_nftables_config(args)
if let Err(e) = process_nftables_config(args) {
eprintln!("Error: {}", e);
// Print the error chain
let mut current = e.source();
while let Some(cause) = current {
eprintln!(" Caused by: {}", cause);
current = cause.source();
}
std::process::exit(1);
}
Ok(())
}

1044
src/parser.rs Normal file

File diff suppressed because it is too large Load diff

316
src/syntax.rs Normal file
View file

@ -0,0 +1,316 @@
use crate::ast::*;
use std::fmt::Write;
/// Configuration for formatting output
#[derive(Debug, Clone)]
pub struct FormatConfig {
pub indent_style: IndentStyle,
pub spaces_per_level: usize,
pub optimize: bool,
pub max_empty_lines: usize,
}
impl Default for FormatConfig {
fn default() -> Self {
Self {
indent_style: IndentStyle::Tabs,
spaces_per_level: 2,
optimize: false,
max_empty_lines: 1,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum IndentStyle {
Tabs,
Spaces,
}
impl IndentStyle {
pub fn format(&self, level: usize, spaces_per_level: usize) -> String {
match self {
IndentStyle::Tabs => "\t".repeat(level),
IndentStyle::Spaces => " ".repeat(spaces_per_level * level),
}
}
}
/// Formatter for nftables AST
pub struct NftablesFormatter {
config: FormatConfig,
}
impl NftablesFormatter {
pub fn new(config: FormatConfig) -> Self {
Self { config }
}
/// Add appropriate number of empty lines based on configuration
fn add_separator(&self, output: &mut String) {
if self.config.optimize {
output.push('\n');
} else {
// Add newlines based on max_empty_lines setting
for _ in 0..=self.config.max_empty_lines {
output.push('\n');
}
}
}
pub fn format_ruleset(&self, ruleset: &Ruleset) -> String {
let mut output = String::new();
// Format shebang
if let Some(shebang) = &ruleset.shebang {
writeln!(output, "#!{}", shebang).unwrap();
}
// Format includes
for include in &ruleset.includes {
self.format_include(&mut output, include, 0);
}
// Add separator if we have includes
if !ruleset.includes.is_empty() {
self.add_separator(&mut output);
}
// Format defines
for define in &ruleset.defines {
self.format_define(&mut output, define, 0);
}
// Add separator if we have defines
if !ruleset.defines.is_empty() {
self.add_separator(&mut output);
}
// Format tables
let mut table_iter = ruleset.tables.values().peekable();
while let Some(table) = table_iter.next() {
self.format_table(&mut output, table, 0); // Add separator between tables
if table_iter.peek().is_some() {
self.add_separator(&mut output);
}
}
// Ensure file ends with newline
if !output.ends_with('\n') {
output.push('\n');
}
output
}
fn format_include(&self, output: &mut String, include: &Include, level: usize) {
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
writeln!(output, "{}include \"{}\"", indent, include.path).unwrap();
}
fn format_define(&self, output: &mut String, define: &Define, level: usize) {
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
write!(output, "{}define {} = ", indent, define.name).unwrap();
self.format_expression(output, &define.value);
output.push('\n');
}
fn format_table(&self, output: &mut String, table: &Table, level: usize) {
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
writeln!(output, "{}table {} {} {{", indent, table.family, table.name).unwrap();
// Format chains
let mut chain_iter = table.chains.values().peekable();
while let Some(chain) = chain_iter.next() {
self.format_chain(output, chain, level + 1); // Add separator between chains
if chain_iter.peek().is_some() {
self.add_separator(output);
}
}
writeln!(output, "{}}}", indent).unwrap();
}
fn format_chain(&self, output: &mut String, chain: &Chain, level: usize) {
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
writeln!(output, "{}chain {} {{", indent, chain.name).unwrap();
// Format chain properties
if let Some(chain_type) = &chain.chain_type {
write!(output, "{}type {}",
self.config.indent_style.format(level + 1, self.config.spaces_per_level),
chain_type).unwrap();
if let Some(hook) = &chain.hook {
write!(output, " hook {}", hook).unwrap();
}
if let Some(priority) = chain.priority {
write!(output, " priority {}", priority).unwrap();
}
// Add semicolon after type/hook/priority
output.push_str(";");
// Add policy on the same line if present
if let Some(policy) = &chain.policy {
write!(output, " policy {}", policy).unwrap();
}
output.push_str(";\n");
if !chain.rules.is_empty() && !self.config.optimize {
output.push('\n');
}
}
// Format rules
for (i, rule) in chain.rules.iter().enumerate() {
// Add spacing between rules (but not before first rule)
if i > 0 && !self.config.optimize && self.config.max_empty_lines > 0 {
output.push('\n');
}
self.format_rule(output, rule, level + 1);
}
writeln!(output, "{}}}", indent).unwrap();
}
fn format_rule(&self, output: &mut String, rule: &Rule, level: usize) {
let indent = self.config.indent_style.format(level, self.config.spaces_per_level);
write!(output, "{}", indent).unwrap();
// Format expressions
for (i, expr) in rule.expressions.iter().enumerate() {
if i > 0 {
output.push(' ');
}
self.format_expression(output, expr);
}
// Add action
if !rule.expressions.is_empty() {
output.push(' ');
}
write!(output, "{}", rule.action).unwrap();
output.push('\n');
// Only add extra newline between rules, not after the last rule
// We'll handle this in the chain formatting instead
}
fn format_expression(&self, output: &mut String, expr: &Expression) {
match expr {
Expression::Identifier(name) => write!(output, "{}", name).unwrap(),
Expression::String(s) => write!(output, "\"{}\"", s).unwrap(),
Expression::Number(n) => write!(output, "{}", n).unwrap(),
Expression::IpAddress(addr) => write!(output, "{}", addr).unwrap(),
Expression::Ipv6Address(addr) => write!(output, "{}", addr).unwrap(),
Expression::MacAddress(addr) => write!(output, "{}", addr).unwrap(),
Expression::Binary { left, operator, right } => {
self.format_expression(output, left);
write!(output, " {} ", operator).unwrap();
self.format_expression(output, right);
}
Expression::Protocol(proto) => write!(output, "protocol {}", proto).unwrap(),
Expression::Port { direction, value } => {
match direction {
PortDirection::Source => write!(output, "sport ").unwrap(),
PortDirection::Destination => write!(output, "dport ").unwrap(),
}
self.format_expression(output, value);
}
Expression::Address { direction, value } => {
// Include the protocol family when formatting addresses
write!(output, "ip ").unwrap();
match direction {
AddressDirection::Source => write!(output, "saddr ").unwrap(),
AddressDirection::Destination => write!(output, "daddr ").unwrap(),
}
self.format_expression(output, value);
}
Expression::Interface { direction, name } => {
match direction {
InterfaceDirection::Input => write!(output, "iifname ").unwrap(),
InterfaceDirection::Output => write!(output, "oifname ").unwrap(),
}
write!(output, "{}", name).unwrap();
}
Expression::ConnTrack { field, value } => {
write!(output, "ct {} ", field).unwrap();
self.format_expression(output, value);
}
Expression::Set(elements) => {
output.push_str("{ ");
for (i, element) in elements.iter().enumerate() {
if i > 0 {
output.push_str(", ");
}
self.format_expression(output, element);
}
output.push_str(" }");
}
Expression::Range { start, end } => {
self.format_expression(output, start);
output.push('-');
self.format_expression(output, end);
}
}
}
}
/// Convert from string-based IndentStyle to our enum
impl std::str::FromStr for IndentStyle {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"tabs" | "tab" => Ok(IndentStyle::Tabs),
"spaces" | "space" => Ok(IndentStyle::Spaces),
_ => Err(format!("Invalid indent style: {}. Use 'tabs' or 'spaces'", s)),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_format_simple_table() {
let table = Table::new(Family::Inet, "test".to_string())
.add_chain(
Chain::new("input".to_string())
.with_type(ChainType::Filter)
.with_hook(Hook::Input)
.with_priority(0)
.with_policy(Policy::Accept)
.add_rule(Rule::new(
vec![Expression::Interface {
direction: InterfaceDirection::Input,
name: "lo".to_string(),
}],
Action::Accept,
))
);
let formatter = NftablesFormatter::new(FormatConfig::default());
let mut output = String::new();
formatter.format_table(&mut output, &table, 0);
// Just verify it doesn't panic and produces some output
assert!(!output.is_empty());
}
}