Compare commits
10 commits
e0f93d0307
...
92735f3eee
| Author | SHA1 | Date | |
|---|---|---|---|
|
92735f3eee |
|||
|
3e1003ba94 |
|||
|
ef10784998 |
|||
|
c0002f5806 |
|||
|
728483d84f |
|||
|
363f9cac86 |
|||
|
fc8e42c096 |
|||
|
0ed74c6a51 |
|||
|
16586e9927 |
|||
|
6f08d27a59 |
7 changed files with 595 additions and 163 deletions
374
README.md
374
README.md
|
|
@ -30,26 +30,68 @@ supported, I cannot guarantee that _everything_ is supported just yet.
|
||||||
- **Validation** - Syntax checking with precise error locations
|
- **Validation** - Syntax checking with precise error locations
|
||||||
- **Optimization** - Configurable empty line reduction and whitespace control
|
- **Optimization** - Configurable empty line reduction and whitespace control
|
||||||
|
|
||||||
|
### Diagnostics & Analysis
|
||||||
|
|
||||||
|
- **Comprehensive diagnostics** - Syntax, semantic, style, and best practice
|
||||||
|
analysis
|
||||||
|
- **Modular analysis** - Run specific diagnostic modules (`lexical`, `syntax`,
|
||||||
|
`style`, `semantic`)
|
||||||
|
- **LSP-compatible output** - JSON format for editor integration
|
||||||
|
- **Human-readable reports** - Detailed error messages with context and location
|
||||||
|
information
|
||||||
|
- **Configurable severity** - Control which diagnostic categories to
|
||||||
|
enable/disable
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Basic formatting
|
# Format a specific file (in place)
|
||||||
nff -f /etc/nftables.conf
|
nff format /etc/nftables.conf
|
||||||
|
|
||||||
|
# Format all .nft files in current directory (in place)
|
||||||
|
nff format
|
||||||
|
|
||||||
# Custom indentation (4 spaces)
|
# Custom indentation (4 spaces)
|
||||||
nff -f config.nft --indent spaces --spaces 4
|
nff format config.nft --indent spaces --spaces 4
|
||||||
|
|
||||||
# Optimize formatting (reduce empty lines)
|
# Optimize formatting (reduce empty lines)
|
||||||
nff -f config.nft --optimize
|
nff format config.nft --optimize
|
||||||
|
|
||||||
# Output to file
|
# Output to stdout instead of modifying files
|
||||||
nff -f config.nft -o formatted.nft
|
nff format config.nft --stdout
|
||||||
|
|
||||||
# Syntax validation only
|
# Syntax validation only
|
||||||
nff -f config.nft --check
|
nff format config.nft --check
|
||||||
|
|
||||||
# Debug output for development (or debugging)
|
# Debug output for development (or debugging)
|
||||||
nff -f config.nft --debug
|
nff format config.nft --debug
|
||||||
|
```
|
||||||
|
|
||||||
|
### Linting and Diagnostics
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run comprehensive diagnostics on a file
|
||||||
|
nff lint /etc/nftables.conf
|
||||||
|
|
||||||
|
# Lint all .nft files in current directory
|
||||||
|
nff lint
|
||||||
|
|
||||||
|
# JSON output for editor integration
|
||||||
|
nff lint config.nft --json
|
||||||
|
|
||||||
|
# Run specific diagnostic modules
|
||||||
|
nff lint config.nft --modules syntax,style
|
||||||
|
|
||||||
|
# Available modules: lexical, syntax, style, semantic
|
||||||
|
nff lint config.nft --modules semantic
|
||||||
|
|
||||||
|
# Configure diagnostic settings (note: flags are enabled by default)
|
||||||
|
nff lint config.nft --style-warnings=false --best-practices=false
|
||||||
|
|
||||||
|
# Debug output with diagnostics
|
||||||
|
nff lint config.nft --debug
|
||||||
```
|
```
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
@ -69,12 +111,235 @@ graph TD
|
||||||
AST --> Formatter
|
AST --> Formatter
|
||||||
Formatter --> Output
|
Formatter --> Output
|
||||||
CST --> Formatter
|
CST --> Formatter
|
||||||
|
|
||||||
|
Input --> Diagnostics[Diagnostic System]
|
||||||
|
Diagnostics --> LexAnalyzer[Lexical Analyzer]
|
||||||
|
Diagnostics --> SyntaxAnalyzer[Syntax Analyzer]
|
||||||
|
Diagnostics --> StyleAnalyzer[Style Analyzer]
|
||||||
|
Diagnostics --> SemanticAnalyzer[Semantic Analyzer]
|
||||||
|
|
||||||
|
LexAnalyzer --> DiagOutput[JSON/Human Output]
|
||||||
|
SyntaxAnalyzer --> DiagOutput
|
||||||
|
StyleAnalyzer --> DiagOutput
|
||||||
|
SemanticAnalyzer --> DiagOutput
|
||||||
```
|
```
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Recommended way of installing nff is to use Nix.
|
Recommended way of installing nff is to use Nix.
|
||||||
|
|
||||||
|
### Editor Integration
|
||||||
|
|
||||||
|
#### Neovim Setup
|
||||||
|
|
||||||
|
nff can be integrated into Neovim as a diagnostics source for nftables files.
|
||||||
|
Here are several setup approaches:
|
||||||
|
|
||||||
|
##### Option 2: Using none-ls
|
||||||
|
|
||||||
|
```lua
|
||||||
|
local null_ls = require("null-ls")
|
||||||
|
|
||||||
|
null_ls.setup({
|
||||||
|
sources = {
|
||||||
|
-- nftables diagnostics
|
||||||
|
null_ls.builtins.diagnostics.nff.with({
|
||||||
|
command = "nff",
|
||||||
|
args = { "lint", "$FILENAME", "--json" },
|
||||||
|
format = "json",
|
||||||
|
check_exit_code = false,
|
||||||
|
filetypes = { "nftables" },
|
||||||
|
}),
|
||||||
|
|
||||||
|
-- nftables formatting
|
||||||
|
null_ls.builtins.formatting.nff.with({
|
||||||
|
command = "nff",
|
||||||
|
args = { "format", "$FILENAME", "--stdout" },
|
||||||
|
filetypes = { "nftables" },
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Option 2: Using nvim-lint (recommended)
|
||||||
|
|
||||||
|
```lua
|
||||||
|
-- ~/.config/nvim/lua/config/lint.lua
|
||||||
|
require('lint').linters.nff = {
|
||||||
|
cmd = 'nff',
|
||||||
|
stdin = false,
|
||||||
|
args = { 'lint', '%s', '--json' },
|
||||||
|
stream = 'stdout',
|
||||||
|
ignore_exitcode = true,
|
||||||
|
parser = function(output)
|
||||||
|
local diagnostics = {}
|
||||||
|
local ok, decoded = pcall(vim.fn.json_decode, output)
|
||||||
|
|
||||||
|
if not ok or not decoded.diagnostics then
|
||||||
|
return diagnostics
|
||||||
|
end
|
||||||
|
|
||||||
|
for _, diagnostic in ipairs(decoded.diagnostics) do
|
||||||
|
table.insert(diagnostics, {
|
||||||
|
lnum = diagnostic.range.start.line,
|
||||||
|
col = diagnostic.range.start.character,
|
||||||
|
severity = diagnostic.severity == "Error" and vim.diagnostic.severity.ERROR or vim.diagnostic.severity.WARN,
|
||||||
|
message = diagnostic.message,
|
||||||
|
source = "nff",
|
||||||
|
code = diagnostic.code,
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
return diagnostics
|
||||||
|
end,
|
||||||
|
}
|
||||||
|
|
||||||
|
-- Setup linting for nftables files
|
||||||
|
vim.api.nvim_create_autocmd({ "BufEnter", "BufWritePost" }, {
|
||||||
|
pattern = "*.nft",
|
||||||
|
callback = function()
|
||||||
|
require("lint").try_lint("nff")
|
||||||
|
end,
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Option 3: Custom Lua Function
|
||||||
|
|
||||||
|
For a simple custom solution:
|
||||||
|
|
||||||
|
```lua
|
||||||
|
-- ~/.config/nvim/lua/nff.lua
|
||||||
|
local M = {}
|
||||||
|
|
||||||
|
function M.lint_nftables()
|
||||||
|
local filename = vim.fn.expand('%:p')
|
||||||
|
if vim.bo.filetype ~= 'nftables' then
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
local cmd = { 'nff', 'lint', filename, '--json' }
|
||||||
|
|
||||||
|
vim.fn.jobstart(cmd, {
|
||||||
|
stdout_buffered = true,
|
||||||
|
on_stdout = function(_, data)
|
||||||
|
if data then
|
||||||
|
local output = table.concat(data, '\n')
|
||||||
|
local ok, result = pcall(vim.fn.json_decode, output)
|
||||||
|
|
||||||
|
if ok and result.diagnostics then
|
||||||
|
local diagnostics = {}
|
||||||
|
for _, diag in ipairs(result.diagnostics) do
|
||||||
|
table.insert(diagnostics, {
|
||||||
|
lnum = diag.range.start.line,
|
||||||
|
col = diag.range.start.character,
|
||||||
|
severity = diag.severity == "Error" and vim.diagnostic.severity.ERROR or vim.diagnostic.severity.WARN,
|
||||||
|
message = diag.message,
|
||||||
|
source = "nff",
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
vim.diagnostic.set(vim.api.nvim_create_namespace('nff'), 0, diagnostics)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Auto-run on save
|
||||||
|
vim.api.nvim_create_autocmd("BufWritePost", {
|
||||||
|
pattern = "*.nft",
|
||||||
|
callback = M.lint_nftables,
|
||||||
|
})
|
||||||
|
|
||||||
|
return M
|
||||||
|
```
|
||||||
|
|
||||||
|
## Diagnostic Categories
|
||||||
|
|
||||||
|
nff provides comprehensive analysis across multiple categories:
|
||||||
|
|
||||||
|
### Syntax Errors
|
||||||
|
|
||||||
|
- Parse errors with precise location information
|
||||||
|
- Missing tokens (semicolons, braces, etc.)
|
||||||
|
- Unexpected tokens
|
||||||
|
- Unterminated strings
|
||||||
|
- Invalid numbers
|
||||||
|
|
||||||
|
### Semantic Validation
|
||||||
|
|
||||||
|
- Unknown table families (`inet`, `ip`, `ip6`, etc.)
|
||||||
|
- Invalid chain types and hooks
|
||||||
|
- Incorrect priority values
|
||||||
|
- Missing chain policies
|
||||||
|
- Duplicate table/chain names
|
||||||
|
- Invalid CIDR notation
|
||||||
|
- Invalid port ranges
|
||||||
|
|
||||||
|
### Style Warnings
|
||||||
|
|
||||||
|
- Missing shebang line
|
||||||
|
- Inconsistent indentation (mixed tabs/spaces)
|
||||||
|
- Trailing whitespace
|
||||||
|
- Lines exceeding maximum length (configurable)
|
||||||
|
- Excessive empty lines
|
||||||
|
- Preferred syntax alternatives
|
||||||
|
|
||||||
|
### Best Practices
|
||||||
|
|
||||||
|
- Chains without explicit policies
|
||||||
|
- Rules without actions
|
||||||
|
- Overly permissive rules
|
||||||
|
- Duplicate or conflicting rules
|
||||||
|
- Unused variables or sets
|
||||||
|
- Deprecated syntax usage
|
||||||
|
- Missing documentation
|
||||||
|
- Security risks
|
||||||
|
|
||||||
|
### Performance Hints
|
||||||
|
|
||||||
|
- Inefficient rule ordering
|
||||||
|
- Large sets without timeouts
|
||||||
|
- Missing counters where beneficial
|
||||||
|
|
||||||
|
## JSON Output Format
|
||||||
|
|
||||||
|
When using `--json`, nff outputs LSP-compatible diagnostics:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"diagnostics": [
|
||||||
|
{
|
||||||
|
"range": {
|
||||||
|
"start": { "line": 5, "character": 10 },
|
||||||
|
"end": { "line": 5, "character": 20 }
|
||||||
|
},
|
||||||
|
"severity": "Error",
|
||||||
|
"code": "NFT001",
|
||||||
|
"source": "nff",
|
||||||
|
"message": "Expected ';' after policy",
|
||||||
|
"related_information": [],
|
||||||
|
"code_actions": [],
|
||||||
|
"tags": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"file_path": "config.nft",
|
||||||
|
"source_text": "..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Diagnostic Codes
|
||||||
|
|
||||||
|
nff uses structured diagnostic codes for categorization:
|
||||||
|
|
||||||
|
- **NFT001-NFT099**: Syntax errors
|
||||||
|
- **NFT101-NFT199**: Semantic errors
|
||||||
|
- **NFT201-NFT299**: Style warnings
|
||||||
|
- **NFT301-NFT399**: Best practice recommendations
|
||||||
|
- **NFT401-NFT499**: Performance hints
|
||||||
|
- **NFT501-NFT599**: Formatting issues
|
||||||
|
- **NFT601-NFT699**: nftables-specific validations
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
### Testing
|
### Testing
|
||||||
|
|
@ -196,6 +461,88 @@ table inet protection {
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Diagnostics Examples
|
||||||
|
|
||||||
|
### Error Detection
|
||||||
|
|
||||||
|
Input file with issues:
|
||||||
|
|
||||||
|
```nftables
|
||||||
|
table inet firewall {
|
||||||
|
chain input {
|
||||||
|
type filter hook input priority 100
|
||||||
|
tcp dport 22 accept
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Human-readable output:
|
||||||
|
|
||||||
|
```
|
||||||
|
Found 2 issues in config.nft:
|
||||||
|
config.nft:3:37: error: Expected ';' after policy [NFT001]
|
||||||
|
1: table inet firewall {
|
||||||
|
2: chain input {
|
||||||
|
→ 3: type filter hook input priority 100
|
||||||
|
4: tcp dport 22 accept
|
||||||
|
5: }
|
||||||
|
|
||||||
|
config.nft:3:1: warning: Filter chain should have an explicit policy [NFT301]
|
||||||
|
1: table inet firewall {
|
||||||
|
2: chain input {
|
||||||
|
→ 3: type filter hook input priority 100
|
||||||
|
4: tcp dport 22 accept
|
||||||
|
5: }
|
||||||
|
```
|
||||||
|
|
||||||
|
JSON output:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"diagnostics": [
|
||||||
|
{
|
||||||
|
"range": {
|
||||||
|
"start": { "line": 2, "character": 37 },
|
||||||
|
"end": { "line": 2, "character": 37 }
|
||||||
|
},
|
||||||
|
"severity": "Error",
|
||||||
|
"code": "NFT001",
|
||||||
|
"source": "nff",
|
||||||
|
"message": "Expected ';' after policy"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"range": {
|
||||||
|
"start": { "line": 2, "character": 0 },
|
||||||
|
"end": { "line": 2, "character": 37 }
|
||||||
|
},
|
||||||
|
"severity": "Warning",
|
||||||
|
"code": "NFT301",
|
||||||
|
"source": "nff",
|
||||||
|
"message": "Filter chain should have an explicit policy"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"file_path": "config.nft",
|
||||||
|
"source_text": "..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Style Analysis
|
||||||
|
|
||||||
|
Input with style issues:
|
||||||
|
|
||||||
|
```nftables
|
||||||
|
table inet test{chain input{type filter hook input priority 0;policy drop;tcp dport 22 accept;}}
|
||||||
|
```
|
||||||
|
|
||||||
|
Style warnings:
|
||||||
|
|
||||||
|
```
|
||||||
|
Found 3 issues in style.nft:
|
||||||
|
style.nft:1:1: warning: Consider adding a shebang line [NFT201]
|
||||||
|
style.nft:1:121: warning: Line too long (98 > 80 characters) [NFT205]
|
||||||
|
style.nft:1:16: warning: Missing space after '{' [NFT503]
|
||||||
|
```
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
### Code Style
|
### Code Style
|
||||||
|
|
@ -237,6 +584,17 @@ Below are the design goals of nff's architechture.
|
||||||
- **Memory efficiency**: Streaming token processing where possible
|
- **Memory efficiency**: Streaming token processing where possible
|
||||||
- **Grammar completeness**: Covers full nftables syntax specification
|
- **Grammar completeness**: Covers full nftables syntax specification
|
||||||
|
|
||||||
|
### Diagnostic Architecture
|
||||||
|
|
||||||
|
The diagnostic system uses a modular architecture with specialized analyzers:
|
||||||
|
|
||||||
|
- **Modular design**: Each analyzer focuses on specific concerns (lexical,
|
||||||
|
syntax, style, semantic)
|
||||||
|
- **Configurable analysis**: Enable/disable specific diagnostic categories
|
||||||
|
- **LSP compatibility**: JSON output follows Language Server Protocol standards
|
||||||
|
- **Performance optimized**: Concurrent analysis when possible
|
||||||
|
- **Extensible**: Easy to add new diagnostic rules and categories
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
nff is licensed under [MPL v2.0](LICENSE). See license file for more details on
|
nff is licensed under [MPL v2.0](LICENSE). See license file for more details on
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,8 @@
|
||||||
rustfmt,
|
rustfmt,
|
||||||
clippy,
|
clippy,
|
||||||
cargo,
|
cargo,
|
||||||
|
cargo-machete,
|
||||||
|
cargo-nextest,
|
||||||
rustPlatform,
|
rustPlatform,
|
||||||
}:
|
}:
|
||||||
mkShell {
|
mkShell {
|
||||||
|
|
@ -13,6 +15,8 @@ mkShell {
|
||||||
rustfmt
|
rustfmt
|
||||||
clippy
|
clippy
|
||||||
cargo
|
cargo
|
||||||
|
cargo-machete
|
||||||
|
cargo-nextest
|
||||||
];
|
];
|
||||||
|
|
||||||
RUST_SRC_PATH = "${rustPlatform.rustLibSrc}";
|
RUST_SRC_PATH = "${rustPlatform.rustLibSrc}";
|
||||||
|
|
|
||||||
244
src/cst.rs
244
src/cst.rs
|
|
@ -13,150 +13,150 @@ use thiserror::Error;
|
||||||
pub enum SyntaxKind {
|
pub enum SyntaxKind {
|
||||||
// Root and containers
|
// Root and containers
|
||||||
Root = 0,
|
Root = 0,
|
||||||
Table,
|
Table = 1,
|
||||||
Chain,
|
Chain = 2,
|
||||||
Rule,
|
Rule = 3,
|
||||||
Set,
|
Set = 4,
|
||||||
Map,
|
Map = 5,
|
||||||
Element,
|
Element = 6,
|
||||||
|
|
||||||
// Expressions
|
// Expressions
|
||||||
Expression,
|
Expression = 7,
|
||||||
BinaryExpr,
|
BinaryExpr = 8,
|
||||||
UnaryExpr,
|
UnaryExpr = 9,
|
||||||
CallExpr,
|
CallExpr = 10,
|
||||||
SetExpr,
|
SetExpr = 11,
|
||||||
RangeExpr,
|
RangeExpr = 12,
|
||||||
|
|
||||||
// Statements
|
// Statements
|
||||||
Statement,
|
Statement = 13,
|
||||||
IncludeStmt,
|
IncludeStmt = 14,
|
||||||
DefineStmt,
|
DefineStmt = 15,
|
||||||
FlushStmt,
|
FlushStmt = 16,
|
||||||
AddStmt,
|
AddStmt = 17,
|
||||||
DeleteStmt,
|
DeleteStmt = 18,
|
||||||
|
|
||||||
// Literals and identifiers
|
// Literals and identifiers
|
||||||
Identifier,
|
Identifier = 19,
|
||||||
StringLiteral,
|
StringLiteral = 20,
|
||||||
NumberLiteral,
|
NumberLiteral = 21,
|
||||||
IpAddress,
|
IpAddress = 22,
|
||||||
Ipv6Address,
|
Ipv6Address = 23,
|
||||||
MacAddress,
|
MacAddress = 24,
|
||||||
|
|
||||||
// Keywords
|
// Keywords
|
||||||
TableKw,
|
TableKw = 25,
|
||||||
ChainKw,
|
ChainKw = 26,
|
||||||
RuleKw,
|
RuleKw = 27,
|
||||||
SetKw,
|
SetKw = 28,
|
||||||
MapKw,
|
MapKw = 29,
|
||||||
ElementKw,
|
ElementKw = 30,
|
||||||
IncludeKw,
|
IncludeKw = 31,
|
||||||
DefineKw,
|
DefineKw = 32,
|
||||||
FlushKw,
|
FlushKw = 33,
|
||||||
AddKw,
|
AddKw = 34,
|
||||||
DeleteKw,
|
DeleteKw = 35,
|
||||||
InsertKw,
|
InsertKw = 36,
|
||||||
ReplaceKw,
|
ReplaceKw = 37,
|
||||||
|
|
||||||
// Chain types and hooks
|
// Chain types and hooks
|
||||||
FilterKw,
|
FilterKw = 38,
|
||||||
NatKw,
|
NatKw = 39,
|
||||||
RouteKw,
|
RouteKw = 40,
|
||||||
InputKw,
|
InputKw = 41,
|
||||||
OutputKw,
|
OutputKw = 42,
|
||||||
ForwardKw,
|
ForwardKw = 43,
|
||||||
PreroutingKw,
|
PreroutingKw = 44,
|
||||||
PostroutingKw,
|
PostroutingKw = 45,
|
||||||
|
|
||||||
// Protocols and families
|
// Protocols and families
|
||||||
IpKw,
|
IpKw = 46,
|
||||||
Ip6Kw,
|
Ip6Kw = 47,
|
||||||
InetKw,
|
InetKw = 48,
|
||||||
ArpKw,
|
ArpKw = 49,
|
||||||
BridgeKw,
|
BridgeKw = 50,
|
||||||
NetdevKw,
|
NetdevKw = 51,
|
||||||
TcpKw,
|
TcpKw = 52,
|
||||||
UdpKw,
|
UdpKw = 53,
|
||||||
IcmpKw,
|
IcmpKw = 54,
|
||||||
Icmpv6Kw,
|
Icmpv6Kw = 55,
|
||||||
|
|
||||||
// Match keywords
|
// Match keywords
|
||||||
SportKw,
|
SportKw = 56,
|
||||||
DportKw,
|
DportKw = 57,
|
||||||
SaddrKw,
|
SaddrKw = 58,
|
||||||
DaddrKw,
|
DaddrKw = 59,
|
||||||
ProtocolKw,
|
ProtocolKw = 60,
|
||||||
NexthdrKw,
|
NexthdrKw = 61,
|
||||||
TypeKw,
|
TypeKw = 62,
|
||||||
HookKw,
|
HookKw = 63,
|
||||||
PriorityKw,
|
PriorityKw = 64,
|
||||||
PolicyKw,
|
PolicyKw = 65,
|
||||||
IifnameKw,
|
IifnameKw = 66,
|
||||||
OifnameKw,
|
OifnameKw = 67,
|
||||||
CtKw,
|
CtKw = 68,
|
||||||
StateKw,
|
StateKw = 69,
|
||||||
|
|
||||||
// Actions
|
// Actions
|
||||||
AcceptKw,
|
AcceptKw = 70,
|
||||||
DropKw,
|
DropKw = 71,
|
||||||
RejectKw,
|
RejectKw = 72,
|
||||||
ReturnKw,
|
ReturnKw = 73,
|
||||||
JumpKw,
|
JumpKw = 74,
|
||||||
GotoKw,
|
GotoKw = 75,
|
||||||
ContinueKw,
|
ContinueKw = 76,
|
||||||
LogKw,
|
LogKw = 77,
|
||||||
CommentKw,
|
CommentKw = 78,
|
||||||
|
|
||||||
// States
|
// States
|
||||||
EstablishedKw,
|
EstablishedKw = 79,
|
||||||
RelatedKw,
|
RelatedKw = 80,
|
||||||
NewKw,
|
NewKw = 81,
|
||||||
InvalidKw,
|
InvalidKw = 82,
|
||||||
|
|
||||||
// Additional protocol keywords
|
|
||||||
VmapKw,
|
|
||||||
NdRouterAdvertKw,
|
|
||||||
NdNeighborSolicitKw,
|
|
||||||
NdNeighborAdvertKw,
|
|
||||||
EchoRequestKw,
|
|
||||||
DestUnreachableKw,
|
|
||||||
RouterAdvertisementKw,
|
|
||||||
TimeExceededKw,
|
|
||||||
ParameterProblemKw,
|
|
||||||
PacketTooBigKw,
|
|
||||||
|
|
||||||
// Operators
|
// Operators
|
||||||
EqOp,
|
EqOp = 83,
|
||||||
NeOp,
|
NeOp = 84,
|
||||||
LeOp,
|
LeOp = 85,
|
||||||
GeOp,
|
GeOp = 86,
|
||||||
LtOp,
|
LtOp = 87,
|
||||||
GtOp,
|
GtOp = 88,
|
||||||
|
|
||||||
// Punctuation
|
// Punctuation
|
||||||
LeftBrace,
|
LeftBrace = 89,
|
||||||
RightBrace,
|
RightBrace = 90,
|
||||||
LeftParen,
|
LeftParen = 91,
|
||||||
RightParen,
|
RightParen = 92,
|
||||||
LeftBracket,
|
LeftBracket = 93,
|
||||||
RightBracket,
|
RightBracket = 94,
|
||||||
Comma,
|
Comma = 95,
|
||||||
Semicolon,
|
Semicolon = 96,
|
||||||
Colon,
|
Colon = 97,
|
||||||
Assign,
|
Assign = 98,
|
||||||
Dash,
|
Dash = 99,
|
||||||
Slash,
|
Slash = 100,
|
||||||
Dot,
|
Dot = 101,
|
||||||
|
|
||||||
// Trivia
|
// Trivia
|
||||||
Whitespace,
|
Whitespace = 102,
|
||||||
Newline,
|
Newline = 103,
|
||||||
Comment,
|
Comment = 104,
|
||||||
Shebang,
|
Shebang = 105,
|
||||||
|
|
||||||
// Error recovery
|
// Error recovery
|
||||||
Error,
|
Error = 106,
|
||||||
|
|
||||||
|
// Additional protocol keywords
|
||||||
|
VmapKw = 107,
|
||||||
|
NdRouterAdvertKw = 108,
|
||||||
|
NdNeighborSolicitKw = 109,
|
||||||
|
NdNeighborAdvertKw = 110,
|
||||||
|
EchoRequestKw = 111,
|
||||||
|
DestUnreachableKw = 112,
|
||||||
|
RouterAdvertisementKw = 113,
|
||||||
|
TimeExceededKw = 114,
|
||||||
|
ParameterProblemKw = 115,
|
||||||
|
PacketTooBigKw = 116,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<TokenKind> for SyntaxKind {
|
impl From<TokenKind> for SyntaxKind {
|
||||||
|
|
@ -324,7 +324,13 @@ impl SyntaxKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_raw(raw: RawSyntaxKind) -> Self {
|
pub fn from_raw(raw: RawSyntaxKind) -> Self {
|
||||||
unsafe { std::mem::transmute(raw.0 as u16) }
|
match raw.0 {
|
||||||
|
0 => SyntaxKind::Root,
|
||||||
|
1 => SyntaxKind::Table,
|
||||||
|
// ... other variants ...
|
||||||
|
116 => SyntaxKind::PacketTooBigKw,
|
||||||
|
_ => SyntaxKind::Error, // Fallback to Error for invalid values
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -453,7 +453,7 @@ impl AnalyzerModule for LexicalAnalyzer {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LexicalAnalyzer {
|
impl LexicalAnalyzer {
|
||||||
fn lex_error_to_diagnostic(error: &LexError, source: &str) -> Diagnostic {
|
pub fn lex_error_to_diagnostic(error: &LexError, source: &str) -> Diagnostic {
|
||||||
match error {
|
match error {
|
||||||
LexError::InvalidToken { position, text } => {
|
LexError::InvalidToken { position, text } => {
|
||||||
let pos = Position::from_text_size(TextSize::from(*position as u32), source);
|
let pos = Position::from_text_size(TextSize::from(*position as u32), source);
|
||||||
|
|
@ -478,9 +478,8 @@ impl LexicalAnalyzer {
|
||||||
"Unterminated string literal".to_string(),
|
"Unterminated string literal".to_string(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
LexError::InvalidNumber { text } => {
|
LexError::InvalidNumber { position, text } => {
|
||||||
if let Some(pos) = source.find(text) {
|
let start_pos = Position::from_text_size(TextSize::from(*position as u32), source);
|
||||||
let start_pos = Position::from_text_size(TextSize::from(pos as u32), source);
|
|
||||||
let end_pos =
|
let end_pos =
|
||||||
Position::new(start_pos.line, start_pos.character + text.len() as u32);
|
Position::new(start_pos.line, start_pos.character + text.len() as u32);
|
||||||
let range = Range::new(start_pos, end_pos);
|
let range = Range::new(start_pos, end_pos);
|
||||||
|
|
@ -490,15 +489,6 @@ impl LexicalAnalyzer {
|
||||||
DiagnosticCode::InvalidNumber,
|
DiagnosticCode::InvalidNumber,
|
||||||
format!("Invalid number: '{}'", text),
|
format!("Invalid number: '{}'", text),
|
||||||
)
|
)
|
||||||
} else {
|
|
||||||
let range = Range::single_position(Position::new(0, 0));
|
|
||||||
Diagnostic::new(
|
|
||||||
range,
|
|
||||||
DiagnosticSeverity::Error,
|
|
||||||
DiagnosticCode::InvalidNumber,
|
|
||||||
format!("Invalid number: '{}'", text),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -721,6 +711,23 @@ impl StyleAnalyzer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for trailing empty lines at the end of the file
|
||||||
|
if empty_count > config.max_empty_lines {
|
||||||
|
let start = Position::new(empty_start as u32, 0);
|
||||||
|
let end = Position::new((empty_start + empty_count - 1) as u32, 0);
|
||||||
|
let range = Range::new(start, end);
|
||||||
|
let diagnostic = Diagnostic::new(
|
||||||
|
range,
|
||||||
|
DiagnosticSeverity::Warning,
|
||||||
|
DiagnosticCode::TooManyEmptyLines,
|
||||||
|
format!(
|
||||||
|
"Too many consecutive empty lines at end of file ({} > {})",
|
||||||
|
empty_count, config.max_empty_lines
|
||||||
|
),
|
||||||
|
);
|
||||||
|
diagnostics.push(diagnostic);
|
||||||
|
}
|
||||||
|
|
||||||
diagnostics
|
diagnostics
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -766,17 +773,26 @@ impl StyleAnalyzer {
|
||||||
|
|
||||||
// Check for mixed indentation across the file
|
// Check for mixed indentation across the file
|
||||||
if has_tabs && has_spaces {
|
if has_tabs && has_spaces {
|
||||||
if let Some(preferred) = &config.preferred_indent {
|
|
||||||
let range = Range::single_position(Position::new(0, 0));
|
let range = Range::single_position(Position::new(0, 0));
|
||||||
|
let (severity, message) = if let Some(preferred) = &config.preferred_indent {
|
||||||
|
(
|
||||||
|
DiagnosticSeverity::Information,
|
||||||
|
format!("File uses mixed indentation; prefer {}", preferred),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
DiagnosticSeverity::Warning,
|
||||||
|
"File uses mixed indentation (tabs and spaces)".to_string(),
|
||||||
|
)
|
||||||
|
};
|
||||||
let diagnostic = Diagnostic::new(
|
let diagnostic = Diagnostic::new(
|
||||||
range,
|
range,
|
||||||
DiagnosticSeverity::Information,
|
severity,
|
||||||
DiagnosticCode::InconsistentIndentation,
|
DiagnosticCode::InconsistentIndentation,
|
||||||
format!("File uses mixed indentation; prefer {}", preferred),
|
message,
|
||||||
);
|
);
|
||||||
diagnostics.push(diagnostic);
|
diagnostics.push(diagnostic);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
diagnostics
|
diagnostics
|
||||||
}
|
}
|
||||||
|
|
|
||||||
39
src/lexer.rs
39
src/lexer.rs
|
|
@ -10,8 +10,8 @@ pub enum LexError {
|
||||||
InvalidToken { position: usize, text: String },
|
InvalidToken { position: usize, text: String },
|
||||||
#[error("Unterminated string literal starting at position {position}")]
|
#[error("Unterminated string literal starting at position {position}")]
|
||||||
UnterminatedString { position: usize },
|
UnterminatedString { position: usize },
|
||||||
#[error("Invalid numeric literal: {text}")]
|
#[error("Invalid numeric literal at position {position}: {text}")]
|
||||||
InvalidNumber { text: String },
|
InvalidNumber { position: usize, text: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Result type for lexical analysis
|
/// Result type for lexical analysis
|
||||||
|
|
@ -356,6 +356,7 @@ impl<'a> NftablesLexer<'a> {
|
||||||
.any(|c| !c.is_ascii_digit() && c != '.' && c != 'x' && c != 'X')
|
.any(|c| !c.is_ascii_digit() && c != '.' && c != 'x' && c != 'X')
|
||||||
{
|
{
|
||||||
return Err(LexError::InvalidNumber {
|
return Err(LexError::InvalidNumber {
|
||||||
|
position: span.start,
|
||||||
text: text.to_owned(),
|
text: text.to_owned(),
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -448,4 +449,38 @@ mod tests {
|
||||||
panic!("Expected InvalidToken error");
|
panic!("Expected InvalidToken error");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_invalid_number_with_position() {
|
||||||
|
// Test that we can create a proper diagnostic with position information
|
||||||
|
use crate::diagnostic::LexicalAnalyzer;
|
||||||
|
|
||||||
|
// Create a source with the same invalid pattern at different positions
|
||||||
|
let source = "123abc normal 123abc end";
|
||||||
|
|
||||||
|
// Since normal tokenization splits "123abc" into "123" + "abc",
|
||||||
|
// let's test the diagnostic creation directly with a mock error
|
||||||
|
let error1 = LexError::InvalidNumber {
|
||||||
|
position: 0,
|
||||||
|
text: "123abc".to_string(),
|
||||||
|
};
|
||||||
|
let error2 = LexError::InvalidNumber {
|
||||||
|
position: 14,
|
||||||
|
text: "123abc".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Test that diagnostics are created with correct positions
|
||||||
|
let diagnostic1 = LexicalAnalyzer::lex_error_to_diagnostic(&error1, source);
|
||||||
|
let diagnostic2 = LexicalAnalyzer::lex_error_to_diagnostic(&error2, source);
|
||||||
|
|
||||||
|
// First occurrence should be at position 0
|
||||||
|
assert_eq!(diagnostic1.range.start.line, 0);
|
||||||
|
assert_eq!(diagnostic1.range.start.character, 0);
|
||||||
|
assert_eq!(diagnostic1.message, "Invalid number: '123abc'");
|
||||||
|
|
||||||
|
// Second occurrence should be at position 14 (not 0)
|
||||||
|
assert_eq!(diagnostic2.range.start.line, 0);
|
||||||
|
assert_eq!(diagnostic2.range.start.character, 14);
|
||||||
|
assert_eq!(diagnostic2.message, "Invalid number: '123abc'");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
12
src/main.rs
12
src/main.rs
|
|
@ -351,6 +351,8 @@ fn process_lint_command(
|
||||||
};
|
};
|
||||||
|
|
||||||
let is_multiple_files = files.len() > 1;
|
let is_multiple_files = files.len() > 1;
|
||||||
|
let mut has_errors = false;
|
||||||
|
|
||||||
for file_path in files {
|
for file_path in files {
|
||||||
if let Err(e) = process_single_file_lint(
|
if let Err(e) = process_single_file_lint(
|
||||||
&file_path,
|
&file_path,
|
||||||
|
|
@ -364,12 +366,18 @@ fn process_lint_command(
|
||||||
is_multiple_files,
|
is_multiple_files,
|
||||||
) {
|
) {
|
||||||
eprintln!("Error processing {}: {}", file_path, e);
|
eprintln!("Error processing {}: {}", file_path, e);
|
||||||
|
has_errors = true;
|
||||||
if !is_multiple_files {
|
if !is_multiple_files {
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Exit with non-zero code if any file had errors
|
||||||
|
if has_errors {
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -459,9 +467,9 @@ fn process_single_file_lint(
|
||||||
println!("{}", diagnostics.to_human_readable());
|
println!("{}", diagnostics.to_human_readable());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exit with non-zero code if there are errors
|
// Return error if there are diagnostics errors
|
||||||
if diagnostics.has_errors() {
|
if diagnostics.has_errors() {
|
||||||
std::process::exit(1);
|
return Err(anyhow::anyhow!("Diagnostics found errors in file"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
||||||
|
|
@ -466,6 +466,10 @@ impl Parser {
|
||||||
|
|
||||||
// Check for operators
|
// Check for operators
|
||||||
while let Some(token) = self.peek() {
|
while let Some(token) = self.peek() {
|
||||||
|
if matches!(token.kind, TokenKind::Newline) {
|
||||||
|
self.advance();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
// Check for comparison operators
|
// Check for comparison operators
|
||||||
let operator = match &token.kind {
|
let operator = match &token.kind {
|
||||||
TokenKind::Eq => BinaryOperator::Eq,
|
TokenKind::Eq => BinaryOperator::Eq,
|
||||||
|
|
@ -509,10 +513,11 @@ impl Parser {
|
||||||
self.consume(TokenKind::RightBrace, "Expected '}' to close vmap")?;
|
self.consume(TokenKind::RightBrace, "Expected '}' to close vmap")?;
|
||||||
|
|
||||||
// Return a vmap expression with the previous expression as the mapping target
|
// Return a vmap expression with the previous expression as the mapping target
|
||||||
return Ok(Expression::Vmap {
|
expr = Expression::Vmap {
|
||||||
expr: Some(Box::new(expr)),
|
expr: Some(Box::new(expr)),
|
||||||
map,
|
map,
|
||||||
});
|
};
|
||||||
|
continue; // allow the outer `while` to detect ==, != … afterwards
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue