Compare commits

...

12 commits

Author SHA1 Message Date
815f4a4725
docs: document using just in the codebase
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I56d1de8a88bb28e49e6387a320f318c86a6a6964
2026-02-23 02:26:48 +03:00
7584eb76e1
meta: switch to justfile for task organization
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ib4000ab597f94b2dd3dccf1e31fce3a76a6a6964
2026-02-23 02:26:47 +03:00
ae505188fc
tests: move fixtures to dedicated dir
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I9d6ce6a264780f215b1b57d947b5264c6a6a6964
2026-02-23 02:26:46 +03:00
3347699a8c
tests/benchmark: make benchmark cases... bigger
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Iabd307b475f6568cff4d1ae6e5ae56ef6a6a6964
2026-02-23 02:26:45 +03:00
84cf5fdf68
irc: add timing measurements; formatting
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Id4402547e18b6569464850c3753383396a6a6964
2026-02-23 02:26:44 +03:00
b6fd2326a6
irc/evaluator: fix variable lookup, recursive let, and value handling
Bunch of things:

- Decode depth and offset from encoded variable indices
- Pre-allocate Values for recursive let bindings before eval
- Use mk* methods for value copying instead of direct assignment
- Evaluate attrset values immediately to avoid dangling thunks

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I4dd40c93d74df5973a642fb9f123e70e6a6a6964
2026-02-23 02:26:43 +03:00
6612479286
irc: add ListNode support; fix recursive attrset scoping
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I1657bc6a05c264f0ae0dd2c94d32b1046a6a6964
2026-02-23 02:26:42 +03:00
6587d07833
irc/parser: fix list parsing and function application
Fixes bug where `concat [1 2 3] [4 5 6]` tried to apply integer 1
as a function.

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I6f373dd83bcac9e59286b0448472200b6a6a6964
2026-02-23 02:26:41 +03:00
8bce6c27b5
tests: initial integration tests
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I09ed2eea568edfaecdb800197bc36c416a6a6964
2026-02-23 02:26:40 +03:00
347175bb86
tests/benchmark: fine-grain timing reports
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ia481b0129193540636665340bd257d136a6a6964
2026-02-23 02:26:39 +03:00
68873352f9
tests/benchmark: rename runner script; compare compilation with native eval
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I6ef30732f875ab134a35282eb2cd66a36a6a6964
2026-02-23 02:26:38 +03:00
f385eebc99
tests: initial benchmarking setup
Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If0ed2dd4279abf155a8ddc678ca047736a6a6964
2026-02-23 02:26:37 +03:00
94 changed files with 1442 additions and 492 deletions

View file

@ -78,6 +78,7 @@ install(TARGETS nix-ir-plugin LIBRARY DESTINATION "${CMAKE_INSTALL_PREFIX}/lib/n
add_executable(regression_test
tests/regression_test.cpp
src/irc/serializer.cpp
src/irc/parser.cpp
)
target_include_directories(regression_test PRIVATE

View file

@ -169,27 +169,44 @@ Entry:
### Building
```bash
# Configure
$ cmake -B build
# Using just (recommended)
$ just build
# Build
$ make
# Or manually with CMake
$ cmake -B build -G Ninja
$ cmake --build build
# The nix-irc executable will be in the project root
$ ./nix-irc --help
# The nix-irc executable will be in build/
$ ./build/nix-irc --help
```
### Available Tasks
Run `just` to see all available tasks:
- `just build` - Build all targets
- `just test` - Run all tests (unit, compile, integration)
- `just bench` - Run performance benchmarks
- `just clean` - Clean build artifacts
- `just smoke` - Run quick smoke test
- `just stats` - Show project statistics
See `just --list` for the complete list of available commands.
### Compiling Nix to IR
```bash
# Basic compilation
$ nix-irc input.nix output.nixir
$ ./build/nix-irc input.nix output.nixir
# With import search paths
$ nix-irc -I ./lib -I /nix/store/... input.nix output.nixir
$ ./build/nix-irc -I ./lib -I /nix/store/... input.nix output.nixir
# Disable import resolution
$ nix-irc --no-imports input.nix output.nixir
$ ./build/nix-irc --no-imports input.nix output.nixir
# Using just
$ just compile input.nix output.nixir
```
### Runtime Evaluation (Plugin)
@ -212,13 +229,21 @@ $ nix --plugin-files ./nix-ir-plugin.so eval --expr 'builtins.nixIR_info'
### Running Tests
```bash
# Test all sample files
for f in tests/*.nix; do
./nix-irc "$f" "${f%.nix}.nixir"
# Run all tests
$ just test
# Run specific test suites
$ just test-unit # Unit tests only
$ just test-compile # Compilation tests only
$ just test-integration # Integration tests only
# Manually test all fixtures
$ for f in tests/fixtures/*.nix; do
./build/nix-irc "$f" "${f%.nix}.nixir"
done
# Verify IR format
$ hexdump -C tests/simple.nixir | head -3
$ hexdump -C tests/fixtures/simple.nixir | head -3
```
## Contributing

View file

@ -30,6 +30,8 @@
ninja
bear
clang-tools
just
entr
];
env.NIX_PLUGINABI = "0.2";

98
justfile Normal file
View file

@ -0,0 +1,98 @@
# Default recipe, show available commands
default:
@just --list
# Build all targets
build:
cmake --build build
# Clean build artifacts
clean:
rm -rf build
find tests -name '*.nixir' -delete
# Configure and build from scratch
rebuild: clean
cmake -B build -G Ninja
cmake --build build
# Run unit tests
test-unit:
./build/regression_test
# Run compilation tests (do all fixtures compile?)
test-compile:
#!/usr/bin/env bash
total=0
success=0
for f in tests/fixtures/*.nix; do
total=$((total+1))
if ./build/nix-irc "$f" "${f%.nix}.nixir" 2>&1 | grep -q "Done!"; then
success=$((success+1))
fi
done
echo "Compiled: $success/$total test files"
[ $success -eq $total ]
# Run integration tests
test-integration:
./tests/integration/run.sh
# Run all tests
test: test-unit test-compile test-integration
@echo "All tests passed"
# Run benchmarks
bench:
./tests/benchmark/run.sh
# Compile a single Nix file to IR
compile FILE OUTPUT="":
#!/usr/bin/env bash
if [ -z "{{OUTPUT}}" ]; then
file="{{FILE}}"
output="${file%.nix}.nixir"
else
output="{{OUTPUT}}"
fi
./build/nix-irc "{{FILE}}" "$output"
# Load plugin and evaluate Nix expression
eval FILE:
nix-instantiate --plugin-files ./build/nix-ir-plugin.so --eval --strict "{{FILE}}"
# Format C++ code with clang-format
format:
find src tests -name '*.cpp' -o -name '*.h' | xargs clang-format -i
# Run clang-tidy on source files
lint:
find src -name '*.cpp' | xargs clang-tidy --fix
# Show project statistics
stats:
@echo "Lines of code:"
@find src -name '*.cpp' -o -name '*.h' | xargs wc -l | tail -1
@echo ""
@echo "Test files:"
@find tests/fixtures -name '*.nix' | wc -l
@echo ""
@echo "Build status:"
@ls -lh build/nix-irc build/nix-ir-plugin.so build/regression_test 2>/dev/null || echo "Not built"
# Run a quick smoke test
smoke:
./build/nix-irc tests/fixtures/simple.nix /tmp/smoke.nixir
nix-instantiate --plugin-files ./build/nix-ir-plugin.so --eval tests/integration/simple_eval.nix
# Generate IR from a Nix file and inspect it
inspect FILE:
./build/nix-irc "{{FILE}}" /tmp/inspect.nixir
@echo "IR bundle size:"
@ls -lh /tmp/inspect.nixir | awk '{print $5}'
@echo "Magic number:"
@xxd -l 4 /tmp/inspect.nixir
# Watch mode, rebuild on file changes
watch:
find src tests -name '*.cpp' -o -name '*.h' | entr -c just build test-unit

View file

@ -21,15 +21,20 @@ struct IREnvironment {
void bind(Value* val) { bindings.push_back(val); }
Value* lookup(uint32_t index) {
Value* lookup(uint32_t encoded_index) {
// Decode the index: high 16 bits = depth, low 16 bits = offset
uint32_t depth = encoded_index >> 16;
uint32_t offset = encoded_index & 0xFFFF;
IREnvironment* env = this;
while (env) {
if (index < env->bindings.size()) {
return env->bindings[index];
}
index -= env->bindings.size();
// Skip 'depth' levels to get to the right scope
for (uint32_t i = 0; i < depth && env; i++) {
env = env->parent;
}
if (env && offset < env->bindings.size()) {
return env->bindings[offset];
}
return nullptr;
}
@ -147,7 +152,35 @@ struct Evaluator::Impl {
state.error<EvalError>("variable not found").debugThrow();
}
force(bound);
v = *bound;
// Copy the forced value's data into v
// For simple types, use mk* methods to ensure proper initialization
// For complex types (attrs, lists, functions), direct assignment is safe
state.forceValue(*bound, noPos);
switch (bound->type()) {
case nInt:
v.mkInt(bound->integer());
break;
case nBool:
v.mkBool(bound->boolean());
break;
case nString:
v.mkString(bound->c_str());
break;
case nPath:
v.mkPath(bound->path());
break;
case nNull:
v.mkNull();
break;
case nFloat:
v.mkFloat(bound->fpoint());
break;
default:
// For attrs, lists, functions, etc., direct assignment is safe
// as they use reference counting internally
v = *bound;
break;
}
} else if (auto* n = node->get_if<LambdaNode>()) {
auto lambda_env = env;
auto body = n->body;
@ -422,20 +455,33 @@ struct Evaluator::Impl {
}
} else if (auto* n = node->get_if<LetNode>()) {
auto let_env = make_env(env);
// Nix's let is recursive: bind all names first, then evaluate
// We allocate Values immediately and evaluate into them
std::vector<Value*> values;
for (const auto& [name, expr] : n->bindings) {
// Create thunks in let_env so bindings can reference each other
Value* val = make_thunk(expr, let_env);
Value* val = state.allocValue();
values.push_back(val);
let_env->bind(val);
}
// Now evaluate each binding expression into its pre-allocated Value
size_t idx = 0;
for (const auto& [name, expr] : n->bindings) {
eval_node(expr, *values[idx++], let_env);
}
eval_node(n->body, v, let_env);
} else if (auto* n = node->get_if<LetRecNode>()) {
auto letrec_env = make_env(env);
// Same as LetNode - both are recursive in Nix
std::vector<Value*> values;
for (const auto& [name, expr] : n->bindings) {
Value* val = make_thunk(expr, letrec_env);
Value* val = state.allocValue();
values.push_back(val);
letrec_env->bind(val);
}
size_t idx = 0;
for (const auto& [name, expr] : n->bindings) {
eval_node(expr, *values[idx++], letrec_env);
}
eval_node(n->body, v, letrec_env);
} else if (auto* n = node->get_if<AttrsetNode>()) {
auto bindings = state.buildBindings(n->attrs.size());
@ -453,9 +499,12 @@ struct Evaluator::Impl {
}
}
// Attributes should be lazy, so store as thunks and not evaluated values
// Evaluate attribute values immediately to avoid dangling thunks
// Our thunk system is tied to the Evaluator lifetime, so we can't
// return lazy thunks that outlive the evaluator
for (const auto& binding : n->attrs) {
Value* attr_val = make_thunk(binding.value, attr_env);
Value* attr_val = state.allocValue();
eval_node(binding.value, *attr_val, attr_env);
if (binding.is_dynamic()) {
// Evaluate key expression to get attribute name
@ -498,7 +547,35 @@ struct Evaluator::Impl {
if (attr) {
Value* val = attr->value;
force(val);
v = *val;
// Copy the forced value's data into v
// For simple types, use mk* methods to ensure proper initialization
// For complex types (attrs, lists, functions), direct assignment is safe
state.forceValue(*val, noPos);
switch (val->type()) {
case nInt:
v.mkInt(val->integer());
break;
case nBool:
v.mkBool(val->boolean());
break;
case nString:
v.mkString(val->c_str());
break;
case nPath:
v.mkPath(val->path());
break;
case nNull:
v.mkNull();
break;
case nFloat:
v.mkFloat(val->fpoint());
break;
default:
// For attrs, lists, functions, etc., direct assignment is safe
// as they use reference counting internally
v = *val;
break;
}
} else if (n->default_expr) {
eval_node(*n->default_expr, v, env);
} else {

View file

@ -1,5 +1,6 @@
#include "ir_gen.h"
#include <algorithm>
#include <iostream>
#include <stack>
#include <unordered_map>
@ -97,7 +98,8 @@ struct IRGenerator::Impl {
return std::make_shared<Node>(*n);
}
if (auto* n = node.get_if<VarNode>()) {
uint32_t idx = name_resolver.resolve(n->name.value_or(""));
std::string var_name = n->name.value_or("");
uint32_t idx = name_resolver.resolve(var_name);
VarNode converted(idx);
converted.name = n->name;
converted.line = n->line;
@ -121,12 +123,17 @@ struct IRGenerator::Impl {
}
if (auto* n = node.get_if<AttrsetNode>()) {
AttrsetNode attrs(n->recursive, n->line);
name_resolver.enter_scope();
for (const auto& binding : n->attrs) {
if (!binding.is_dynamic()) {
name_resolver.bind(binding.static_name.value());
// Only enter a new scope for recursive attrsets
if (n->recursive) {
name_resolver.enter_scope();
for (const auto& binding : n->attrs) {
if (!binding.is_dynamic()) {
name_resolver.bind(binding.static_name.value());
}
}
}
for (const auto& binding : n->attrs) {
if (binding.is_dynamic()) {
attrs.attrs.push_back(AttrBinding(convert(binding.dynamic_name), convert(binding.value)));
@ -134,7 +141,10 @@ struct IRGenerator::Impl {
attrs.attrs.push_back(AttrBinding(binding.static_name.value(), convert(binding.value)));
}
}
name_resolver.exit_scope();
if (n->recursive) {
name_resolver.exit_scope();
}
return std::make_shared<Node>(attrs);
}
if (auto* n = node.get_if<SelectNode>()) {
@ -208,6 +218,14 @@ struct IRGenerator::Impl {
auto operand = convert(n->operand);
return std::make_shared<Node>(UnaryOpNode(n->op, operand, n->line));
}
if (auto* n = node.get_if<ListNode>()) {
std::vector<std::shared_ptr<Node>> elements;
elements.reserve(n->elements.size());
for (const auto& elem : n->elements) {
elements.push_back(convert(elem));
}
return std::make_shared<Node>(ListNode(std::move(elements), n->line));
}
return std::make_shared<Node>(ConstNullNode{});
}
};

View file

@ -636,8 +636,9 @@ private:
void tokenize_ident() {
size_t start = pos;
while (pos < input.size() && (isalnum(input[pos]) || input[pos] == '_' || input[pos] == '-' ||
input[pos] == '+' || input[pos] == '.'))
// Note: Don't include '.' here - it's used for selection (a.b.c)
// URIs are handled separately by checking for '://' pattern
while (pos < input.size() && (isalnum(input[pos]) || input[pos] == '_' || input[pos] == '-'))
pos++;
std::string ident = input.substr(start, pos - start);
@ -927,16 +928,25 @@ public:
std::shared_ptr<Node> left = parse_expr3();
while (true) {
if (current().type == Token::LBRACKET) {
advance();
auto arg = parse_expr();
expect(Token::RBRACKET);
left = std::make_shared<Node>(AppNode(left, arg));
} else if (current().type == Token::STRING) {
if (current().type == Token::STRING) {
Token s = current();
advance();
auto arg = std::make_shared<Node>(ConstStringNode(s.value));
left = std::make_shared<Node>(AppNode(left, arg));
} else if (current().type == Token::LPAREN) {
// Function application with parenthesized argument: func (expr)
advance();
auto arg = parse_expr();
expect(Token::RPAREN);
left = std::make_shared<Node>(AppNode(left, arg));
} else if (current().type == Token::IDENT || current().type == Token::INT ||
current().type == Token::FLOAT || current().type == Token::BOOL ||
current().type == Token::PATH || current().type == Token::LOOKUP_PATH ||
current().type == Token::URI || current().type == Token::LBRACKET) {
// Juxtaposition application: f x
// Parse the argument as a primary expression (which handles lists, etc.)
auto arg = parse_expr3();
left = std::make_shared<Node>(AppNode(left, arg));
} else {
break;
}
@ -969,6 +979,16 @@ public:
return expr;
}
// Handle rec { ... } syntax
if (consume(Token::REC)) {
expect(Token::LBRACE);
auto attrs = parse_attrs();
if (auto* attrset = attrs->get_if<AttrsetNode>()) {
attrset->recursive = true;
}
return attrs;
}
if (consume(Token::LBRACE)) {
return parse_attrs();
}
@ -1151,6 +1171,35 @@ public:
return std::make_shared<Node>(std::move(attrs));
}
// Parse a list element: supports selections but NOT juxtaposition application
// This prevents [1 2 3] from being parsed as ((1 2) 3)
std::shared_ptr<Node> parse_list_element() {
auto left = parse_expr3();
// Handle selections (a.b.c)
while (current().type == Token::DOT) {
advance();
Token name = current();
if (name.type == Token::IDENT) {
advance();
auto attr = std::make_shared<Node>(ConstStringNode(name.value));
left = std::make_shared<Node>(SelectNode(left, attr));
continue;
}
break;
}
// Check for 'or' default value
if (left->get_if<SelectNode>() && current().type == Token::IDENT && current().value == "or") {
advance();
auto default_expr = parse_expr3();
auto* select = left->get_if<SelectNode>();
select->default_expr = default_expr;
}
return left;
}
std::shared_ptr<Node> parse_list() {
std::vector<std::shared_ptr<Node>> elements;
@ -1158,18 +1207,14 @@ public:
return std::make_shared<Node>(ListNode(elements));
}
while (current().type != Token::RBRACKET) {
elements.push_back(parse_expr());
if (!consume(Token::RBRACKET)) {
// Elements are whitespace-separated in Nix, no comma required
// But we'll continue parsing until we hit ]
} else {
// Found closing bracket
return std::make_shared<Node>(ListNode(elements));
while (current().type != Token::RBRACKET && current().type != Token::EOF_) {
elements.push_back(parse_list_element());
if (current().type == Token::RBRACKET) {
break;
}
}
// Unreachable, but for safety
expect(Token::RBRACKET);
return std::make_shared<Node>(ListNode(elements));
}

View file

@ -388,7 +388,7 @@ struct Deserializer::Impl {
uint32_t num_elements = read_u32();
std::vector<std::shared_ptr<Node>> elements;
elements.reserve(num_elements);
for (uint32_t i = 0; i < num_elements; i++) {
for (uint32_t i = 0; i < num_elements; i++) {
elements.push_back(read_node());
}
return std::make_shared<Node>(ListNode(std::move(elements), line));

View file

@ -12,6 +12,7 @@
#include "irc/serializer.h"
#include "irc/types.h"
#include <chrono>
#include <iostream>
namespace nix_ir_plugin {
@ -29,6 +30,8 @@ static void prim_loadIR(EvalState& state, const PosIdx pos, Value** args, Value&
std::string pathStr(path);
auto t_start = std::chrono::high_resolution_clock::now();
Deserializer deserializer;
IRModule module;
@ -38,6 +41,8 @@ static void prim_loadIR(EvalState& state, const PosIdx pos, Value** args, Value&
state.error<EvalError>("failed to deserialize IR bundle: %s", e.what()).atPos(pos).debugThrow();
}
auto t_deser = std::chrono::high_resolution_clock::now();
if (!module.entry) {
state.error<EvalError>("IR bundle has no entry point").atPos(pos).debugThrow();
}
@ -48,6 +53,14 @@ static void prim_loadIR(EvalState& state, const PosIdx pos, Value** args, Value&
} catch (const std::exception& e) {
state.error<EvalError>("failed to evaluate IR: %s", e.what()).atPos(pos).debugThrow();
}
auto t_eval = std::chrono::high_resolution_clock::now();
auto deser_us = std::chrono::duration_cast<std::chrono::microseconds>(t_deser - t_start).count();
auto eval_us = std::chrono::duration_cast<std::chrono::microseconds>(t_eval - t_deser).count();
std::cerr << "nixIR timing: deser=" << deser_us << "us eval=" << eval_us
<< "us total=" << (deser_us + eval_us) << "us" << std::endl;
}
/**
@ -139,7 +152,7 @@ static RegisterPrimOp rp_info({
} // namespace nix_ir_plugin
// Plugin initialization message
// Plugin initialization
__attribute__((constructor)) static void init_plugin() {
std::cerr << "nix-ir-plugin loaded" << std::endl;
// Plugin loads silently...
}

237
tests/benchmark/large.nix Normal file
View file

@ -0,0 +1,237 @@
# Large benchmark for comprehensive stress testing
let
range = start: end:
if start >= end
then []
else [start] ++ range (start + 1) end;
concat = a: b: a ++ b;
factorial = n:
if n <= 1
then 1
else n * factorial (n - 1);
# Ackermann function (highly recursive)
ackermann = m: n:
if m == 0
then n + 1
else if n == 0
then ackermann (m - 1) 1
else ackermann (m - 1) (ackermann m (n - 1));
# Greatest common divisor
gcd = a: b:
if b == 0
then a
else gcd b (a - (a / b) * b);
# Power function
pow = base: exp:
if exp == 0
then 1
else if exp == 1
then base
else base * pow base (exp - 1);
compose = f: g: x: f (g x);
double = x: x * 2;
addTen = x: x + 10;
square = x: x * x;
pipeline = compose square (compose double addTen);
list_100 = range 1 101;
list_50 = range 1 51;
list_25 = range 1 26;
largeAttrs = {
a1 = 1;
a2 = 2;
a3 = 3;
a4 = 4;
a5 = 5;
a6 = 6;
a7 = 7;
a8 = 8;
a9 = 9;
a10 = 10;
b1 = 11;
b2 = 12;
b3 = 13;
b4 = 14;
b5 = 15;
b6 = 16;
b7 = 17;
b8 = 18;
b9 = 19;
b10 = 20;
c1 = 21;
c2 = 22;
c3 = 23;
c4 = 24;
c5 = 25;
c6 = 26;
c7 = 27;
c8 = 28;
c9 = 29;
c10 = 30;
d1 = 31;
d2 = 32;
d3 = 33;
d4 = 34;
d5 = 35;
d6 = 36;
d7 = 37;
d8 = 38;
d9 = 39;
d10 = 40;
e1 = 41;
e2 = 42;
e3 = 43;
e4 = 44;
e5 = 45;
e6 = 46;
e7 = 47;
e8 = 48;
e9 = 49;
e10 = 50;
};
# Very deep nesting (10 levels)
deepNest = {
level1 = {
level2 = {
level3 = {
level4 = {
level5 = {
level6 = {
level7 = {
level8 = {
level9 = {
level10 = {
treasure = "found";
value = 12345;
};
};
};
};
};
};
};
};
};
};
};
recursiveComplex = rec {
base = 10;
doubled = base * 2;
tripled = base * 3;
sum = doubled + tripled;
product = doubled * tripled;
x = base * 4;
y = x + doubled;
z = y * tripled;
total = sum + product + z;
final = total * base;
};
config1 = rec {
multiplier = 5;
base = 100;
result = base * multiplier;
};
config2 = rec {
offset = 50;
scaled = config1.result + offset;
doubled = scaled * 2;
};
config3 = rec {
factor = 3;
combined = config2.doubled * factor;
final = combined + config1.multiplier;
};
baseConfig = {
system = {
arch = "x86_64";
os = "linux";
};
settings = {
enabled = true;
level = 5;
};
};
overrides = {
system = {
kernel = "6.1";
};
settings = {
level = 10;
extra = "custom";
};
newSection = {
value = 42;
};
};
merged =
baseConfig
// overrides
// {
system = baseConfig.system // overrides.system;
settings =
baseConfig.settings
// overrides.settings
// {
combined = baseConfig.settings.level + overrides.settings.level;
};
};
fact10 = factorial 10;
fact7 = factorial 7;
ack_3_3 = ackermann 3 3;
gcd_48_18 = gcd 48 18;
gcd_100_35 = gcd 100 35;
pow_2_10 = pow 2 10;
pow_3_5 = pow 3 5;
pipelineResult = pipeline 5; # ((5 + 10) * 2)^2 = 900
# List operations
concatenated = concat [1 2 3] [4 5 6];
multilevel = concat (concat [1] [2 3]) [4 5];
in {
# Lists
inherit list_100 list_50 list_25 concatenated multilevel;
# Math results
inherit fact10 fact7 ack_3_3 gcd_48_18 gcd_100_35 pow_2_10 pow_3_5 pipelineResult;
# Data structures
inherit largeAttrs merged;
deepValue = deepNest.level1.level2.level3.level4.level5.level6.level7.level8.level9.level10.value;
deepTreasure = deepNest.level1.level2.level3.level4.level5.level6.level7.level8.level9.level10.treasure;
# Recursive attrsets
recursiveTotal = recursiveComplex.total;
recursiveFinal = recursiveComplex.final;
computedZ = recursiveComplex.z;
# Config chain
config1Result = config1.result;
config2Doubled = config2.doubled;
config3Final = config3.final;
# Merged config
mergedCombined = merged.settings.combined;
mergedArch = merged.system.arch;
mergedKernel = merged.system.kernel;
}

View file

@ -0,0 +1,75 @@
let
# Recursive factorial
factorial = n:
if n <= 1
then 1
else n * factorial (n - 1);
# Fibonacci sequence generator
fib = n:
if n <= 1
then n
else fib (n - 1) + fib (n - 2);
# List concatenation test
range = start: end:
if start >= end
then []
else [start] ++ range (start + 1) end;
# Curried function application
add = x: y: x + y;
add5 = add 5;
# Complex computation
compute = x: y: let
a = x * 2;
b = y + 10;
c = a * b;
in
c / 2;
# Data structures
numbers = range 1 11;
# Nested attribute operations
base = {
config = {
enable = true;
value = 42;
};
data = {
items = [1 2 3];
};
};
extended =
base
// {
config =
base.config
// {
extra = "test";
multiplied = base.config.value * 2;
};
computed = base.config.value + 100;
};
# Recursive attrset with selections
recursive = rec {
x = 10;
y = x * 2;
z = y + x;
result = z * 3;
final = result + x;
};
in {
fact5 = factorial 5;
fib7 = fib 7;
sum15 = add5 10;
computed = compute 10 20;
inherit numbers extended;
deepValue = extended.config.multiplied;
recursiveResult = recursive.result;
recursiveFinal = recursive.final;
}

158
tests/benchmark/run.sh Executable file
View file

@ -0,0 +1,158 @@
#!/usr/bin/env bash
set -e
echo "# Running benchmarks..."
echo ""
BENCH_DIR="$(pwd)/tests/benchmark"
IRC_BIN="$(pwd)/build/nix-irc"
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[0;33m'
NC='\033[0m'
get_ms() {
local time_str="$1"
if [[ $time_str =~ ([0-9]+)m([0-9.]+)s ]]; then
local mins="${BASH_REMATCH[1]}"
local secs="${BASH_REMATCH[2]}"
local ms
ms=$(awk "BEGIN {printf \"%.1f\", ($mins * 60000) + ($secs * 1000)}")
echo "$ms"
else
echo "0"
fi
}
run_benchmark() {
local name="$1"
local file="$2"
echo -e "${BLUE}=== $name ===${NC}"
echo ""
# Measure compilation time only
echo -n " Compilation only: "
local compile_start
compile_start=$(date +%s%N)
"$IRC_BIN" "$file" /tmp/bench.nixir >/dev/null 2>&1
local compile_end
compile_end=$(date +%s%N)
local compile_ms=$(((compile_end - compile_start) / 1000000))
echo -e "${YELLOW}${compile_ms}ms${NC}"
# Measure IR loading only (deserialization + evaluation)
echo -n " IR load only: "
PLUGIN_PATH="$(pwd)/build/nix-ir-plugin.so"
if [ ! -f "$PLUGIN_PATH" ]; then
echo -e "${YELLOW}skipped${NC} (plugin not built)"
else
# Pre-compile the IR
"$IRC_BIN" "$file" /tmp/bench.nixir >/dev/null 2>&1
# Measure just the loading (average of 10 runs to reduce noise)
local total_load_us=0
for _ in {1..10}; do
local load_output
load_output=$(nix-instantiate --plugin-files "$PLUGIN_PATH" --eval --expr "builtins.nixIR_loadIR \"/tmp/bench.nixir\"" 2>&1 >/dev/null | grep "nixIR timing" | grep -oP 'total=\K[0-9]+')
total_load_us=$((total_load_us + load_output))
done
local avg_load_us=$((total_load_us / 10))
local avg_load_ms_frac=$(awk "BEGIN {printf \"%.3f\", $avg_load_us / 1000}")
echo -e "${GREEN}${avg_load_ms_frac}ms${NC} avg (10 runs)"
fi
# Measure full pipeline (compile + nix-instantiate overhead + IR load)
echo -n " Full pipeline: "
if [ ! -f "$PLUGIN_PATH" ]; then
echo -e "${YELLOW}skipped${NC}"
else
local pipeline_start
pipeline_start=$(date +%s%N)
"$IRC_BIN" "$file" /tmp/bench.nixir >/dev/null 2>&1
nix-instantiate --plugin-files "$PLUGIN_PATH" --eval --expr "builtins.nixIR_loadIR \"/tmp/bench.nixir\"" >/dev/null 2>&1
local pipeline_end
pipeline_end=$(date +%s%N)
local pipeline_ms=$(((pipeline_end - pipeline_start) / 1000000))
echo -e "${YELLOW}${pipeline_ms}ms${NC}"
fi
# Source and IR sizes
local src_size
src_size=$(stat -c%s "$file" 2>/dev/null || stat -f%z "$file" 2>/dev/null)
local ir_size
ir_size=$(stat -c%s /tmp/bench.nixir 2>/dev/null || stat -f%z /tmp/bench.nixir 2>/dev/null)
local ratio=0
if [[ "$src_size" -gt 0 ]]; then
ratio=$((ir_size * 100 / src_size))
fi
echo -e " Source size: ${src_size}B"
echo -e " IR bundle size: ${ir_size}B (${ratio}% of source)"
echo ""
# Native Nix evaluation (baseline)
echo -n " Native Nix eval: "
local native_total=0
for _ in {1..5}; do
local t
t=$( (time nix-instantiate --eval --strict "$file" >/dev/null 2>&1) 2>&1 | grep "real" | awk '{print $2}')
local ms
ms=$(get_ms "$t")
native_total=$(awk "BEGIN {print $native_total + $ms}")
done
local native_avg
native_avg=$(awk "BEGIN {printf \"%.1f\", $native_total / 5}")
echo -e "${GREEN}${native_avg}ms${NC} avg (5 runs)"
echo ""
}
echo "Measuring IR compilation speed and bundle size characteristics."
echo ""
run_benchmark "Simple Expression" "$BENCH_DIR/simple.nix"
run_benchmark "Medium Complexity" "$BENCH_DIR/medium.nix"
run_benchmark "Large Expression" "$BENCH_DIR/large.nix"
# Overall statistics
echo -e "${BLUE}=== Overall Statistics ===${NC}"
echo ""
testdir=$(mktemp -d)
total_nix=0
total_ir=0
total_compile_time=0
for f in "$BENCH_DIR"/*.nix; do
nixsize=$(stat -c%s "$f" 2>/dev/null || stat -f%z "$f" 2>/dev/null)
base=$(basename "$f" .nix)
irfile="${testdir}/${base}.nixir"
start=$(date +%s%N)
"$IRC_BIN" "$f" "$irfile" >/dev/null 2>&1
end=$(date +%s%N)
compile_time=$(((end - start) / 1000000))
if [ -f "$irfile" ]; then
irsize=$(stat -c%s "$irfile" 2>/dev/null || stat -f%z "$irfile" 2>/dev/null)
total_nix=$((total_nix + nixsize))
total_ir=$((total_ir + irsize))
total_compile_time=$((total_compile_time + compile_time))
fi
done
total_ratio=$((total_ir * 100 / total_nix))
avg_compile_time=$((total_compile_time / 3))
# TBH those are entirely unnecessary. However, I'm a sucker for data
# and those are trivial to compile. Might as well. Who knows, maybe it'll
# come in handy in the future.
echo " Total source size: ${total_nix}B"
echo " Total IR size: ${total_ir}B"
echo " Compression ratio: ${total_ratio}% of source"
echo " Average compile time: ${avg_compile_time}ms"
echo ""
rm -rf "$testdir"

View file

@ -0,0 +1,13 @@
let
x = 10;
y = 20;
z = x + y;
in {
result = z * 2;
list = [1 2 3 4 5];
attrs = {
a = 1;
b = 2;
c = 3;
};
}

View file

@ -1,12 +0,0 @@
# Test block comments /* */
/* This is a block comment */
let
x = 42; /* inline block comment */
/* Multi-line
block
comment */
y = 100;
in
/* Comment before expression */
x + y
/* Trailing comment */

24
tests/fixtures/block_comments.nix vendored Normal file
View file

@ -0,0 +1,24 @@
# Test block comments /* */
/*
This is a block comment
*/
let
x = 42;
/*
inline block comment
*/
/*
Multi-line
block
comment
*/
y = 100;
in
/*
Comment before expression
*/
x + y
/*
Trailing comment
*/

14
tests/fixtures/dynamic_attr_full.nix vendored Normal file
View file

@ -0,0 +1,14 @@
# Test dynamic attribute names
let
key = "mykey";
value = 42;
in {
# Dynamic attribute with string interpolation
"${key}" = value;
# Another dynamic attribute
"${key}_suffix" = value + 1;
# Static attribute for comparison
static = 100;
}

View file

@ -1,11 +1,9 @@
# Test import expression
# Import evaluates the file and returns its value
# Import a file that returns a simple value (42)
import ./simple.nix
# Can also import lookup paths:
# import <nixpkgs> { }
# Import with path expressions:
# import (./dir + "/file.nix")

8
tests/fixtures/list_simple.nix vendored Normal file
View file

@ -0,0 +1,8 @@
# Test basic list support
let
x = [1 2 3];
y = [4 5 6];
z = x ++ y; # List concatenation
in {
inherit x y z;
}

View file

@ -1,9 +1,8 @@
# Test lookup path syntax
# Lookup paths resolve via NIX_PATH environment variable
# Example: <nixpkgs> -> /nix/var/nix/profiles/per-user/root/channels/nixpkgs
# Simple lookup path
<nixpkgs>
# Nested lookup path (common pattern)
# <nixpkgs/lib>

View file

@ -1,6 +1,6 @@
# Test 'or' in attrset context
let
attrs = { a = 1; };
attrs = {a = 1;};
in {
test = attrs.a or 999;
}

5
tests/fixtures/or_simple.nix vendored Normal file
View file

@ -0,0 +1,5 @@
# Simplest 'or' test
let
x = {a = 1;};
in
x.a or 2

View file

@ -1,6 +1,9 @@
# Test selection with 'or' default
let
attrs = { a = 1; b = 2; };
attrs = {
a = 1;
b = 2;
};
in {
# Attribute exists - should use value from attrs
has_attr = attrs.a or 999;

View file

@ -0,0 +1,7 @@
# Test that import builtin still works
let
imported = import ./imported_module.nix;
in {
value = imported.foo + 100;
nested = imported.bar.baz;
}

View file

@ -0,0 +1,7 @@
# Module to be imported
{
foo = 42;
bar = {
baz = "hello";
};
}

View file

@ -0,0 +1,13 @@
# Test our custom IR builtins
let
# Test nixIR_info
info = builtins.nixIR_info;
# Test nixIR_compile
compiled = builtins.nixIR_compile "let x = 10; in x + 5";
# Test that normal builtins still work
list = builtins.map (x: x * 2) [1 2 3];
in {
inherit info compiled list;
}

View file

@ -0,0 +1,39 @@
# Test that normal Nix evaluation is not broken
# This file should work identically with or without the plugin
let
# Basic arithmetic
math = 1 + 2 * 3;
# String operations
str = "hello" + " " + "world";
# List operations
list = [1 2 3] ++ [4 5 6];
# Attrset operations
attrs =
{
a = 1;
b = 2;
}
// {c = 3;};
# Functions
double = x: x * 2;
result = double 21;
# Conditionals
cond =
if true
then "yes"
else "no";
# Let bindings
nested = let
x = 10;
y = 20;
in
x + y;
in {
inherit math str list attrs result cond nested;
}

76
tests/integration/run.sh Executable file
View file

@ -0,0 +1,76 @@
#!/usr/bin/env bash
set -euo pipefail
echo "=== Phase 6: Integration Testing ==="
echo ""
PLUGIN_PATH="$(pwd)/build/nix-ir-plugin.so"
TEST_DIR="$(pwd)/tests/integration"
if [ ! -f "$PLUGIN_PATH" ]; then
echo "ERROR: Plugin not found at $PLUGIN_PATH"
exit 1
fi
echo "Plugin path: $PLUGIN_PATH"
echo ""
echo "Test 1: Plugin Loading"
echo "----------------------"
if nix-instantiate --plugin-files "$PLUGIN_PATH" --eval "$TEST_DIR/simple_eval.nix" 2>&1 | grep -q "30"; then
echo "[PASS] Plugin loads and evaluates correctly"
else
echo "[FAIL] Plugin failed to load or evaluate"
exit 1
fi
echo ""
echo "Test 2: Normal Nix Evaluation (No Plugin)"
echo "------------------------------------------"
result=$(nix-instantiate --eval --strict --json "$TEST_DIR/regression_normal_nix.nix" 2>&1)
if echo "$result" | grep -q '"math":7'; then
echo "[PASS] Normal Nix evaluation works without plugin"
else
echo "[FAIL] Normal Nix evaluation broken"
echo "$result"
exit 1
fi
echo ""
echo "Test 3: Normal Nix Evaluation (With Plugin)"
echo "--------------------------------------------"
result=$(nix-instantiate --plugin-files "$PLUGIN_PATH" --eval --strict --json "$TEST_DIR/regression_normal_nix.nix" 2>&1)
if echo "$result" | grep -q '"math":7'; then
echo "[PASS] Normal Nix evaluation works with plugin loaded"
else
echo "[FAIL] Plugin breaks normal Nix evaluation"
echo "$result"
exit 1
fi
echo ""
echo "Test 4: Import Builtin"
echo "----------------------"
cd "$TEST_DIR"
result=$(nix-instantiate --plugin-files "$PLUGIN_PATH" --eval --strict --json import_test.nix 2>&1)
if echo "$result" | grep -q '"value":142'; then
echo "[PASS] Import builtin works correctly"
else
echo "[FAIL] Import builtin broken"
echo "$result"
exit 1
fi
cd - >/dev/null
echo ""
echo "Test 5: IR Builtins Available"
echo "------------------------------"
result=$(nix-instantiate --plugin-files "$PLUGIN_PATH" --eval "$TEST_DIR/ir_builtins_test.nix" 2>&1)
if echo "$result" | grep -q "info.*="; then
echo "[PASS] IR builtins (nixIR_info, nixIR_compile, nixIR_loadIR) available"
else
echo "[WARN] IR builtins may not be available (check plugin initialization)"
fi
echo ""
echo "Integration Tests Complete"

View file

@ -0,0 +1,6 @@
# Simple expression to test plugin loading
let
x = 10;
y = 20;
in
x + y

View file

@ -1,4 +0,0 @@
# Simplest 'or' test
let
x = { a = 1; };
in x.a or 2

View file

@ -1,3 +1,4 @@
#include "irc/parser.h"
#include "irc/serializer.h"
#include "irc/types.h"
#include <cassert>
@ -7,21 +8,21 @@ using namespace nix_irc;
int failures = 0;
#define TEST_CHECK(cond, msg) \
do { \
if (!(cond)) { \
std::cerr << " FAIL: " << msg << std::endl; \
failures++; \
} else { \
std::cout << " PASS: " << msg << std::endl; \
} \
#define TEST_CHECK(cond, msg) \
do { \
if (!(cond)) { \
std::cerr << " FAIL: " << msg << std::endl; \
failures++; \
} else { \
std::cout << " PASS: " << msg << std::endl; \
} \
} while (0)
#define TEST_PASS(msg) std::cout << " PASS: " << msg << std::endl
#define TEST_FAIL(msg) \
do { \
std::cerr << " FAIL: " << msg << std::endl; \
failures++; \
#define TEST_FAIL(msg) \
do { \
std::cerr << " FAIL: " << msg << std::endl; \
failures++; \
} while (0)
void test_enum_compatibility() {
@ -30,33 +31,28 @@ void test_enum_compatibility() {
if (static_cast<uint8_t>(NodeType::WITH) == 0x32) {
std::cout << " PASS: WITH has correct value 0x32" << std::endl;
} else {
std::cerr << " FAIL: WITH should be 0x32, got "
<< static_cast<uint8_t>(NodeType::WITH) << std::endl;
std::cerr << " FAIL: WITH should be 0x32, got " << static_cast<uint8_t>(NodeType::WITH)
<< std::endl;
}
if (static_cast<uint8_t>(NodeType::HAS_ATTR) == 0x34) {
std::cout << " PASS: HAS_ATTR has value 0x34 (new slot after WITH bump)"
<< std::endl;
std::cout << " PASS: HAS_ATTR has value 0x34 (new slot after WITH bump)" << std::endl;
} else if (static_cast<uint8_t>(NodeType::HAS_ATTR) == 0x33 &&
static_cast<uint8_t>(NodeType::WITH) == 0x32) {
std::cout << " PASS: HAS_ATTR has value 0x33 (restored original with WITH "
"at 0x32)"
<< std::endl;
} else {
std::cerr << " FAIL: HAS_ATTR value is "
<< static_cast<uint8_t>(NodeType::HAS_ATTR)
std::cerr << " FAIL: HAS_ATTR value is " << static_cast<uint8_t>(NodeType::HAS_ATTR)
<< " (expected 0x34 or 0x33 with WITH=0x32)" << std::endl;
}
if (IR_VERSION == 2) {
std::cout << " PASS: IR_VERSION bumped to 2 for breaking change"
<< std::endl;
std::cout << " PASS: IR_VERSION bumped to 2 for breaking change" << std::endl;
} else if (static_cast<uint8_t>(NodeType::WITH) == 0x32) {
std::cout << " PASS: IR_VERSION unchanged but WITH restored to 0x32"
<< std::endl;
std::cout << " PASS: IR_VERSION unchanged but WITH restored to 0x32" << std::endl;
} else {
std::cerr << " FAIL: Either bump IR_VERSION or fix enum values"
<< std::endl;
std::cerr << " FAIL: Either bump IR_VERSION or fix enum values" << std::endl;
}
}
@ -80,19 +76,16 @@ void test_serializer_select_with_default() {
Deserializer deser;
auto loaded = deser.deserialize(bytes);
auto *loaded_select = loaded.entry->get_if<SelectNode>();
if (loaded_select && loaded_select->default_expr &&
*loaded_select->default_expr) {
auto *def_val = (*loaded_select->default_expr)->get_if<ConstIntNode>();
auto* loaded_select = loaded.entry->get_if<SelectNode>();
if (loaded_select && loaded_select->default_expr && *loaded_select->default_expr) {
auto* def_val = (*loaded_select->default_expr)->get_if<ConstIntNode>();
if (def_val && def_val->value == 100) {
std::cout << " PASS: SELECT with default_expr round-trips correctly"
<< std::endl;
std::cout << " PASS: SELECT with default_expr round-trips correctly" << std::endl;
} else {
std::cerr << " FAIL: default_expr value incorrect" << std::endl;
}
} else {
std::cerr << " FAIL: default_expr not deserialized (missing u8 flag read)"
<< std::endl;
std::cerr << " FAIL: default_expr not deserialized (missing u8 flag read)" << std::endl;
}
}
@ -114,11 +107,9 @@ void test_serializer_select_without_default() {
Deserializer deser;
auto loaded = deser.deserialize(bytes);
auto *loaded_select = loaded.entry->get_if<SelectNode>();
if (loaded_select &&
(!loaded_select->default_expr || !*loaded_select->default_expr)) {
std::cout << " PASS: SELECT without default_expr round-trips correctly"
<< std::endl;
auto* loaded_select = loaded.entry->get_if<SelectNode>();
if (loaded_select && (!loaded_select->default_expr || !*loaded_select->default_expr)) {
std::cout << " PASS: SELECT without default_expr round-trips correctly" << std::endl;
} else {
std::cerr << " FAIL: default_expr should be null/absent" << std::endl;
}
@ -127,34 +118,53 @@ void test_serializer_select_without_default() {
void test_parser_brace_depth_in_strings() {
std::cout << "> Parser brace depth handling in strings..." << std::endl;
std::string test_input = R"(
let s = "test}"; in ${s}
)";
std::string test_input = R"(let s = "test}"; in s)";
std::cout << " Test input contains '}' inside string - should not end "
"interpolation"
<< std::endl;
std::cout << " NOTE: This test requires running through actual parser"
<< std::endl;
try {
Parser parser;
auto ast = parser.parse(test_input);
TEST_PASS("Brace inside string does not confuse parser");
} catch (const std::exception& e) {
TEST_FAIL("Parser should handle '}' inside strings");
}
}
void test_parser_has_ellipsis_usage() {
std::cout << "> Parser has_ellipsis usage..." << std::endl;
std::cout << " NOTE: LambdaNode should have strict_pattern field when "
"has_ellipsis is false"
<< std::endl;
std::cout << " This requires checking the parser output for strict patterns"
<< std::endl;
std::string with_ellipsis = "{ a, ... }: a";
std::string without_ellipsis = "{ a, b }: a + b";
try {
Parser parser1;
auto ast1 = parser1.parse(with_ellipsis);
TEST_PASS("Pattern with ellipsis parses correctly");
Parser parser2;
auto ast2 = parser2.parse(without_ellipsis);
TEST_PASS("Pattern without ellipsis parses correctly");
} catch (const std::exception& e) {
TEST_FAIL("Pattern parsing failed");
}
}
void test_parser_expect_in_speculative_parsing() {
std::cout << "> Parser expect() in speculative parsing..." << std::endl;
std::cout << " NOTE: try_parse_lambda should not throw on non-lambda input"
<< std::endl;
std::cout << " This requires testing parser with invalid lambda patterns"
<< std::endl;
std::string not_a_lambda = "1 + 2";
std::string actual_lambda = "x: x + 1";
try {
Parser parser1;
auto ast1 = parser1.parse(not_a_lambda);
TEST_PASS("Non-lambda input does not cause parser to throw");
Parser parser2;
auto ast2 = parser2.parse(actual_lambda);
TEST_PASS("Actual lambda parses correctly");
} catch (const std::exception& e) {
TEST_FAIL("Parser should handle both lambda and non-lambda input");
}
}
void test_lookup_path_node() {
@ -170,10 +180,9 @@ void test_lookup_path_node() {
Deserializer deser;
auto loaded = deser.deserialize(bytes);
auto *loaded_lookup = loaded.entry->get_if<ConstLookupPathNode>();
auto* loaded_lookup = loaded.entry->get_if<ConstLookupPathNode>();
TEST_CHECK(loaded_lookup != nullptr, "Deserialized node is ConstLookupPathNode");
TEST_CHECK(loaded_lookup && loaded_lookup->value == "nixpkgs",
"Lookup path value is 'nixpkgs'");
TEST_CHECK(loaded_lookup && loaded_lookup->value == "nixpkgs", "Lookup path value is 'nixpkgs'");
}
void test_import_node() {
@ -190,16 +199,14 @@ void test_import_node() {
Deserializer deser;
auto loaded = deser.deserialize(bytes);
auto *loaded_import = loaded.entry->get_if<ImportNode>();
auto* loaded_import = loaded.entry->get_if<ImportNode>();
TEST_CHECK(loaded_import != nullptr, "Deserialized node is ImportNode");
TEST_CHECK(loaded_import && loaded_import->path != nullptr,
"Import node has path");
TEST_CHECK(loaded_import && loaded_import->path != nullptr, "Import node has path");
if (loaded_import && loaded_import->path) {
auto *path_node = loaded_import->path->get_if<ConstPathNode>();
auto* path_node = loaded_import->path->get_if<ConstPathNode>();
TEST_CHECK(path_node != nullptr, "Import path is ConstPathNode");
TEST_CHECK(path_node && path_node->value == "./test.nix",
"Import path value is './test.nix'");
TEST_CHECK(path_node && path_node->value == "./test.nix", "Import path value is './test.nix'");
}
}
@ -217,14 +224,13 @@ void test_import_with_lookup_path() {
Deserializer deser;
auto loaded = deser.deserialize(bytes);
auto *loaded_import = loaded.entry->get_if<ImportNode>();
auto* loaded_import = loaded.entry->get_if<ImportNode>();
TEST_CHECK(loaded_import != nullptr, "Deserialized node is ImportNode");
if (loaded_import && loaded_import->path) {
auto *lookup_node = loaded_import->path->get_if<ConstLookupPathNode>();
auto* lookup_node = loaded_import->path->get_if<ConstLookupPathNode>();
TEST_CHECK(lookup_node != nullptr, "Import path is ConstLookupPathNode");
TEST_CHECK(lookup_node && lookup_node->value == "nixpkgs",
"Lookup path value is 'nixpkgs'");
TEST_CHECK(lookup_node && lookup_node->value == "nixpkgs", "Lookup path value is 'nixpkgs'");
}
}
@ -241,7 +247,7 @@ void test_uri_node() {
Deserializer deser;
auto loaded = deser.deserialize(bytes);
auto *loaded_uri = loaded.entry->get_if<ConstURINode>();
auto* loaded_uri = loaded.entry->get_if<ConstURINode>();
TEST_CHECK(loaded_uri != nullptr, "Deserialized node is ConstURINode");
TEST_CHECK(loaded_uri && loaded_uri->value == "https://example.com",
"URI value is 'https://example.com'");
@ -260,10 +266,9 @@ void test_float_node() {
Deserializer deser;
auto loaded = deser.deserialize(bytes);
auto *loaded_float = loaded.entry->get_if<ConstFloatNode>();
auto* loaded_float = loaded.entry->get_if<ConstFloatNode>();
TEST_CHECK(loaded_float != nullptr, "Deserialized node is ConstFloatNode");
TEST_CHECK(loaded_float && loaded_float->value > 3.14 &&
loaded_float->value < 3.15,
TEST_CHECK(loaded_float && loaded_float->value > 3.14 && loaded_float->value < 3.15,
"Float value is approximately 3.14159");
}