Test harness #6

Merged
mikael-lovqvist merged 7 commits from mikael-lovqvists-claude-agent/fa2json:planning into main 2026-03-05 19:16:04 +00:00
4 changed files with 217 additions and 0 deletions
Showing only changes of commit 448e27ed8e - Show all commits

21
Makefile Normal file
View File

@@ -0,0 +1,21 @@
SHELL := /bin/bash
CC := cc
CFLAGS := -Wall -O2
TARGET := fa2json
SRCS := fs-watcher.c json-writer.c
.PHONY: all test dev clean
all: $(TARGET)
$(TARGET): $(SRCS)
$(CC) $(CFLAGS) -o $@ $^
test: $(TARGET)
node test/test.mjs
dev: $(TARGET)
node test/dev.mjs $(ARGS)
clean:
rm -f $(TARGET)

43
test/dev.mjs Normal file
View File

@@ -0,0 +1,43 @@
#!/usr/bin/env node
// Developer mode: set up loop device, stream fa2json output through jq,
// tear down on exit. Optionally launch a terminal at the mount point.
//
// Usage:
// sudo node test/dev.mjs
// sudo node test/dev.mjs --terminal konsole
// sudo node test/dev.mjs --terminal "konsole -e bash"
import { spawn } from 'node:child_process';
import { createInterface } from 'node:readline';
import { setup, spawnFa2json } from './lib/setup.mjs';
const terminalArg = (() => {
const i = process.argv.indexOf('--terminal');
return i !== -1 ? process.argv.slice(i + 1).join(' ') : null;
})();
const { mnt, teardown } = await setup();
console.error(`Mount point: ${mnt}`);
// Pipe fa2json stdout through jq for pretty coloured output
const fa2json = spawnFa2json(mnt);
const jq = spawn('jq', ['-C', '--unbuffered', '.'], { stdio: ['pipe', 'inherit', 'inherit'] });
fa2json.stdout.pipe(jq.stdin);
fa2json.on('exit', async () => {
jq.stdin.end();
await teardown();
process.exit(0);
});
// Launch optional terminal at mount point
if (terminalArg) {
const [cmd, ...args] = terminalArg.split(' ');
spawn(cmd, [...args, '--workdir', mnt], { detached: true, stdio: 'ignore' }).unref();
console.error(`Launched: ${terminalArg} --workdir ${mnt}`);
}
// Clean teardown on Ctrl+C
process.on('SIGINT', async () => {
fa2json.kill('SIGTERM');
});

34
test/lib/setup.mjs Normal file
View File

@@ -0,0 +1,34 @@
import { execSync, spawn } from 'node:child_process';
import { mkdtempSync, mkdirSync } from 'node:fs';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
const FA2JSON = new URL('../../fa2json', import.meta.url).pathname;
export async function setup() {
// Create image file and format
const img = execSync('mktemp /tmp/fa2json-test-XXXXXX.img').toString().trim();
execSync(`truncate -s 10M ${img}`);
execSync(`mkfs.ext4 -q ${img}`);
// Create mount point and mount
const mnt = execSync('mktemp -d /tmp/fa2json-mnt-XXXXXX').toString().trim();
execSync(`sudo mount ${img} ${mnt}`);
// Hand ownership to current user, then sync before fa2json starts
execSync(`sudo chown ${process.getuid()} ${mnt}`);
execSync('sync');
async function teardown() {
try { execSync(`sudo umount ${mnt}`); } catch {}
try { execSync(`rm -f ${img}`); } catch {}
try { execSync(`rmdir ${mnt}`); } catch {}
}
return { img, mnt, teardown };
}
export function spawnFa2json(mnt) {
const proc = spawn('sudo', [FA2JSON, mnt], { stdio: ['ignore', 'pipe', 'inherit'] });
return proc;
}

119
test/test.mjs Normal file
View File

@@ -0,0 +1,119 @@
#!/usr/bin/env node
// Automated test runner. Exit 0 = pass, exit 1 = fail.
// Requires root (sudo) for mount and fa2json.
import { createInterface } from 'node:readline';
import { promises as fs } from 'node:fs';
import { join } from 'node:path';
import { setup, spawnFa2json } from './lib/setup.mjs';
// Fanotify mask flags
const FAN_ATTRIB = 0x4;
const FAN_CLOSE_WRITE = 0x8;
const FAN_CREATE = 0x100;
const FAN_DELETE = 0x200;
const FAN_RENAME = 0x10000000;
const FAN_ONDIR = 0x40000000;
const { mnt, teardown } = await setup();
const fa2json = spawnFa2json(mnt);
// Event buffer and marker machinery
const events = [];
let markerResolve = null;
let markerName = null;
let markerCounter = 0;
const rl = createInterface({ input: fa2json.stdout });
rl.on('line', line => {
const event = JSON.parse(line);
// Strip mount prefix from all path fields
for (const key of ['name', 'old', 'new']) {
if (event[key]) event[key] = event[key].slice(mnt.length);
}
events.push(event);
if (markerResolve && event.name === `/.marker_${markerName}` && (event.mask & FAN_CREATE)) {
markerResolve();
markerResolve = null;
}
});
// Perform a FS operation, drop a marker, collect events up to that marker
async function doOp(label, fn) {
const id = markerCounter++;
await fn();
await fs.writeFile(join(mnt, `.marker_${id}`), '');
await new Promise(resolve => {
markerName = id;
markerResolve = resolve;
});
// Collect all events since previous marker (excluding marker events themselves)
const batch = events.splice(0).filter(e => !e.name?.startsWith('/.marker_'));
return { label, batch };
}
function assert(label, batch, check) {
if (!check(batch)) {
console.error(`FAIL: ${label}`);
console.error('Batch:', JSON.stringify(batch, null, 2));
throw new Error(`Assertion failed: ${label}`);
}
}
function hasEvent(batch, flags, path, field = 'name') {
return batch.some(e => (e.mask & flags) === flags && e[field] === path);
}
try {
let op;
op = await doOp('mkdir /dir_a', () => fs.mkdir(join(mnt, 'dir_a')));
assert(op.label, op.batch, b => hasEvent(b, FAN_CREATE | FAN_ONDIR, '/dir_a'));
op = await doOp('touch /file_a.txt', () => fs.writeFile(join(mnt, 'file_a.txt'), ''));
assert(op.label, op.batch, b => hasEvent(b, FAN_CREATE, '/file_a.txt'));
op = await doOp('write /file_a.txt', () => fs.writeFile(join(mnt, 'file_a.txt'), 'content'));
assert(op.label, op.batch, b => hasEvent(b, FAN_CLOSE_WRITE, '/file_a.txt'));
op = await doOp('mkdir /dir_b', () => fs.mkdir(join(mnt, 'dir_b')));
assert(op.label, op.batch, b => hasEvent(b, FAN_CREATE | FAN_ONDIR, '/dir_b'));
op = await doOp('touch /dir_b/nested.txt', () => fs.writeFile(join(mnt, 'dir_b', 'nested.txt'), ''));
assert(op.label, op.batch, b => hasEvent(b, FAN_CREATE, '/dir_b/nested.txt'));
op = await doOp('mv /file_a.txt /file_b.txt', () => fs.rename(join(mnt, 'file_a.txt'), join(mnt, 'file_b.txt')));
assert(op.label, op.batch, b => b.some(e => (e.mask & FAN_RENAME) && e.old === '/file_a.txt' && e.new === '/file_b.txt'));
op = await doOp('mv /dir_b /dir_a/dir_b_moved', () => fs.rename(join(mnt, 'dir_b'), join(mnt, 'dir_a', 'dir_b_moved')));
assert(op.label, op.batch, b => b.some(e => (e.mask & FAN_RENAME) && (e.mask & FAN_ONDIR) && e.old === '/dir_b' && e.new === '/dir_a/dir_b_moved'));
op = await doOp('chmod 600 /file_b.txt', () => fs.chmod(join(mnt, 'file_b.txt'), 0o600));
assert(op.label, op.batch, b => hasEvent(b, FAN_ATTRIB, '/file_b.txt'));
op = await doOp('touch -m /file_b.txt', () => fs.utimes(join(mnt, 'file_b.txt'), new Date(), new Date()));
assert(op.label, op.batch, b => hasEvent(b, FAN_ATTRIB, '/file_b.txt'));
op = await doOp('chmod 755 /dir_a', () => fs.chmod(join(mnt, 'dir_a'), 0o755));
assert(op.label, op.batch, b => hasEvent(b, FAN_ATTRIB | FAN_ONDIR, '/dir_a'));
op = await doOp('rm /file_b.txt', () => fs.unlink(join(mnt, 'file_b.txt')));
assert(op.label, op.batch, b => hasEvent(b, FAN_DELETE, '/file_b.txt'));
op = await doOp('rm /dir_a/dir_b_moved/nested.txt', () => fs.unlink(join(mnt, 'dir_a', 'dir_b_moved', 'nested.txt')));
assert(op.label, op.batch, b => hasEvent(b, FAN_DELETE, '/dir_a/dir_b_moved/nested.txt'));
op = await doOp('rmdir /dir_a/dir_b_moved', () => fs.rmdir(join(mnt, 'dir_a', 'dir_b_moved')));
assert(op.label, op.batch, b => hasEvent(b, FAN_DELETE | FAN_ONDIR, '/dir_a/dir_b_moved'));
op = await doOp('rmdir /dir_a', () => fs.rmdir(join(mnt, 'dir_a')));
assert(op.label, op.batch, b => hasEvent(b, FAN_DELETE | FAN_ONDIR, '/dir_a'));
console.log('PASS');
} catch (e) {
console.error(e.message);
process.exitCode = 1;
} finally {
fa2json.kill();
await teardown();
}