mirror of
https://github.com/affaan-m/everything-claude-code.git
synced 2026-04-30 22:13:28 +08:00
Compare commits
4 Commits
fix/insait
...
fix/mcp-he
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
015b00b8fc | ||
|
|
51511461f6 | ||
|
|
aaaf52fb1e | ||
|
|
33edfd3bb3 |
@@ -422,7 +422,7 @@ async function runTests() {
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '1000'
|
||||
}
|
||||
);
|
||||
|
||||
@@ -458,7 +458,7 @@ async function runTests() {
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_FAIL_OPEN: '1',
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '1000'
|
||||
}
|
||||
);
|
||||
|
||||
@@ -490,7 +490,7 @@ async function runTests() {
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '1000'
|
||||
}
|
||||
);
|
||||
const missingCommand = runHook(
|
||||
@@ -499,7 +499,7 @@ async function runTests() {
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '1000'
|
||||
}
|
||||
);
|
||||
|
||||
@@ -597,7 +597,7 @@ async function runTests() {
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_RECONNECT_COMMAND: `${JSON.stringify(process.execPath)} ${JSON.stringify(reconnectScript)}`,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100',
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '1000',
|
||||
ECC_MCP_HEALTH_BACKOFF_MS: '10'
|
||||
}
|
||||
);
|
||||
@@ -660,7 +660,7 @@ async function runTests() {
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_RECONNECT_COMMAND: `node ${JSON.stringify(reconnectScript)}`,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '1000'
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@@ -40,6 +40,48 @@ function inTempRepo(fn) {
|
||||
}
|
||||
}
|
||||
|
||||
function captureConsoleError(fn) {
|
||||
const previousError = console.error;
|
||||
const lines = [];
|
||||
console.error = (...args) => {
|
||||
lines.push(args.join(' '));
|
||||
};
|
||||
|
||||
try {
|
||||
const result = fn();
|
||||
return { result, stderr: lines.join('\n') };
|
||||
} finally {
|
||||
console.error = previousError;
|
||||
}
|
||||
}
|
||||
|
||||
function writeAndStage(repoDir, relativePath, content) {
|
||||
const filePath = path.join(repoDir, relativePath);
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.writeFileSync(filePath, content, 'utf8');
|
||||
spawnSync('git', ['add', relativePath], { cwd: repoDir, stdio: 'pipe', encoding: 'utf8' });
|
||||
}
|
||||
|
||||
function withEnv(overrides, fn) {
|
||||
const previous = {};
|
||||
for (const key of Object.keys(overrides)) {
|
||||
previous[key] = process.env[key];
|
||||
process.env[key] = overrides[key];
|
||||
}
|
||||
|
||||
try {
|
||||
return fn();
|
||||
} finally {
|
||||
for (const key of Object.keys(overrides)) {
|
||||
if (typeof previous[key] === 'string') {
|
||||
process.env[key] = previous[key];
|
||||
} else {
|
||||
delete process.env[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
@@ -77,5 +119,159 @@ if (test('evaluate inspects staged snapshot instead of newer working tree conten
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('passes through non-commit amend malformed JSON and run wrapper paths', () => {
|
||||
const readInput = JSON.stringify({ tool_input: { command: 'git status --short' } });
|
||||
assert.deepStrictEqual(hook.evaluate(readInput), { output: readInput, exitCode: 0 });
|
||||
|
||||
const amendInput = JSON.stringify({ tool_input: { command: 'git commit --amend -m "fix: update"' } });
|
||||
assert.deepStrictEqual(hook.evaluate(amendInput), { output: amendInput, exitCode: 0 });
|
||||
|
||||
const malformed = 'not json {{{';
|
||||
const malformedResult = captureConsoleError(() => hook.run(malformed));
|
||||
assert.deepStrictEqual(malformedResult.result, { stdout: malformed, exitCode: 0 });
|
||||
assert.ok(malformedResult.stderr.includes('[Hook] Error:'), 'should log JSON parse errors without blocking');
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('allows git commit when no files are staged', () => {
|
||||
inTempRepo(() => {
|
||||
const input = JSON.stringify({ tool_input: { command: 'git commit -m "fix: no staged files"' } });
|
||||
const { result, stderr } = captureConsoleError(() => hook.evaluate(input));
|
||||
|
||||
assert.strictEqual(result.output, input);
|
||||
assert.strictEqual(result.exitCode, 0);
|
||||
assert.ok(stderr.includes('No staged files found'), `expected no-staged warning, got: ${stderr}`);
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('allows warning-only issues while reporting console TODO and message warnings', () => {
|
||||
inTempRepo(repoDir => {
|
||||
writeAndStage(repoDir, 'index.js', [
|
||||
'console.log("debug only");',
|
||||
'// TODO: clean this up',
|
||||
'// TODO: tracked in issue #123',
|
||||
'// console.log("commented out");',
|
||||
'* console.log("doc comment");',
|
||||
'const ok = true;',
|
||||
''
|
||||
].join('\n'));
|
||||
|
||||
const input = JSON.stringify({
|
||||
tool_input: {
|
||||
command: 'git commit -m "fix: Uppercase subject."'
|
||||
}
|
||||
});
|
||||
const { result, stderr } = captureConsoleError(() => hook.evaluate(input));
|
||||
|
||||
assert.strictEqual(result.output, input);
|
||||
assert.strictEqual(result.exitCode, 0, 'warning-only issues should not block');
|
||||
assert.ok(stderr.includes('WARNING Line 1'), `expected console warning, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('INFO Line 2'), `expected TODO info warning, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('Subject should start with lowercase'), `expected capitalization warning, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('should not end with a period'), `expected punctuation warning, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('Warnings found'), `expected warning summary, got: ${stderr}`);
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('reports invalid and long commit messages without blocking when files are clean', () => {
|
||||
inTempRepo(repoDir => {
|
||||
writeAndStage(repoDir, 'index.js', 'const clean = true;\n');
|
||||
|
||||
const longMessage = `Bad message ${'x'.repeat(80)}`;
|
||||
const input = JSON.stringify({
|
||||
tool_input: {
|
||||
command: `git commit --message="${longMessage}"`
|
||||
}
|
||||
});
|
||||
const { result, stderr } = captureConsoleError(() => hook.evaluate(input));
|
||||
|
||||
assert.strictEqual(result.output, input);
|
||||
assert.strictEqual(result.exitCode, 0);
|
||||
assert.ok(stderr.includes('does not follow conventional commit format'), `expected format warning, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('Commit message too long'), `expected length warning, got: ${stderr}`);
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('blocks commits with staged secret patterns across checkable files', () => {
|
||||
inTempRepo(repoDir => {
|
||||
writeAndStage(repoDir, 'index.js', [
|
||||
"const openai = 'sk-abcdefghijklmnopqrstuvwxyz';",
|
||||
"const token = 'ghp_abcdefghijklmnopqrstuvwxyzABCDEFGHIJ';",
|
||||
''
|
||||
].join('\n'));
|
||||
writeAndStage(repoDir, 'app.py', [
|
||||
'aws = "AKIAABCDEFGHIJKLMNOP"',
|
||||
'api_key = "secret-value"',
|
||||
''
|
||||
].join('\n'));
|
||||
|
||||
const input = JSON.stringify({ tool_input: { command: 'git commit -m "fix: block secrets"' } });
|
||||
const { result, stderr } = captureConsoleError(() => hook.evaluate(input));
|
||||
|
||||
assert.strictEqual(result.output, input);
|
||||
assert.strictEqual(result.exitCode, 2);
|
||||
assert.ok(stderr.includes('Potential OpenAI API key'), `expected OpenAI secret warning, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('Potential GitHub PAT'), `expected GitHub PAT warning, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('Potential AWS Access Key'), `expected AWS key warning, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('Potential API key'), `expected generic API key warning, got: ${stderr}`);
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('reports eslint pylint and golint failures from staged files', () => {
|
||||
inTempRepo(repoDir => {
|
||||
writeAndStage(repoDir, 'index.js', 'const lint = true;\n');
|
||||
writeAndStage(repoDir, 'app.py', 'print("lint")\n');
|
||||
writeAndStage(repoDir, 'main.go', 'package main\n');
|
||||
|
||||
const eslintPath = path.join(repoDir, 'node_modules', '.bin', process.platform === 'win32' ? 'eslint.cmd' : 'eslint');
|
||||
fs.mkdirSync(path.dirname(eslintPath), { recursive: true });
|
||||
fs.writeFileSync(eslintPath, '#!/bin/sh\necho "eslint failed"\nexit 1\n', 'utf8');
|
||||
fs.chmodSync(eslintPath, 0o755);
|
||||
|
||||
const binDir = path.join(repoDir, 'fake-bin');
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
const pylintPath = path.join(binDir, 'pylint');
|
||||
const golintPath = path.join(binDir, 'golint');
|
||||
fs.writeFileSync(pylintPath, '#!/bin/sh\necho "pylint failed"\nexit 1\n', 'utf8');
|
||||
fs.writeFileSync(golintPath, '#!/bin/sh\necho "main.go:1: lint failed"\nexit 0\n', 'utf8');
|
||||
fs.chmodSync(pylintPath, 0o755);
|
||||
fs.chmodSync(golintPath, 0o755);
|
||||
|
||||
withEnv({ PATH: `${binDir}${path.delimiter}${process.env.PATH || ''}` }, () => {
|
||||
const input = JSON.stringify({ tool_input: { command: 'git commit -m "fix: lint failures"' } });
|
||||
const { result, stderr } = captureConsoleError(() => hook.evaluate(input));
|
||||
|
||||
assert.strictEqual(result.output, input);
|
||||
assert.strictEqual(result.exitCode, 2);
|
||||
assert.ok(stderr.includes('ESLint Issues'), `expected ESLint output, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('eslint failed'), `expected ESLint failure text, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('Pylint Issues'), `expected Pylint output, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('pylint failed'), `expected Pylint failure text, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('golint Issues'), `expected golint output, got: ${stderr}`);
|
||||
assert.ok(stderr.includes('main.go:1: lint failed'), `expected golint failure text, got: ${stderr}`);
|
||||
});
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('stdin entry point truncates oversized input and preserves pass-through output', () => {
|
||||
const oversized = JSON.stringify({
|
||||
tool_input: {
|
||||
command: 'git status',
|
||||
filler: 'x'.repeat(1024 * 1024 + 1024)
|
||||
}
|
||||
});
|
||||
const result = spawnSync('node', [path.join(__dirname, '..', '..', 'scripts', 'hooks', 'pre-bash-commit-quality.js')], {
|
||||
input: oversized,
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: 10000
|
||||
});
|
||||
|
||||
assert.strictEqual(result.status, 0);
|
||||
assert.ok(result.stdout.length > 0, 'expected truncated payload to pass through');
|
||||
assert.ok(result.stdout.length <= 1024 * 1024, 'expected stdout to stay within hook input limit');
|
||||
assert.strictEqual(result.stdout, oversized.slice(0, result.stdout.length));
|
||||
assert.ok(result.stderr.includes('[Hook] Error:'), 'truncated JSON should be logged and allowed');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
@@ -16,6 +16,13 @@ const script = path.join(
|
||||
'hooks',
|
||||
'session-activity-tracker.js'
|
||||
);
|
||||
const {
|
||||
buildActivityRow,
|
||||
extractFileEvents,
|
||||
extractFilePaths,
|
||||
summarizeOutput,
|
||||
run,
|
||||
} = require(script);
|
||||
|
||||
function test(name, fn) {
|
||||
try {
|
||||
@@ -52,6 +59,15 @@ function runScript(input, envOverrides = {}, options = {}) {
|
||||
return { code: result.status || 0, stdout: result.stdout || '', stderr: result.stderr || '' };
|
||||
}
|
||||
|
||||
function readMetricRows(homeDir) {
|
||||
const metricsFile = path.join(homeDir, '.claude', 'metrics', 'tool-usage.jsonl');
|
||||
return fs.readFileSync(metricsFile, 'utf8')
|
||||
.trim()
|
||||
.split(/\r?\n/)
|
||||
.filter(Boolean)
|
||||
.map(line => JSON.parse(line));
|
||||
}
|
||||
|
||||
function runTests() {
|
||||
console.log('\n=== Testing session-activity-tracker.js ===\n');
|
||||
|
||||
@@ -405,6 +421,246 @@ function runTests() {
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('skips non-PostToolUse events and rows without required identifiers', () => {
|
||||
assert.strictEqual(buildActivityRow(
|
||||
{ tool_name: 'Read', tool_input: { file_path: 'README.md' } },
|
||||
{ CLAUDE_HOOK_EVENT_NAME: 'PreToolUse', ECC_SESSION_ID: 'sess' }
|
||||
), null);
|
||||
assert.strictEqual(buildActivityRow(
|
||||
{ tool_name: 'Read', tool_input: { file_path: 'README.md' } },
|
||||
{ CLAUDE_HOOK_EVENT_NAME: 'PostToolUse' }
|
||||
), null);
|
||||
assert.strictEqual(buildActivityRow(
|
||||
{ tool_input: { file_path: 'README.md' } },
|
||||
{ CLAUDE_HOOK_EVENT_NAME: 'PostToolUse', ECC_SESSION_ID: 'sess' }
|
||||
), null);
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('sanitizes nested params, long summaries, and output variants', () => {
|
||||
const longValue = `start ${'x'.repeat(260)} ghp_${'A'.repeat(20)}`;
|
||||
const row = buildActivityRow(
|
||||
{
|
||||
tool_name: 'Lookup',
|
||||
tool_input: {
|
||||
query: longValue,
|
||||
secret: `gho_${'B'.repeat(20)}`,
|
||||
count: 3,
|
||||
enabled: false,
|
||||
omitted: null,
|
||||
nested: { a: { b: { c: { d: 'too deep' } } } },
|
||||
list: [1, true, null, 4],
|
||||
},
|
||||
tool_output: `line one\nline two ${'y'.repeat(260)}`,
|
||||
},
|
||||
{ CLAUDE_HOOK_EVENT_NAME: 'PostToolUse', CLAUDE_SESSION_ID: 'claude-fallback' }
|
||||
);
|
||||
|
||||
assert.strictEqual(row.session_id, 'claude-fallback');
|
||||
assert.strictEqual(row.file_paths.length, 0);
|
||||
assert.ok(row.input_summary.endsWith('...'), 'Expected long shallow summary to be truncated');
|
||||
assert.ok(!row.input_summary.includes('ghp_'), 'Expected GitHub token redaction in input summary');
|
||||
assert.ok(row.output_summary.endsWith('...'), 'Expected long output summary to be truncated');
|
||||
assert.ok(!row.output_summary.includes('\n'), 'Expected output summary to normalize whitespace');
|
||||
|
||||
const params = JSON.parse(row.input_params_json);
|
||||
assert.strictEqual(params.count, 3);
|
||||
assert.strictEqual(params.enabled, false);
|
||||
assert.strictEqual(params.omitted, null);
|
||||
assert.strictEqual(params.secret, '<REDACTED>');
|
||||
assert.strictEqual(params.nested.a.b.c, '[Truncated]');
|
||||
assert.deepStrictEqual(params.list.slice(0, 3), [1, true, null]);
|
||||
assert.strictEqual(params.list[3], 4);
|
||||
assert.ok(params.query.endsWith('...'), 'Expected long param value to be truncated');
|
||||
|
||||
assert.strictEqual(summarizeOutput(null), '');
|
||||
assert.strictEqual(summarizeOutput(undefined), '');
|
||||
assert.strictEqual(summarizeOutput('hello\nworld'), 'hello world');
|
||||
assert.strictEqual(summarizeOutput({ ok: true }), '{"ok":true}');
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('extracts file paths from nested arrays while filtering duplicates and remote URIs', () => {
|
||||
const paths = extractFilePaths({
|
||||
file_paths: [
|
||||
'src/a.js',
|
||||
'src/a.js',
|
||||
'https://example.com/file.js',
|
||||
'',
|
||||
{ file_path: 'src/b.js' },
|
||||
],
|
||||
nested: {
|
||||
source_path: 'app://connector/item',
|
||||
deep: [
|
||||
{ new_file_path: 'src/c.js' },
|
||||
{ old_file_path: 'plugin://plugin/item' },
|
||||
42,
|
||||
],
|
||||
},
|
||||
ignored: 'not-a-path-field',
|
||||
});
|
||||
|
||||
assert.deepStrictEqual(paths, ['src/a.js', 'src/b.js', 'src/c.js']);
|
||||
assert.deepStrictEqual(extractFilePaths(null), []);
|
||||
assert.deepStrictEqual(extractFilePaths('src/not-collected.js'), []);
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('extracts file event previews for create delete and one-sided edits', () => {
|
||||
const events = extractFileEvents('Write', {
|
||||
files: [
|
||||
{
|
||||
file_path: 'src/new.ts',
|
||||
content: 'first line\nsecond line',
|
||||
},
|
||||
{
|
||||
file_path: 'src/new.ts',
|
||||
content: 'first line\nsecond line',
|
||||
},
|
||||
{
|
||||
file_path: 'https://example.com/remote.ts',
|
||||
content: 'ignored',
|
||||
},
|
||||
],
|
||||
});
|
||||
assert.deepStrictEqual(events, [
|
||||
{
|
||||
path: 'src/new.ts',
|
||||
action: 'create',
|
||||
diff_preview: '+ first line second line',
|
||||
patch_preview: '+ first line second line',
|
||||
},
|
||||
]);
|
||||
|
||||
assert.deepStrictEqual(extractFileEvents('Remove', {
|
||||
file_path: 'src/old.ts',
|
||||
content: 'legacy line',
|
||||
}), [
|
||||
{
|
||||
path: 'src/old.ts',
|
||||
action: 'delete',
|
||||
patch_preview: '- legacy line',
|
||||
},
|
||||
]);
|
||||
|
||||
assert.deepStrictEqual(extractFileEvents('Edit', {
|
||||
edits: [
|
||||
{ file_path: 'src/before.ts', old_string: 'legacy', new_string: '' },
|
||||
{ file_path: 'src/after.ts', old_string: '', new_string: 'modern' },
|
||||
{ file_path: 'src/no-preview.ts', old_string: '', new_string: '' },
|
||||
],
|
||||
}), [
|
||||
{
|
||||
path: 'src/before.ts',
|
||||
action: 'modify',
|
||||
diff_preview: 'legacy ->',
|
||||
patch_preview: '@@\n- legacy',
|
||||
},
|
||||
{
|
||||
path: 'src/after.ts',
|
||||
action: 'modify',
|
||||
diff_preview: '-> modern',
|
||||
patch_preview: '@@\n+ modern',
|
||||
},
|
||||
{ path: 'src/no-preview.ts', action: 'modify' },
|
||||
]);
|
||||
|
||||
assert.deepStrictEqual(extractFileEvents('Rename', {
|
||||
old_file_path: 'src/old-name.ts',
|
||||
new_file_path: 'src/new-name.ts',
|
||||
}), [
|
||||
{ path: 'src/old-name.ts', action: 'move' },
|
||||
{ path: 'src/new-name.ts', action: 'move' },
|
||||
]);
|
||||
|
||||
assert.deepStrictEqual(extractFileEvents('Read', null), []);
|
||||
assert.deepStrictEqual(extractFileEvents('Touch', { file_path: 'src/touched.ts' }), [
|
||||
{ path: 'src/touched.ts', action: 'touch' },
|
||||
]);
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('records creation previews unchanged when running outside a git repository', () => {
|
||||
const tmpHome = makeTempDir();
|
||||
const tmpCwd = makeTempDir();
|
||||
|
||||
const input = {
|
||||
tool_name: 'Write',
|
||||
tool_input: {
|
||||
file_path: 'created.txt',
|
||||
content: 'alpha\nbeta',
|
||||
},
|
||||
tool_output: 17,
|
||||
};
|
||||
const result = runScript(input, {
|
||||
...withTempHome(tmpHome),
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUse',
|
||||
ECC_SESSION_ID: 'ecc-session-non-git-create',
|
||||
}, {
|
||||
cwd: tmpCwd,
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0);
|
||||
const [row] = readMetricRows(tmpHome);
|
||||
assert.strictEqual(row.output_summary, '17');
|
||||
assert.deepStrictEqual(row.file_events, [
|
||||
{
|
||||
path: 'created.txt',
|
||||
action: 'create',
|
||||
diff_preview: '+ alpha beta',
|
||||
patch_preview: '+ alpha beta',
|
||||
},
|
||||
]);
|
||||
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
fs.rmSync(tmpCwd, { recursive: true, force: true });
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('preserves absolute paths outside the repo without git enrichment', () => {
|
||||
const tmpHome = makeTempDir();
|
||||
const outsideDir = makeTempDir();
|
||||
const outsideFile = path.join(outsideDir, 'outside.txt');
|
||||
fs.writeFileSync(outsideFile, 'outside', 'utf8');
|
||||
|
||||
const input = {
|
||||
tool_name: 'Read',
|
||||
tool_input: {
|
||||
file_path: outsideFile,
|
||||
},
|
||||
tool_output: 'read outside',
|
||||
};
|
||||
const result = runScript(input, {
|
||||
...withTempHome(tmpHome),
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUse',
|
||||
ECC_SESSION_ID: 'ecc-session-absolute-outside',
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0);
|
||||
const [row] = readMetricRows(tmpHome);
|
||||
assert.deepStrictEqual(row.file_paths, [outsideFile]);
|
||||
assert.deepStrictEqual(row.file_events, [
|
||||
{ path: outsideFile, action: 'read' },
|
||||
]);
|
||||
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
fs.rmSync(outsideDir, { recursive: true, force: true });
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('passes empty stdin through without creating metrics', () => {
|
||||
const tmpHome = makeTempDir();
|
||||
const result = runScript('', {
|
||||
...withTempHome(tmpHome),
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUse',
|
||||
ECC_SESSION_ID: 'sess-empty',
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0);
|
||||
assert.strictEqual(result.stdout, '');
|
||||
assert.strictEqual(run(''), '');
|
||||
assert.strictEqual(
|
||||
fs.existsSync(path.join(tmpHome, '.claude', 'metrics', 'tool-usage.jsonl')),
|
||||
false
|
||||
);
|
||||
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
}
|
||||
|
||||
@@ -577,7 +577,7 @@ async function runTests() {
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '1000'
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@@ -6,8 +6,13 @@ const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const {
|
||||
SESSION_SCHEMA_VERSION,
|
||||
buildAggregates,
|
||||
getFallbackSessionRecordingPath,
|
||||
persistCanonicalSnapshot
|
||||
normalizeClaudeHistorySession,
|
||||
normalizeDmuxSnapshot,
|
||||
persistCanonicalSnapshot,
|
||||
validateCanonicalSnapshot
|
||||
} = require('../../scripts/lib/session-adapters/canonical-session');
|
||||
const { createClaudeHistoryAdapter } = require('../../scripts/lib/session-adapters/claude-history');
|
||||
const { createDmuxTmuxAdapter } = require('../../scripts/lib/session-adapters/dmux-tmux');
|
||||
@@ -55,6 +60,75 @@ function withHome(homeDir, fn) {
|
||||
}
|
||||
}
|
||||
|
||||
function canonicalSnapshot(overrides = {}) {
|
||||
const snapshot = {
|
||||
schemaVersion: SESSION_SCHEMA_VERSION,
|
||||
adapterId: 'test-adapter',
|
||||
session: {
|
||||
id: 'session-1',
|
||||
kind: 'test',
|
||||
state: 'active',
|
||||
repoRoot: null,
|
||||
sourceTarget: {
|
||||
type: 'session',
|
||||
value: 'session-1'
|
||||
}
|
||||
},
|
||||
workers: [{
|
||||
id: 'worker-1',
|
||||
label: 'Worker 1',
|
||||
state: 'running',
|
||||
health: 'healthy',
|
||||
branch: null,
|
||||
worktree: null,
|
||||
runtime: {
|
||||
kind: 'test-runtime',
|
||||
command: null,
|
||||
pid: null,
|
||||
active: true,
|
||||
dead: false
|
||||
},
|
||||
intent: {
|
||||
objective: 'Test objective',
|
||||
seedPaths: []
|
||||
},
|
||||
outputs: {
|
||||
summary: [],
|
||||
validation: [],
|
||||
remainingRisks: []
|
||||
},
|
||||
artifacts: {}
|
||||
}]
|
||||
};
|
||||
|
||||
snapshot.aggregates = buildAggregates(snapshot.workers);
|
||||
|
||||
if (overrides.session) {
|
||||
snapshot.session = { ...snapshot.session, ...overrides.session };
|
||||
}
|
||||
if (overrides.sourceTarget) {
|
||||
snapshot.session.sourceTarget = {
|
||||
...snapshot.session.sourceTarget,
|
||||
...overrides.sourceTarget
|
||||
};
|
||||
}
|
||||
if (Object.prototype.hasOwnProperty.call(overrides, 'workers')) {
|
||||
snapshot.workers = overrides.workers;
|
||||
snapshot.aggregates = buildAggregates(Array.isArray(overrides.workers) ? overrides.workers : []);
|
||||
}
|
||||
if (overrides.aggregates) {
|
||||
snapshot.aggregates = { ...snapshot.aggregates, ...overrides.aggregates };
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(overrides)) {
|
||||
if (!['session', 'sourceTarget', 'workers', 'aggregates'].includes(key)) {
|
||||
snapshot[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
test('dmux adapter normalizes orchestration snapshots into canonical form', () => {
|
||||
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-recordings-'));
|
||||
|
||||
@@ -509,6 +583,324 @@ test('adapter registry lists adapter metadata and target types', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('canonical snapshot validation rejects malformed required fields', () => {
|
||||
const invalidCases = [
|
||||
[null, /must be an object/],
|
||||
[canonicalSnapshot({ schemaVersion: 'ecc.session.v0' }), /Unsupported canonical session schema version/],
|
||||
[canonicalSnapshot({ adapterId: '' }), /adapterId/],
|
||||
[canonicalSnapshot({ session: { id: '' } }), /session.id/],
|
||||
[canonicalSnapshot({ session: { repoRoot: 42 } }), /session.repoRoot/],
|
||||
[canonicalSnapshot({ sourceTarget: { type: '' } }), /session.sourceTarget.type/],
|
||||
[(() => {
|
||||
const snapshot = canonicalSnapshot();
|
||||
snapshot.workers = [null];
|
||||
snapshot.aggregates = { workerCount: 1, states: { unknown: 1 }, healths: { unknown: 1 } };
|
||||
return snapshot;
|
||||
})(), /workers\[0\] to be an object/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
branch: 7
|
||||
}]
|
||||
}), /workers\[0\].branch/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
runtime: {
|
||||
...canonicalSnapshot().workers[0].runtime,
|
||||
command: 123
|
||||
}
|
||||
}]
|
||||
}), /workers\[0\].runtime.command/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
runtime: {
|
||||
...canonicalSnapshot().workers[0].runtime,
|
||||
active: 'yes'
|
||||
}
|
||||
}]
|
||||
}), /workers\[0\].runtime.active/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
intent: {
|
||||
objective: 'ok',
|
||||
seedPaths: ['README.md', 123]
|
||||
}
|
||||
}]
|
||||
}), /workers\[0\].intent.seedPaths/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
outputs: {
|
||||
summary: [],
|
||||
validation: 'nope',
|
||||
remainingRisks: []
|
||||
}
|
||||
}]
|
||||
}), /workers\[0\].outputs.validation/],
|
||||
[canonicalSnapshot({ aggregates: { workerCount: 99 } }), /aggregates.workerCount to match/],
|
||||
[canonicalSnapshot({ aggregates: { states: [] } }), /aggregates.states to be an object/],
|
||||
[canonicalSnapshot({ aggregates: { states: { running: -1 } } }), /aggregates.states.running/],
|
||||
[canonicalSnapshot({ aggregates: { healths: null } }), /aggregates.healths to be an object/]
|
||||
];
|
||||
|
||||
for (const [snapshot, pattern] of invalidCases) {
|
||||
assert.throws(() => validateCanonicalSnapshot(snapshot), pattern);
|
||||
}
|
||||
});
|
||||
|
||||
function dmuxWorker(workerSlug, status = {}, overrides = {}) {
|
||||
return {
|
||||
workerSlug,
|
||||
workerDir: `/tmp/${workerSlug}`,
|
||||
status: {
|
||||
state: 'running',
|
||||
updated: new Date().toISOString(),
|
||||
branch: null,
|
||||
worktree: null,
|
||||
...status
|
||||
},
|
||||
task: {
|
||||
objective: `${workerSlug} objective`,
|
||||
seedPaths: ['README.md'],
|
||||
...(overrides.task || {})
|
||||
},
|
||||
handoff: {
|
||||
summary: ['summary'],
|
||||
validation: ['validation'],
|
||||
remainingRisks: ['risk'],
|
||||
...(overrides.handoff || {})
|
||||
},
|
||||
files: {
|
||||
status: `/tmp/${workerSlug}/status.md`,
|
||||
task: `/tmp/${workerSlug}/task.md`,
|
||||
handoff: `/tmp/${workerSlug}/handoff.md`,
|
||||
...(overrides.files || {})
|
||||
},
|
||||
pane: Object.prototype.hasOwnProperty.call(overrides, 'pane')
|
||||
? overrides.pane
|
||||
: {
|
||||
currentCommand: 'codex',
|
||||
pid: 123,
|
||||
active: true,
|
||||
dead: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function dmuxSnapshot(overrides = {}) {
|
||||
return {
|
||||
sessionName: 'edge-session',
|
||||
repoRoot: '/tmp/repo',
|
||||
sessionActive: false,
|
||||
workerStates: {},
|
||||
workerCount: 0,
|
||||
workers: [],
|
||||
...overrides
|
||||
};
|
||||
}
|
||||
|
||||
test('dmux normalization covers missing failed idle and stale worker states', () => {
|
||||
const sourceTarget = { type: 'session', value: 'edge-session' };
|
||||
|
||||
const missing = normalizeDmuxSnapshot(dmuxSnapshot(), sourceTarget);
|
||||
assert.strictEqual(missing.session.state, 'missing');
|
||||
assert.strictEqual(missing.aggregates.workerCount, 0);
|
||||
|
||||
const failed = normalizeDmuxSnapshot(dmuxSnapshot({
|
||||
workerStates: { failed: 1 },
|
||||
workerCount: 1,
|
||||
workers: [
|
||||
dmuxWorker('failure', { state: 'failed' }, { pane: null })
|
||||
]
|
||||
}), sourceTarget);
|
||||
assert.strictEqual(failed.session.state, 'failed');
|
||||
assert.strictEqual(failed.workers[0].health, 'degraded');
|
||||
assert.strictEqual(failed.workers[0].runtime.active, false);
|
||||
assert.strictEqual(failed.workers[0].runtime.dead, false);
|
||||
|
||||
const idle = normalizeDmuxSnapshot(dmuxSnapshot({
|
||||
workerStates: { running: 1, queued: 1 },
|
||||
workerCount: 2,
|
||||
workers: [
|
||||
dmuxWorker('missing-update', { state: 'running', updated: undefined }),
|
||||
dmuxWorker('stale-update', { state: 'active', updated: '2001-01-01T00:00:00Z' }),
|
||||
dmuxWorker('dead-pane', { state: 'running' }, { pane: { dead: true, active: false } }),
|
||||
dmuxWorker('mystery', { state: 'queued' }, {
|
||||
task: { seedPaths: 'not-array' },
|
||||
handoff: { summary: 'not-array', validation: null, remainingRisks: undefined },
|
||||
pane: null
|
||||
})
|
||||
]
|
||||
}), sourceTarget);
|
||||
|
||||
assert.strictEqual(idle.session.state, 'idle');
|
||||
assert.deepStrictEqual(
|
||||
idle.workers.map(worker => worker.health),
|
||||
['stale', 'stale', 'degraded', 'unknown']
|
||||
);
|
||||
assert.deepStrictEqual(idle.workers[3].intent.seedPaths, []);
|
||||
assert.deepStrictEqual(idle.workers[3].outputs.summary, []);
|
||||
|
||||
const completed = normalizeDmuxSnapshot(dmuxSnapshot({
|
||||
workerStates: null,
|
||||
workerCount: 2,
|
||||
workers: [
|
||||
dmuxWorker('done-a', { state: 'done' }),
|
||||
dmuxWorker('done-b', { state: 'success' })
|
||||
]
|
||||
}), sourceTarget);
|
||||
assert.strictEqual(completed.session.state, 'completed');
|
||||
assert.deepStrictEqual(completed.workers.map(worker => worker.health), ['healthy', 'healthy']);
|
||||
});
|
||||
|
||||
test('claude history normalization falls back to filename ids and empty metadata defaults', () => {
|
||||
const snapshot = normalizeClaudeHistorySession({
|
||||
shortId: 'no-id',
|
||||
filename: '2026-03-13-no-id-session.tmp',
|
||||
sessionPath: '/tmp/2026-03-13-no-id-session.tmp',
|
||||
metadata: {
|
||||
title: '',
|
||||
completed: 'not-array',
|
||||
inProgress: ['Resume from filename fallback'],
|
||||
context: '',
|
||||
notes: ''
|
||||
}
|
||||
}, {
|
||||
type: 'claude-history',
|
||||
value: 'latest'
|
||||
});
|
||||
|
||||
assert.strictEqual(snapshot.session.id, '2026-03-13-no-id-session');
|
||||
assert.strictEqual(snapshot.workers[0].id, '2026-03-13-no-id-session');
|
||||
assert.strictEqual(snapshot.workers[0].label, '2026-03-13-no-id-session.tmp');
|
||||
assert.strictEqual(snapshot.workers[0].intent.objective, 'Resume from filename fallback');
|
||||
assert.deepStrictEqual(snapshot.workers[0].intent.seedPaths, []);
|
||||
assert.deepStrictEqual(snapshot.workers[0].outputs.summary, []);
|
||||
assert.deepStrictEqual(snapshot.workers[0].outputs.remainingRisks, []);
|
||||
|
||||
const pathOnly = normalizeClaudeHistorySession({
|
||||
sessionPath: '/tmp/path-only-session.tmp',
|
||||
metadata: {
|
||||
title: 'Path Only',
|
||||
inProgress: ['Continue work'],
|
||||
context: ' README.md \n\n scripts/ecc.js ',
|
||||
notes: 'No risks'
|
||||
}
|
||||
}, {
|
||||
type: 'claude-history',
|
||||
value: '/tmp/path-only-session.tmp'
|
||||
});
|
||||
|
||||
assert.strictEqual(pathOnly.session.id, 'path-only-session');
|
||||
assert.strictEqual(pathOnly.workers[0].intent.objective, 'Continue work');
|
||||
assert.deepStrictEqual(pathOnly.workers[0].intent.seedPaths, ['README.md', 'scripts/ecc.js']);
|
||||
assert.deepStrictEqual(pathOnly.workers[0].outputs.remainingRisks, ['No risks']);
|
||||
});
|
||||
|
||||
test('fallback recordings sanitize paths, use env dirs, and preserve changed history', () => {
|
||||
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-recordings-env-'));
|
||||
const previousRecordingDir = process.env.ECC_SESSION_RECORDING_DIR;
|
||||
|
||||
try {
|
||||
process.env.ECC_SESSION_RECORDING_DIR = recordingDir;
|
||||
const first = canonicalSnapshot({
|
||||
adapterId: 'adapter with spaces',
|
||||
session: { id: 'session id/with:chars' }
|
||||
});
|
||||
const recordingPath = getFallbackSessionRecordingPath(first);
|
||||
assert.ok(recordingPath.includes(`${path.sep}adapter_with_spaces${path.sep}`));
|
||||
assert.ok(recordingPath.endsWith(`${path.sep}session_id_with_chars.json`));
|
||||
|
||||
fs.mkdirSync(path.dirname(recordingPath), { recursive: true });
|
||||
fs.writeFileSync(recordingPath, '{not json', 'utf8');
|
||||
|
||||
const firstPersistence = persistCanonicalSnapshot(first, {
|
||||
loadStateStoreImpl: () => null
|
||||
});
|
||||
const changed = canonicalSnapshot({
|
||||
adapterId: 'adapter with spaces',
|
||||
session: { id: 'session id/with:chars', state: 'idle' }
|
||||
});
|
||||
persistCanonicalSnapshot(changed, { loadStateStoreImpl: () => null });
|
||||
persistCanonicalSnapshot(changed, { loadStateStoreImpl: () => null });
|
||||
|
||||
const persisted = JSON.parse(fs.readFileSync(recordingPath, 'utf8'));
|
||||
assert.strictEqual(firstPersistence.backend, 'json-file');
|
||||
assert.strictEqual(firstPersistence.path, recordingPath);
|
||||
assert.strictEqual(persisted.schemaVersion, 'ecc.session.recording.v1');
|
||||
assert.strictEqual(persisted.latest.session.state, 'idle');
|
||||
assert.strictEqual(persisted.history.length, 2);
|
||||
assert.strictEqual(persisted.history[0].snapshot.session.state, 'active');
|
||||
assert.strictEqual(persisted.history[1].snapshot.session.state, 'idle');
|
||||
assert.strictEqual(persisted.createdAt, persisted.history[0].recordedAt);
|
||||
} finally {
|
||||
if (typeof previousRecordingDir === 'string') {
|
||||
process.env.ECC_SESSION_RECORDING_DIR = previousRecordingDir;
|
||||
} else {
|
||||
delete process.env.ECC_SESSION_RECORDING_DIR;
|
||||
}
|
||||
fs.rmSync(recordingDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('persistence supports skip mode, writer variants, and missing state-store fallback', () => {
|
||||
const snapshot = canonicalSnapshot();
|
||||
const skipped = persistCanonicalSnapshot(snapshot, { persist: false });
|
||||
assert.deepStrictEqual(skipped, {
|
||||
backend: 'skipped',
|
||||
path: null,
|
||||
recordedAt: null
|
||||
});
|
||||
|
||||
const topLevelStore = {
|
||||
calls: [],
|
||||
recordCanonicalSessionSnapshot(snapshotArg, metadata) {
|
||||
this.calls.push({ snapshot: snapshotArg, metadata });
|
||||
}
|
||||
};
|
||||
const stateStoreResult = persistCanonicalSnapshot(snapshot, { stateStore: topLevelStore });
|
||||
assert.strictEqual(stateStoreResult.backend, 'state-store');
|
||||
assert.strictEqual(topLevelStore.calls.length, 1);
|
||||
assert.strictEqual(topLevelStore.calls[0].metadata.sessionId, 'session-1');
|
||||
|
||||
const nestedStore = {
|
||||
sessions: {
|
||||
calls: [],
|
||||
recordSessionSnapshot(snapshotArg, metadata) {
|
||||
this.calls.push({ snapshot: snapshotArg, metadata });
|
||||
}
|
||||
}
|
||||
};
|
||||
persistCanonicalSnapshot(snapshot, { stateStore: nestedStore });
|
||||
assert.strictEqual(nestedStore.sessions.calls.length, 1);
|
||||
|
||||
const noWriterDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-no-writer-'));
|
||||
const missingModuleDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-missing-module-'));
|
||||
try {
|
||||
const noWriter = persistCanonicalSnapshot(snapshot, {
|
||||
recordingDir: noWriterDir,
|
||||
stateStore: { createStateStore() {} }
|
||||
});
|
||||
assert.strictEqual(noWriter.backend, 'json-file');
|
||||
|
||||
const missingModule = new Error("Cannot find module '../state-store'");
|
||||
missingModule.code = 'MODULE_NOT_FOUND';
|
||||
const fallback = persistCanonicalSnapshot(snapshot, {
|
||||
recordingDir: missingModuleDir,
|
||||
loadStateStoreImpl() {
|
||||
throw missingModule;
|
||||
}
|
||||
});
|
||||
assert.strictEqual(fallback.backend, 'json-file');
|
||||
} finally {
|
||||
fs.rmSync(noWriterDir, { recursive: true, force: true });
|
||||
fs.rmSync(missingModuleDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('persistence only falls back when the state-store module is missing', () => {
|
||||
const snapshot = {
|
||||
schemaVersion: 'ecc.session.v1',
|
||||
|
||||
@@ -19,7 +19,6 @@ const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const repoRoot = path.resolve(__dirname, '..');
|
||||
const repoRootWithSep = `${repoRoot}${path.sep}`;
|
||||
const packageJsonPath = path.join(repoRoot, 'package.json');
|
||||
const packageLockPath = path.join(repoRoot, 'package-lock.json');
|
||||
const rootAgentsPath = path.join(repoRoot, 'AGENTS.md');
|
||||
@@ -70,16 +69,6 @@ function loadJsonObject(filePath, label) {
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function assertSafeRepoRelativePath(relativePath, label) {
|
||||
const normalized = path.posix.normalize(relativePath.replace(/\\/g, '/'));
|
||||
|
||||
assert.ok(!path.isAbsolute(relativePath), `${label} must not be absolute: ${relativePath}`);
|
||||
assert.ok(
|
||||
!normalized.startsWith('../') && !normalized.includes('/../'),
|
||||
`${label} must not traverse directories: ${relativePath}`,
|
||||
);
|
||||
}
|
||||
|
||||
function collectMarkdownFiles(rootPath) {
|
||||
if (!fs.existsSync(rootPath)) {
|
||||
return [];
|
||||
|
||||
Reference in New Issue
Block a user