mirror of
https://github.com/affaan-m/everything-claude-code.git
synced 2026-04-30 22:13:28 +08:00
Compare commits
6 Commits
fix/ecc2-w
...
fix/ecc2-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aaaf52fb1e | ||
|
|
33edfd3bb3 | ||
|
|
f92dc544c4 | ||
|
|
1c2d5dd389 | ||
|
|
b40de37ccb | ||
|
|
63485a26bf |
@@ -15,6 +15,7 @@ const path = require('path');
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
const MAX_STDIN = 1024 * 1024;
|
||||
const WINDOWS_SHELL_UNSAFE_PATH_CHARS = /[&|<>^%!]/;
|
||||
|
||||
function isEnabled(value) {
|
||||
return ['1', 'true', 'yes', 'on'].includes(String(value || '').toLowerCase());
|
||||
@@ -37,17 +38,33 @@ process.stdin.on('end', () => {
|
||||
const scriptDir = __dirname;
|
||||
const pyScript = path.join(scriptDir, 'insaits-security-monitor.py');
|
||||
|
||||
// Try python3 first (macOS/Linux), fall back to python (Windows)
|
||||
const pythonCandidates = ['python3', 'python'];
|
||||
// Prefer real Windows executables before .cmd shims so shell execution is
|
||||
// only used for wrapper scripts such as pyenv/npm-style shims.
|
||||
const pythonCandidates = process.platform === 'win32'
|
||||
? ['python3.exe', 'python.exe', 'python3.cmd', 'python.cmd', 'python3', 'python']
|
||||
: ['python3', 'python'];
|
||||
let result;
|
||||
|
||||
for (const pythonBin of pythonCandidates) {
|
||||
const useWindowsShell = process.platform === 'win32' && /\.(cmd|bat)$/i.test(pythonBin);
|
||||
if (useWindowsShell && (
|
||||
WINDOWS_SHELL_UNSAFE_PATH_CHARS.test(pythonBin)
|
||||
|| WINDOWS_SHELL_UNSAFE_PATH_CHARS.test(pyScript)
|
||||
)) {
|
||||
result = {
|
||||
error: new Error(`Unsafe Windows Python shim path: ${pythonBin}`),
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
result = spawnSync(pythonBin, [pyScript], {
|
||||
input: raw,
|
||||
encoding: 'utf8',
|
||||
env: process.env,
|
||||
cwd: process.cwd(),
|
||||
timeout: 14000,
|
||||
shell: useWindowsShell,
|
||||
windowsHide: true,
|
||||
});
|
||||
|
||||
// ENOENT means binary not found - try next candidate
|
||||
@@ -81,6 +98,16 @@ process.stdin.on('end', () => {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// The monitor only uses 0 (pass) and 2 (block). Other statuses usually
|
||||
// mean Python launcher/dependency/runtime failure, so keep the hook fail-open.
|
||||
if (result.status !== 0 && result.status !== 2) {
|
||||
const detail = (result.stderr || result.stdout || '').trim();
|
||||
const suffix = detail ? `: ${detail}` : '';
|
||||
process.stderr.write(`[InsAIts] Security monitor exited with status ${result.status}${suffix}\n`);
|
||||
process.stdout.write(raw);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (result.stdout) {
|
||||
process.stdout.write(result.stdout);
|
||||
} else if (result.status === 0) {
|
||||
|
||||
@@ -21,8 +21,40 @@ function cleanup(dirPath) {
|
||||
}
|
||||
|
||||
function writeFakePython(binDir) {
|
||||
const fakePython = path.join(binDir, 'python3');
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
if (process.platform === 'win32') {
|
||||
const fakePythonJs = path.join(binDir, 'fake-python.js');
|
||||
const fakePythonCmd = path.join(binDir, 'python3.cmd');
|
||||
fs.writeFileSync(fakePythonJs, [
|
||||
"'use strict';",
|
||||
"const fs = require('fs');",
|
||||
"const mode = process.env.FAKE_INSAITS_MODE || 'clean';",
|
||||
"if (mode === 'clean') {",
|
||||
" fs.readFileSync(0, 'utf8');",
|
||||
" process.exit(0);",
|
||||
"}",
|
||||
"if (mode === 'echo') {",
|
||||
" process.stdout.write(fs.readFileSync(0, 'utf8'));",
|
||||
" process.exit(0);",
|
||||
"}",
|
||||
"if (mode === 'block') {",
|
||||
" process.stdout.write('blocked by monitor\\n');",
|
||||
" process.stderr.write('monitor warning\\n');",
|
||||
" process.exit(2);",
|
||||
"}",
|
||||
"if (mode === 'error') {",
|
||||
" process.stderr.write('spawned but failed\\n');",
|
||||
" process.exit(1);",
|
||||
"}",
|
||||
].join('\n'), 'utf8');
|
||||
fs.writeFileSync(fakePythonCmd, [
|
||||
'@echo off',
|
||||
`"${process.execPath}" "%~dp0fake-python.js" %*`,
|
||||
].join('\r\n'), 'utf8');
|
||||
return;
|
||||
}
|
||||
|
||||
const fakePython = path.join(binDir, 'python3');
|
||||
fs.writeFileSync(fakePython, [
|
||||
'#!/bin/sh',
|
||||
'mode="${FAKE_INSAITS_MODE:-clean}"',
|
||||
@@ -134,6 +166,29 @@ function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('enabled monitor unexpected failure fails open with warning and raw stdin', () => {
|
||||
const tempDir = createTempDir();
|
||||
try {
|
||||
writeFakePython(path.join(tempDir, 'bin'));
|
||||
|
||||
const result = run({
|
||||
input: 'raw-input',
|
||||
env: {
|
||||
ECC_ENABLE_INSAITS: '1',
|
||||
FAKE_INSAITS_MODE: 'error',
|
||||
PATH: path.join(tempDir, 'bin'),
|
||||
},
|
||||
});
|
||||
|
||||
assert.strictEqual(result.status, 0);
|
||||
assert.strictEqual(result.stdout, 'raw-input');
|
||||
assert.ok(result.stderr.includes('Security monitor exited with status 1'));
|
||||
assert.ok(result.stderr.includes('spawned but failed'));
|
||||
} finally {
|
||||
cleanup(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('missing Python fails open with warning and raw stdin', () => {
|
||||
const result = run({
|
||||
input: 'raw-input',
|
||||
@@ -145,7 +200,10 @@ function runTests() {
|
||||
|
||||
assert.strictEqual(result.status, 0);
|
||||
assert.strictEqual(result.stdout, 'raw-input');
|
||||
assert.ok(result.stderr.includes('python3/python not found'));
|
||||
assert.ok(
|
||||
result.stderr.includes('python3/python not found')
|
||||
|| result.stderr.includes('Security monitor exited with status')
|
||||
);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
|
||||
@@ -61,15 +61,29 @@ function createCommandConfig(scriptPath) {
|
||||
};
|
||||
}
|
||||
|
||||
function runHook(input, env = {}) {
|
||||
function buildHookEnv(env = {}) {
|
||||
const merged = {
|
||||
...process.env,
|
||||
ECC_HOOK_PROFILE: 'standard'
|
||||
};
|
||||
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
if (value === null || value === undefined) {
|
||||
delete merged[key];
|
||||
} else {
|
||||
merged[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
function runHook(input, env = {}, options = {}) {
|
||||
const result = spawnSync('node', [script], {
|
||||
input: JSON.stringify(input),
|
||||
encoding: 'utf8',
|
||||
env: {
|
||||
...process.env,
|
||||
ECC_HOOK_PROFILE: 'standard',
|
||||
...env
|
||||
},
|
||||
cwd: options.cwd || process.cwd(),
|
||||
env: buildHookEnv(env),
|
||||
timeout: 15000,
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
@@ -81,15 +95,12 @@ function runHook(input, env = {}) {
|
||||
};
|
||||
}
|
||||
|
||||
function runRawHook(rawInput, env = {}) {
|
||||
function runRawHook(rawInput, env = {}, options = {}) {
|
||||
const result = spawnSync('node', [script], {
|
||||
input: rawInput,
|
||||
encoding: 'utf8',
|
||||
env: {
|
||||
...process.env,
|
||||
ECC_HOOK_PROFILE: 'standard',
|
||||
...env
|
||||
},
|
||||
cwd: options.cwd || process.cwd(),
|
||||
env: buildHookEnv(env),
|
||||
timeout: 15000,
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
@@ -173,6 +184,192 @@ async function runTests() {
|
||||
assert.ok(result.stderr.includes('Hook input exceeded 512 bytes'), `Expected size warning, got: ${result.stderr}`);
|
||||
assert.ok(/blocking search/i.test(result.stderr), `Expected blocking message, got: ${result.stderr}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('allows truncated MCP hook input when fail-open mode is enabled', () => {
|
||||
const rawInput = JSON.stringify({ tool_name: 'mcp__flaky__search', tool_input: {} });
|
||||
const result = runRawHook(rawInput, {
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_HOOK_INPUT_TRUNCATED: 'true',
|
||||
ECC_HOOK_INPUT_MAX_BYTES: '256',
|
||||
ECC_MCP_HEALTH_FAIL_OPEN: 'yes'
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Expected fail-open mode to allow truncated MCP input');
|
||||
assert.strictEqual(result.stdout, rawInput, 'Expected raw input passthrough on stdout');
|
||||
assert.ok(result.stderr.includes('Hook input exceeded 256 bytes'), `Expected size warning, got: ${result.stderr}`);
|
||||
assert.ok(/fail-open mode is enabled/i.test(result.stderr), `Expected fail-open log, got: ${result.stderr}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('uses default cwd config path and default home state path', async () => {
|
||||
const tempDir = createTempDir();
|
||||
const homeDir = path.join(tempDir, 'home');
|
||||
const configDir = path.join(tempDir, '.claude');
|
||||
const configPath = path.join(configDir, 'settings.json');
|
||||
const expectedStatePath = path.join(homeDir, '.claude', 'mcp-health-cache.json');
|
||||
const serverScript = path.join(tempDir, 'default-path-server.js');
|
||||
|
||||
try {
|
||||
fs.mkdirSync(configDir, { recursive: true });
|
||||
fs.mkdirSync(homeDir, { recursive: true });
|
||||
fs.writeFileSync(serverScript, "setInterval(() => {}, 1000);\n");
|
||||
writeConfig(configPath, {
|
||||
mcpServers: {
|
||||
cwddefault: createCommandConfig(serverScript)
|
||||
}
|
||||
});
|
||||
|
||||
const input = { tool_name: 'mcp__cwddefault__list', tool_input: {} };
|
||||
const result = runHook(
|
||||
input,
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: null,
|
||||
ECC_MCP_HEALTH_STATE_PATH: null,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100',
|
||||
HOME: homeDir,
|
||||
USERPROFILE: homeDir
|
||||
},
|
||||
{ cwd: tempDir }
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, `Expected default-path server to pass, got ${result.code}: ${result.stderr}`);
|
||||
assert.strictEqual(result.stdout.trim(), JSON.stringify(input), 'Expected original JSON on stdout');
|
||||
|
||||
const state = readState(expectedStatePath);
|
||||
assert.strictEqual(state.servers.cwddefault.status, 'healthy', 'Expected default home state path to be used');
|
||||
assert.strictEqual(
|
||||
fs.realpathSync(state.servers.cwddefault.source),
|
||||
fs.realpathSync(configPath),
|
||||
'Expected cwd .claude/settings.json config source'
|
||||
);
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uses cached healthy and unhealthy states without probing configs', () => {
|
||||
const tempDir = createTempDir();
|
||||
const now = Date.now();
|
||||
const healthyStatePath = path.join(tempDir, 'healthy-state.json');
|
||||
const unhealthyStatePath = path.join(tempDir, 'unhealthy-state.json');
|
||||
|
||||
try {
|
||||
fs.writeFileSync(healthyStatePath, JSON.stringify({
|
||||
version: 1,
|
||||
servers: {
|
||||
cached: {
|
||||
status: 'healthy',
|
||||
checkedAt: now,
|
||||
expiresAt: now + 60000,
|
||||
failureCount: 0,
|
||||
nextRetryAt: now
|
||||
}
|
||||
}
|
||||
}));
|
||||
fs.writeFileSync(unhealthyStatePath, JSON.stringify({
|
||||
version: 1,
|
||||
servers: {
|
||||
blocked: {
|
||||
status: 'unhealthy',
|
||||
checkedAt: now,
|
||||
expiresAt: now,
|
||||
failureCount: 1,
|
||||
nextRetryAt: now + 60000,
|
||||
lastError: 'cached outage'
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
const healthy = runHook(
|
||||
{ tool_name: 'mcp__cached__list', tool_input: {} },
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: path.join(tempDir, 'missing.json'),
|
||||
ECC_MCP_HEALTH_STATE_PATH: healthyStatePath
|
||||
}
|
||||
);
|
||||
const unhealthy = runHook(
|
||||
{ tool_name: 'mcp__blocked__query', tool_input: {} },
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: path.join(tempDir, 'missing.json'),
|
||||
ECC_MCP_HEALTH_STATE_PATH: unhealthyStatePath
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(healthy.code, 0, 'Expected cached healthy server to pass without config lookup');
|
||||
assert.strictEqual(healthy.stderr, '', 'Expected cached healthy server to skip logging');
|
||||
assert.strictEqual(unhealthy.code, 2, 'Expected cached unhealthy server to block before retry time');
|
||||
assert.ok(unhealthy.stderr.includes('marked unhealthy until'), `Expected cached unhealthy log, got: ${unhealthy.stderr}`);
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('ignores malformed state files and allows missing MCP configs', () => {
|
||||
const tempDir = createTempDir();
|
||||
const statePath = path.join(tempDir, 'malformed-state.json');
|
||||
|
||||
try {
|
||||
fs.writeFileSync(statePath, '[]');
|
||||
|
||||
const result = runHook(
|
||||
{
|
||||
tool_name: 'Invoke',
|
||||
server: 'ghost',
|
||||
tool: 'lookup',
|
||||
tool_input: {}
|
||||
},
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: path.join(tempDir, 'missing.json'),
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Expected missing config to be non-blocking');
|
||||
assert.ok(result.stderr.includes('No MCP config found for ghost'), `Expected missing config log, got: ${result.stderr}`);
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('supports explicit tool_input server targets and mcp_servers config aliases', async () => {
|
||||
const tempDir = createTempDir();
|
||||
const configPath = path.join(tempDir, 'claude.json');
|
||||
const statePath = path.join(tempDir, 'mcp-health.json');
|
||||
const serverScript = path.join(tempDir, 'alias-server.js');
|
||||
|
||||
try {
|
||||
fs.writeFileSync(serverScript, "setInterval(() => {}, 1000);\n");
|
||||
writeConfig(configPath, {
|
||||
mcp_servers: {
|
||||
alias: createCommandConfig(serverScript)
|
||||
}
|
||||
});
|
||||
|
||||
const input = {
|
||||
tool_name: 'GenericMcpTool',
|
||||
tool_input: {
|
||||
connector: 'alias',
|
||||
mcp_tool: 'lookup'
|
||||
}
|
||||
};
|
||||
const result = runHook(input, {
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0, `Expected explicit MCP target to pass, got ${result.code}: ${result.stderr}`);
|
||||
const state = readState(statePath);
|
||||
assert.strictEqual(state.servers.alias.status, 'healthy', 'Expected alias server to be marked healthy');
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('marks healthy command MCP servers and allows the tool call', async () => {
|
||||
const tempDir = createTempDir();
|
||||
const configPath = path.join(tempDir, 'claude.json');
|
||||
@@ -272,6 +469,151 @@ async function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('blocks unsupported MCP configs and command spawn failures', async () => {
|
||||
const tempDir = createTempDir();
|
||||
const configPath = path.join(tempDir, 'claude.json');
|
||||
const statePath = path.join(tempDir, 'mcp-health.json');
|
||||
|
||||
try {
|
||||
writeConfig(configPath, {
|
||||
mcpServers: {
|
||||
unsupported: {},
|
||||
missingcmd: {
|
||||
command: path.join(tempDir, 'missing-mcp-server')
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const unsupported = runHook(
|
||||
{ tool_name: 'mcp__unsupported__search', tool_input: {} },
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
}
|
||||
);
|
||||
const missingCommand = runHook(
|
||||
{ tool_name: 'mcp__missingcmd__search', tool_input: {} },
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(unsupported.code, 2, 'Expected unsupported config to block');
|
||||
assert.ok(unsupported.stderr.includes('unsupported MCP server config'), `Expected unsupported reason, got: ${unsupported.stderr}`);
|
||||
assert.strictEqual(missingCommand.code, 2, 'Expected missing command to block');
|
||||
assert.ok(/ENOENT|spawn/i.test(missingCommand.stderr), `Expected spawn failure reason, got: ${missingCommand.stderr}`);
|
||||
|
||||
const state = readState(statePath);
|
||||
assert.strictEqual(state.servers.unsupported.status, 'unhealthy', 'Expected unsupported server state');
|
||||
assert.strictEqual(state.servers.missingcmd.status, 'unhealthy', 'Expected missing command server state');
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('includes command stderr and config env in unhealthy probe reasons', async () => {
|
||||
const tempDir = createTempDir();
|
||||
const configPath = path.join(tempDir, 'claude.json');
|
||||
const statePath = path.join(tempDir, 'mcp-health.json');
|
||||
const serverScript = path.join(tempDir, 'stderr-server.js');
|
||||
|
||||
try {
|
||||
fs.writeFileSync(
|
||||
serverScript,
|
||||
"console.error(`probe failed with ${process.env.ECC_MCP_TEST_MARKER}`); process.exit(1);\n"
|
||||
);
|
||||
writeConfig(configPath, {
|
||||
mcpServers: {
|
||||
stderrprobe: {
|
||||
command: process.execPath,
|
||||
args: [serverScript],
|
||||
env: {
|
||||
ECC_MCP_TEST_MARKER: 'marker-from-config'
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const result = runHook(
|
||||
{ tool_name: 'mcp__stderrprobe__search', tool_input: {} },
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100'
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 2, 'Expected stderr probe failure to block');
|
||||
assert.ok(result.stderr.includes('marker-from-config'), `Expected command stderr in reason, got: ${result.stderr}`);
|
||||
|
||||
const state = readState(statePath);
|
||||
assert.ok(state.servers.stderrprobe.lastError.includes('marker-from-config'), 'Expected stderr reason in state');
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('records reconnect reprobe failures for previously unhealthy servers', async () => {
|
||||
const tempDir = createTempDir();
|
||||
const configPath = path.join(tempDir, 'claude.json');
|
||||
const statePath = path.join(tempDir, 'mcp-health.json');
|
||||
const serverScript = path.join(tempDir, 'still-down-server.js');
|
||||
const reconnectScript = path.join(tempDir, 'noop-reconnect.js');
|
||||
const now = Date.now();
|
||||
|
||||
try {
|
||||
fs.writeFileSync(serverScript, "console.error('503 Service Unavailable'); process.exit(1);\n");
|
||||
fs.writeFileSync(reconnectScript, "process.exit(0);\n");
|
||||
fs.writeFileSync(statePath, JSON.stringify({
|
||||
version: 1,
|
||||
servers: {
|
||||
sticky: {
|
||||
status: 'unhealthy',
|
||||
checkedAt: now - 60000,
|
||||
expiresAt: now - 60000,
|
||||
failureCount: 2,
|
||||
lastError: 'previous outage',
|
||||
nextRetryAt: now - 1000,
|
||||
lastRestoredAt: now - 120000
|
||||
}
|
||||
}
|
||||
}));
|
||||
writeConfig(configPath, {
|
||||
mcpServers: {
|
||||
sticky: createCommandConfig(serverScript)
|
||||
}
|
||||
});
|
||||
|
||||
const result = runHook(
|
||||
{ tool_name: 'mcp__sticky__search', tool_input: {} },
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PreToolUse',
|
||||
ECC_MCP_CONFIG_PATH: configPath,
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_RECONNECT_COMMAND: `${JSON.stringify(process.execPath)} ${JSON.stringify(reconnectScript)}`,
|
||||
ECC_MCP_HEALTH_TIMEOUT_MS: '100',
|
||||
ECC_MCP_HEALTH_BACKOFF_MS: '10'
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 2, 'Expected still-unhealthy server to block');
|
||||
assert.ok(result.stderr.includes('reconnect reprobe failed'), `Expected reprobe failure reason, got: ${result.stderr}`);
|
||||
assert.ok(result.stderr.includes('Reconnect attempt: ok'), `Expected reconnect attempt suffix, got: ${result.stderr}`);
|
||||
|
||||
const state = readState(statePath);
|
||||
assert.strictEqual(state.servers.sticky.failureCount, 3, 'Expected failure count to increment');
|
||||
assert.strictEqual(state.servers.sticky.lastRestoredAt, now - 120000, 'Expected previous restore timestamp to survive');
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('post-failure reconnect command restores server health when a reprobe succeeds', async () => {
|
||||
const tempDir = createTempDir();
|
||||
const configPath = path.join(tempDir, 'claude.json');
|
||||
@@ -334,6 +676,131 @@ async function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('ignores post-failure events without a reconnect-worthy failure code', () => {
|
||||
const tempDir = createTempDir();
|
||||
const statePath = path.join(tempDir, 'mcp-health.json');
|
||||
|
||||
try {
|
||||
const result = runHook(
|
||||
{
|
||||
tool_name: 'mcp__quiet__messages',
|
||||
tool_input: {},
|
||||
error: 'tool returned an application-level validation error'
|
||||
},
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUseFailure',
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Expected unmatched post-failure to remain non-blocking');
|
||||
assert.strictEqual(result.stderr, '', 'Expected no logs for unmatched post-failure');
|
||||
assert.strictEqual(fs.existsSync(statePath), false, 'Expected no state write for unmatched post-failure');
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('post-failure marks servers unhealthy and skips reconnect when no command is configured', () => {
|
||||
const tempDir = createTempDir();
|
||||
const statePath = path.join(tempDir, 'mcp-health.json');
|
||||
|
||||
try {
|
||||
const result = runHook(
|
||||
{
|
||||
tool_name: 'mcp__noplan__messages',
|
||||
tool_input: {},
|
||||
tool_output: {
|
||||
stderr: '403 Forbidden from upstream MCP'
|
||||
}
|
||||
},
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUseFailure',
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_RECONNECT_COMMAND: null
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Expected post-failure hook to remain non-blocking');
|
||||
assert.ok(result.stderr.includes('reported 403'), `Expected detected failure code log, got: ${result.stderr}`);
|
||||
assert.ok(result.stderr.includes('reconnect skipped'), `Expected reconnect skipped log, got: ${result.stderr}`);
|
||||
|
||||
const state = readState(statePath);
|
||||
assert.strictEqual(state.servers.noplan.status, 'unhealthy', 'Expected post-failure to mark server unhealthy');
|
||||
assert.strictEqual(state.servers.noplan.lastFailureCode, 403, 'Expected detected status code in state');
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('post-failure reports failed reconnect commands', () => {
|
||||
const tempDir = createTempDir();
|
||||
const statePath = path.join(tempDir, 'mcp-health.json');
|
||||
const reconnectScript = path.join(tempDir, 'failed-reconnect.js');
|
||||
|
||||
try {
|
||||
fs.writeFileSync(reconnectScript, "console.error('cannot reconnect'); process.exit(7);\n");
|
||||
|
||||
const result = runHook(
|
||||
{
|
||||
tool_name: 'mcp__badreconnect__messages',
|
||||
tool_input: {},
|
||||
tool_response: 'service unavailable 503'
|
||||
},
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUseFailure',
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_RECONNECT_COMMAND: `${JSON.stringify(process.execPath)} ${JSON.stringify(reconnectScript)}`
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Expected reconnect failure hook to remain non-blocking');
|
||||
assert.ok(result.stderr.includes('reported 503'), `Expected detected failure code log, got: ${result.stderr}`);
|
||||
assert.ok(result.stderr.includes('reconnect failed: cannot reconnect'), `Expected reconnect failure reason, got: ${result.stderr}`);
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('post-failure expands per-server reconnect commands before follow-up config checks', () => {
|
||||
const tempDir = createTempDir();
|
||||
const statePath = path.join(tempDir, 'mcp-health.json');
|
||||
const reconnectScript = path.join(tempDir, 'server-reconnect.js');
|
||||
const markerFile = path.join(tempDir, 'server-name.txt');
|
||||
|
||||
try {
|
||||
fs.writeFileSync(
|
||||
reconnectScript,
|
||||
[
|
||||
"const fs = require('fs');",
|
||||
"fs.writeFileSync(process.argv[2], process.argv[3]);"
|
||||
].join('\n')
|
||||
);
|
||||
|
||||
const result = runHook(
|
||||
{
|
||||
tool_name: 'mcp__foo-bar__messages',
|
||||
tool_input: {},
|
||||
message: 'transport connection reset'
|
||||
},
|
||||
{
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUseFailure',
|
||||
ECC_MCP_HEALTH_STATE_PATH: statePath,
|
||||
ECC_MCP_CONFIG_PATH: path.join(tempDir, 'missing.json'),
|
||||
ECC_MCP_RECONNECT_COMMAND: null,
|
||||
ECC_MCP_RECONNECT_FOO_BAR: `${JSON.stringify(process.execPath)} ${JSON.stringify(reconnectScript)} ${JSON.stringify(markerFile)} {server}`
|
||||
}
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Expected per-server reconnect hook to remain non-blocking');
|
||||
assert.strictEqual(fs.readFileSync(markerFile, 'utf8'), 'foo-bar', 'Expected {server} token expansion');
|
||||
assert.ok(result.stderr.includes('reported transport'), `Expected transport failure log, got: ${result.stderr}`);
|
||||
assert.ok(result.stderr.includes('no config was available'), `Expected missing config follow-up log, got: ${result.stderr}`);
|
||||
} finally {
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('treats HTTP 400 probe responses as healthy reachable servers', async () => {
|
||||
const tempDir = createTempDir();
|
||||
const configPath = path.join(tempDir, 'claude.json');
|
||||
|
||||
@@ -16,6 +16,13 @@ const script = path.join(
|
||||
'hooks',
|
||||
'session-activity-tracker.js'
|
||||
);
|
||||
const {
|
||||
buildActivityRow,
|
||||
extractFileEvents,
|
||||
extractFilePaths,
|
||||
summarizeOutput,
|
||||
run,
|
||||
} = require(script);
|
||||
|
||||
function test(name, fn) {
|
||||
try {
|
||||
@@ -52,6 +59,15 @@ function runScript(input, envOverrides = {}, options = {}) {
|
||||
return { code: result.status || 0, stdout: result.stdout || '', stderr: result.stderr || '' };
|
||||
}
|
||||
|
||||
function readMetricRows(homeDir) {
|
||||
const metricsFile = path.join(homeDir, '.claude', 'metrics', 'tool-usage.jsonl');
|
||||
return fs.readFileSync(metricsFile, 'utf8')
|
||||
.trim()
|
||||
.split(/\r?\n/)
|
||||
.filter(Boolean)
|
||||
.map(line => JSON.parse(line));
|
||||
}
|
||||
|
||||
function runTests() {
|
||||
console.log('\n=== Testing session-activity-tracker.js ===\n');
|
||||
|
||||
@@ -405,6 +421,246 @@ function runTests() {
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('skips non-PostToolUse events and rows without required identifiers', () => {
|
||||
assert.strictEqual(buildActivityRow(
|
||||
{ tool_name: 'Read', tool_input: { file_path: 'README.md' } },
|
||||
{ CLAUDE_HOOK_EVENT_NAME: 'PreToolUse', ECC_SESSION_ID: 'sess' }
|
||||
), null);
|
||||
assert.strictEqual(buildActivityRow(
|
||||
{ tool_name: 'Read', tool_input: { file_path: 'README.md' } },
|
||||
{ CLAUDE_HOOK_EVENT_NAME: 'PostToolUse' }
|
||||
), null);
|
||||
assert.strictEqual(buildActivityRow(
|
||||
{ tool_input: { file_path: 'README.md' } },
|
||||
{ CLAUDE_HOOK_EVENT_NAME: 'PostToolUse', ECC_SESSION_ID: 'sess' }
|
||||
), null);
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('sanitizes nested params, long summaries, and output variants', () => {
|
||||
const longValue = `start ${'x'.repeat(260)} ghp_${'A'.repeat(20)}`;
|
||||
const row = buildActivityRow(
|
||||
{
|
||||
tool_name: 'Lookup',
|
||||
tool_input: {
|
||||
query: longValue,
|
||||
secret: `gho_${'B'.repeat(20)}`,
|
||||
count: 3,
|
||||
enabled: false,
|
||||
omitted: null,
|
||||
nested: { a: { b: { c: { d: 'too deep' } } } },
|
||||
list: [1, true, null, 4],
|
||||
},
|
||||
tool_output: `line one\nline two ${'y'.repeat(260)}`,
|
||||
},
|
||||
{ CLAUDE_HOOK_EVENT_NAME: 'PostToolUse', CLAUDE_SESSION_ID: 'claude-fallback' }
|
||||
);
|
||||
|
||||
assert.strictEqual(row.session_id, 'claude-fallback');
|
||||
assert.strictEqual(row.file_paths.length, 0);
|
||||
assert.ok(row.input_summary.endsWith('...'), 'Expected long shallow summary to be truncated');
|
||||
assert.ok(!row.input_summary.includes('ghp_'), 'Expected GitHub token redaction in input summary');
|
||||
assert.ok(row.output_summary.endsWith('...'), 'Expected long output summary to be truncated');
|
||||
assert.ok(!row.output_summary.includes('\n'), 'Expected output summary to normalize whitespace');
|
||||
|
||||
const params = JSON.parse(row.input_params_json);
|
||||
assert.strictEqual(params.count, 3);
|
||||
assert.strictEqual(params.enabled, false);
|
||||
assert.strictEqual(params.omitted, null);
|
||||
assert.strictEqual(params.secret, '<REDACTED>');
|
||||
assert.strictEqual(params.nested.a.b.c, '[Truncated]');
|
||||
assert.deepStrictEqual(params.list.slice(0, 3), [1, true, null]);
|
||||
assert.strictEqual(params.list[3], 4);
|
||||
assert.ok(params.query.endsWith('...'), 'Expected long param value to be truncated');
|
||||
|
||||
assert.strictEqual(summarizeOutput(null), '');
|
||||
assert.strictEqual(summarizeOutput(undefined), '');
|
||||
assert.strictEqual(summarizeOutput('hello\nworld'), 'hello world');
|
||||
assert.strictEqual(summarizeOutput({ ok: true }), '{"ok":true}');
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('extracts file paths from nested arrays while filtering duplicates and remote URIs', () => {
|
||||
const paths = extractFilePaths({
|
||||
file_paths: [
|
||||
'src/a.js',
|
||||
'src/a.js',
|
||||
'https://example.com/file.js',
|
||||
'',
|
||||
{ file_path: 'src/b.js' },
|
||||
],
|
||||
nested: {
|
||||
source_path: 'app://connector/item',
|
||||
deep: [
|
||||
{ new_file_path: 'src/c.js' },
|
||||
{ old_file_path: 'plugin://plugin/item' },
|
||||
42,
|
||||
],
|
||||
},
|
||||
ignored: 'not-a-path-field',
|
||||
});
|
||||
|
||||
assert.deepStrictEqual(paths, ['src/a.js', 'src/b.js', 'src/c.js']);
|
||||
assert.deepStrictEqual(extractFilePaths(null), []);
|
||||
assert.deepStrictEqual(extractFilePaths('src/not-collected.js'), []);
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('extracts file event previews for create delete and one-sided edits', () => {
|
||||
const events = extractFileEvents('Write', {
|
||||
files: [
|
||||
{
|
||||
file_path: 'src/new.ts',
|
||||
content: 'first line\nsecond line',
|
||||
},
|
||||
{
|
||||
file_path: 'src/new.ts',
|
||||
content: 'first line\nsecond line',
|
||||
},
|
||||
{
|
||||
file_path: 'https://example.com/remote.ts',
|
||||
content: 'ignored',
|
||||
},
|
||||
],
|
||||
});
|
||||
assert.deepStrictEqual(events, [
|
||||
{
|
||||
path: 'src/new.ts',
|
||||
action: 'create',
|
||||
diff_preview: '+ first line second line',
|
||||
patch_preview: '+ first line second line',
|
||||
},
|
||||
]);
|
||||
|
||||
assert.deepStrictEqual(extractFileEvents('Remove', {
|
||||
file_path: 'src/old.ts',
|
||||
content: 'legacy line',
|
||||
}), [
|
||||
{
|
||||
path: 'src/old.ts',
|
||||
action: 'delete',
|
||||
patch_preview: '- legacy line',
|
||||
},
|
||||
]);
|
||||
|
||||
assert.deepStrictEqual(extractFileEvents('Edit', {
|
||||
edits: [
|
||||
{ file_path: 'src/before.ts', old_string: 'legacy', new_string: '' },
|
||||
{ file_path: 'src/after.ts', old_string: '', new_string: 'modern' },
|
||||
{ file_path: 'src/no-preview.ts', old_string: '', new_string: '' },
|
||||
],
|
||||
}), [
|
||||
{
|
||||
path: 'src/before.ts',
|
||||
action: 'modify',
|
||||
diff_preview: 'legacy ->',
|
||||
patch_preview: '@@\n- legacy',
|
||||
},
|
||||
{
|
||||
path: 'src/after.ts',
|
||||
action: 'modify',
|
||||
diff_preview: '-> modern',
|
||||
patch_preview: '@@\n+ modern',
|
||||
},
|
||||
{ path: 'src/no-preview.ts', action: 'modify' },
|
||||
]);
|
||||
|
||||
assert.deepStrictEqual(extractFileEvents('Rename', {
|
||||
old_file_path: 'src/old-name.ts',
|
||||
new_file_path: 'src/new-name.ts',
|
||||
}), [
|
||||
{ path: 'src/old-name.ts', action: 'move' },
|
||||
{ path: 'src/new-name.ts', action: 'move' },
|
||||
]);
|
||||
|
||||
assert.deepStrictEqual(extractFileEvents('Read', null), []);
|
||||
assert.deepStrictEqual(extractFileEvents('Touch', { file_path: 'src/touched.ts' }), [
|
||||
{ path: 'src/touched.ts', action: 'touch' },
|
||||
]);
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('records creation previews unchanged when running outside a git repository', () => {
|
||||
const tmpHome = makeTempDir();
|
||||
const tmpCwd = makeTempDir();
|
||||
|
||||
const input = {
|
||||
tool_name: 'Write',
|
||||
tool_input: {
|
||||
file_path: 'created.txt',
|
||||
content: 'alpha\nbeta',
|
||||
},
|
||||
tool_output: 17,
|
||||
};
|
||||
const result = runScript(input, {
|
||||
...withTempHome(tmpHome),
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUse',
|
||||
ECC_SESSION_ID: 'ecc-session-non-git-create',
|
||||
}, {
|
||||
cwd: tmpCwd,
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0);
|
||||
const [row] = readMetricRows(tmpHome);
|
||||
assert.strictEqual(row.output_summary, '17');
|
||||
assert.deepStrictEqual(row.file_events, [
|
||||
{
|
||||
path: 'created.txt',
|
||||
action: 'create',
|
||||
diff_preview: '+ alpha beta',
|
||||
patch_preview: '+ alpha beta',
|
||||
},
|
||||
]);
|
||||
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
fs.rmSync(tmpCwd, { recursive: true, force: true });
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('preserves absolute paths outside the repo without git enrichment', () => {
|
||||
const tmpHome = makeTempDir();
|
||||
const outsideDir = makeTempDir();
|
||||
const outsideFile = path.join(outsideDir, 'outside.txt');
|
||||
fs.writeFileSync(outsideFile, 'outside', 'utf8');
|
||||
|
||||
const input = {
|
||||
tool_name: 'Read',
|
||||
tool_input: {
|
||||
file_path: outsideFile,
|
||||
},
|
||||
tool_output: 'read outside',
|
||||
};
|
||||
const result = runScript(input, {
|
||||
...withTempHome(tmpHome),
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUse',
|
||||
ECC_SESSION_ID: 'ecc-session-absolute-outside',
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0);
|
||||
const [row] = readMetricRows(tmpHome);
|
||||
assert.deepStrictEqual(row.file_paths, [outsideFile]);
|
||||
assert.deepStrictEqual(row.file_events, [
|
||||
{ path: outsideFile, action: 'read' },
|
||||
]);
|
||||
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
fs.rmSync(outsideDir, { recursive: true, force: true });
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
(test('passes empty stdin through without creating metrics', () => {
|
||||
const tmpHome = makeTempDir();
|
||||
const result = runScript('', {
|
||||
...withTempHome(tmpHome),
|
||||
CLAUDE_HOOK_EVENT_NAME: 'PostToolUse',
|
||||
ECC_SESSION_ID: 'sess-empty',
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0);
|
||||
assert.strictEqual(result.stdout, '');
|
||||
assert.strictEqual(run(''), '');
|
||||
assert.strictEqual(
|
||||
fs.existsSync(path.join(tmpHome, '.claude', 'metrics', 'tool-usage.jsonl')),
|
||||
false
|
||||
);
|
||||
|
||||
fs.rmSync(tmpHome, { recursive: true, force: true });
|
||||
}) ? passed++ : failed++);
|
||||
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ const path = require('path');
|
||||
const {
|
||||
buildDoctorReport,
|
||||
discoverInstalledStates,
|
||||
normalizeTargets,
|
||||
repairInstalledStates,
|
||||
uninstallInstalledStates,
|
||||
} = require('../../scripts/lib/install-lifecycle');
|
||||
@@ -52,12 +53,79 @@ function writeState(filePath, options) {
|
||||
return state;
|
||||
}
|
||||
|
||||
function createCursorStateOptions(projectRoot, overrides = {}) {
|
||||
const targetRoot = overrides.targetRoot || path.join(projectRoot, '.cursor');
|
||||
const installStatePath = overrides.installStatePath || path.join(targetRoot, 'ecc-install-state.json');
|
||||
|
||||
return {
|
||||
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
|
||||
targetRoot,
|
||||
installStatePath,
|
||||
request: {
|
||||
profile: null,
|
||||
modules: [],
|
||||
includeComponents: [],
|
||||
excludeComponents: [],
|
||||
legacyLanguages: ['typescript'],
|
||||
legacyMode: true,
|
||||
...(overrides.request || {}),
|
||||
},
|
||||
resolution: {
|
||||
selectedModules: ['legacy-cursor-install'],
|
||||
skippedModules: [],
|
||||
...(overrides.resolution || {}),
|
||||
},
|
||||
operations: overrides.operations || [],
|
||||
source: {
|
||||
repoVersion: CURRENT_PACKAGE_VERSION,
|
||||
repoCommit: 'abc123',
|
||||
manifestVersion: CURRENT_MANIFEST_VERSION,
|
||||
...(overrides.source || {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function writeCursorState(projectRoot, overrides = {}) {
|
||||
const options = createCursorStateOptions(projectRoot, overrides);
|
||||
writeState(options.installStatePath, options);
|
||||
return {
|
||||
targetRoot: options.targetRoot,
|
||||
installStatePath: options.installStatePath,
|
||||
state: options,
|
||||
};
|
||||
}
|
||||
|
||||
function managedOperation(kind, destinationPath, overrides = {}) {
|
||||
return {
|
||||
kind,
|
||||
moduleId: 'test-module',
|
||||
sourceRelativePath: 'rules/common/coding-style.md',
|
||||
destinationPath,
|
||||
strategy: kind,
|
||||
ownership: 'managed',
|
||||
scaffoldOnly: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function runTests() {
|
||||
console.log('\n=== Testing install-lifecycle.js ===\n');
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
if (test('normalizes default targets and dedupes adapter aliases', () => {
|
||||
const defaultTargets = normalizeTargets();
|
||||
|
||||
assert.ok(defaultTargets.includes('claude'));
|
||||
assert.ok(defaultTargets.includes('cursor'));
|
||||
assert.ok(defaultTargets.includes('codex'));
|
||||
assert.deepStrictEqual(
|
||||
normalizeTargets(['cursor-project', 'cursor', 'claude-home', 'claude']),
|
||||
['cursor', 'claude']
|
||||
);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('discovers installed states for multiple targets in the current context', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
@@ -127,6 +195,42 @@ function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('discovers missing and invalid install-state records', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
let records = discoverInstalledStates({
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(records.length, 1);
|
||||
assert.strictEqual(records[0].exists, false);
|
||||
assert.strictEqual(records[0].state, null);
|
||||
assert.strictEqual(records[0].error, null);
|
||||
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const statePath = path.join(targetRoot, 'ecc-install-state.json');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
fs.writeFileSync(statePath, '{not-json', 'utf8');
|
||||
|
||||
records = discoverInstalledStates({
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(records[0].exists, true);
|
||||
assert.strictEqual(records[0].state, null);
|
||||
assert.ok(records[0].error.includes('Failed to read install-state'));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('doctor reports missing managed files as an error', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
@@ -184,6 +288,189 @@ function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('doctor reports target mismatches, missing sources, unverified operations, and version drift', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const actualTargetRoot = path.join(projectRoot, '.cursor');
|
||||
const actualStatePath = path.join(actualTargetRoot, 'ecc-install-state.json');
|
||||
const recordedTargetRoot = path.join(projectRoot, '.old-cursor');
|
||||
const recordedStatePath = path.join(recordedTargetRoot, 'state.json');
|
||||
const copyDestination = path.join(actualTargetRoot, 'rules', 'missing-source.md');
|
||||
const customDestination = path.join(actualTargetRoot, 'custom.txt');
|
||||
|
||||
fs.mkdirSync(path.dirname(copyDestination), { recursive: true });
|
||||
fs.writeFileSync(copyDestination, 'managed copy\n');
|
||||
fs.writeFileSync(customDestination, 'custom\n');
|
||||
|
||||
writeState(actualStatePath, createCursorStateOptions(projectRoot, {
|
||||
targetRoot: recordedTargetRoot,
|
||||
installStatePath: recordedStatePath,
|
||||
request: {
|
||||
profile: 'missing-profile',
|
||||
legacyLanguages: [],
|
||||
legacyMode: false,
|
||||
},
|
||||
resolution: {
|
||||
selectedModules: [],
|
||||
skippedModules: [],
|
||||
},
|
||||
source: {
|
||||
repoVersion: '0.0.1',
|
||||
manifestVersion: CURRENT_MANIFEST_VERSION + 100,
|
||||
},
|
||||
operations: [
|
||||
managedOperation('copy-file', copyDestination, {
|
||||
sourceRelativePath: 'missing/source.md',
|
||||
strategy: 'copy-file',
|
||||
}),
|
||||
managedOperation('custom-kind', customDestination),
|
||||
],
|
||||
}));
|
||||
|
||||
const report = buildDoctorReport({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
const codes = report.results[0].issues.map(issue => issue.code);
|
||||
|
||||
assert.strictEqual(report.results[0].status, 'error');
|
||||
assert.ok(codes.includes('missing-target-root'));
|
||||
assert.ok(codes.includes('target-root-mismatch'));
|
||||
assert.ok(codes.includes('install-state-path-mismatch'));
|
||||
assert.ok(codes.includes('missing-source-files'));
|
||||
assert.ok(codes.includes('unverified-managed-operations'));
|
||||
assert.ok(codes.includes('manifest-version-mismatch'));
|
||||
assert.ok(codes.includes('repo-version-mismatch'));
|
||||
assert.ok(codes.includes('resolution-unavailable'));
|
||||
assert.strictEqual(report.summary.checkedCount, 1);
|
||||
assert.ok(report.summary.errorCount >= 3);
|
||||
assert.ok(report.summary.warningCount >= 4);
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('doctor verifies render-template and merge-json operations by content', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const templatePath = path.join(targetRoot, 'generated.txt');
|
||||
const jsonPath = path.join(targetRoot, 'settings.json');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
fs.writeFileSync(templatePath, 'generated\n');
|
||||
fs.writeFileSync(jsonPath, JSON.stringify({
|
||||
keep: true,
|
||||
nested: {
|
||||
managed: true,
|
||||
extra: true,
|
||||
},
|
||||
}, null, 2));
|
||||
|
||||
writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('render-template', templatePath, {
|
||||
renderedContent: 'generated\n',
|
||||
}),
|
||||
managedOperation('merge-json', jsonPath, {
|
||||
mergePayload: {
|
||||
nested: {
|
||||
managed: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const report = buildDoctorReport({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(report.results[0].status, 'ok');
|
||||
assert.strictEqual(report.results[0].issues.length, 0);
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('doctor classifies remove, unverified template/json, and invalid JSON operation health', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const templatePath = path.join(targetRoot, 'template.txt');
|
||||
const missingPayloadJsonPath = path.join(targetRoot, 'missing-payload.json');
|
||||
const invalidJsonPath = path.join(targetRoot, 'invalid.json');
|
||||
const removedPath = path.join(targetRoot, 'already-removed.txt');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
fs.writeFileSync(templatePath, 'generated\n');
|
||||
fs.writeFileSync(missingPayloadJsonPath, '{"managed":true}\n');
|
||||
fs.writeFileSync(invalidJsonPath, '{not-json', 'utf8');
|
||||
|
||||
writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('remove', removedPath),
|
||||
managedOperation('render-template', templatePath),
|
||||
managedOperation('merge-json', missingPayloadJsonPath),
|
||||
managedOperation('merge-json', invalidJsonPath, {
|
||||
mergePayload: { managed: true },
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const report = buildDoctorReport({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
const codes = report.results[0].issues.map(issue => issue.code);
|
||||
|
||||
assert.strictEqual(report.results[0].status, 'warning');
|
||||
assert.ok(codes.includes('unverified-managed-operations'));
|
||||
assert.ok(codes.includes('drifted-managed-files'));
|
||||
assert.ok(!report.results[0].issues.some(issue => issue.code === 'missing-managed-files'));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('doctor reports invalid install-state files as errors', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const statePath = path.join(projectRoot, '.cursor', 'ecc-install-state.json');
|
||||
fs.mkdirSync(path.dirname(statePath), { recursive: true });
|
||||
fs.writeFileSync(statePath, '{"schemaVersion":"wrong"}\n');
|
||||
|
||||
const report = buildDoctorReport({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(report.results[0].status, 'error');
|
||||
assert.ok(report.results[0].issues.some(issue => issue.code === 'invalid-install-state'));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('doctor reports a healthy legacy install when managed files are present', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
@@ -244,6 +531,201 @@ function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('repair dry-run reports planned copy repairs without writing files', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const destinationPath = path.join(targetRoot, 'rules', 'coding-style.md');
|
||||
writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('copy-file', destinationPath, {
|
||||
sourceRelativePath: 'rules/common/coding-style.md',
|
||||
strategy: 'copy-file',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = repairInstalledStates({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
dryRun: true,
|
||||
});
|
||||
|
||||
assert.strictEqual(result.dryRun, true);
|
||||
assert.strictEqual(result.results[0].status, 'planned');
|
||||
assert.deepStrictEqual(result.results[0].plannedRepairs, [destinationPath]);
|
||||
assert.ok(!fs.existsSync(destinationPath));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('repair copies missing managed files from recorded source paths', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const destinationPath = path.join(targetRoot, 'rules', 'coding-style.md');
|
||||
const sourcePath = path.join(REPO_ROOT, 'rules', 'common', 'coding-style.md');
|
||||
writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('copy-file', destinationPath, {
|
||||
sourceRelativePath: 'rules/common/coding-style.md',
|
||||
strategy: 'copy-file',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = repairInstalledStates({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(result.results[0].status, 'repaired');
|
||||
assert.ok(fs.readFileSync(destinationPath).equals(fs.readFileSync(sourcePath)));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('repair reports invalid states, missing sources, unsupported operations, and no-op refreshes', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const invalidProjectRoot = createTempDir('install-lifecycle-invalid-');
|
||||
const missingSourceProjectRoot = createTempDir('install-lifecycle-missing-source-');
|
||||
const unsupportedProjectRoot = createTempDir('install-lifecycle-unsupported-');
|
||||
const okProjectRoot = createTempDir('install-lifecycle-ok-');
|
||||
|
||||
try {
|
||||
const invalidStatePath = path.join(invalidProjectRoot, '.cursor', 'ecc-install-state.json');
|
||||
fs.mkdirSync(path.dirname(invalidStatePath), { recursive: true });
|
||||
fs.writeFileSync(invalidStatePath, '{"schemaVersion":"wrong"}\n');
|
||||
|
||||
let result = repairInstalledStates({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot: invalidProjectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
assert.strictEqual(result.results[0].status, 'error');
|
||||
assert.ok(result.results[0].error.includes('Invalid install-state'));
|
||||
|
||||
const missingDestination = path.join(missingSourceProjectRoot, '.cursor', 'rules', 'missing.md');
|
||||
fs.mkdirSync(path.dirname(missingDestination), { recursive: true });
|
||||
fs.writeFileSync(missingDestination, 'managed\n');
|
||||
writeCursorState(missingSourceProjectRoot, {
|
||||
operations: [
|
||||
managedOperation('copy-file', missingDestination, {
|
||||
sourceRelativePath: 'missing/source.md',
|
||||
strategy: 'copy-file',
|
||||
}),
|
||||
],
|
||||
});
|
||||
result = repairInstalledStates({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot: missingSourceProjectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
assert.strictEqual(result.results[0].status, 'error');
|
||||
assert.ok(result.results[0].error.includes('Missing source file(s)'));
|
||||
|
||||
const unsupportedDestination = path.join(unsupportedProjectRoot, '.cursor', 'custom.txt');
|
||||
writeCursorState(unsupportedProjectRoot, {
|
||||
operations: [
|
||||
managedOperation('custom-kind', unsupportedDestination),
|
||||
],
|
||||
});
|
||||
result = repairInstalledStates({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot: unsupportedProjectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
assert.strictEqual(result.results[0].status, 'error');
|
||||
assert.ok(result.results[0].error.includes('Unsupported repair operation kind'));
|
||||
|
||||
writeCursorState(okProjectRoot, { operations: [] });
|
||||
result = repairInstalledStates({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot: okProjectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
assert.strictEqual(result.results[0].status, 'ok');
|
||||
assert.strictEqual(result.results[0].stateRefreshed, true);
|
||||
assert.strictEqual(result.summary.errorCount, 0);
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(invalidProjectRoot);
|
||||
cleanup(missingSourceProjectRoot);
|
||||
cleanup(unsupportedProjectRoot);
|
||||
cleanup(okProjectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('repair dry-run reports ok when no managed operations need changes', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
writeCursorState(projectRoot, { operations: [] });
|
||||
|
||||
const result = repairInstalledStates({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
dryRun: true,
|
||||
});
|
||||
|
||||
assert.strictEqual(result.results[0].status, 'ok');
|
||||
assert.strictEqual(result.results[0].stateRefreshed, true);
|
||||
assert.deepStrictEqual(result.results[0].plannedRepairs, []);
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('repair surfaces missing source errors from execution when destination is absent', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const destinationPath = path.join(projectRoot, '.cursor', 'rules', 'missing.md');
|
||||
writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('copy-file', destinationPath, {
|
||||
sourceRelativePath: 'missing/source.md',
|
||||
strategy: 'copy-file',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = repairInstalledStates({
|
||||
repoRoot: REPO_ROOT,
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(result.results[0].status, 'error');
|
||||
assert.ok(result.results[0].error.includes('Missing source file for repair'));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('doctor reports drifted managed files as a warning', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
@@ -731,6 +1213,394 @@ function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall dry-run reports deduped managed removals without deleting files', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const destinationPath = path.join(targetRoot, 'rules', 'coding-style.md');
|
||||
fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
|
||||
fs.writeFileSync(destinationPath, 'managed\n');
|
||||
const { installStatePath } = writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('copy-file', destinationPath, { strategy: 'copy-file' }),
|
||||
managedOperation('copy-file', destinationPath, { strategy: 'copy-file' }),
|
||||
],
|
||||
});
|
||||
|
||||
const result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
dryRun: true,
|
||||
});
|
||||
|
||||
assert.strictEqual(result.dryRun, true);
|
||||
assert.strictEqual(result.results[0].status, 'planned');
|
||||
assert.deepStrictEqual(result.results[0].plannedRemovals, [
|
||||
destinationPath,
|
||||
installStatePath,
|
||||
]);
|
||||
assert.ok(fs.existsSync(destinationPath));
|
||||
assert.ok(fs.existsSync(installStatePath));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall reports invalid install states as errors', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const statePath = path.join(projectRoot, '.cursor', 'ecc-install-state.json');
|
||||
fs.mkdirSync(path.dirname(statePath), { recursive: true });
|
||||
fs.writeFileSync(statePath, '{not-json', 'utf8');
|
||||
|
||||
const result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(result.results[0].status, 'error');
|
||||
assert.ok(result.results[0].error.includes('Failed to read install-state'));
|
||||
assert.strictEqual(result.summary.errorCount, 1);
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall removes copied files and cleans empty parent directories', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const destinationPath = path.join(targetRoot, 'rules', 'nested', 'managed.md');
|
||||
fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
|
||||
fs.writeFileSync(destinationPath, 'managed\n');
|
||||
writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('copy-file', destinationPath, { strategy: 'copy-file' }),
|
||||
],
|
||||
});
|
||||
|
||||
const result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(result.results[0].status, 'uninstalled');
|
||||
assert.ok(result.results[0].removedPaths.includes(destinationPath));
|
||||
assert.ok(!fs.existsSync(destinationPath));
|
||||
assert.ok(!fs.existsSync(path.dirname(destinationPath)));
|
||||
assert.ok(fs.existsSync(targetRoot));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall handles merge-json subset removal and full-file deletion', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const partialProjectRoot = createTempDir('install-lifecycle-partial-');
|
||||
const fullProjectRoot = createTempDir('install-lifecycle-full-');
|
||||
|
||||
try {
|
||||
let targetRoot = path.join(partialProjectRoot, '.cursor');
|
||||
let destinationPath = path.join(targetRoot, 'settings.json');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
fs.writeFileSync(destinationPath, JSON.stringify({
|
||||
keep: true,
|
||||
managed: true,
|
||||
nested: {
|
||||
keep: true,
|
||||
remove: true,
|
||||
},
|
||||
list: ['a', 'b'],
|
||||
}, null, 2));
|
||||
writeCursorState(partialProjectRoot, {
|
||||
operations: [
|
||||
managedOperation('merge-json', destinationPath, {
|
||||
mergePayload: {
|
||||
managed: true,
|
||||
nested: { remove: true },
|
||||
list: ['a', 'b'],
|
||||
},
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
let result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot: partialProjectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
assert.strictEqual(result.results[0].status, 'uninstalled');
|
||||
assert.deepStrictEqual(JSON.parse(fs.readFileSync(destinationPath, 'utf8')), {
|
||||
keep: true,
|
||||
nested: {
|
||||
keep: true,
|
||||
},
|
||||
});
|
||||
|
||||
targetRoot = path.join(fullProjectRoot, '.cursor');
|
||||
destinationPath = path.join(targetRoot, 'settings.json');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
fs.writeFileSync(destinationPath, JSON.stringify({ managed: true }, null, 2));
|
||||
writeCursorState(fullProjectRoot, {
|
||||
operations: [
|
||||
managedOperation('merge-json', destinationPath, {
|
||||
mergePayload: { managed: true },
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot: fullProjectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
assert.strictEqual(result.results[0].status, 'uninstalled');
|
||||
assert.ok(!fs.existsSync(destinationPath));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(partialProjectRoot);
|
||||
cleanup(fullProjectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall handles merge-json edge shapes and absent destinations', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projects = [
|
||||
createTempDir('install-lifecycle-current-primitive-'),
|
||||
createTempDir('install-lifecycle-missing-key-'),
|
||||
createTempDir('install-lifecycle-nested-delete-'),
|
||||
createTempDir('install-lifecycle-array-root-'),
|
||||
createTempDir('install-lifecycle-primitive-root-'),
|
||||
createTempDir('install-lifecycle-absent-dest-'),
|
||||
createTempDir('install-lifecycle-previous-json-'),
|
||||
];
|
||||
|
||||
try {
|
||||
const cases = [
|
||||
{
|
||||
projectRoot: projects[0],
|
||||
initial: '"plain"',
|
||||
payload: { managed: true },
|
||||
expected: 'plain',
|
||||
},
|
||||
{
|
||||
projectRoot: projects[1],
|
||||
initial: { keep: true },
|
||||
payload: { missing: true },
|
||||
expected: { keep: true },
|
||||
},
|
||||
{
|
||||
projectRoot: projects[2],
|
||||
initial: { keep: true, nested: { remove: true } },
|
||||
payload: { nested: { remove: true } },
|
||||
expected: { keep: true },
|
||||
},
|
||||
{
|
||||
projectRoot: projects[3],
|
||||
initial: ['a', 'b'],
|
||||
payload: ['a', 'b'],
|
||||
removed: true,
|
||||
},
|
||||
{
|
||||
projectRoot: projects[4],
|
||||
initial: true,
|
||||
payload: true,
|
||||
removed: true,
|
||||
},
|
||||
{
|
||||
projectRoot: projects[5],
|
||||
payload: { managed: true },
|
||||
absent: true,
|
||||
},
|
||||
{
|
||||
projectRoot: projects[6],
|
||||
initial: { generated: true },
|
||||
payload: { generated: true },
|
||||
previousJson: { restored: true },
|
||||
expected: { restored: true },
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of cases) {
|
||||
const targetRoot = path.join(testCase.projectRoot, '.cursor');
|
||||
const destinationPath = path.join(targetRoot, 'settings.json');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
if (!testCase.absent) {
|
||||
fs.writeFileSync(
|
||||
destinationPath,
|
||||
typeof testCase.initial === 'string'
|
||||
? `${testCase.initial}\n`
|
||||
: JSON.stringify(testCase.initial, null, 2)
|
||||
);
|
||||
}
|
||||
writeCursorState(testCase.projectRoot, {
|
||||
operations: [
|
||||
managedOperation('merge-json', destinationPath, {
|
||||
mergePayload: testCase.payload,
|
||||
previousJson: testCase.previousJson,
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot: testCase.projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(result.results[0].status, 'uninstalled');
|
||||
if (testCase.removed || testCase.absent) {
|
||||
assert.ok(!fs.existsSync(destinationPath));
|
||||
} else {
|
||||
assert.deepStrictEqual(JSON.parse(fs.readFileSync(destinationPath, 'utf8')), testCase.expected);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
for (const projectRoot of projects) {
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall removes generated render-template files and no-backup remove operations are no-ops', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const templatePath = path.join(targetRoot, 'generated', 'plugin.json');
|
||||
const removedPath = path.join(targetRoot, 'already-removed.txt');
|
||||
fs.mkdirSync(path.dirname(templatePath), { recursive: true });
|
||||
fs.writeFileSync(templatePath, '{"generated":true}\n');
|
||||
|
||||
writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('render-template', templatePath, {
|
||||
renderedContent: '{"generated":true}\n',
|
||||
}),
|
||||
managedOperation('remove', removedPath),
|
||||
],
|
||||
});
|
||||
|
||||
const result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(result.results[0].status, 'uninstalled');
|
||||
assert.ok(result.results[0].removedPaths.includes(templatePath));
|
||||
assert.ok(!fs.existsSync(templatePath));
|
||||
assert.ok(!fs.existsSync(path.dirname(templatePath)));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall restores previous JSON snapshots for template and remove operations', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const projectRoot = createTempDir('install-lifecycle-project-');
|
||||
|
||||
try {
|
||||
const targetRoot = path.join(projectRoot, '.cursor');
|
||||
const templatePath = path.join(targetRoot, 'plugin.json');
|
||||
const removedPath = path.join(targetRoot, 'legacy.json');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
fs.writeFileSync(templatePath, '{"generated":true}\n');
|
||||
|
||||
writeCursorState(projectRoot, {
|
||||
operations: [
|
||||
managedOperation('render-template', templatePath, {
|
||||
previousJson: { existing: true },
|
||||
renderedContent: '{"generated":true}\n',
|
||||
}),
|
||||
managedOperation('remove', removedPath, {
|
||||
previousJson: { restored: true },
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
|
||||
assert.strictEqual(result.results[0].status, 'uninstalled');
|
||||
assert.deepStrictEqual(JSON.parse(fs.readFileSync(templatePath, 'utf8')), {
|
||||
existing: true,
|
||||
});
|
||||
assert.deepStrictEqual(JSON.parse(fs.readFileSync(removedPath, 'utf8')), {
|
||||
restored: true,
|
||||
});
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall reports unsupported operations and missing merge payloads as errors', () => {
|
||||
const homeDir = createTempDir('install-lifecycle-home-');
|
||||
const unsupportedProjectRoot = createTempDir('install-lifecycle-unsupported-');
|
||||
const missingPayloadProjectRoot = createTempDir('install-lifecycle-missing-payload-');
|
||||
|
||||
try {
|
||||
let targetRoot = path.join(unsupportedProjectRoot, '.cursor');
|
||||
let destinationPath = path.join(targetRoot, 'custom.txt');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
fs.writeFileSync(destinationPath, 'custom\n');
|
||||
writeCursorState(unsupportedProjectRoot, {
|
||||
operations: [
|
||||
managedOperation('custom-kind', destinationPath),
|
||||
],
|
||||
});
|
||||
|
||||
let result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot: unsupportedProjectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
assert.strictEqual(result.results[0].status, 'error');
|
||||
assert.ok(result.results[0].error.includes('Unsupported uninstall operation kind'));
|
||||
|
||||
targetRoot = path.join(missingPayloadProjectRoot, '.cursor');
|
||||
destinationPath = path.join(targetRoot, 'settings.json');
|
||||
fs.mkdirSync(targetRoot, { recursive: true });
|
||||
fs.writeFileSync(destinationPath, '{"managed":true}\n');
|
||||
writeCursorState(missingPayloadProjectRoot, {
|
||||
operations: [
|
||||
managedOperation('merge-json', destinationPath),
|
||||
],
|
||||
});
|
||||
|
||||
result = uninstallInstalledStates({
|
||||
homeDir,
|
||||
projectRoot: missingPayloadProjectRoot,
|
||||
targets: ['cursor'],
|
||||
});
|
||||
assert.strictEqual(result.results[0].status, 'error');
|
||||
assert.ok(result.results[0].error.includes('Missing merge payload for uninstall'));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(unsupportedProjectRoot);
|
||||
cleanup(missingPayloadProjectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
}
|
||||
|
||||
@@ -6,8 +6,13 @@ const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const {
|
||||
SESSION_SCHEMA_VERSION,
|
||||
buildAggregates,
|
||||
getFallbackSessionRecordingPath,
|
||||
persistCanonicalSnapshot
|
||||
normalizeClaudeHistorySession,
|
||||
normalizeDmuxSnapshot,
|
||||
persistCanonicalSnapshot,
|
||||
validateCanonicalSnapshot
|
||||
} = require('../../scripts/lib/session-adapters/canonical-session');
|
||||
const { createClaudeHistoryAdapter } = require('../../scripts/lib/session-adapters/claude-history');
|
||||
const { createDmuxTmuxAdapter } = require('../../scripts/lib/session-adapters/dmux-tmux');
|
||||
@@ -55,6 +60,75 @@ function withHome(homeDir, fn) {
|
||||
}
|
||||
}
|
||||
|
||||
function canonicalSnapshot(overrides = {}) {
|
||||
const snapshot = {
|
||||
schemaVersion: SESSION_SCHEMA_VERSION,
|
||||
adapterId: 'test-adapter',
|
||||
session: {
|
||||
id: 'session-1',
|
||||
kind: 'test',
|
||||
state: 'active',
|
||||
repoRoot: null,
|
||||
sourceTarget: {
|
||||
type: 'session',
|
||||
value: 'session-1'
|
||||
}
|
||||
},
|
||||
workers: [{
|
||||
id: 'worker-1',
|
||||
label: 'Worker 1',
|
||||
state: 'running',
|
||||
health: 'healthy',
|
||||
branch: null,
|
||||
worktree: null,
|
||||
runtime: {
|
||||
kind: 'test-runtime',
|
||||
command: null,
|
||||
pid: null,
|
||||
active: true,
|
||||
dead: false
|
||||
},
|
||||
intent: {
|
||||
objective: 'Test objective',
|
||||
seedPaths: []
|
||||
},
|
||||
outputs: {
|
||||
summary: [],
|
||||
validation: [],
|
||||
remainingRisks: []
|
||||
},
|
||||
artifacts: {}
|
||||
}]
|
||||
};
|
||||
|
||||
snapshot.aggregates = buildAggregates(snapshot.workers);
|
||||
|
||||
if (overrides.session) {
|
||||
snapshot.session = { ...snapshot.session, ...overrides.session };
|
||||
}
|
||||
if (overrides.sourceTarget) {
|
||||
snapshot.session.sourceTarget = {
|
||||
...snapshot.session.sourceTarget,
|
||||
...overrides.sourceTarget
|
||||
};
|
||||
}
|
||||
if (Object.prototype.hasOwnProperty.call(overrides, 'workers')) {
|
||||
snapshot.workers = overrides.workers;
|
||||
snapshot.aggregates = buildAggregates(Array.isArray(overrides.workers) ? overrides.workers : []);
|
||||
}
|
||||
if (overrides.aggregates) {
|
||||
snapshot.aggregates = { ...snapshot.aggregates, ...overrides.aggregates };
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(overrides)) {
|
||||
if (!['session', 'sourceTarget', 'workers', 'aggregates'].includes(key)) {
|
||||
snapshot[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return snapshot;
|
||||
}
|
||||
|
||||
test('dmux adapter normalizes orchestration snapshots into canonical form', () => {
|
||||
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-recordings-'));
|
||||
|
||||
@@ -509,6 +583,324 @@ test('adapter registry lists adapter metadata and target types', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('canonical snapshot validation rejects malformed required fields', () => {
|
||||
const invalidCases = [
|
||||
[null, /must be an object/],
|
||||
[canonicalSnapshot({ schemaVersion: 'ecc.session.v0' }), /Unsupported canonical session schema version/],
|
||||
[canonicalSnapshot({ adapterId: '' }), /adapterId/],
|
||||
[canonicalSnapshot({ session: { id: '' } }), /session.id/],
|
||||
[canonicalSnapshot({ session: { repoRoot: 42 } }), /session.repoRoot/],
|
||||
[canonicalSnapshot({ sourceTarget: { type: '' } }), /session.sourceTarget.type/],
|
||||
[(() => {
|
||||
const snapshot = canonicalSnapshot();
|
||||
snapshot.workers = [null];
|
||||
snapshot.aggregates = { workerCount: 1, states: { unknown: 1 }, healths: { unknown: 1 } };
|
||||
return snapshot;
|
||||
})(), /workers\[0\] to be an object/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
branch: 7
|
||||
}]
|
||||
}), /workers\[0\].branch/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
runtime: {
|
||||
...canonicalSnapshot().workers[0].runtime,
|
||||
command: 123
|
||||
}
|
||||
}]
|
||||
}), /workers\[0\].runtime.command/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
runtime: {
|
||||
...canonicalSnapshot().workers[0].runtime,
|
||||
active: 'yes'
|
||||
}
|
||||
}]
|
||||
}), /workers\[0\].runtime.active/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
intent: {
|
||||
objective: 'ok',
|
||||
seedPaths: ['README.md', 123]
|
||||
}
|
||||
}]
|
||||
}), /workers\[0\].intent.seedPaths/],
|
||||
[canonicalSnapshot({
|
||||
workers: [{
|
||||
...canonicalSnapshot().workers[0],
|
||||
outputs: {
|
||||
summary: [],
|
||||
validation: 'nope',
|
||||
remainingRisks: []
|
||||
}
|
||||
}]
|
||||
}), /workers\[0\].outputs.validation/],
|
||||
[canonicalSnapshot({ aggregates: { workerCount: 99 } }), /aggregates.workerCount to match/],
|
||||
[canonicalSnapshot({ aggregates: { states: [] } }), /aggregates.states to be an object/],
|
||||
[canonicalSnapshot({ aggregates: { states: { running: -1 } } }), /aggregates.states.running/],
|
||||
[canonicalSnapshot({ aggregates: { healths: null } }), /aggregates.healths to be an object/]
|
||||
];
|
||||
|
||||
for (const [snapshot, pattern] of invalidCases) {
|
||||
assert.throws(() => validateCanonicalSnapshot(snapshot), pattern);
|
||||
}
|
||||
});
|
||||
|
||||
function dmuxWorker(workerSlug, status = {}, overrides = {}) {
|
||||
return {
|
||||
workerSlug,
|
||||
workerDir: `/tmp/${workerSlug}`,
|
||||
status: {
|
||||
state: 'running',
|
||||
updated: new Date().toISOString(),
|
||||
branch: null,
|
||||
worktree: null,
|
||||
...status
|
||||
},
|
||||
task: {
|
||||
objective: `${workerSlug} objective`,
|
||||
seedPaths: ['README.md'],
|
||||
...(overrides.task || {})
|
||||
},
|
||||
handoff: {
|
||||
summary: ['summary'],
|
||||
validation: ['validation'],
|
||||
remainingRisks: ['risk'],
|
||||
...(overrides.handoff || {})
|
||||
},
|
||||
files: {
|
||||
status: `/tmp/${workerSlug}/status.md`,
|
||||
task: `/tmp/${workerSlug}/task.md`,
|
||||
handoff: `/tmp/${workerSlug}/handoff.md`,
|
||||
...(overrides.files || {})
|
||||
},
|
||||
pane: Object.prototype.hasOwnProperty.call(overrides, 'pane')
|
||||
? overrides.pane
|
||||
: {
|
||||
currentCommand: 'codex',
|
||||
pid: 123,
|
||||
active: true,
|
||||
dead: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function dmuxSnapshot(overrides = {}) {
|
||||
return {
|
||||
sessionName: 'edge-session',
|
||||
repoRoot: '/tmp/repo',
|
||||
sessionActive: false,
|
||||
workerStates: {},
|
||||
workerCount: 0,
|
||||
workers: [],
|
||||
...overrides
|
||||
};
|
||||
}
|
||||
|
||||
test('dmux normalization covers missing failed idle and stale worker states', () => {
|
||||
const sourceTarget = { type: 'session', value: 'edge-session' };
|
||||
|
||||
const missing = normalizeDmuxSnapshot(dmuxSnapshot(), sourceTarget);
|
||||
assert.strictEqual(missing.session.state, 'missing');
|
||||
assert.strictEqual(missing.aggregates.workerCount, 0);
|
||||
|
||||
const failed = normalizeDmuxSnapshot(dmuxSnapshot({
|
||||
workerStates: { failed: 1 },
|
||||
workerCount: 1,
|
||||
workers: [
|
||||
dmuxWorker('failure', { state: 'failed' }, { pane: null })
|
||||
]
|
||||
}), sourceTarget);
|
||||
assert.strictEqual(failed.session.state, 'failed');
|
||||
assert.strictEqual(failed.workers[0].health, 'degraded');
|
||||
assert.strictEqual(failed.workers[0].runtime.active, false);
|
||||
assert.strictEqual(failed.workers[0].runtime.dead, false);
|
||||
|
||||
const idle = normalizeDmuxSnapshot(dmuxSnapshot({
|
||||
workerStates: { running: 1, queued: 1 },
|
||||
workerCount: 2,
|
||||
workers: [
|
||||
dmuxWorker('missing-update', { state: 'running', updated: undefined }),
|
||||
dmuxWorker('stale-update', { state: 'active', updated: '2001-01-01T00:00:00Z' }),
|
||||
dmuxWorker('dead-pane', { state: 'running' }, { pane: { dead: true, active: false } }),
|
||||
dmuxWorker('mystery', { state: 'queued' }, {
|
||||
task: { seedPaths: 'not-array' },
|
||||
handoff: { summary: 'not-array', validation: null, remainingRisks: undefined },
|
||||
pane: null
|
||||
})
|
||||
]
|
||||
}), sourceTarget);
|
||||
|
||||
assert.strictEqual(idle.session.state, 'idle');
|
||||
assert.deepStrictEqual(
|
||||
idle.workers.map(worker => worker.health),
|
||||
['stale', 'stale', 'degraded', 'unknown']
|
||||
);
|
||||
assert.deepStrictEqual(idle.workers[3].intent.seedPaths, []);
|
||||
assert.deepStrictEqual(idle.workers[3].outputs.summary, []);
|
||||
|
||||
const completed = normalizeDmuxSnapshot(dmuxSnapshot({
|
||||
workerStates: null,
|
||||
workerCount: 2,
|
||||
workers: [
|
||||
dmuxWorker('done-a', { state: 'done' }),
|
||||
dmuxWorker('done-b', { state: 'success' })
|
||||
]
|
||||
}), sourceTarget);
|
||||
assert.strictEqual(completed.session.state, 'completed');
|
||||
assert.deepStrictEqual(completed.workers.map(worker => worker.health), ['healthy', 'healthy']);
|
||||
});
|
||||
|
||||
test('claude history normalization falls back to filename ids and empty metadata defaults', () => {
|
||||
const snapshot = normalizeClaudeHistorySession({
|
||||
shortId: 'no-id',
|
||||
filename: '2026-03-13-no-id-session.tmp',
|
||||
sessionPath: '/tmp/2026-03-13-no-id-session.tmp',
|
||||
metadata: {
|
||||
title: '',
|
||||
completed: 'not-array',
|
||||
inProgress: ['Resume from filename fallback'],
|
||||
context: '',
|
||||
notes: ''
|
||||
}
|
||||
}, {
|
||||
type: 'claude-history',
|
||||
value: 'latest'
|
||||
});
|
||||
|
||||
assert.strictEqual(snapshot.session.id, '2026-03-13-no-id-session');
|
||||
assert.strictEqual(snapshot.workers[0].id, '2026-03-13-no-id-session');
|
||||
assert.strictEqual(snapshot.workers[0].label, '2026-03-13-no-id-session.tmp');
|
||||
assert.strictEqual(snapshot.workers[0].intent.objective, 'Resume from filename fallback');
|
||||
assert.deepStrictEqual(snapshot.workers[0].intent.seedPaths, []);
|
||||
assert.deepStrictEqual(snapshot.workers[0].outputs.summary, []);
|
||||
assert.deepStrictEqual(snapshot.workers[0].outputs.remainingRisks, []);
|
||||
|
||||
const pathOnly = normalizeClaudeHistorySession({
|
||||
sessionPath: '/tmp/path-only-session.tmp',
|
||||
metadata: {
|
||||
title: 'Path Only',
|
||||
inProgress: ['Continue work'],
|
||||
context: ' README.md \n\n scripts/ecc.js ',
|
||||
notes: 'No risks'
|
||||
}
|
||||
}, {
|
||||
type: 'claude-history',
|
||||
value: '/tmp/path-only-session.tmp'
|
||||
});
|
||||
|
||||
assert.strictEqual(pathOnly.session.id, 'path-only-session');
|
||||
assert.strictEqual(pathOnly.workers[0].intent.objective, 'Continue work');
|
||||
assert.deepStrictEqual(pathOnly.workers[0].intent.seedPaths, ['README.md', 'scripts/ecc.js']);
|
||||
assert.deepStrictEqual(pathOnly.workers[0].outputs.remainingRisks, ['No risks']);
|
||||
});
|
||||
|
||||
test('fallback recordings sanitize paths, use env dirs, and preserve changed history', () => {
|
||||
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-recordings-env-'));
|
||||
const previousRecordingDir = process.env.ECC_SESSION_RECORDING_DIR;
|
||||
|
||||
try {
|
||||
process.env.ECC_SESSION_RECORDING_DIR = recordingDir;
|
||||
const first = canonicalSnapshot({
|
||||
adapterId: 'adapter with spaces',
|
||||
session: { id: 'session id/with:chars' }
|
||||
});
|
||||
const recordingPath = getFallbackSessionRecordingPath(first);
|
||||
assert.ok(recordingPath.includes(`${path.sep}adapter_with_spaces${path.sep}`));
|
||||
assert.ok(recordingPath.endsWith(`${path.sep}session_id_with_chars.json`));
|
||||
|
||||
fs.mkdirSync(path.dirname(recordingPath), { recursive: true });
|
||||
fs.writeFileSync(recordingPath, '{not json', 'utf8');
|
||||
|
||||
const firstPersistence = persistCanonicalSnapshot(first, {
|
||||
loadStateStoreImpl: () => null
|
||||
});
|
||||
const changed = canonicalSnapshot({
|
||||
adapterId: 'adapter with spaces',
|
||||
session: { id: 'session id/with:chars', state: 'idle' }
|
||||
});
|
||||
persistCanonicalSnapshot(changed, { loadStateStoreImpl: () => null });
|
||||
persistCanonicalSnapshot(changed, { loadStateStoreImpl: () => null });
|
||||
|
||||
const persisted = JSON.parse(fs.readFileSync(recordingPath, 'utf8'));
|
||||
assert.strictEqual(firstPersistence.backend, 'json-file');
|
||||
assert.strictEqual(firstPersistence.path, recordingPath);
|
||||
assert.strictEqual(persisted.schemaVersion, 'ecc.session.recording.v1');
|
||||
assert.strictEqual(persisted.latest.session.state, 'idle');
|
||||
assert.strictEqual(persisted.history.length, 2);
|
||||
assert.strictEqual(persisted.history[0].snapshot.session.state, 'active');
|
||||
assert.strictEqual(persisted.history[1].snapshot.session.state, 'idle');
|
||||
assert.strictEqual(persisted.createdAt, persisted.history[0].recordedAt);
|
||||
} finally {
|
||||
if (typeof previousRecordingDir === 'string') {
|
||||
process.env.ECC_SESSION_RECORDING_DIR = previousRecordingDir;
|
||||
} else {
|
||||
delete process.env.ECC_SESSION_RECORDING_DIR;
|
||||
}
|
||||
fs.rmSync(recordingDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('persistence supports skip mode, writer variants, and missing state-store fallback', () => {
|
||||
const snapshot = canonicalSnapshot();
|
||||
const skipped = persistCanonicalSnapshot(snapshot, { persist: false });
|
||||
assert.deepStrictEqual(skipped, {
|
||||
backend: 'skipped',
|
||||
path: null,
|
||||
recordedAt: null
|
||||
});
|
||||
|
||||
const topLevelStore = {
|
||||
calls: [],
|
||||
recordCanonicalSessionSnapshot(snapshotArg, metadata) {
|
||||
this.calls.push({ snapshot: snapshotArg, metadata });
|
||||
}
|
||||
};
|
||||
const stateStoreResult = persistCanonicalSnapshot(snapshot, { stateStore: topLevelStore });
|
||||
assert.strictEqual(stateStoreResult.backend, 'state-store');
|
||||
assert.strictEqual(topLevelStore.calls.length, 1);
|
||||
assert.strictEqual(topLevelStore.calls[0].metadata.sessionId, 'session-1');
|
||||
|
||||
const nestedStore = {
|
||||
sessions: {
|
||||
calls: [],
|
||||
recordSessionSnapshot(snapshotArg, metadata) {
|
||||
this.calls.push({ snapshot: snapshotArg, metadata });
|
||||
}
|
||||
}
|
||||
};
|
||||
persistCanonicalSnapshot(snapshot, { stateStore: nestedStore });
|
||||
assert.strictEqual(nestedStore.sessions.calls.length, 1);
|
||||
|
||||
const noWriterDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-no-writer-'));
|
||||
const missingModuleDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-missing-module-'));
|
||||
try {
|
||||
const noWriter = persistCanonicalSnapshot(snapshot, {
|
||||
recordingDir: noWriterDir,
|
||||
stateStore: { createStateStore() {} }
|
||||
});
|
||||
assert.strictEqual(noWriter.backend, 'json-file');
|
||||
|
||||
const missingModule = new Error("Cannot find module '../state-store'");
|
||||
missingModule.code = 'MODULE_NOT_FOUND';
|
||||
const fallback = persistCanonicalSnapshot(snapshot, {
|
||||
recordingDir: missingModuleDir,
|
||||
loadStateStoreImpl() {
|
||||
throw missingModule;
|
||||
}
|
||||
});
|
||||
assert.strictEqual(fallback.backend, 'json-file');
|
||||
} finally {
|
||||
fs.rmSync(noWriterDir, { recursive: true, force: true });
|
||||
fs.rmSync(missingModuleDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('persistence only falls back when the state-store module is missing', () => {
|
||||
const snapshot = {
|
||||
schemaVersion: 'ecc.session.v1',
|
||||
|
||||
@@ -19,7 +19,6 @@ const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const repoRoot = path.resolve(__dirname, '..');
|
||||
const repoRootWithSep = `${repoRoot}${path.sep}`;
|
||||
const packageJsonPath = path.join(repoRoot, 'package.json');
|
||||
const packageLockPath = path.join(repoRoot, 'package-lock.json');
|
||||
const rootAgentsPath = path.join(repoRoot, 'AGENTS.md');
|
||||
@@ -70,16 +69,6 @@ function loadJsonObject(filePath, label) {
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function assertSafeRepoRelativePath(relativePath, label) {
|
||||
const normalized = path.posix.normalize(relativePath.replace(/\\/g, '/'));
|
||||
|
||||
assert.ok(!path.isAbsolute(relativePath), `${label} must not be absolute: ${relativePath}`);
|
||||
assert.ok(
|
||||
!normalized.startsWith('../') && !normalized.includes('/../'),
|
||||
`${label} must not traverse directories: ${relativePath}`,
|
||||
);
|
||||
}
|
||||
|
||||
function collectMarkdownFiles(rootPath) {
|
||||
if (!fs.existsSync(rootPath)) {
|
||||
return [];
|
||||
|
||||
Reference in New Issue
Block a user