mirror of
https://github.com/affaan-m/everything-claude-code.git
synced 2026-04-19 16:43:29 +08:00
Compare commits
42 Commits
f6ebc2a3c2
...
e70d4d2237
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e70d4d2237 | ||
|
|
9b286ab3f8 | ||
|
|
b3e362105d | ||
|
|
8cacf0f6a6 | ||
|
|
cedcf9a701 | ||
|
|
15717d6d04 | ||
|
|
c8b7d41e42 | ||
|
|
9bec3d7625 | ||
|
|
2573cbb7b0 | ||
|
|
9dccdb9068 | ||
|
|
f000d9b02d | ||
|
|
27ae5ea299 | ||
|
|
723e69a621 | ||
|
|
241c35a589 | ||
|
|
0c67e0571e | ||
|
|
02d5986049 | ||
|
|
f623e3b429 | ||
|
|
44b5a4f9f0 | ||
|
|
567664091d | ||
|
|
5031a84d6e | ||
|
|
702c3f54b4 | ||
|
|
162222a46c | ||
|
|
485def8582 | ||
|
|
cba6b44c61 | ||
|
|
1fcdf12b62 | ||
|
|
85a86f6747 | ||
|
|
3ec0aa7b50 | ||
|
|
9afecedb21 | ||
|
|
7db0d316f5 | ||
|
|
99fc51dda7 | ||
|
|
2fea46edc7 | ||
|
|
990c08159c | ||
|
|
43808ccf78 | ||
|
|
3bc0929c6e | ||
|
|
ad40bf3aad | ||
|
|
f1a693f7cf | ||
|
|
4e520c6873 | ||
|
|
86844a305a | ||
|
|
b950fd7427 | ||
|
|
71e86cc93f | ||
|
|
4f7b50fb78 | ||
|
|
277006bd7f |
@@ -125,7 +125,7 @@ ${chalk.bold('Files Tracked:')} ${chalk.green(data.files)}
|
||||
console.log(chalk.gray('─'.repeat(50)));
|
||||
|
||||
patterns.forEach((pattern, i) => {
|
||||
const confidence = pattern.confidence || 0.8;
|
||||
const confidence = pattern.confidence ?? 0.8;
|
||||
const confidenceBar = progressBar(Math.round(confidence * 100), 15);
|
||||
console.log(`
|
||||
${chalk.bold(chalk.yellow(`${i + 1}.`))} ${chalk.bold(pattern.name)}
|
||||
|
||||
@@ -1587,6 +1587,558 @@ function runTests() {
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 47: escape sequence and frontmatter edge cases ──
|
||||
console.log('\nRound 47: validate-hooks (inline JS escape sequences):');
|
||||
|
||||
if (test('validates inline JS with mixed escape sequences (newline + escaped quote)', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksFile = path.join(testDir, 'hooks.json');
|
||||
// Command value after JSON parse: node -e "var a = \"ok\"\nconsole.log(a)"
|
||||
// Regex captures: var a = \"ok\"\nconsole.log(a)
|
||||
// After unescape chain: var a = "ok"\nconsole.log(a) (real newline) — valid JS
|
||||
fs.writeFileSync(hooksFile, JSON.stringify({
|
||||
hooks: {
|
||||
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command',
|
||||
command: 'node -e "var a = \\"ok\\"\\nconsole.log(a)"' }] }]
|
||||
}
|
||||
}));
|
||||
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
||||
assert.strictEqual(result.code, 0, 'Should handle escaped quotes and newline separators');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('rejects inline JS with syntax error after unescaping', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksFile = path.join(testDir, 'hooks.json');
|
||||
// After unescape this becomes: var x = { — missing closing brace
|
||||
fs.writeFileSync(hooksFile, JSON.stringify({
|
||||
hooks: {
|
||||
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command',
|
||||
command: 'node -e "var x = {"' }] }]
|
||||
}
|
||||
}));
|
||||
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
||||
assert.strictEqual(result.code, 1, 'Should reject JS syntax error after unescaping');
|
||||
assert.ok(result.stderr.includes('invalid inline JS'), 'Should report inline JS error');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 47: validate-agents (frontmatter lines without colon):');
|
||||
|
||||
if (test('silently ignores frontmatter line without colon', () => {
|
||||
const testDir = createTestDir();
|
||||
// Line "just some text" has no colon — should be skipped, not cause crash
|
||||
fs.writeFileSync(path.join(testDir, 'mixed.md'),
|
||||
'---\nmodel: sonnet\njust some text without colon\ntools: Read\n---\n# Agent');
|
||||
|
||||
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
||||
assert.strictEqual(result.code, 0, 'Should ignore lines without colon in frontmatter');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 52: command inline backtick refs, workflow whitespace, code-only rules ──
|
||||
console.log('\nRound 52: validate-commands (inline backtick refs):');
|
||||
|
||||
if (test('validates command refs inside inline backticks (not stripped by code block removal)', () => {
|
||||
const testDir = createTestDir();
|
||||
const agentsDir = createTestDir();
|
||||
const skillsDir = createTestDir();
|
||||
fs.writeFileSync(path.join(testDir, 'deploy.md'), '# Deploy\nDeploy the app.');
|
||||
// Inline backtick ref `/deploy` should be validated (only fenced blocks stripped)
|
||||
fs.writeFileSync(path.join(testDir, 'workflow.md'),
|
||||
'# Workflow\nFirst run `/deploy` to deploy the app.');
|
||||
|
||||
const result = runValidatorWithDirs('validate-commands', {
|
||||
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Inline backtick command refs should be validated');
|
||||
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 52: validate-commands (workflow whitespace):');
|
||||
|
||||
if (test('validates workflow arrows with irregular whitespace', () => {
|
||||
const testDir = createTestDir();
|
||||
const agentsDir = createTestDir();
|
||||
const skillsDir = createTestDir();
|
||||
fs.writeFileSync(path.join(agentsDir, 'planner.md'), '# Planner');
|
||||
fs.writeFileSync(path.join(agentsDir, 'reviewer.md'), '# Reviewer');
|
||||
// Three workflow lines: no spaces, double spaces, tab-separated
|
||||
fs.writeFileSync(path.join(testDir, 'flow.md'),
|
||||
'# Workflow\n\nplanner->reviewer\nplanner -> reviewer');
|
||||
|
||||
const result = runValidatorWithDirs('validate-commands', {
|
||||
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Workflow arrows with irregular whitespace should be valid');
|
||||
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 52: validate-rules (code-only content):');
|
||||
|
||||
if (test('passes rule file containing only a fenced code block', () => {
|
||||
const testDir = createTestDir();
|
||||
fs.writeFileSync(path.join(testDir, 'code-only.md'),
|
||||
'```javascript\nfunction example() {\n return true;\n}\n```');
|
||||
|
||||
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
||||
assert.strictEqual(result.code, 0, 'Rule with only code block should pass (non-empty)');
|
||||
assert.ok(result.stdout.includes('Validated 1'), 'Should count the code-only file');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 57: readFileSync error path, statSync catch block, adjacent code blocks ──
|
||||
console.log('\nRound 57: validate-skills.js (SKILL.md is a directory — readFileSync error):');
|
||||
|
||||
if (test('fails gracefully when SKILL.md is a directory instead of a file', () => {
|
||||
const testDir = createTestDir();
|
||||
const skillDir = path.join(testDir, 'dir-skill');
|
||||
fs.mkdirSync(skillDir);
|
||||
// Create SKILL.md as a DIRECTORY, not a file — existsSync returns true
|
||||
// but readFileSync throws EISDIR, exercising the catch block (lines 33-37)
|
||||
fs.mkdirSync(path.join(skillDir, 'SKILL.md'));
|
||||
|
||||
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
||||
assert.strictEqual(result.code, 1, 'Should fail when SKILL.md is a directory');
|
||||
assert.ok(result.stderr.includes('dir-skill'), 'Should report the problematic skill');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 57: validate-rules.js (broken symlink — statSync catch block):');
|
||||
|
||||
if (test('reports error for broken symlink .md file in rules directory', () => {
|
||||
const testDir = createTestDir();
|
||||
// Create a valid rule first
|
||||
fs.writeFileSync(path.join(testDir, 'valid.md'), '# Valid Rule');
|
||||
// Create a broken symlink (dangling → target doesn't exist)
|
||||
// statSync follows symlinks and throws ENOENT, exercising catch (lines 35-38)
|
||||
try {
|
||||
fs.symlinkSync('/nonexistent/target.md', path.join(testDir, 'broken.md'));
|
||||
} catch {
|
||||
// Skip on systems that don't support symlinks
|
||||
console.log(' (skipped — symlinks not supported)');
|
||||
cleanupTestDir(testDir);
|
||||
return;
|
||||
}
|
||||
|
||||
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
||||
assert.strictEqual(result.code, 1, 'Should fail on broken symlink');
|
||||
assert.ok(result.stderr.includes('broken.md'), 'Should report the broken symlink file');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 57: validate-commands.js (adjacent code blocks both stripped):');
|
||||
|
||||
if (test('strips multiple adjacent code blocks before checking references', () => {
|
||||
const testDir = createTestDir();
|
||||
const agentsDir = createTestDir();
|
||||
const skillsDir = createTestDir();
|
||||
// Two adjacent code blocks, each with broken refs — BOTH must be stripped
|
||||
fs.writeFileSync(path.join(testDir, 'multi-blocks.md'),
|
||||
'# Multi Block\n\n' +
|
||||
'```\n`/phantom-a` in first block\n```\n\n' +
|
||||
'Content between blocks\n\n' +
|
||||
'```\n`/phantom-b` in second block\nagents/ghost-agent.md\n```\n\n' +
|
||||
'Final content');
|
||||
|
||||
const result = runValidatorWithDirs('validate-commands', {
|
||||
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
||||
});
|
||||
assert.strictEqual(result.code, 0,
|
||||
'Both code blocks should be stripped — no broken refs reported');
|
||||
assert.ok(!result.stderr.includes('phantom-a'), 'First block ref should be stripped');
|
||||
assert.ok(!result.stderr.includes('phantom-b'), 'Second block ref should be stripped');
|
||||
assert.ok(!result.stderr.includes('ghost-agent'), 'Agent ref in second block should be stripped');
|
||||
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 58: readFileSync catch block, colonIdx edge case, command-as-object ──
|
||||
console.log('\nRound 58: validate-agents.js (unreadable agent file — readFileSync catch):');
|
||||
|
||||
if (test('reports error when agent .md file is unreadable (chmod 000)', () => {
|
||||
// Skip on Windows or when running as root (permissions won't work)
|
||||
if (process.platform === 'win32' || (process.getuid && process.getuid() === 0)) {
|
||||
console.log(' (skipped — not supported on this platform)');
|
||||
return;
|
||||
}
|
||||
const testDir = createTestDir();
|
||||
const agentFile = path.join(testDir, 'locked.md');
|
||||
fs.writeFileSync(agentFile, '---\nmodel: sonnet\ntools: Read\n---\n# Agent');
|
||||
fs.chmodSync(agentFile, 0o000);
|
||||
|
||||
try {
|
||||
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
||||
assert.strictEqual(result.code, 1, 'Should exit 1 on read error');
|
||||
assert.ok(result.stderr.includes('locked.md'), 'Should mention the unreadable file');
|
||||
} finally {
|
||||
fs.chmodSync(agentFile, 0o644);
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 58: validate-agents.js (frontmatter line with colon at position 0):');
|
||||
|
||||
if (test('rejects agent when required field key has colon at position 0 (no key name)', () => {
|
||||
const testDir = createTestDir();
|
||||
fs.writeFileSync(path.join(testDir, 'bad-colon.md'),
|
||||
'---\n:sonnet\ntools: Read\n---\n# Agent with leading colon');
|
||||
|
||||
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
||||
assert.strictEqual(result.code, 1, 'Should fail — model field is missing (colon at idx 0 skipped)');
|
||||
assert.ok(result.stderr.includes('model'), 'Should report missing model field');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 58: validate-hooks.js (command is a plain object — not string or array):');
|
||||
|
||||
if (test('rejects hook entry where command is a plain object', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksFile = path.join(testDir, 'hooks.json');
|
||||
fs.writeFileSync(hooksFile, JSON.stringify({
|
||||
hooks: {
|
||||
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: { run: 'echo hi' } }] }]
|
||||
}
|
||||
}));
|
||||
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
||||
assert.strictEqual(result.code, 1, 'Should reject object command (not string or array)');
|
||||
assert.ok(result.stderr.includes('command'), 'Should report invalid command field');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 63: object-format missing matcher, unreadable command file, empty commands dir ──
|
||||
console.log('\nRound 63: validate-hooks.js (object-format matcher missing matcher field):');
|
||||
|
||||
if (test('rejects object-format matcher entry missing matcher field', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksFile = path.join(testDir, 'hooks.json');
|
||||
// Object format: matcher entry has hooks array but NO matcher field
|
||||
fs.writeFileSync(hooksFile, JSON.stringify({
|
||||
hooks: {
|
||||
PreToolUse: [{ hooks: [{ type: 'command', command: 'echo ok' }] }]
|
||||
}
|
||||
}));
|
||||
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
||||
assert.strictEqual(result.code, 1, 'Should fail on missing matcher field in object format');
|
||||
assert.ok(result.stderr.includes("missing 'matcher' field"), 'Should report missing matcher field');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 63: validate-commands.js (unreadable command file):');
|
||||
|
||||
if (test('reports error when command .md file is unreadable (chmod 000)', () => {
|
||||
if (process.platform === 'win32' || (process.getuid && process.getuid() === 0)) {
|
||||
console.log(' (skipped — not supported on this platform)');
|
||||
return;
|
||||
}
|
||||
const testDir = createTestDir();
|
||||
const cmdFile = path.join(testDir, 'locked.md');
|
||||
fs.writeFileSync(cmdFile, '# Locked Command');
|
||||
fs.chmodSync(cmdFile, 0o000);
|
||||
|
||||
try {
|
||||
const result = runValidatorWithDirs('validate-commands', {
|
||||
COMMANDS_DIR: testDir, AGENTS_DIR: '/nonexistent', SKILLS_DIR: '/nonexistent'
|
||||
});
|
||||
assert.strictEqual(result.code, 1, 'Should exit 1 on read error');
|
||||
assert.ok(result.stderr.includes('locked.md'), 'Should mention the unreadable file');
|
||||
} finally {
|
||||
fs.chmodSync(cmdFile, 0o644);
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 63: validate-commands.js (empty commands directory):');
|
||||
|
||||
if (test('passes on empty commands directory (no .md files)', () => {
|
||||
const testDir = createTestDir();
|
||||
// Only non-.md files — no .md files to validate
|
||||
fs.writeFileSync(path.join(testDir, 'readme.txt'), 'not a command');
|
||||
|
||||
const result = runValidatorWithDirs('validate-commands', {
|
||||
COMMANDS_DIR: testDir, AGENTS_DIR: '/nonexistent', SKILLS_DIR: '/nonexistent'
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should pass on empty commands directory');
|
||||
assert.ok(result.stdout.includes('Validated 0'), 'Should report 0 validated');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 65: empty directories for rules and skills ──
|
||||
console.log('\nRound 65: validate-rules.js (empty directory — no .md files):');
|
||||
|
||||
if (test('passes on rules directory with no .md files (Validated 0)', () => {
|
||||
const testDir = createTestDir();
|
||||
// Only non-.md files — readdirSync filter yields empty array
|
||||
fs.writeFileSync(path.join(testDir, 'notes.txt'), 'not a rule');
|
||||
fs.writeFileSync(path.join(testDir, 'config.json'), '{}');
|
||||
|
||||
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
||||
assert.strictEqual(result.code, 0, 'Should pass on empty rules directory');
|
||||
assert.ok(result.stdout.includes('Validated 0'), 'Should report 0 validated rule files');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 65: validate-skills.js (empty directory — no subdirectories):');
|
||||
|
||||
if (test('passes on skills directory with only files, no subdirectories (Validated 0)', () => {
|
||||
const testDir = createTestDir();
|
||||
// Only files, no subdirectories — isDirectory filter yields empty array
|
||||
fs.writeFileSync(path.join(testDir, 'README.md'), '# Skills');
|
||||
fs.writeFileSync(path.join(testDir, '.gitkeep'), '');
|
||||
|
||||
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
||||
assert.strictEqual(result.code, 0, 'Should pass on skills directory with no subdirectories');
|
||||
assert.ok(result.stdout.includes('Validated 0'), 'Should report 0 validated skill directories');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 70: validate-commands.js "would create:" line skip ──
|
||||
console.log('\nRound 70: validate-commands.js (would create: skip):');
|
||||
|
||||
if (test('skips command references on "would create:" lines', () => {
|
||||
const testDir = createTestDir();
|
||||
const agentsDir = createTestDir();
|
||||
const skillsDir = createTestDir();
|
||||
// "Would create:" is the alternate form checked by the regex at line 80:
|
||||
// if (/creates:|would create:/i.test(line)) continue;
|
||||
// Only "creates:" was previously tested (Round 20). "Would create:" exercises
|
||||
// the second alternation in the regex.
|
||||
fs.writeFileSync(path.join(testDir, 'gen-cmd.md'),
|
||||
'# Generator Command\n\nWould create: `/phantom-cmd` in your project.\n\nThis is safe.');
|
||||
|
||||
const result = runValidatorWithDirs('validate-commands', {
|
||||
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should skip "would create:" lines');
|
||||
assert.ok(!result.stderr.includes('phantom-cmd'), 'Should not flag ref on "would create:" line');
|
||||
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 72: validate-hooks.js async/timeout type validation ──
|
||||
console.log('\nRound 72: validate-hooks.js (async and timeout type validation):');
|
||||
|
||||
if (test('rejects hook with non-boolean async field', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksFile = path.join(testDir, 'hooks.json');
|
||||
fs.writeFileSync(hooksFile, JSON.stringify({
|
||||
PreToolUse: [{
|
||||
matcher: 'Write',
|
||||
hooks: [{
|
||||
type: 'intercept',
|
||||
command: 'echo test',
|
||||
async: 'yes' // Should be boolean, not string
|
||||
}]
|
||||
}]
|
||||
}));
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
||||
assert.strictEqual(result.code, 1, 'Should fail on non-boolean async');
|
||||
assert.ok(result.stderr.includes('async'), 'Should mention async in error');
|
||||
assert.ok(result.stderr.includes('boolean'), 'Should mention boolean type');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('rejects hook with negative timeout value', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksFile = path.join(testDir, 'hooks.json');
|
||||
fs.writeFileSync(hooksFile, JSON.stringify({
|
||||
PostToolUse: [{
|
||||
matcher: 'Edit',
|
||||
hooks: [{
|
||||
type: 'intercept',
|
||||
command: 'echo test',
|
||||
timeout: -5 // Must be non-negative
|
||||
}]
|
||||
}]
|
||||
}));
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
||||
assert.strictEqual(result.code, 1, 'Should fail on negative timeout');
|
||||
assert.ok(result.stderr.includes('timeout'), 'Should mention timeout in error');
|
||||
assert.ok(result.stderr.includes('non-negative'), 'Should mention non-negative');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 73: validate-commands.js skill directory statSync catch ──
|
||||
console.log('\nRound 73: validate-commands.js (unreadable skill entry — statSync catch):');
|
||||
|
||||
if (test('skips unreadable skill directory entries without error (broken symlink)', () => {
|
||||
const testDir = createTestDir();
|
||||
const agentsDir = createTestDir();
|
||||
const skillsDir = createTestDir();
|
||||
|
||||
// Create one valid skill directory and one broken symlink
|
||||
const validSkill = path.join(skillsDir, 'valid-skill');
|
||||
fs.mkdirSync(validSkill, { recursive: true });
|
||||
// Broken symlink: target does not exist — statSync will throw ENOENT
|
||||
const brokenLink = path.join(skillsDir, 'broken-skill');
|
||||
fs.symlinkSync('/nonexistent/target/path', brokenLink);
|
||||
|
||||
// Command that references the valid skill (should resolve)
|
||||
fs.writeFileSync(path.join(testDir, 'cmd.md'),
|
||||
'# Command\nSee skills/valid-skill/ for details.');
|
||||
|
||||
const result = runValidatorWithDirs('validate-commands', {
|
||||
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
||||
});
|
||||
assert.strictEqual(result.code, 0,
|
||||
'Should pass — broken symlink in skills dir should be skipped silently');
|
||||
// The broken-skill should NOT be in validSkills, so referencing it would warn
|
||||
// but the valid-skill reference should resolve fine
|
||||
cleanupTestDir(testDir);
|
||||
cleanupTestDir(agentsDir);
|
||||
fs.rmSync(skillsDir, { recursive: true, force: true });
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 76: validate-hooks.js invalid JSON in hooks.json ──
|
||||
console.log('\nRound 76: validate-hooks.js (invalid JSON in hooks.json):');
|
||||
|
||||
if (test('reports error for invalid JSON in hooks.json', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksFile = path.join(testDir, 'hooks.json');
|
||||
fs.writeFileSync(hooksFile, '{not valid json!!!');
|
||||
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
||||
assert.strictEqual(result.code, 1,
|
||||
`Expected exit 1 for invalid JSON, got ${result.code}`);
|
||||
assert.ok(result.stderr.includes('Invalid JSON'),
|
||||
`stderr should mention Invalid JSON, got: ${result.stderr}`);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 78: validate-hooks.js wrapped { hooks: { ... } } format ──
|
||||
console.log('\nRound 78: validate-hooks.js (wrapped hooks format):');
|
||||
|
||||
if (test('validates wrapped format { hooks: { PreToolUse: [...] } }', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksFile = path.join(testDir, 'hooks.json');
|
||||
// The production hooks.json uses this wrapped format — { hooks: { ... } }
|
||||
// data.hooks is the object with event types, not data itself
|
||||
fs.writeFileSync(hooksFile, JSON.stringify({
|
||||
"$schema": "https://json.schemastore.org/claude-code-settings.json",
|
||||
hooks: {
|
||||
PreToolUse: [{ matcher: 'Write', hooks: [{ type: 'command', command: 'echo ok' }] }],
|
||||
PostToolUse: [{ matcher: 'Read', hooks: [{ type: 'command', command: 'echo done' }] }]
|
||||
}
|
||||
}));
|
||||
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
||||
assert.strictEqual(result.code, 0,
|
||||
`Should pass wrapped hooks format, got exit ${result.code}. stderr: ${result.stderr}`);
|
||||
assert.ok(result.stdout.includes('Validated 2'),
|
||||
`Should validate 2 matchers, got: ${result.stdout}`);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 79: validate-commands.js warnings count suffix in output ──
|
||||
console.log('\nRound 79: validate-commands.js (warnings count in output):');
|
||||
|
||||
if (test('output includes (N warnings) suffix when skill references produce warnings', () => {
|
||||
const testDir = createTestDir();
|
||||
const agentsDir = createTestDir();
|
||||
const skillsDir = createTestDir();
|
||||
// Create a command that references 2 non-existent skill directories
|
||||
// Each triggers a WARN (not error) — warnCount should be 2
|
||||
fs.writeFileSync(path.join(testDir, 'cmd-warn.md'),
|
||||
'# Command\nSee skills/fake-skill-a/ and skills/fake-skill-b/ for details.');
|
||||
|
||||
const result = runValidatorWithDirs('validate-commands', {
|
||||
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Skill warnings should not cause error exit');
|
||||
// The validate-commands output appends "(N warnings)" when warnCount > 0
|
||||
assert.ok(result.stdout.includes('(2 warnings)'),
|
||||
`Output should include "(2 warnings)" suffix, got: ${result.stdout}`);
|
||||
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 80: validate-hooks.js legacy array format (lines 115-135) ──
|
||||
console.log('\nRound 80: validate-hooks.js (legacy array format):');
|
||||
|
||||
if (test('validates hooks in legacy array format (hooks is an array, not object)', () => {
|
||||
const testDir = createTestDir();
|
||||
// The legacy array format wraps hooks as { hooks: [...] } where the array
|
||||
// contains matcher objects directly. This exercises lines 115-135 of
|
||||
// validate-hooks.js which use "Hook ${i}" error labels instead of "${eventType}[${i}]".
|
||||
const hooksJson = JSON.stringify({
|
||||
hooks: [
|
||||
{
|
||||
matcher: 'Edit',
|
||||
hooks: [{ type: 'command', command: 'echo legacy test' }]
|
||||
}
|
||||
]
|
||||
});
|
||||
fs.writeFileSync(path.join(testDir, 'hooks.json'), hooksJson);
|
||||
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', path.join(testDir, 'hooks.json'));
|
||||
assert.strictEqual(result.code, 0, 'Should pass on valid legacy array format');
|
||||
assert.ok(result.stdout.includes('Validated 1 hook'),
|
||||
`Should report 1 validated matcher, got: ${result.stdout}`);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 82: Notification and SubagentStop event types ──
|
||||
|
||||
console.log('\nRound 82: validate-hooks (Notification and SubagentStop event types):');
|
||||
|
||||
if (test('accepts Notification and SubagentStop as valid event types', () => {
|
||||
const testDir = createTestDir();
|
||||
const hooksJson = JSON.stringify({
|
||||
hooks: [
|
||||
{
|
||||
matcher: { type: 'Notification' },
|
||||
hooks: [{ type: 'command', command: 'echo notification' }]
|
||||
},
|
||||
{
|
||||
matcher: { type: 'SubagentStop' },
|
||||
hooks: [{ type: 'command', command: 'echo subagent stopped' }]
|
||||
}
|
||||
]
|
||||
});
|
||||
fs.writeFileSync(path.join(testDir, 'hooks.json'), hooksJson);
|
||||
|
||||
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', path.join(testDir, 'hooks.json'));
|
||||
assert.strictEqual(result.code, 0, 'Should pass with Notification and SubagentStop events');
|
||||
assert.ok(result.stdout.includes('Validated 2 hook'),
|
||||
`Should report 2 validated matchers, got: ${result.stdout}`);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 83: validate-agents whitespace-only field, validate-skills empty SKILL.md ──
|
||||
|
||||
console.log('\nRound 83: validate-agents (whitespace-only frontmatter field value):');
|
||||
|
||||
if (test('rejects agent with whitespace-only model field (trim guard)', () => {
|
||||
const testDir = createTestDir();
|
||||
// model has only whitespace — extractFrontmatter produces { model: ' ', tools: 'Read' }
|
||||
// The condition: typeof frontmatter[field] === 'string' && !frontmatter[field].trim()
|
||||
// evaluates to true for model → "Missing required field: model"
|
||||
fs.writeFileSync(path.join(testDir, 'ws.md'), '---\nmodel: \ntools: Read\n---\n# Whitespace model');
|
||||
|
||||
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
||||
assert.strictEqual(result.code, 1, 'Should reject whitespace-only model');
|
||||
assert.ok(result.stderr.includes('model'), 'Should report missing model field');
|
||||
assert.ok(!result.stderr.includes('tools'), 'tools field is valid and should NOT be flagged');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 83: validate-skills (empty SKILL.md file):');
|
||||
|
||||
if (test('rejects skill directory with empty SKILL.md file', () => {
|
||||
const testDir = createTestDir();
|
||||
const skillDir = path.join(testDir, 'empty-skill');
|
||||
fs.mkdirSync(skillDir, { recursive: true });
|
||||
// Create SKILL.md with only whitespace (trim to zero length)
|
||||
fs.writeFileSync(path.join(skillDir, 'SKILL.md'), ' \n \n');
|
||||
|
||||
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
||||
assert.strictEqual(result.code, 1, 'Should reject empty SKILL.md');
|
||||
assert.ok(result.stderr.includes('Empty file'),
|
||||
`Should report "Empty file", got: ${result.stderr}`);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
@@ -258,6 +258,159 @@ function runTests() {
|
||||
assert.strictEqual(result.status, 0, 'Should exit 0 on empty stdin');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 53: env var fallback path ──
|
||||
console.log('\nRound 53: CLAUDE_TRANSCRIPT_PATH fallback:');
|
||||
|
||||
if (test('falls back to CLAUDE_TRANSCRIPT_PATH env var when stdin is invalid JSON', () => {
|
||||
const testDir = createTestDir();
|
||||
const transcript = createTranscript(testDir, 15);
|
||||
|
||||
const result = spawnSync('node', [evaluateScript], {
|
||||
encoding: 'utf8',
|
||||
input: 'invalid json {{{',
|
||||
timeout: 10000,
|
||||
env: { ...process.env, CLAUDE_TRANSCRIPT_PATH: transcript }
|
||||
});
|
||||
|
||||
assert.strictEqual(result.status, 0, 'Should exit 0');
|
||||
assert.ok(
|
||||
result.stderr.includes('15 messages'),
|
||||
'Should evaluate using env var fallback path'
|
||||
);
|
||||
assert.ok(
|
||||
result.stderr.includes('evaluate'),
|
||||
'Should indicate session evaluation'
|
||||
);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 65: regex whitespace tolerance in countInFile ──
|
||||
console.log('\nRound 65: regex whitespace tolerance around colon:');
|
||||
|
||||
if (test('counts user messages when JSON has spaces around colon ("type" : "user")', () => {
|
||||
const testDir = createTestDir();
|
||||
const filePath = path.join(testDir, 'spaced.jsonl');
|
||||
// Manually write JSON with spaces around the colon — NOT JSON.stringify
|
||||
// The regex /"type"\s*:\s*"user"/g should match these
|
||||
const lines = [];
|
||||
for (let i = 0; i < 12; i++) {
|
||||
lines.push(`{"type" : "user", "content": "msg ${i}"}`);
|
||||
lines.push(`{"type" : "assistant", "content": "resp ${i}"}`);
|
||||
}
|
||||
fs.writeFileSync(filePath, lines.join('\n') + '\n');
|
||||
|
||||
const result = runEvaluate({ transcript_path: filePath });
|
||||
assert.strictEqual(result.code, 0);
|
||||
// 12 user messages >= 10 threshold → should evaluate (not "too short")
|
||||
assert.ok(!result.stderr.includes('too short'),
|
||||
'Should NOT say too short for 12 spaced-colon user messages');
|
||||
assert.ok(
|
||||
result.stderr.includes('12 messages') || result.stderr.includes('evaluate'),
|
||||
`Should evaluate session with spaced-colon JSON. Got stderr: ${result.stderr}`
|
||||
);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 85: config file parse error (corrupt JSON) ──
|
||||
console.log('\nRound 85: config parse error catch block:');
|
||||
|
||||
if (test('falls back to defaults when config file contains invalid JSON', () => {
|
||||
// The evaluate-session.js script reads config from:
|
||||
// path.join(__dirname, '..', '..', 'skills', 'continuous-learning', 'config.json')
|
||||
// where __dirname = scripts/hooks/ → config = repo_root/skills/continuous-learning/config.json
|
||||
const configPath = path.join(__dirname, '..', '..', 'skills', 'continuous-learning', 'config.json');
|
||||
let originalContent = null;
|
||||
try {
|
||||
originalContent = fs.readFileSync(configPath, 'utf8');
|
||||
} catch {
|
||||
// Config file may not exist — that's fine
|
||||
}
|
||||
|
||||
try {
|
||||
// Write corrupt JSON to the config file
|
||||
fs.writeFileSync(configPath, 'NOT VALID JSON {{{ corrupt data !!!', 'utf8');
|
||||
|
||||
// Create a transcript with 12 user messages (above default threshold of 10)
|
||||
const testDir = createTestDir();
|
||||
const transcript = createTranscript(testDir, 12);
|
||||
const result = runEvaluate({ transcript_path: transcript });
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 despite corrupt config');
|
||||
// With corrupt config, defaults apply: min_session_length = 10
|
||||
// 12 >= 10 → should evaluate (not "too short")
|
||||
assert.ok(!result.stderr.includes('too short'),
|
||||
`Should NOT say too short — corrupt config falls back to default min=10. Got: ${result.stderr}`);
|
||||
assert.ok(
|
||||
result.stderr.includes('12 messages') || result.stderr.includes('evaluate'),
|
||||
`Should evaluate with 12 messages using default threshold. Got: ${result.stderr}`
|
||||
);
|
||||
// The catch block logs "Failed to parse config" — verify that log message
|
||||
assert.ok(result.stderr.includes('Failed to parse config'),
|
||||
`Should log config parse error. Got: ${result.stderr}`);
|
||||
|
||||
cleanupTestDir(testDir);
|
||||
} finally {
|
||||
// Restore original config file
|
||||
if (originalContent !== null) {
|
||||
fs.writeFileSync(configPath, originalContent, 'utf8');
|
||||
} else {
|
||||
// Config didn't exist before — remove the corrupt one we created
|
||||
try { fs.unlinkSync(configPath); } catch { /* best-effort */ }
|
||||
}
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 86: config learned_skills_path override with ~ expansion ──
|
||||
console.log('\nRound 86: config learned_skills_path override:');
|
||||
|
||||
if (test('uses learned_skills_path from config with ~ expansion', () => {
|
||||
// evaluate-session.js lines 69-72:
|
||||
// if (config.learned_skills_path) {
|
||||
// learnedSkillsPath = config.learned_skills_path.replace(/^~/, require('os').homedir());
|
||||
// }
|
||||
// This branch was never tested — only the parse error (Round 85) and default path.
|
||||
const configPath = path.join(__dirname, '..', '..', 'skills', 'continuous-learning', 'config.json');
|
||||
let originalContent = null;
|
||||
try {
|
||||
originalContent = fs.readFileSync(configPath, 'utf8');
|
||||
} catch {
|
||||
// Config file may not exist
|
||||
}
|
||||
|
||||
try {
|
||||
// Write config with a custom learned_skills_path using ~ prefix
|
||||
fs.writeFileSync(configPath, JSON.stringify({
|
||||
min_session_length: 10,
|
||||
learned_skills_path: '~/custom-learned-skills-dir'
|
||||
}));
|
||||
|
||||
// Create a transcript with 12 user messages (above threshold)
|
||||
const testDir = createTestDir();
|
||||
const transcript = createTranscript(testDir, 12);
|
||||
const result = runEvaluate({ transcript_path: transcript });
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0');
|
||||
// The script logs "Save learned skills to: <path>" where <path> should
|
||||
// be the expanded home directory, NOT the literal "~"
|
||||
assert.ok(!result.stderr.includes('~/custom-learned-skills-dir'),
|
||||
'Should NOT contain literal ~ in output (should be expanded)');
|
||||
assert.ok(result.stderr.includes('custom-learned-skills-dir'),
|
||||
`Should reference the custom learned skills dir. Got: ${result.stderr}`);
|
||||
// The ~ should have been replaced with os.homedir()
|
||||
assert.ok(result.stderr.includes(os.homedir()),
|
||||
`Should contain expanded home directory. Got: ${result.stderr}`);
|
||||
|
||||
cleanupTestDir(testDir);
|
||||
} finally {
|
||||
// Restore original config file
|
||||
if (originalContent !== null) {
|
||||
fs.writeFileSync(configPath, originalContent, 'utf8');
|
||||
} else {
|
||||
try { fs.unlinkSync(configPath); } catch { /* best-effort */ }
|
||||
}
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
@@ -2546,6 +2546,941 @@ async function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 49: typecheck extension matching and session-end conditional sections ──
|
||||
console.log('\nRound 49: post-edit-typecheck.js (extension edge cases):');
|
||||
|
||||
if (await asyncTest('.d.ts files match the TS regex and trigger typecheck path', async () => {
|
||||
const testDir = createTestDir();
|
||||
const testFile = path.join(testDir, 'types.d.ts');
|
||||
fs.writeFileSync(testFile, 'declare const x: number;');
|
||||
|
||||
const stdinJson = JSON.stringify({ tool_input: { file_path: testFile } });
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-typecheck.js'), stdinJson);
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 for .d.ts file');
|
||||
assert.ok(result.stdout.includes('tool_input'), 'Should pass through stdin data');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('.mts extension does not trigger typecheck', async () => {
|
||||
const stdinJson = JSON.stringify({ tool_input: { file_path: '/project/utils.mts' } });
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-typecheck.js'), stdinJson);
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 for .mts file');
|
||||
assert.strictEqual(result.stdout, stdinJson, 'Should pass through .mts unchanged');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 49: session-end.js (conditional summary sections):');
|
||||
|
||||
if (await asyncTest('summary omits Files Modified and Tools Used when none found', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-notools-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
const testDir = createTestDir();
|
||||
const transcriptPath = path.join(testDir, 'transcript.jsonl');
|
||||
// Only user messages — no tool_use entries at all
|
||||
const lines = [
|
||||
'{"type":"user","content":"How does authentication work?"}',
|
||||
'{"type":"assistant","message":{"content":[{"type":"text","text":"It uses JWT"}]}}'
|
||||
];
|
||||
fs.writeFileSync(transcriptPath, lines.join('\n'));
|
||||
const stdinJson = JSON.stringify({ transcript_path: transcriptPath });
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0);
|
||||
|
||||
const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('-session.tmp'));
|
||||
assert.ok(files.length > 0, 'Should create session file');
|
||||
const content = fs.readFileSync(path.join(sessionsDir, files[0]), 'utf8');
|
||||
assert.ok(content.includes('authentication'), 'Should include user message');
|
||||
assert.ok(!content.includes('### Files Modified'), 'Should omit Files Modified when empty');
|
||||
assert.ok(!content.includes('### Tools Used'), 'Should omit Tools Used when empty');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 50: alias reporting, parallel compaction, graceful degradation ──
|
||||
console.log('\nRound 50: session-start.js (alias reporting):');
|
||||
|
||||
if (await asyncTest('reports available session aliases on startup', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-start-alias-${Date.now()}`);
|
||||
fs.mkdirSync(path.join(isoHome, '.claude', 'sessions'), { recursive: true });
|
||||
fs.mkdirSync(path.join(isoHome, '.claude', 'skills', 'learned'), { recursive: true });
|
||||
|
||||
// Pre-populate the aliases file
|
||||
fs.writeFileSync(path.join(isoHome, '.claude', 'session-aliases.json'), JSON.stringify({
|
||||
version: '1.0',
|
||||
aliases: {
|
||||
'my-feature': { sessionPath: '/sessions/feat', createdAt: new Date().toISOString(), updatedAt: new Date().toISOString(), title: null },
|
||||
'bug-fix': { sessionPath: '/sessions/fix', createdAt: new Date().toISOString(), updatedAt: new Date().toISOString(), title: null }
|
||||
},
|
||||
metadata: { totalCount: 2, lastUpdated: new Date().toISOString() }
|
||||
}));
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-start.js'), '', {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0);
|
||||
assert.ok(result.stderr.includes('alias'), 'Should mention aliases in stderr');
|
||||
assert.ok(
|
||||
result.stderr.includes('my-feature') || result.stderr.includes('bug-fix'),
|
||||
'Should list at least one alias name'
|
||||
);
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 50: pre-compact.js (parallel execution):');
|
||||
|
||||
if (await asyncTest('parallel compaction runs all append to log without loss', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-compact-par-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
try {
|
||||
const promises = Array(3).fill(null).map(() =>
|
||||
runScript(path.join(scriptsDir, 'pre-compact.js'), '', {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
})
|
||||
);
|
||||
const results = await Promise.all(promises);
|
||||
results.forEach((r, i) => assert.strictEqual(r.code, 0, `Run ${i} should exit 0`));
|
||||
|
||||
const logFile = path.join(sessionsDir, 'compaction-log.txt');
|
||||
assert.ok(fs.existsSync(logFile), 'Compaction log should exist');
|
||||
const content = fs.readFileSync(logFile, 'utf8');
|
||||
const entries = (content.match(/Context compaction triggered/g) || []).length;
|
||||
assert.strictEqual(entries, 3, `Should have 3 log entries, got ${entries}`);
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 50: session-start.js (graceful degradation):');
|
||||
|
||||
if (await asyncTest('exits 0 when sessions path is a file (not a directory)', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-start-blocked-${Date.now()}`);
|
||||
fs.mkdirSync(path.join(isoHome, '.claude'), { recursive: true });
|
||||
// Block sessions dir creation by placing a file at that path
|
||||
fs.writeFileSync(path.join(isoHome, '.claude', 'sessions'), 'blocked');
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-start.js'), '', {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even when sessions dir is blocked');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 53: console-warn max matches and format non-existent file ──
|
||||
console.log('\nRound 53: post-edit-console-warn.js (max matches truncation):');
|
||||
|
||||
if (await asyncTest('reports maximum 5 console.log matches per file', async () => {
|
||||
const testDir = createTestDir();
|
||||
const testFile = path.join(testDir, 'many-logs.js');
|
||||
const lines = Array(7).fill(null).map((_, i) =>
|
||||
`console.log("debug line ${i + 1}");`
|
||||
);
|
||||
fs.writeFileSync(testFile, lines.join('\n'));
|
||||
|
||||
const stdinJson = JSON.stringify({ tool_input: { file_path: testFile } });
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-console-warn.js'), stdinJson);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0');
|
||||
// Count line number reports in stderr (format: "N: console.log(...)")
|
||||
const lineReports = (result.stderr.match(/^\d+:/gm) || []).length;
|
||||
assert.strictEqual(lineReports, 5, `Should report max 5 matches, got ${lineReports}`);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 53: post-edit-format.js (non-existent file):');
|
||||
|
||||
if (await asyncTest('passes through data for non-existent .tsx file path', async () => {
|
||||
const stdinJson = JSON.stringify({
|
||||
tool_input: { file_path: '/nonexistent/path/file.tsx' }
|
||||
});
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-format.js'), stdinJson);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 for non-existent file');
|
||||
assert.strictEqual(result.stdout, stdinJson, 'Should pass through stdin data unchanged');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 55: maxAge boundary, multi-session injection, stdin overflow ──
|
||||
console.log('\nRound 55: session-start.js (maxAge 7-day boundary):');
|
||||
|
||||
if (await asyncTest('excludes session files older than 7 days', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-start-7day-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
fs.mkdirSync(path.join(isoHome, '.claude', 'skills', 'learned'), { recursive: true });
|
||||
|
||||
// Create session file 6.9 days old (should be INCLUDED by maxAge:7)
|
||||
const recentFile = path.join(sessionsDir, '2026-02-06-recent69-session.tmp');
|
||||
fs.writeFileSync(recentFile, '# Recent Session\n\nRECENT CONTENT HERE');
|
||||
const sixPointNineDaysAgo = new Date(Date.now() - 6.9 * 24 * 60 * 60 * 1000);
|
||||
fs.utimesSync(recentFile, sixPointNineDaysAgo, sixPointNineDaysAgo);
|
||||
|
||||
// Create session file 8 days old (should be EXCLUDED by maxAge:7)
|
||||
const oldFile = path.join(sessionsDir, '2026-02-05-old8day-session.tmp');
|
||||
fs.writeFileSync(oldFile, '# Old Session\n\nOLD CONTENT SHOULD NOT APPEAR');
|
||||
const eightDaysAgo = new Date(Date.now() - 8 * 24 * 60 * 60 * 1000);
|
||||
fs.utimesSync(oldFile, eightDaysAgo, eightDaysAgo);
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-start.js'), '', {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0);
|
||||
assert.ok(result.stderr.includes('1 recent session'),
|
||||
`Should find 1 recent session (6.9-day included, 8-day excluded), stderr: ${result.stderr}`);
|
||||
assert.ok(result.stdout.includes('RECENT CONTENT HERE'),
|
||||
'Should inject the 6.9-day-old session content');
|
||||
assert.ok(!result.stdout.includes('OLD CONTENT SHOULD NOT APPEAR'),
|
||||
'Should NOT inject the 8-day-old session content');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 55: session-start.js (newest session selection):');
|
||||
|
||||
if (await asyncTest('injects newest session when multiple recent sessions exist', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-start-multi-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
fs.mkdirSync(path.join(isoHome, '.claude', 'skills', 'learned'), { recursive: true });
|
||||
|
||||
const now = Date.now();
|
||||
|
||||
// Create older session (2 days ago)
|
||||
const olderSession = path.join(sessionsDir, '2026-02-11-olderabc-session.tmp');
|
||||
fs.writeFileSync(olderSession, '# Older Session\n\nOLDER_CONTEXT_MARKER');
|
||||
fs.utimesSync(olderSession, new Date(now - 2 * 86400000), new Date(now - 2 * 86400000));
|
||||
|
||||
// Create newer session (1 day ago)
|
||||
const newerSession = path.join(sessionsDir, '2026-02-12-newerdef-session.tmp');
|
||||
fs.writeFileSync(newerSession, '# Newer Session\n\nNEWER_CONTEXT_MARKER');
|
||||
fs.utimesSync(newerSession, new Date(now - 1 * 86400000), new Date(now - 1 * 86400000));
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-start.js'), '', {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0);
|
||||
assert.ok(result.stderr.includes('2 recent session'),
|
||||
`Should find 2 recent sessions, stderr: ${result.stderr}`);
|
||||
// Should inject the NEWER session, not the older one
|
||||
assert.ok(result.stdout.includes('NEWER_CONTEXT_MARKER'),
|
||||
'Should inject the newest session content');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 55: session-end.js (stdin overflow):');
|
||||
|
||||
if (await asyncTest('handles stdin exceeding MAX_STDIN (1MB) gracefully', async () => {
|
||||
const testDir = createTestDir();
|
||||
const transcriptPath = path.join(testDir, 'transcript.jsonl');
|
||||
// Create a minimal valid transcript so env var fallback works
|
||||
fs.writeFileSync(transcriptPath, JSON.stringify({ type: 'user', content: 'Overflow test' }) + '\n');
|
||||
|
||||
// Create stdin > 1MB: truncated JSON will be invalid → falls back to env var
|
||||
const oversizedPayload = '{"transcript_path":"' + 'x'.repeat(1048600) + '"}';
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), oversizedPayload, {
|
||||
CLAUDE_TRANSCRIPT_PATH: transcriptPath
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even with oversized stdin');
|
||||
// Truncated JSON → JSON.parse throws → falls back to env var → creates session file
|
||||
assert.ok(
|
||||
result.stderr.includes('Created session file') || result.stderr.includes('Updated session file'),
|
||||
`Should create/update session file via env var fallback, stderr: ${result.stderr}`
|
||||
);
|
||||
} finally {
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 56: typecheck tsconfig walk-up, suggest-compact fallback path ──
|
||||
console.log('\nRound 56: post-edit-typecheck.js (tsconfig in parent directory):');
|
||||
|
||||
if (await asyncTest('walks up directory tree to find tsconfig.json in grandparent', async () => {
|
||||
const testDir = createTestDir();
|
||||
// Place tsconfig at the TOP level, file is nested 2 levels deep
|
||||
fs.writeFileSync(path.join(testDir, 'tsconfig.json'), JSON.stringify({
|
||||
compilerOptions: { strict: false, noEmit: true }
|
||||
}));
|
||||
const deepDir = path.join(testDir, 'src', 'components');
|
||||
fs.mkdirSync(deepDir, { recursive: true });
|
||||
const testFile = path.join(deepDir, 'widget.ts');
|
||||
fs.writeFileSync(testFile, 'export const value: number = 42;\n');
|
||||
|
||||
const stdinJson = JSON.stringify({ tool_input: { file_path: testFile } });
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-typecheck.js'), stdinJson);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 after walking up to find tsconfig');
|
||||
// Core assertion: stdin must pass through regardless of whether tsc ran
|
||||
const parsed = JSON.parse(result.stdout);
|
||||
assert.strictEqual(parsed.tool_input.file_path, testFile,
|
||||
'Should pass through original stdin data with file_path intact');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 56: suggest-compact.js (counter file as directory — fallback path):');
|
||||
|
||||
if (await asyncTest('exits 0 when counter file path is occupied by a directory', async () => {
|
||||
const sessionId = `dirblock-${Date.now()}`;
|
||||
const counterFile = path.join(os.tmpdir(), `claude-tool-count-${sessionId}`);
|
||||
// Create a DIRECTORY at the counter file path — openSync('a+') will fail with EISDIR
|
||||
fs.mkdirSync(counterFile);
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'suggest-compact.js'), '', {
|
||||
CLAUDE_SESSION_ID: sessionId
|
||||
});
|
||||
assert.strictEqual(result.code, 0,
|
||||
'Should exit 0 even when counter file path is a directory (graceful fallback)');
|
||||
} finally {
|
||||
// Cleanup: remove the blocking directory
|
||||
try { fs.rmdirSync(counterFile); } catch { /* best-effort */ }
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 59: session-start unreadable file, console-log stdin overflow, pre-compact write error ──
|
||||
console.log('\nRound 59: session-start.js (unreadable session file — readFile returns null):');
|
||||
|
||||
if (await asyncTest('does not inject content when session file is unreadable', async () => {
|
||||
// Skip on Windows or when running as root (permissions won't work)
|
||||
if (process.platform === 'win32' || (process.getuid && process.getuid() === 0)) {
|
||||
console.log(' (skipped — not supported on this platform)');
|
||||
return;
|
||||
}
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-start-unreadable-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
// Create a session file with real content, then make it unreadable
|
||||
const sessionFile = path.join(sessionsDir, `${Date.now()}-session.tmp`);
|
||||
fs.writeFileSync(sessionFile, '# Sensitive session content that should NOT appear');
|
||||
fs.chmodSync(sessionFile, 0o000);
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-start.js'), '', {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even with unreadable session file');
|
||||
// readFile returns null for unreadable files → content is null → no injection
|
||||
assert.ok(!result.stdout.includes('Sensitive session content'),
|
||||
'Should NOT inject content from unreadable file');
|
||||
} finally {
|
||||
try { fs.chmodSync(sessionFile, 0o644); } catch { /* best-effort */ }
|
||||
try { fs.rmSync(isoHome, { recursive: true, force: true }); } catch { /* best-effort */ }
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 59: check-console-log.js (stdin exceeding 1MB — truncation):');
|
||||
|
||||
if (await asyncTest('truncates stdin at 1MB limit and still passes through data', async () => {
|
||||
// Send 1.2MB of data — exceeds the 1MB MAX_STDIN limit
|
||||
const payload = 'x'.repeat(1024 * 1024 + 200000);
|
||||
const result = await runScript(path.join(scriptsDir, 'check-console-log.js'), payload);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even with oversized stdin');
|
||||
// Output should be truncated — significantly less than input
|
||||
assert.ok(result.stdout.length < payload.length,
|
||||
`stdout (${result.stdout.length}) should be shorter than input (${payload.length})`);
|
||||
// Output should be approximately 1MB (last accepted chunk may push slightly over)
|
||||
assert.ok(result.stdout.length <= 1024 * 1024 + 65536,
|
||||
`stdout (${result.stdout.length}) should be near 1MB, not unbounded`);
|
||||
assert.ok(result.stdout.length > 0, 'Should still pass through truncated data');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 59: pre-compact.js (read-only session file — appendFile error):');
|
||||
|
||||
if (await asyncTest('exits 0 when session file is read-only (appendFile fails)', async () => {
|
||||
if (process.platform === 'win32' || (process.getuid && process.getuid() === 0)) {
|
||||
console.log(' (skipped — not supported on this platform)');
|
||||
return;
|
||||
}
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-compact-ro-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
// Create a session file then make it read-only
|
||||
const sessionFile = path.join(sessionsDir, `${Date.now()}-session.tmp`);
|
||||
fs.writeFileSync(sessionFile, '# Active session\n');
|
||||
fs.chmodSync(sessionFile, 0o444);
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'pre-compact.js'), '', {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
// Should exit 0 — hooks must not block the user (catch at lines 45-47)
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even when append fails');
|
||||
// Session file should remain unchanged (write was blocked)
|
||||
const content = fs.readFileSync(sessionFile, 'utf8');
|
||||
assert.strictEqual(content, '# Active session\n',
|
||||
'Read-only session file should remain unchanged');
|
||||
} finally {
|
||||
try { fs.chmodSync(sessionFile, 0o644); } catch { /* best-effort */ }
|
||||
try { fs.rmSync(isoHome, { recursive: true, force: true }); } catch { /* best-effort */ }
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 60: replaceInFile failure, console-warn stdin overflow, format missing tool_input ──
|
||||
console.log('\nRound 60: session-end.js (replaceInFile returns false — timestamp update warning):');
|
||||
|
||||
if (await asyncTest('logs warning when existing session file lacks Last Updated field', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-end-nots-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
// Create transcript with a user message so a summary is produced
|
||||
const testDir = createTestDir();
|
||||
const transcriptPath = path.join(testDir, 'transcript.jsonl');
|
||||
fs.writeFileSync(transcriptPath, '{"type":"user","content":"test message"}\n');
|
||||
|
||||
// Pre-create session file WITHOUT the **Last Updated:** line
|
||||
// Use today's date and a short ID matching getSessionIdShort() pattern
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
const sessionFile = path.join(sessionsDir, `${today}-session-session.tmp`);
|
||||
fs.writeFileSync(sessionFile, '# Session file without timestamp marker\nSome existing content\n');
|
||||
|
||||
const stdinJson = JSON.stringify({ transcript_path: transcriptPath });
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even when replaceInFile fails');
|
||||
// replaceInFile returns false → line 166 logs warning about failed timestamp update
|
||||
assert.ok(result.stderr.includes('Failed to update') || result.stderr.includes('[SessionEnd]'),
|
||||
'Should log warning when timestamp pattern not found in session file');
|
||||
|
||||
cleanupTestDir(testDir);
|
||||
try { fs.rmSync(isoHome, { recursive: true, force: true }); } catch { /* best-effort */ }
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 60: post-edit-console-warn.js (stdin exceeding 1MB — truncation):');
|
||||
|
||||
if (await asyncTest('truncates stdin at 1MB limit and still passes through data', async () => {
|
||||
// Send 1.2MB of data — exceeds the 1MB MAX_STDIN limit
|
||||
const payload = 'x'.repeat(1024 * 1024 + 200000);
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-console-warn.js'), payload);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even with oversized stdin');
|
||||
// Data should be truncated — stdout significantly less than input
|
||||
assert.ok(result.stdout.length < payload.length,
|
||||
`stdout (${result.stdout.length}) should be shorter than input (${payload.length})`);
|
||||
// Should be approximately 1MB (last accepted chunk may push slightly over)
|
||||
assert.ok(result.stdout.length <= 1024 * 1024 + 65536,
|
||||
`stdout (${result.stdout.length}) should be near 1MB, not unbounded`);
|
||||
assert.ok(result.stdout.length > 0, 'Should still pass through truncated data');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 60: post-edit-format.js (valid JSON without tool_input key):');
|
||||
|
||||
if (await asyncTest('skips formatting when JSON has no tool_input field', async () => {
|
||||
const stdinJson = JSON.stringify({ result: 'ok', output: 'some data' });
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-format.js'), stdinJson);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 for JSON without tool_input');
|
||||
// input.tool_input?.file_path is undefined → skips formatting → passes through
|
||||
assert.strictEqual(result.stdout, stdinJson,
|
||||
'Should pass through data unchanged when tool_input is absent');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 64: post-edit-typecheck.js valid JSON without tool_input ──
|
||||
console.log('\nRound 64: post-edit-typecheck.js (valid JSON without tool_input):');
|
||||
|
||||
if (await asyncTest('skips typecheck when JSON has no tool_input field', async () => {
|
||||
const stdinJson = JSON.stringify({ result: 'ok', metadata: { action: 'test' } });
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-typecheck.js'), stdinJson);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 for JSON without tool_input');
|
||||
// input.tool_input?.file_path is undefined → skips TS check → passes through
|
||||
assert.strictEqual(result.stdout, stdinJson,
|
||||
'Should pass through data unchanged when tool_input is absent');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 66: session-end.js entry.role === 'user' fallback and nonexistent transcript ──
|
||||
console.log('\nRound 66: session-end.js (entry.role user fallback):');
|
||||
|
||||
if (await asyncTest('extracts user messages from role-only format (no type field)', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-role-only-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
const testDir = createTestDir();
|
||||
const transcriptPath = path.join(testDir, 'transcript.jsonl');
|
||||
// Use entries with ONLY role field (no type:"user") to exercise the fallback
|
||||
const lines = [
|
||||
'{"role":"user","content":"Deploy the production build"}',
|
||||
'{"role":"assistant","content":"I will deploy now"}',
|
||||
'{"role":"user","content":"Check the logs after deploy"}',
|
||||
];
|
||||
fs.writeFileSync(transcriptPath, lines.join('\n'));
|
||||
const stdinJson = JSON.stringify({ transcript_path: transcriptPath });
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0);
|
||||
|
||||
const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('-session.tmp'));
|
||||
assert.ok(files.length > 0, 'Should create session file');
|
||||
const content = fs.readFileSync(path.join(sessionsDir, files[0]), 'utf8');
|
||||
// The role-only user messages should be extracted
|
||||
assert.ok(content.includes('Deploy the production build') || content.includes('deploy'),
|
||||
`Session file should include role-only user messages. Got: ${content.substring(0, 300)}`);
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 66: session-end.js (nonexistent transcript path):');
|
||||
|
||||
if (await asyncTest('logs "Transcript not found" for nonexistent transcript_path', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-notfound-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
const stdinJson = JSON.stringify({ transcript_path: '/tmp/nonexistent-transcript-99999.jsonl' });
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 for missing transcript');
|
||||
assert.ok(
|
||||
result.stderr.includes('Transcript not found') || result.stderr.includes('not found'),
|
||||
`Should log transcript not found. Got stderr: ${result.stderr.substring(0, 300)}`
|
||||
);
|
||||
// Should still create a session file (with blank template, since summary is null)
|
||||
const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('-session.tmp'));
|
||||
assert.ok(files.length > 0, 'Should still create session file even without transcript');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 70: session-end.js entry.name / entry.input fallback in direct tool_use entries ──
|
||||
console.log('\nRound 70: session-end.js (entry.name/entry.input fallback):');
|
||||
|
||||
if (await asyncTest('extracts tool name and file path from entry.name/entry.input (not tool_name/tool_input)', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-r70-entryname-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
const transcriptPath = path.join(isoHome, 'transcript.jsonl');
|
||||
|
||||
// Use "name" and "input" fields instead of "tool_name" and "tool_input"
|
||||
// This exercises the fallback at session-end.js lines 63 and 66:
|
||||
// const toolName = entry.tool_name || entry.name || '';
|
||||
// const filePath = entry.tool_input?.file_path || entry.input?.file_path || '';
|
||||
const lines = [
|
||||
'{"type":"user","content":"Use the alt format fields"}',
|
||||
'{"type":"tool_use","name":"Edit","input":{"file_path":"/src/alt-format.ts"}}',
|
||||
'{"type":"tool_use","name":"Read","input":{"file_path":"/src/other.ts"}}',
|
||||
'{"type":"tool_use","name":"Write","input":{"file_path":"/src/written.ts"}}',
|
||||
];
|
||||
fs.writeFileSync(transcriptPath, lines.join('\n'));
|
||||
|
||||
const stdinJson = JSON.stringify({ transcript_path: transcriptPath });
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0');
|
||||
|
||||
const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('.tmp'));
|
||||
assert.ok(files.length > 0, 'Should create session file');
|
||||
const content = fs.readFileSync(path.join(sessionsDir, files[0]), 'utf8');
|
||||
// Tools extracted via entry.name fallback
|
||||
assert.ok(content.includes('Edit'), 'Should list Edit via entry.name fallback');
|
||||
assert.ok(content.includes('Read'), 'Should list Read via entry.name fallback');
|
||||
// Files modified via entry.input fallback (Edit and Write, not Read)
|
||||
assert.ok(content.includes('/src/alt-format.ts'), 'Should list edited file via entry.input fallback');
|
||||
assert.ok(content.includes('/src/written.ts'), 'Should list written file via entry.input fallback');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 71: session-start.js default source shows getSelectionPrompt ──
|
||||
console.log('\nRound 71: session-start.js (default source — selection prompt):');
|
||||
|
||||
if (await asyncTest('shows selection prompt when no package manager preference found (default source)', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-r71-ss-default-${Date.now()}`);
|
||||
const isoProject = path.join(isoHome, 'project');
|
||||
fs.mkdirSync(path.join(isoHome, '.claude', 'sessions'), { recursive: true });
|
||||
fs.mkdirSync(path.join(isoHome, '.claude', 'skills', 'learned'), { recursive: true });
|
||||
fs.mkdirSync(isoProject, { recursive: true });
|
||||
// No package.json, no lock files, no package-manager.json — forces default source
|
||||
|
||||
try {
|
||||
const result = await new Promise((resolve, reject) => {
|
||||
const env = { ...process.env, HOME: isoHome, USERPROFILE: isoHome };
|
||||
delete env.CLAUDE_PACKAGE_MANAGER; // Remove any env-level PM override
|
||||
const proc = spawn('node', [path.join(scriptsDir, 'session-start.js')], {
|
||||
env,
|
||||
cwd: isoProject, // CWD with no package.json or lock files
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
proc.stdout.on('data', data => stdout += data);
|
||||
proc.stderr.on('data', data => stderr += data);
|
||||
proc.stdin.end();
|
||||
proc.on('close', code => resolve({ code, stdout, stderr }));
|
||||
proc.on('error', reject);
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0');
|
||||
assert.ok(result.stderr.includes('No package manager preference'),
|
||||
`Should show selection prompt when source is default. Got stderr: ${result.stderr.slice(0, 500)}`);
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 74: session-start.js main().catch handler ──
|
||||
console.log('\nRound 74: session-start.js (main catch — unrecoverable error):');
|
||||
|
||||
if (await asyncTest('session-start exits 0 with error message when HOME is non-directory', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log(' (skipped — /dev/null not available on Windows)');
|
||||
return;
|
||||
}
|
||||
// HOME=/dev/null makes ensureDir(sessionsDir) throw ENOTDIR,
|
||||
// which propagates to main().catch — the top-level error boundary
|
||||
const result = await runScript(path.join(scriptsDir, 'session-start.js'), '', {
|
||||
HOME: '/dev/null',
|
||||
USERPROFILE: '/dev/null'
|
||||
});
|
||||
assert.strictEqual(result.code, 0,
|
||||
`Should exit 0 (don't block on errors), got ${result.code}`);
|
||||
assert.ok(result.stderr.includes('[SessionStart] Error:'),
|
||||
`stderr should contain [SessionStart] Error:, got: ${result.stderr}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 75: pre-compact.js main().catch handler ──
|
||||
console.log('\nRound 75: pre-compact.js (main catch — unrecoverable error):');
|
||||
|
||||
if (await asyncTest('pre-compact exits 0 with error message when HOME is non-directory', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log(' (skipped — /dev/null not available on Windows)');
|
||||
return;
|
||||
}
|
||||
// HOME=/dev/null makes ensureDir(sessionsDir) throw ENOTDIR,
|
||||
// which propagates to main().catch — the top-level error boundary
|
||||
const result = await runScript(path.join(scriptsDir, 'pre-compact.js'), '', {
|
||||
HOME: '/dev/null',
|
||||
USERPROFILE: '/dev/null'
|
||||
});
|
||||
assert.strictEqual(result.code, 0,
|
||||
`Should exit 0 (don't block on errors), got ${result.code}`);
|
||||
assert.ok(result.stderr.includes('[PreCompact] Error:'),
|
||||
`stderr should contain [PreCompact] Error:, got: ${result.stderr}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 75: session-end.js main().catch handler ──
|
||||
console.log('\nRound 75: session-end.js (main catch — unrecoverable error):');
|
||||
|
||||
if (await asyncTest('session-end exits 0 with error message when HOME is non-directory', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log(' (skipped — /dev/null not available on Windows)');
|
||||
return;
|
||||
}
|
||||
// HOME=/dev/null makes ensureDir(sessionsDir) throw ENOTDIR inside main(),
|
||||
// which propagates to runMain().catch — the top-level error boundary
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), '{}', {
|
||||
HOME: '/dev/null',
|
||||
USERPROFILE: '/dev/null'
|
||||
});
|
||||
assert.strictEqual(result.code, 0,
|
||||
`Should exit 0 (don't block on errors), got ${result.code}`);
|
||||
assert.ok(result.stderr.includes('[SessionEnd] Error:'),
|
||||
`stderr should contain [SessionEnd] Error:, got: ${result.stderr}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 76: evaluate-session.js main().catch handler ──
|
||||
console.log('\nRound 76: evaluate-session.js (main catch — unrecoverable error):');
|
||||
|
||||
if (await asyncTest('evaluate-session exits 0 with error message when HOME is non-directory', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log(' (skipped — /dev/null not available on Windows)');
|
||||
return;
|
||||
}
|
||||
// HOME=/dev/null makes ensureDir(learnedSkillsPath) throw ENOTDIR,
|
||||
// which propagates to main().catch — the top-level error boundary
|
||||
const result = await runScript(path.join(scriptsDir, 'evaluate-session.js'), '{}', {
|
||||
HOME: '/dev/null',
|
||||
USERPROFILE: '/dev/null'
|
||||
});
|
||||
assert.strictEqual(result.code, 0,
|
||||
`Should exit 0 (don't block on errors), got ${result.code}`);
|
||||
assert.ok(result.stderr.includes('[ContinuousLearning] Error:'),
|
||||
`stderr should contain [ContinuousLearning] Error:, got: ${result.stderr}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 76: suggest-compact.js main().catch handler ──
|
||||
console.log('\nRound 76: suggest-compact.js (main catch — double-failure):');
|
||||
|
||||
if (await asyncTest('suggest-compact exits 0 with error when TMPDIR is non-directory', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log(' (skipped — /dev/null not available on Windows)');
|
||||
return;
|
||||
}
|
||||
// TMPDIR=/dev/null causes openSync to fail (ENOTDIR), then the catch
|
||||
// fallback writeFile also fails, propagating to main().catch
|
||||
const result = await runScript(path.join(scriptsDir, 'suggest-compact.js'), '', {
|
||||
TMPDIR: '/dev/null'
|
||||
});
|
||||
assert.strictEqual(result.code, 0,
|
||||
`Should exit 0 (don't block on errors), got ${result.code}`);
|
||||
assert.ok(result.stderr.includes('[StrategicCompact] Error:'),
|
||||
`stderr should contain [StrategicCompact] Error:, got: ${result.stderr}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 80: session-end.js entry.message?.role === 'user' third OR condition ──
|
||||
console.log('\nRound 80: session-end.js (entry.message.role user — third OR condition):');
|
||||
|
||||
if (await asyncTest('extracts user messages from entries where only message.role is user (not type or role)', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-msgrole-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
const testDir = createTestDir();
|
||||
const transcriptPath = path.join(testDir, 'transcript.jsonl');
|
||||
// Entries where type is NOT 'user' and there is no direct role field,
|
||||
// but message.role IS 'user'. This exercises the third OR condition at
|
||||
// session-end.js line 48: entry.message?.role === 'user'
|
||||
const lines = [
|
||||
'{"type":"human","message":{"role":"user","content":"Refactor the auth module"}}',
|
||||
'{"type":"human","message":{"role":"assistant","content":"I will refactor it"}}',
|
||||
'{"type":"human","message":{"role":"user","content":"Add integration tests too"}}',
|
||||
];
|
||||
fs.writeFileSync(transcriptPath, lines.join('\n'));
|
||||
const stdinJson = JSON.stringify({ transcript_path: transcriptPath });
|
||||
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0);
|
||||
|
||||
const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('-session.tmp'));
|
||||
assert.ok(files.length > 0, 'Should create session file');
|
||||
const content = fs.readFileSync(path.join(sessionsDir, files[0]), 'utf8');
|
||||
// The third OR condition should fire for type:"human" + message.role:"user"
|
||||
assert.ok(content.includes('Refactor the auth module') || content.includes('auth'),
|
||||
`Session should include message extracted via message.role path. Got: ${content.substring(0, 300)}`);
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 81: suggest-compact threshold upper bound, session-end non-string content ──
|
||||
console.log('\nRound 81: suggest-compact.js (COMPACT_THRESHOLD > 10000):');
|
||||
|
||||
if (await asyncTest('COMPACT_THRESHOLD exceeding 10000 falls back to default 50', async () => {
|
||||
// suggest-compact.js line 31: rawThreshold <= 10000 ? rawThreshold : 50
|
||||
// Values > 10000 are positive and finite but fail the upper-bound check.
|
||||
// Existing tests cover 0, negative, NaN — this covers the > 10000 boundary.
|
||||
const result = await runScript(path.join(scriptsDir, 'suggest-compact.js'), '', {
|
||||
COMPACT_THRESHOLD: '20000'
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0');
|
||||
// The script logs the threshold it chose — should fall back to 50
|
||||
// Look for the fallback value in stderr (log output)
|
||||
const compactSource = fs.readFileSync(path.join(scriptsDir, 'suggest-compact.js'), 'utf8');
|
||||
// The condition at line 31: rawThreshold <= 10000 ? rawThreshold : 50
|
||||
assert.ok(compactSource.includes('<= 10000'),
|
||||
'Source should have <= 10000 upper bound check');
|
||||
assert.ok(compactSource.includes(': 50'),
|
||||
'Source should fall back to 50 when threshold exceeds 10000');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 81: session-end.js (user entry with non-string non-array content):');
|
||||
|
||||
if (await asyncTest('skips user messages with numeric content (non-string non-array branch)', async () => {
|
||||
// session-end.js line 50-55: rawContent is checked for string, then array, else ''
|
||||
// When content is a number (42), neither branch matches, text = '', message is skipped.
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-r81-numcontent-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
const transcriptPath = path.join(isoHome, 'transcript.jsonl');
|
||||
|
||||
const lines = [
|
||||
// Normal user message (string content) — should be included
|
||||
'{"type":"user","content":"Real user message"}',
|
||||
// User message with numeric content — exercises the else: '' branch
|
||||
'{"type":"user","content":42}',
|
||||
// User message with boolean content — also hits the else branch
|
||||
'{"type":"user","content":true}',
|
||||
// User message with object content (no .text) — also hits the else branch
|
||||
'{"type":"user","content":{"type":"image","source":"data:..."}}',
|
||||
];
|
||||
fs.writeFileSync(transcriptPath, lines.join('\n'));
|
||||
|
||||
const stdinJson = JSON.stringify({ transcript_path: transcriptPath });
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0');
|
||||
|
||||
const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('.tmp'));
|
||||
assert.ok(files.length > 0, 'Should create session file');
|
||||
const content = fs.readFileSync(path.join(sessionsDir, files[0]), 'utf8');
|
||||
// The real string message should appear
|
||||
assert.ok(content.includes('Real user message'),
|
||||
'Should include the string content user message');
|
||||
// Numeric/boolean/object content should NOT appear as text
|
||||
assert.ok(!content.includes('42'),
|
||||
'Numeric content should be skipped (else branch → empty string → filtered)');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 82: tool_name OR fallback, template marker regex no-match ──
|
||||
|
||||
console.log('\nRound 82: session-end.js (entry.tool_name without type=tool_use):');
|
||||
|
||||
if (await asyncTest('collects tool name from entry with tool_name but non-tool_use type', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-r82-toolname-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
const transcriptPath = path.join(isoHome, 'transcript.jsonl');
|
||||
const lines = [
|
||||
'{"type":"user","content":"Fix the bug"}',
|
||||
'{"type":"result","tool_name":"Edit","tool_input":{"file_path":"/tmp/app.js"}}',
|
||||
'{"type":"assistant","message":{"content":[{"type":"text","text":"Done fixing"}]}}',
|
||||
];
|
||||
fs.writeFileSync(transcriptPath, lines.join('\n'));
|
||||
|
||||
const stdinJson = JSON.stringify({ transcript_path: transcriptPath });
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0');
|
||||
const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('.tmp'));
|
||||
assert.ok(files.length > 0, 'Should create session file');
|
||||
const content = fs.readFileSync(path.join(sessionsDir, files[0]), 'utf8');
|
||||
// The tool name "Edit" should appear even though type is "result", not "tool_use"
|
||||
assert.ok(content.includes('Edit'), 'Should collect Edit tool via tool_name OR fallback');
|
||||
// The file modified should also be collected since tool_name is Edit
|
||||
assert.ok(content.includes('app.js'), 'Should collect modified file path from tool_input');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 82: session-end.js (template marker present but regex no-match):');
|
||||
|
||||
if (await asyncTest('preserves file when marker present but regex does not match corrupted template', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-r82-tmpl-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
const sessionFile = path.join(sessionsDir, `session-${today}.tmp`);
|
||||
|
||||
// Write a corrupted template: has the marker but NOT the full regex structure
|
||||
const corruptedTemplate = `# Session: ${today}
|
||||
**Date:** ${today}
|
||||
**Started:** 10:00
|
||||
**Last Updated:** 10:00
|
||||
|
||||
---
|
||||
|
||||
## Current State
|
||||
|
||||
[Session context goes here]
|
||||
|
||||
Some random content without the expected ### Context to Load section
|
||||
`;
|
||||
fs.writeFileSync(sessionFile, corruptedTemplate);
|
||||
|
||||
// Provide a transcript with enough content to generate a summary
|
||||
const transcriptPath = path.join(isoHome, 'transcript.jsonl');
|
||||
const lines = [
|
||||
'{"type":"user","content":"Implement authentication feature"}',
|
||||
'{"type":"assistant","message":{"content":[{"type":"text","text":"I will implement the auth feature using JWT tokens and bcrypt for password hashing."}]}}',
|
||||
'{"type":"tool_use","tool_name":"Write","name":"Write","tool_input":{"file_path":"/tmp/auth.js"}}',
|
||||
'{"type":"user","content":"Now add the login endpoint"}',
|
||||
'{"type":"assistant","message":{"content":[{"type":"text","text":"Adding the login endpoint with proper validation."}]}}',
|
||||
];
|
||||
fs.writeFileSync(transcriptPath, lines.join('\n'));
|
||||
|
||||
const stdinJson = JSON.stringify({ transcript_path: transcriptPath });
|
||||
try {
|
||||
const result = await runScript(path.join(scriptsDir, 'session-end.js'), stdinJson, {
|
||||
HOME: isoHome, USERPROFILE: isoHome
|
||||
});
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0');
|
||||
|
||||
const content = fs.readFileSync(sessionFile, 'utf8');
|
||||
// The marker text should still be present since regex didn't match
|
||||
assert.ok(content.includes('[Session context goes here]'),
|
||||
'Marker should remain when regex fails to match corrupted template');
|
||||
// The corrupted content should still be there
|
||||
assert.ok(content.includes('Some random content'),
|
||||
'Original corrupted content should be preserved');
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 87: post-edit-format.js and post-edit-typecheck.js stdin overflow (1MB) ──
|
||||
console.log('\nRound 87: post-edit-format.js (stdin exceeding 1MB — truncation):');
|
||||
|
||||
if (await asyncTest('truncates stdin at 1MB limit and still passes through data (post-edit-format)', async () => {
|
||||
// Send 1.2MB of data — exceeds the 1MB MAX_STDIN limit (lines 14-22)
|
||||
const payload = 'x'.repeat(1024 * 1024 + 200000);
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-format.js'), payload);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even with oversized stdin');
|
||||
// Output should be truncated — significantly less than input
|
||||
assert.ok(result.stdout.length < payload.length,
|
||||
`stdout (${result.stdout.length}) should be shorter than input (${payload.length})`);
|
||||
// Output should be approximately 1MB (last accepted chunk may push slightly over)
|
||||
assert.ok(result.stdout.length <= 1024 * 1024 + 65536,
|
||||
`stdout (${result.stdout.length}) should be near 1MB, not unbounded`);
|
||||
assert.ok(result.stdout.length > 0, 'Should still pass through truncated data');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 87: post-edit-typecheck.js (stdin exceeding 1MB — truncation):');
|
||||
|
||||
if (await asyncTest('truncates stdin at 1MB limit and still passes through data (post-edit-typecheck)', async () => {
|
||||
// Send 1.2MB of data — exceeds the 1MB MAX_STDIN limit (lines 16-24)
|
||||
const payload = 'x'.repeat(1024 * 1024 + 200000);
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-typecheck.js'), payload);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 even with oversized stdin');
|
||||
// Output should be truncated — significantly less than input
|
||||
assert.ok(result.stdout.length < payload.length,
|
||||
`stdout (${result.stdout.length}) should be shorter than input (${payload.length})`);
|
||||
// Output should be approximately 1MB (last accepted chunk may push slightly over)
|
||||
assert.ok(result.stdout.length <= 1024 * 1024 + 65536,
|
||||
`stdout (${result.stdout.length}) should be near 1MB, not unbounded`);
|
||||
assert.ok(result.stdout.length > 0, 'Should still pass through truncated data');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log('\n=== Test Results ===');
|
||||
console.log(`Passed: ${passed}`);
|
||||
|
||||
@@ -318,6 +318,30 @@ function runTests() {
|
||||
cleanupCounter();
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 64: default session ID fallback ──
|
||||
console.log('\nDefault session ID fallback (Round 64):');
|
||||
|
||||
if (test('uses "default" session ID when CLAUDE_SESSION_ID is empty', () => {
|
||||
const defaultCounterFile = getCounterFilePath('default');
|
||||
try { fs.unlinkSync(defaultCounterFile); } catch {}
|
||||
try {
|
||||
// Pass empty CLAUDE_SESSION_ID — falsy, so script uses 'default'
|
||||
const env = { ...process.env, CLAUDE_SESSION_ID: '' };
|
||||
const result = spawnSync('node', [compactScript], {
|
||||
encoding: 'utf8',
|
||||
input: '{}',
|
||||
timeout: 10000,
|
||||
env,
|
||||
});
|
||||
assert.strictEqual(result.status || 0, 0, 'Should exit 0');
|
||||
assert.ok(fs.existsSync(defaultCounterFile), 'Counter file should use "default" session ID');
|
||||
const count = parseInt(fs.readFileSync(defaultCounterFile, 'utf8').trim(), 10);
|
||||
assert.strictEqual(count, 1, 'Counter should be 1 for first run with default session');
|
||||
} finally {
|
||||
try { fs.unlinkSync(defaultCounterFile); } catch {}
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
@@ -622,6 +622,76 @@ async function runTests() {
|
||||
assert.strictEqual(code, 0, 'Should not crash on truncated JSON');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ==========================================
|
||||
// Round 51: Timeout Enforcement
|
||||
// ==========================================
|
||||
console.log('\nRound 51: Timeout Enforcement:');
|
||||
|
||||
if (await asyncTest('runHookWithInput kills hanging hooks after timeout', async () => {
|
||||
const testDir = createTestDir();
|
||||
const hangingHookPath = path.join(testDir, 'hanging-hook.js');
|
||||
fs.writeFileSync(hangingHookPath, 'setInterval(() => {}, 100);');
|
||||
|
||||
try {
|
||||
const startTime = Date.now();
|
||||
let error = null;
|
||||
|
||||
try {
|
||||
await runHookWithInput(hangingHookPath, {}, {}, 500);
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - startTime;
|
||||
assert.ok(error, 'Should throw timeout error');
|
||||
assert.ok(error.message.includes('timed out'), 'Error should mention timeout');
|
||||
assert.ok(elapsed >= 450, `Should wait at least ~500ms, waited ${elapsed}ms`);
|
||||
assert.ok(elapsed < 2000, `Should not wait much longer than 500ms, waited ${elapsed}ms`);
|
||||
} finally {
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ==========================================
|
||||
// Round 51: hooks.json Schema Validation
|
||||
// ==========================================
|
||||
console.log('\nRound 51: hooks.json Schema Validation:');
|
||||
|
||||
if (await asyncTest('hooks.json async hook has valid timeout field', async () => {
|
||||
const asyncHook = hooks.hooks.PostToolUse.find(h =>
|
||||
h.hooks && h.hooks[0] && h.hooks[0].async === true
|
||||
);
|
||||
|
||||
assert.ok(asyncHook, 'Should have at least one async hook defined');
|
||||
assert.strictEqual(asyncHook.hooks[0].async, true, 'async field should be true');
|
||||
assert.ok(asyncHook.hooks[0].timeout, 'Should have timeout field');
|
||||
assert.strictEqual(typeof asyncHook.hooks[0].timeout, 'number', 'Timeout should be a number');
|
||||
assert.ok(asyncHook.hooks[0].timeout > 0, 'Timeout should be positive');
|
||||
|
||||
const match = asyncHook.hooks[0].command.match(/^node -e "(.+)"$/s);
|
||||
assert.ok(match, 'Async hook command should be node -e format');
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('all hook commands in hooks.json are valid format', async () => {
|
||||
for (const [hookType, hookArray] of Object.entries(hooks.hooks)) {
|
||||
for (const hookDef of hookArray) {
|
||||
assert.ok(hookDef.hooks, `${hookType} entry should have hooks array`);
|
||||
|
||||
for (const hook of hookDef.hooks) {
|
||||
assert.ok(hook.command, `Hook in ${hookType} should have command field`);
|
||||
|
||||
const isInline = hook.command.startsWith('node -e');
|
||||
const isFilePath = hook.command.startsWith('node "');
|
||||
|
||||
assert.ok(
|
||||
isInline || isFilePath,
|
||||
`Hook command in ${hookType} should be inline (node -e) or file path (node "), got: ${hook.command.substring(0, 50)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log('\n=== Test Results ===');
|
||||
console.log(`Passed: ${passed}`);
|
||||
|
||||
@@ -1164,6 +1164,205 @@ function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 48: detectFromPackageJson format edge cases ──
|
||||
console.log('\nRound 48: detectFromPackageJson (version format edge cases):');
|
||||
|
||||
if (test('returns null for packageManager with non-@ separator', () => {
|
||||
const testDir = createTestDir();
|
||||
try {
|
||||
fs.writeFileSync(path.join(testDir, 'package.json'), JSON.stringify({
|
||||
name: 'test',
|
||||
packageManager: 'pnpm+8.6.0'
|
||||
}));
|
||||
const result = pm.detectFromPackageJson(testDir);
|
||||
// split('@') on 'pnpm+8.6.0' returns ['pnpm+8.6.0'], which doesn't match PACKAGE_MANAGERS
|
||||
assert.strictEqual(result, null, 'Non-@ format should not match any package manager');
|
||||
} finally {
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('extracts package manager from caret version like yarn@^4.0.0', () => {
|
||||
const testDir = createTestDir();
|
||||
try {
|
||||
fs.writeFileSync(path.join(testDir, 'package.json'), JSON.stringify({
|
||||
name: 'test',
|
||||
packageManager: 'yarn@^4.0.0'
|
||||
}));
|
||||
const result = pm.detectFromPackageJson(testDir);
|
||||
assert.strictEqual(result, 'yarn', 'Caret version should still extract PM name');
|
||||
} finally {
|
||||
cleanupTestDir(testDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// getPackageManager falls through corrupted global config to npm default
|
||||
if (test('getPackageManager falls through corrupted global config to npm default', () => {
|
||||
const tmpDir = createTestDir();
|
||||
const projDir = path.join(tmpDir, 'proj');
|
||||
fs.mkdirSync(projDir, { recursive: true });
|
||||
const origHome = process.env.HOME;
|
||||
const origUserProfile = process.env.USERPROFILE;
|
||||
const origPM = process.env.CLAUDE_PACKAGE_MANAGER;
|
||||
try {
|
||||
// Create corrupted global config file
|
||||
const claudeDir = path.join(tmpDir, '.claude');
|
||||
fs.mkdirSync(claudeDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(claudeDir, 'package-manager.json'), '{ invalid json !!!', 'utf8');
|
||||
process.env.HOME = tmpDir;
|
||||
process.env.USERPROFILE = tmpDir;
|
||||
delete process.env.CLAUDE_PACKAGE_MANAGER;
|
||||
// Re-require to pick up new HOME
|
||||
delete require.cache[require.resolve('../../scripts/lib/package-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshPM = require('../../scripts/lib/package-manager');
|
||||
// Empty project dir: no lock file, no package.json, no project config
|
||||
const result = freshPM.getPackageManager({ projectDir: projDir });
|
||||
assert.strictEqual(result.name, 'npm', 'Should fall through to npm default');
|
||||
assert.strictEqual(result.source, 'default', 'Source should be default');
|
||||
} finally {
|
||||
process.env.HOME = origHome;
|
||||
process.env.USERPROFILE = origUserProfile;
|
||||
if (origPM !== undefined) process.env.CLAUDE_PACKAGE_MANAGER = origPM;
|
||||
delete require.cache[require.resolve('../../scripts/lib/package-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
cleanupTestDir(tmpDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 69: getPackageManager global-config success path ──
|
||||
console.log('\nRound 69: getPackageManager (global-config success):');
|
||||
|
||||
if (test('getPackageManager returns source global-config when valid global config exists', () => {
|
||||
const tmpDir = createTestDir();
|
||||
const projDir = path.join(tmpDir, 'proj');
|
||||
fs.mkdirSync(projDir, { recursive: true });
|
||||
const origHome = process.env.HOME;
|
||||
const origUserProfile = process.env.USERPROFILE;
|
||||
const origPM = process.env.CLAUDE_PACKAGE_MANAGER;
|
||||
try {
|
||||
// Create valid global config with pnpm preference
|
||||
const claudeDir = path.join(tmpDir, '.claude');
|
||||
fs.mkdirSync(claudeDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(claudeDir, 'package-manager.json'),
|
||||
JSON.stringify({ packageManager: 'pnpm', setAt: '2026-01-01T00:00:00Z' }), 'utf8');
|
||||
process.env.HOME = tmpDir;
|
||||
process.env.USERPROFILE = tmpDir;
|
||||
delete process.env.CLAUDE_PACKAGE_MANAGER;
|
||||
// Re-require to pick up new HOME
|
||||
delete require.cache[require.resolve('../../scripts/lib/package-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshPM = require('../../scripts/lib/package-manager');
|
||||
// Empty project dir: no lock file, no package.json, no project config
|
||||
const result = freshPM.getPackageManager({ projectDir: projDir });
|
||||
assert.strictEqual(result.name, 'pnpm', 'Should detect pnpm from global config');
|
||||
assert.strictEqual(result.source, 'global-config', 'Source should be global-config');
|
||||
assert.ok(result.config, 'Should include config object');
|
||||
assert.strictEqual(result.config.lockFile, 'pnpm-lock.yaml', 'Config should match pnpm');
|
||||
} finally {
|
||||
process.env.HOME = origHome;
|
||||
process.env.USERPROFILE = origUserProfile;
|
||||
if (origPM !== undefined) process.env.CLAUDE_PACKAGE_MANAGER = origPM;
|
||||
delete require.cache[require.resolve('../../scripts/lib/package-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
cleanupTestDir(tmpDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 71: setPreferredPackageManager save failure wraps error ──
|
||||
console.log('\nRound 71: setPreferredPackageManager (save failure):');
|
||||
|
||||
if (test('setPreferredPackageManager throws wrapped error when save fails', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-pm-r71-${Date.now()}`);
|
||||
const claudeDir = path.join(isoHome, '.claude');
|
||||
fs.mkdirSync(claudeDir, { recursive: true });
|
||||
|
||||
const savedHome = process.env.HOME;
|
||||
const savedProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
process.env.HOME = isoHome;
|
||||
process.env.USERPROFILE = isoHome;
|
||||
delete require.cache[require.resolve('../../scripts/lib/package-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshPm = require('../../scripts/lib/package-manager');
|
||||
|
||||
// Make .claude directory read-only — can't create new files (package-manager.json)
|
||||
fs.chmodSync(claudeDir, 0o555);
|
||||
|
||||
assert.throws(() => {
|
||||
freshPm.setPreferredPackageManager('npm');
|
||||
}, /Failed to save package manager preference/);
|
||||
} finally {
|
||||
try { fs.chmodSync(claudeDir, 0o755); } catch { /* best-effort */ }
|
||||
process.env.HOME = savedHome;
|
||||
process.env.USERPROFILE = savedProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/package-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 72: setProjectPackageManager save failure wraps error ──
|
||||
console.log('\nRound 72: setProjectPackageManager (save failure):');
|
||||
|
||||
if (test('setProjectPackageManager throws wrapped error when write fails', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const isoProject = path.join(os.tmpdir(), `ecc-pm-proj-r72-${Date.now()}`);
|
||||
const claudeDir = path.join(isoProject, '.claude');
|
||||
fs.mkdirSync(claudeDir, { recursive: true });
|
||||
|
||||
// Make .claude directory read-only — can't create new files
|
||||
fs.chmodSync(claudeDir, 0o555);
|
||||
|
||||
try {
|
||||
assert.throws(() => {
|
||||
pm.setProjectPackageManager('npm', isoProject);
|
||||
}, /Failed to save package manager config/);
|
||||
} finally {
|
||||
fs.chmodSync(claudeDir, 0o755);
|
||||
fs.rmSync(isoProject, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 80: getExecCommand with truthy non-string args ──
|
||||
console.log('\nRound 80: getExecCommand (truthy non-string args):');
|
||||
|
||||
if (test('getExecCommand with args=42 (truthy number) appends stringified value', () => {
|
||||
const originalEnv = process.env.CLAUDE_PACKAGE_MANAGER;
|
||||
try {
|
||||
process.env.CLAUDE_PACKAGE_MANAGER = 'npm';
|
||||
// args=42: truthy, so typeof check at line 334 short-circuits
|
||||
// (typeof 42 !== 'string'), skipping validation. Line 339:
|
||||
// 42 ? ' ' + 42 → ' 42' → appended.
|
||||
const cmd = pm.getExecCommand('prettier', 42);
|
||||
assert.ok(cmd.includes('prettier'), 'Should include binary name');
|
||||
assert.ok(cmd.includes('42'), 'Truthy number should be stringified and appended');
|
||||
} finally {
|
||||
if (originalEnv !== undefined) process.env.CLAUDE_PACKAGE_MANAGER = originalEnv;
|
||||
else delete process.env.CLAUDE_PACKAGE_MANAGER;
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 86: detectFromPackageJson with empty (0-byte) package.json ──
|
||||
console.log('\nRound 86: detectFromPackageJson (empty package.json):');
|
||||
|
||||
if (test('detectFromPackageJson returns null for empty (0-byte) package.json', () => {
|
||||
// package-manager.js line 109-111: readFile returns "" for empty file.
|
||||
// "" is falsy → if (content) is false → skips JSON.parse → returns null.
|
||||
const testDir = createTestDir();
|
||||
fs.writeFileSync(path.join(testDir, 'package.json'), '');
|
||||
const result = pm.detectFromPackageJson(testDir);
|
||||
assert.strictEqual(result, null, 'Empty package.json should return null (content="" is falsy)');
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log('\n=== Test Results ===');
|
||||
console.log(`Passed: ${passed}`);
|
||||
|
||||
@@ -839,6 +839,390 @@ function runTests() {
|
||||
// best-effort
|
||||
}
|
||||
|
||||
// ── Round 48: rapid sequential saves data integrity ──
|
||||
console.log('\nRound 48: rapid sequential saves:');
|
||||
|
||||
if (test('rapid sequential setAlias calls maintain data integrity', () => {
|
||||
resetAliases();
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const result = aliases.setAlias(`rapid-${i}`, `/path/${i}`, `Title ${i}`);
|
||||
assert.strictEqual(result.success, true, `setAlias rapid-${i} should succeed`);
|
||||
}
|
||||
const data = aliases.loadAliases();
|
||||
for (let i = 0; i < 5; i++) {
|
||||
assert.ok(data.aliases[`rapid-${i}`], `rapid-${i} should exist after all saves`);
|
||||
assert.strictEqual(data.aliases[`rapid-${i}`].sessionPath, `/path/${i}`);
|
||||
}
|
||||
assert.strictEqual(data.metadata.totalCount, 5, 'Metadata count should match actual aliases');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 56: Windows platform unlink-before-rename code path ──
|
||||
console.log('\nRound 56: Windows platform atomic write path:');
|
||||
|
||||
if (test('Windows platform mock: unlinks existing file before rename', () => {
|
||||
resetAliases();
|
||||
// First create an alias so the file exists
|
||||
const r1 = aliases.setAlias('win-initial', '2026-01-01-abc123-session.tmp');
|
||||
assert.strictEqual(r1.success, true, 'Initial alias should succeed');
|
||||
const aliasesPath = aliases.getAliasesPath();
|
||||
assert.ok(fs.existsSync(aliasesPath), 'Aliases file should exist before win32 test');
|
||||
|
||||
// Mock process.platform to 'win32' to trigger the unlink-before-rename path
|
||||
const origPlatform = Object.getOwnPropertyDescriptor(process, 'platform');
|
||||
Object.defineProperty(process, 'platform', { value: 'win32', configurable: true });
|
||||
|
||||
try {
|
||||
// This save triggers the Windows code path: unlink existing → rename temp
|
||||
const r2 = aliases.setAlias('win-updated', '2026-02-01-def456-session.tmp');
|
||||
assert.strictEqual(r2.success, true, 'setAlias should succeed under win32 mock');
|
||||
|
||||
// Verify data integrity after the Windows path
|
||||
assert.ok(fs.existsSync(aliasesPath), 'Aliases file should exist after win32 save');
|
||||
const data = aliases.loadAliases();
|
||||
assert.ok(data.aliases['win-initial'], 'Original alias should still exist');
|
||||
assert.ok(data.aliases['win-updated'], 'New alias should exist');
|
||||
assert.strictEqual(data.aliases['win-updated'].sessionPath,
|
||||
'2026-02-01-def456-session.tmp', 'Session path should match');
|
||||
|
||||
// No .tmp or .bak files left behind
|
||||
assert.ok(!fs.existsSync(aliasesPath + '.tmp'), 'No temp file should remain');
|
||||
assert.ok(!fs.existsSync(aliasesPath + '.bak'), 'No backup file should remain');
|
||||
} finally {
|
||||
// Restore original platform descriptor
|
||||
if (origPlatform) {
|
||||
Object.defineProperty(process, 'platform', origPlatform);
|
||||
}
|
||||
resetAliases();
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 64: loadAliases backfills missing version and metadata ──
|
||||
console.log('\nRound 64: loadAliases version/metadata backfill:');
|
||||
|
||||
if (test('loadAliases backfills missing version and metadata fields', () => {
|
||||
resetAliases();
|
||||
const aliasesPath = aliases.getAliasesPath();
|
||||
// Write a file with valid aliases but NO version and NO metadata
|
||||
fs.writeFileSync(aliasesPath, JSON.stringify({
|
||||
aliases: {
|
||||
'backfill-test': {
|
||||
sessionPath: '/sessions/backfill',
|
||||
createdAt: '2026-01-15T00:00:00.000Z',
|
||||
updatedAt: '2026-01-15T00:00:00.000Z',
|
||||
title: 'Backfill Test'
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
const data = aliases.loadAliases();
|
||||
// Version should be backfilled to ALIAS_VERSION ('1.0')
|
||||
assert.strictEqual(data.version, '1.0', 'Should backfill missing version to 1.0');
|
||||
// Metadata should be backfilled with totalCount from aliases
|
||||
assert.ok(data.metadata, 'Should backfill missing metadata object');
|
||||
assert.strictEqual(data.metadata.totalCount, 1, 'Metadata totalCount should match alias count');
|
||||
assert.ok(data.metadata.lastUpdated, 'Metadata should have lastUpdated');
|
||||
// Alias data should be preserved
|
||||
assert.ok(data.aliases['backfill-test'], 'Alias data should be preserved');
|
||||
assert.strictEqual(data.aliases['backfill-test'].sessionPath, '/sessions/backfill');
|
||||
resetAliases();
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 67: loadAliases empty file, resolveSessionAlias null, metadata-only backfill ──
|
||||
console.log('\nRound 67: loadAliases (empty 0-byte file):');
|
||||
|
||||
if (test('loadAliases returns default structure for empty (0-byte) file', () => {
|
||||
resetAliases();
|
||||
const aliasesPath = aliases.getAliasesPath();
|
||||
// Write a 0-byte file — readFile returns '', which is falsy → !content branch
|
||||
fs.writeFileSync(aliasesPath, '');
|
||||
const data = aliases.loadAliases();
|
||||
assert.ok(data.aliases, 'Should have aliases key');
|
||||
assert.strictEqual(Object.keys(data.aliases).length, 0, 'Should have no aliases');
|
||||
assert.strictEqual(data.version, '1.0', 'Should have default version');
|
||||
assert.ok(data.metadata, 'Should have metadata');
|
||||
assert.strictEqual(data.metadata.totalCount, 0, 'Should have totalCount 0');
|
||||
resetAliases();
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 67: resolveSessionAlias (null/falsy input):');
|
||||
|
||||
if (test('resolveSessionAlias returns null when given null input', () => {
|
||||
resetAliases();
|
||||
const result = aliases.resolveSessionAlias(null);
|
||||
assert.strictEqual(result, null, 'Should return null for null input');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nRound 67: loadAliases (metadata-only backfill, version present):');
|
||||
|
||||
if (test('loadAliases backfills only metadata when version already present', () => {
|
||||
resetAliases();
|
||||
const aliasesPath = aliases.getAliasesPath();
|
||||
// Write a file WITH version but WITHOUT metadata
|
||||
fs.writeFileSync(aliasesPath, JSON.stringify({
|
||||
version: '1.0',
|
||||
aliases: {
|
||||
'meta-only': {
|
||||
sessionPath: '/sessions/meta-only',
|
||||
createdAt: '2026-01-20T00:00:00.000Z',
|
||||
updatedAt: '2026-01-20T00:00:00.000Z',
|
||||
title: 'Metadata Only Test'
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
const data = aliases.loadAliases();
|
||||
// Version should remain as-is (NOT overwritten)
|
||||
assert.strictEqual(data.version, '1.0', 'Version should remain 1.0');
|
||||
// Metadata should be backfilled
|
||||
assert.ok(data.metadata, 'Should backfill missing metadata');
|
||||
assert.strictEqual(data.metadata.totalCount, 1, 'Metadata totalCount should be 1');
|
||||
assert.ok(data.metadata.lastUpdated, 'Metadata should have lastUpdated');
|
||||
// Alias data should be preserved
|
||||
assert.ok(data.aliases['meta-only'], 'Alias should be preserved');
|
||||
assert.strictEqual(data.aliases['meta-only'].title, 'Metadata Only Test');
|
||||
resetAliases();
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 70: updateAliasTitle save failure path ──
|
||||
console.log('\nupdateAliasTitle save failure (Round 70):');
|
||||
|
||||
if (test('updateAliasTitle returns failure when saveAliases fails (read-only dir)', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
// Use a fresh isolated HOME to avoid .tmp/.bak leftovers from other tests.
|
||||
// On macOS, overwriting an EXISTING file in a read-only dir succeeds,
|
||||
// so we must start clean with ONLY the .json file present.
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-alias-r70-${Date.now()}`);
|
||||
const isoClaudeDir = path.join(isoHome, '.claude');
|
||||
fs.mkdirSync(isoClaudeDir, { recursive: true });
|
||||
const savedHome = process.env.HOME;
|
||||
const savedProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
process.env.HOME = isoHome;
|
||||
process.env.USERPROFILE = isoHome;
|
||||
// Re-require to pick up new HOME
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-aliases')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshAliases = require('../../scripts/lib/session-aliases');
|
||||
|
||||
// Set up a valid alias
|
||||
freshAliases.setAlias('title-save-fail', '/path/session', 'Original Title');
|
||||
// Verify no leftover .tmp/.bak
|
||||
const ap = freshAliases.getAliasesPath();
|
||||
assert.ok(fs.existsSync(ap), 'Alias file should exist after setAlias');
|
||||
|
||||
// Make .claude dir read-only so saveAliases fails when creating .bak
|
||||
fs.chmodSync(isoClaudeDir, 0o555);
|
||||
|
||||
const result = freshAliases.updateAliasTitle('title-save-fail', 'New Title');
|
||||
assert.strictEqual(result.success, false, 'Should fail when save is blocked');
|
||||
assert.ok(result.error.includes('Failed to update alias title'),
|
||||
`Should return save failure error, got: ${result.error}`);
|
||||
} finally {
|
||||
try { fs.chmodSync(isoClaudeDir, 0o755); } catch { /* best-effort */ }
|
||||
process.env.HOME = savedHome;
|
||||
process.env.USERPROFILE = savedProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-aliases')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 72: deleteAlias save failure path ──
|
||||
console.log('\nRound 72: deleteAlias (save failure):');
|
||||
|
||||
if (test('deleteAlias returns failure when saveAliases fails (read-only dir)', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-alias-r72-${Date.now()}`);
|
||||
const isoClaudeDir = path.join(isoHome, '.claude');
|
||||
fs.mkdirSync(isoClaudeDir, { recursive: true });
|
||||
const savedHome = process.env.HOME;
|
||||
const savedProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
process.env.HOME = isoHome;
|
||||
process.env.USERPROFILE = isoHome;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-aliases')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshAliases = require('../../scripts/lib/session-aliases');
|
||||
|
||||
// Create an alias first (writes the file)
|
||||
freshAliases.setAlias('to-delete', '/path/session', 'Test');
|
||||
const ap = freshAliases.getAliasesPath();
|
||||
assert.ok(fs.existsSync(ap), 'Alias file should exist after setAlias');
|
||||
|
||||
// Make .claude directory read-only — save will fail (can't create temp file)
|
||||
fs.chmodSync(isoClaudeDir, 0o555);
|
||||
|
||||
const result = freshAliases.deleteAlias('to-delete');
|
||||
assert.strictEqual(result.success, false, 'Should fail when save is blocked');
|
||||
assert.ok(result.error.includes('Failed to delete alias'),
|
||||
`Should return delete failure error, got: ${result.error}`);
|
||||
} finally {
|
||||
try { fs.chmodSync(isoClaudeDir, 0o755); } catch { /* best-effort */ }
|
||||
process.env.HOME = savedHome;
|
||||
process.env.USERPROFILE = savedProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-aliases')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 73: cleanupAliases save failure path ──
|
||||
console.log('\nRound 73: cleanupAliases (save failure):');
|
||||
|
||||
if (test('cleanupAliases returns failure when saveAliases fails after removing aliases', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-alias-r73-cleanup-${Date.now()}`);
|
||||
const isoClaudeDir = path.join(isoHome, '.claude');
|
||||
fs.mkdirSync(isoClaudeDir, { recursive: true });
|
||||
const savedHome = process.env.HOME;
|
||||
const savedProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
process.env.HOME = isoHome;
|
||||
process.env.USERPROFILE = isoHome;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-aliases')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshAliases = require('../../scripts/lib/session-aliases');
|
||||
|
||||
// Create aliases — one to keep, one to remove
|
||||
freshAliases.setAlias('keep-me', '/sessions/real', 'Kept');
|
||||
freshAliases.setAlias('remove-me', '/sessions/gone', 'Gone');
|
||||
|
||||
// Make .claude dir read-only so save will fail
|
||||
fs.chmodSync(isoClaudeDir, 0o555);
|
||||
|
||||
// Cleanup: "gone" session doesn't exist, so remove-me should be removed
|
||||
const result = freshAliases.cleanupAliases((p) => p === '/sessions/real');
|
||||
assert.strictEqual(result.success, false, 'Should fail when save is blocked');
|
||||
assert.ok(result.error.includes('Failed to save after cleanup'),
|
||||
`Should return cleanup save failure error, got: ${result.error}`);
|
||||
assert.strictEqual(result.removed, 1, 'Should report 1 removed alias');
|
||||
assert.ok(result.removedAliases.some(a => a.name === 'remove-me'),
|
||||
'Should report remove-me in removedAliases');
|
||||
} finally {
|
||||
try { fs.chmodSync(isoClaudeDir, 0o755); } catch { /* best-effort */ }
|
||||
process.env.HOME = savedHome;
|
||||
process.env.USERPROFILE = savedProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-aliases')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 73: setAlias save failure path ──
|
||||
console.log('\nRound 73: setAlias (save failure):');
|
||||
|
||||
if (test('setAlias returns failure when saveAliases fails', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-alias-r73-set-${Date.now()}`);
|
||||
const isoClaudeDir = path.join(isoHome, '.claude');
|
||||
fs.mkdirSync(isoClaudeDir, { recursive: true });
|
||||
const savedHome = process.env.HOME;
|
||||
const savedProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
process.env.HOME = isoHome;
|
||||
process.env.USERPROFILE = isoHome;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-aliases')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshAliases = require('../../scripts/lib/session-aliases');
|
||||
|
||||
// Make .claude dir read-only BEFORE any setAlias call
|
||||
fs.chmodSync(isoClaudeDir, 0o555);
|
||||
|
||||
const result = freshAliases.setAlias('my-alias', '/sessions/test', 'Test');
|
||||
assert.strictEqual(result.success, false, 'Should fail when save is blocked');
|
||||
assert.ok(result.error.includes('Failed to save alias'),
|
||||
`Should return save failure error, got: ${result.error}`);
|
||||
} finally {
|
||||
try { fs.chmodSync(isoClaudeDir, 0o755); } catch { /* best-effort */ }
|
||||
process.env.HOME = savedHome;
|
||||
process.env.USERPROFILE = savedProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-aliases')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 84: listAliases sort NaN date fallback (getTime() || 0) ──
|
||||
console.log('\nRound 84: listAliases (NaN date fallback in sort comparator):');
|
||||
|
||||
if (test('listAliases sorts entries with invalid/missing dates to the end via || 0 fallback', () => {
|
||||
// session-aliases.js line 257:
|
||||
// (new Date(b.updatedAt || b.createdAt || 0).getTime() || 0) - ...
|
||||
// When updatedAt and createdAt are both invalid strings, getTime() returns NaN.
|
||||
// The outer || 0 converts NaN to 0 (epoch time), pushing the entry to the end.
|
||||
resetAliases();
|
||||
const data = aliases.loadAliases();
|
||||
|
||||
// Entry with valid dates — should sort first (newest)
|
||||
data.aliases['valid-alias'] = {
|
||||
sessionPath: '/sessions/valid',
|
||||
createdAt: '2026-02-10T12:00:00.000Z',
|
||||
updatedAt: '2026-02-10T12:00:00.000Z',
|
||||
title: 'Valid'
|
||||
};
|
||||
|
||||
// Entry with invalid date strings — getTime() → NaN → || 0 → epoch (oldest)
|
||||
data.aliases['nan-alias'] = {
|
||||
sessionPath: '/sessions/nan',
|
||||
createdAt: 'not-a-date',
|
||||
updatedAt: 'also-invalid',
|
||||
title: 'NaN dates'
|
||||
};
|
||||
|
||||
// Entry with missing date fields — undefined || undefined || 0 → new Date(0) → epoch
|
||||
data.aliases['missing-alias'] = {
|
||||
sessionPath: '/sessions/missing',
|
||||
title: 'Missing dates'
|
||||
// No createdAt or updatedAt
|
||||
};
|
||||
|
||||
aliases.saveAliases(data);
|
||||
const list = aliases.listAliases();
|
||||
|
||||
assert.strictEqual(list.length, 3, 'Should list all 3 aliases');
|
||||
// Valid-dated entry should be first (newest by updatedAt)
|
||||
assert.strictEqual(list[0].name, 'valid-alias',
|
||||
'Entry with valid dates should sort first');
|
||||
// The two invalid-dated entries sort to epoch (0), so they come after
|
||||
assert.ok(
|
||||
(list[1].name === 'nan-alias' || list[1].name === 'missing-alias') &&
|
||||
(list[2].name === 'nan-alias' || list[2].name === 'missing-alias'),
|
||||
'Entries with invalid/missing dates should sort to the end');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 86: loadAliases with truthy non-object aliases field ──
|
||||
console.log('\nRound 86: loadAliases (truthy non-object aliases field):');
|
||||
|
||||
if (test('loadAliases resets to defaults when aliases field is a string (typeof !== object)', () => {
|
||||
// session-aliases.js line 58: if (!data.aliases || typeof data.aliases !== 'object')
|
||||
// Previous tests covered !data.aliases (undefined) via { noAliasesKey: true }.
|
||||
// This exercises the SECOND half: aliases is truthy but typeof !== 'object'.
|
||||
const aliasesPath = aliases.getAliasesPath();
|
||||
fs.writeFileSync(aliasesPath, JSON.stringify({
|
||||
version: '1.0',
|
||||
aliases: 'this-is-a-string-not-an-object',
|
||||
metadata: { totalCount: 0 }
|
||||
}));
|
||||
const data = aliases.loadAliases();
|
||||
assert.strictEqual(typeof data.aliases, 'object', 'Should reset aliases to object');
|
||||
assert.ok(!Array.isArray(data.aliases), 'Should be a plain object, not array');
|
||||
assert.strictEqual(Object.keys(data.aliases).length, 0, 'Should have no aliases');
|
||||
assert.strictEqual(data.version, '1.0', 'Should have version');
|
||||
resetAliases();
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
@@ -956,6 +956,44 @@ src/main.ts
|
||||
assert.ok(result.modifiedTime, 'modifiedTime should be present');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 54: search filter scope and getSessionPath utility ──
|
||||
console.log('\nRound 54: search filter scope and path utility:');
|
||||
|
||||
if (test('getAllSessions search filter matches only short ID, not title or content', () => {
|
||||
// "Session" appears in file CONTENT (e.g. "# Session 1") but not in any shortId
|
||||
const result = sessionManager.getAllSessions({ search: 'Session', limit: 100 });
|
||||
assert.strictEqual(result.total, 0, 'Search should not match title/content, only shortId');
|
||||
// Verify that searching by actual shortId substring still works
|
||||
const result2 = sessionManager.getAllSessions({ search: 'abcd', limit: 100 });
|
||||
assert.strictEqual(result2.total, 1, 'Search by shortId should still work');
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('getSessionPath returns absolute path for session filename', () => {
|
||||
const filename = '2026-02-01-testpath-session.tmp';
|
||||
const result = sessionManager.getSessionPath(filename);
|
||||
assert.ok(path.isAbsolute(result), 'Should return an absolute path');
|
||||
assert.ok(result.endsWith(filename), `Path should end with filename, got: ${result}`);
|
||||
// Since HOME is overridden, sessions dir should be under tmpHome
|
||||
assert.ok(result.includes('.claude'), 'Path should include .claude directory');
|
||||
assert.ok(result.includes('sessions'), 'Path should include sessions directory');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 66: getSessionById noIdMatch path (date-only string for old format) ──
|
||||
console.log('\nRound 66: getSessionById (noIdMatch — date-only match for old format):');
|
||||
|
||||
if (test('getSessionById finds old-format session by date-only string (noIdMatch)', () => {
|
||||
// File is 2026-02-10-session.tmp (old format, shortId = 'no-id')
|
||||
// Calling with '2026-02-10' → filenameMatch fails (filename !== '2026-02-10' and !== '2026-02-10.tmp')
|
||||
// shortIdMatch fails (shortId === 'no-id', not !== 'no-id')
|
||||
// noIdMatch succeeds: shortId === 'no-id' && filename === '2026-02-10-session.tmp'
|
||||
const result = sessionManager.getSessionById('2026-02-10');
|
||||
assert.ok(result, 'Should find old-format session by date-only string');
|
||||
assert.strictEqual(result.shortId, 'no-id', 'Should have no-id shortId');
|
||||
assert.ok(result.filename.includes('2026-02-10-session.tmp'), 'Should match old-format file');
|
||||
assert.ok(result.sessionPath, 'Should have sessionPath');
|
||||
assert.ok(result.date === '2026-02-10', 'Should have correct date');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Cleanup — restore both HOME and USERPROFILE (Windows)
|
||||
process.env.HOME = origHome;
|
||||
if (origUserProfile !== undefined) {
|
||||
@@ -1088,6 +1126,281 @@ src/main.ts
|
||||
}
|
||||
try { fs.rmSync(r33Home, { recursive: true, force: true }); } catch {}
|
||||
|
||||
// ── Round 46: path heuristic and checklist edge cases ──
|
||||
console.log('\ngetSessionStats Windows path heuristic (Round 46):');
|
||||
|
||||
if (test('recognises Windows drive-letter path as a file path', () => {
|
||||
// The looksLikePath regex includes /^[A-Za-z]:[/\\]/ for Windows
|
||||
// A non-existent Windows path should still be treated as a path
|
||||
// (getSessionContent returns null → parseSessionMetadata(null) → defaults)
|
||||
const stats1 = sessionManager.getSessionStats('C:/Users/test/session.tmp');
|
||||
assert.strictEqual(stats1.lineCount, 0, 'C:/ path treated as path, not content');
|
||||
const stats2 = sessionManager.getSessionStats('D:\\Sessions\\2026-01-01.tmp');
|
||||
assert.strictEqual(stats2.lineCount, 0, 'D:\\ path treated as path, not content');
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('does not treat bare drive letter without slash as path', () => {
|
||||
// "C:session.tmp" has no slash after colon → regex fails → treated as content
|
||||
const stats = sessionManager.getSessionStats('C:session.tmp');
|
||||
assert.strictEqual(stats.lineCount, 1, 'Bare C: without slash treated as content');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nparseSessionMetadata checkbox case sensitivity (Round 46):');
|
||||
|
||||
if (test('uppercase [X] does not match completed items regex', () => {
|
||||
const content = '# Test\n\n### Completed\n- [X] Uppercase task\n- [x] Lowercase task\n';
|
||||
const meta = sessionManager.parseSessionMetadata(content);
|
||||
// Regex is /- \[x\]\s*(.+)/g — only matches lowercase [x]
|
||||
assert.strictEqual(meta.completed.length, 1, 'Only lowercase [x] should match');
|
||||
assert.strictEqual(meta.completed[0], 'Lowercase task');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// getAllSessions returns empty result when sessions directory does not exist
|
||||
if (test('getAllSessions returns empty when sessions dir missing', () => {
|
||||
const tmpDir = createTempSessionDir();
|
||||
const origHome = process.env.HOME;
|
||||
const origUserProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
// Point HOME to a dir with no .claude/sessions/
|
||||
process.env.HOME = tmpDir;
|
||||
process.env.USERPROFILE = tmpDir;
|
||||
// Re-require to pick up new HOME
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshSM = require('../../scripts/lib/session-manager');
|
||||
const result = freshSM.getAllSessions();
|
||||
assert.deepStrictEqual(result.sessions, [], 'Should return empty sessions array');
|
||||
assert.strictEqual(result.total, 0, 'Total should be 0');
|
||||
assert.strictEqual(result.hasMore, false, 'hasMore should be false');
|
||||
} finally {
|
||||
process.env.HOME = origHome;
|
||||
process.env.USERPROFILE = origUserProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
cleanup(tmpDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 69: getSessionById returns null when sessions dir missing ──
|
||||
console.log('\nRound 69: getSessionById (missing sessions directory):');
|
||||
|
||||
if (test('getSessionById returns null when sessions directory does not exist', () => {
|
||||
const tmpDir = createTempSessionDir();
|
||||
const origHome = process.env.HOME;
|
||||
const origUserProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
// Point HOME to a dir with no .claude/sessions/
|
||||
process.env.HOME = tmpDir;
|
||||
process.env.USERPROFILE = tmpDir;
|
||||
// Re-require to pick up new HOME
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshSM = require('../../scripts/lib/session-manager');
|
||||
const result = freshSM.getSessionById('anything');
|
||||
assert.strictEqual(result, null, 'Should return null when sessions dir does not exist');
|
||||
} finally {
|
||||
process.env.HOME = origHome;
|
||||
process.env.USERPROFILE = origUserProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
cleanup(tmpDir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 78: getSessionStats reads real file when given existing .tmp path ──
|
||||
console.log('\nRound 78: getSessionStats (actual file path → reads from disk):');
|
||||
|
||||
if (test('getSessionStats reads from disk when given path to existing .tmp file', () => {
|
||||
const dir = createTempSessionDir();
|
||||
try {
|
||||
const sessionPath = path.join(dir, '2026-03-01-test1234-session.tmp');
|
||||
const content = '# Real File Stats Test\n\n**Date:** 2026-03-01\n**Started:** 09:00\n\n### Completed\n- [x] First task\n- [x] Second task\n\n### In Progress\n- [ ] Third task\n\n### Notes for Next Session\nDon\'t forget the edge cases\n';
|
||||
fs.writeFileSync(sessionPath, content);
|
||||
|
||||
// Pass the FILE PATH (not content) — this exercises looksLikePath branch
|
||||
const stats = sessionManager.getSessionStats(sessionPath);
|
||||
assert.strictEqual(stats.completedItems, 2, 'Should find 2 completed items from file');
|
||||
assert.strictEqual(stats.inProgressItems, 1, 'Should find 1 in-progress item from file');
|
||||
assert.strictEqual(stats.totalItems, 3, 'Should find 3 total items from file');
|
||||
assert.strictEqual(stats.hasNotes, true, 'Should detect notes section from file');
|
||||
assert.ok(stats.lineCount > 5, `Should have multiple lines from file, got ${stats.lineCount}`);
|
||||
} finally {
|
||||
cleanup(dir);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 78: getAllSessions hasContent field ──
|
||||
console.log('\nRound 78: getAllSessions (hasContent field):');
|
||||
|
||||
if (test('getAllSessions hasContent is true for non-empty and false for empty files', () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-hascontent-${Date.now()}`);
|
||||
const isoSessions = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(isoSessions, { recursive: true });
|
||||
const savedHome = process.env.HOME;
|
||||
const savedProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
// Create one non-empty session and one empty session
|
||||
fs.writeFileSync(path.join(isoSessions, '2026-04-01-nonempty-session.tmp'), '# Has content');
|
||||
fs.writeFileSync(path.join(isoSessions, '2026-04-02-emptyfile-session.tmp'), '');
|
||||
|
||||
process.env.HOME = isoHome;
|
||||
process.env.USERPROFILE = isoHome;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshSM = require('../../scripts/lib/session-manager');
|
||||
|
||||
const result = freshSM.getAllSessions({ limit: 100 });
|
||||
assert.strictEqual(result.total, 2, 'Should find both sessions');
|
||||
|
||||
const nonEmpty = result.sessions.find(s => s.shortId === 'nonempty');
|
||||
const empty = result.sessions.find(s => s.shortId === 'emptyfile');
|
||||
|
||||
assert.ok(nonEmpty, 'Should find the non-empty session');
|
||||
assert.ok(empty, 'Should find the empty session');
|
||||
assert.strictEqual(nonEmpty.hasContent, true, 'Non-empty file should have hasContent: true');
|
||||
assert.strictEqual(empty.hasContent, false, 'Empty file should have hasContent: false');
|
||||
} finally {
|
||||
process.env.HOME = savedHome;
|
||||
process.env.USERPROFILE = savedProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 75: deleteSession catch — unlinkSync throws on read-only dir ──
|
||||
console.log('\nRound 75: deleteSession (unlink failure in read-only dir):');
|
||||
|
||||
if (test('deleteSession returns false when file exists but directory is read-only', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const tmpDir = path.join(os.tmpdir(), `sm-del-ro-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
const sessionFile = path.join(tmpDir, 'test-session.tmp');
|
||||
fs.writeFileSync(sessionFile, 'session content');
|
||||
try {
|
||||
// Make directory read-only so unlinkSync throws EACCES
|
||||
fs.chmodSync(tmpDir, 0o555);
|
||||
const result = sessionManager.deleteSession(sessionFile);
|
||||
assert.strictEqual(result, false, 'Should return false when unlinkSync fails');
|
||||
} finally {
|
||||
try { fs.chmodSync(tmpDir, 0o755); } catch { /* best-effort */ }
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 81: getSessionStats(null) ──
|
||||
console.log('\nRound 81: getSessionStats(null) (null input):');
|
||||
|
||||
if (test('getSessionStats(null) returns zero lineCount and empty metadata', () => {
|
||||
// session-manager.js line 158-177: getSessionStats accepts path or content.
|
||||
// typeof null === 'string' is false → looksLikePath = false → content = null.
|
||||
// Line 177: content ? content.split('\n').length : 0 → lineCount: 0.
|
||||
// parseSessionMetadata(null) returns defaults → totalItems/completedItems/inProgressItems = 0.
|
||||
const stats = sessionManager.getSessionStats(null);
|
||||
assert.strictEqual(stats.lineCount, 0, 'null input should yield lineCount 0');
|
||||
assert.strictEqual(stats.totalItems, 0, 'null input should yield totalItems 0');
|
||||
assert.strictEqual(stats.completedItems, 0, 'null input should yield completedItems 0');
|
||||
assert.strictEqual(stats.inProgressItems, 0, 'null input should yield inProgressItems 0');
|
||||
assert.strictEqual(stats.hasNotes, false, 'null input should yield hasNotes false');
|
||||
assert.strictEqual(stats.hasContext, false, 'null input should yield hasContext false');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 83: getAllSessions TOCTOU statSync catch (broken symlink) ──
|
||||
console.log('\nRound 83: getAllSessions (broken symlink — statSync catch):');
|
||||
|
||||
if (test('getAllSessions skips broken symlink .tmp files gracefully', () => {
|
||||
// getAllSessions at line 241-246: statSync throws for broken symlinks,
|
||||
// the catch causes `continue`, skipping that entry entirely.
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-r83-toctou-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
// Create one real session file
|
||||
const realFile = '2026-02-10-abcd1234-session.tmp';
|
||||
fs.writeFileSync(path.join(sessionsDir, realFile), '# Real session\n');
|
||||
|
||||
// Create a broken symlink that matches the session filename pattern
|
||||
const brokenSymlink = '2026-02-10-deadbeef-session.tmp';
|
||||
fs.symlinkSync('/nonexistent/path/that/does/not/exist', path.join(sessionsDir, brokenSymlink));
|
||||
|
||||
const origHome = process.env.HOME;
|
||||
const origUserProfile = process.env.USERPROFILE;
|
||||
process.env.HOME = isoHome;
|
||||
process.env.USERPROFILE = isoHome;
|
||||
try {
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshManager = require('../../scripts/lib/session-manager');
|
||||
const result = freshManager.getAllSessions({ limit: 100 });
|
||||
|
||||
// Should have only the real session, not the broken symlink
|
||||
assert.strictEqual(result.total, 1, 'Should find only the real session, not the broken symlink');
|
||||
assert.ok(result.sessions[0].filename === realFile,
|
||||
`Should return the real file, got: ${result.sessions[0].filename}`);
|
||||
} finally {
|
||||
process.env.HOME = origHome;
|
||||
process.env.USERPROFILE = origUserProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 84: getSessionById TOCTOU — statSync catch returns null for broken symlink ──
|
||||
console.log('\nRound 84: getSessionById (broken symlink — statSync catch):');
|
||||
|
||||
if (test('getSessionById returns null when matching session is a broken symlink', () => {
|
||||
// getSessionById at line 307-310: statSync throws for broken symlinks,
|
||||
// the catch returns null (file deleted between readdir and stat).
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-r84-getbyid-toctou-${Date.now()}`);
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
fs.mkdirSync(sessionsDir, { recursive: true });
|
||||
|
||||
// Create a broken symlink that matches a session ID pattern
|
||||
const brokenFile = '2026-02-11-deadbeef-session.tmp';
|
||||
fs.symlinkSync('/nonexistent/target/that/does/not/exist', path.join(sessionsDir, brokenFile));
|
||||
|
||||
const origHome = process.env.HOME;
|
||||
const origUserProfile = process.env.USERPROFILE;
|
||||
try {
|
||||
process.env.HOME = isoHome;
|
||||
process.env.USERPROFILE = isoHome;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
const freshSM = require('../../scripts/lib/session-manager');
|
||||
|
||||
// Search by the short ID "deadbeef" — should match the broken symlink
|
||||
const result = freshSM.getSessionById('deadbeef');
|
||||
assert.strictEqual(result, null,
|
||||
'Should return null when matching session file is a broken symlink');
|
||||
} finally {
|
||||
process.env.HOME = origHome;
|
||||
process.env.USERPROFILE = origUserProfile;
|
||||
delete require.cache[require.resolve('../../scripts/lib/session-manager')];
|
||||
delete require.cache[require.resolve('../../scripts/lib/utils')];
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 88: parseSessionMetadata null date/started/lastUpdated fields ──
|
||||
console.log('\nRound 88: parseSessionMetadata content lacking Date/Started/Updated fields:');
|
||||
if (test('parseSessionMetadata returns null for date, started, lastUpdated when fields absent', () => {
|
||||
const content = '# Title Only\n\n### Notes for Next Session\nSome notes\n';
|
||||
const meta = sessionManager.parseSessionMetadata(content);
|
||||
assert.strictEqual(meta.date, null,
|
||||
'date should be null when **Date:** field is absent');
|
||||
assert.strictEqual(meta.started, null,
|
||||
'started should be null when **Started:** field is absent');
|
||||
assert.strictEqual(meta.lastUpdated, null,
|
||||
'lastUpdated should be null when **Last Updated:** field is absent');
|
||||
// Confirm other fields still parse correctly
|
||||
assert.strictEqual(meta.title, 'Title Only');
|
||||
assert.strictEqual(meta.notes, 'Some notes');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
@@ -1025,6 +1025,212 @@ function runTests() {
|
||||
assert.strictEqual(parsed.test, 'settled-guard', 'Should parse normally when end fires first');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// replaceInFile returns false when write fails (e.g., read-only file)
|
||||
if (test('replaceInFile returns false on write failure (read-only file)', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const testDir = path.join(utils.getTempDir(), `utils-test-readonly-${Date.now()}`);
|
||||
fs.mkdirSync(testDir, { recursive: true });
|
||||
const filePath = path.join(testDir, 'readonly.txt');
|
||||
try {
|
||||
fs.writeFileSync(filePath, 'hello world', 'utf8');
|
||||
fs.chmodSync(filePath, 0o444);
|
||||
const result = utils.replaceInFile(filePath, 'hello', 'goodbye');
|
||||
assert.strictEqual(result, false, 'Should return false when file is read-only');
|
||||
// Verify content unchanged
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
assert.strictEqual(content, 'hello world', 'Original content should be preserved');
|
||||
} finally {
|
||||
fs.chmodSync(filePath, 0o644);
|
||||
fs.rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 69: getGitModifiedFiles with ALL invalid patterns ──
|
||||
console.log('\ngetGitModifiedFiles all-invalid patterns (Round 69):');
|
||||
|
||||
if (test('getGitModifiedFiles with all-invalid patterns skips filtering (returns all files)', () => {
|
||||
// When every pattern is invalid regex, compiled.length === 0 at line 386,
|
||||
// so the filtering is skipped entirely and all modified files are returned.
|
||||
// This differs from the mixed-valid test where at least one pattern compiles.
|
||||
const allInvalid = utils.getGitModifiedFiles(['(unclosed', '[bad', '**invalid']);
|
||||
const unfiltered = utils.getGitModifiedFiles();
|
||||
// Both should return the same list — all-invalid patterns = no filtering
|
||||
assert.deepStrictEqual(allInvalid, unfiltered,
|
||||
'All-invalid patterns should return same result as no patterns (no filtering)');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 71: findFiles recursive scan skips unreadable subdirectory ──
|
||||
console.log('\nRound 71: findFiles (unreadable subdirectory in recursive scan):');
|
||||
|
||||
if (test('findFiles recursive scan skips unreadable subdirectory silently', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const tmpDir = path.join(utils.getTempDir(), `ecc-findfiles-r71-${Date.now()}`);
|
||||
const readableSubdir = path.join(tmpDir, 'readable');
|
||||
const unreadableSubdir = path.join(tmpDir, 'unreadable');
|
||||
fs.mkdirSync(readableSubdir, { recursive: true });
|
||||
fs.mkdirSync(unreadableSubdir, { recursive: true });
|
||||
|
||||
// Create files in both subdirectories
|
||||
fs.writeFileSync(path.join(readableSubdir, 'found.txt'), 'data');
|
||||
fs.writeFileSync(path.join(unreadableSubdir, 'hidden.txt'), 'data');
|
||||
|
||||
// Make the subdirectory unreadable — readdirSync will throw EACCES
|
||||
fs.chmodSync(unreadableSubdir, 0o000);
|
||||
|
||||
try {
|
||||
const results = utils.findFiles(tmpDir, '*.txt', { recursive: true });
|
||||
// Should find the readable file but silently skip the unreadable dir
|
||||
assert.ok(results.length >= 1, 'Should find at least the readable file');
|
||||
const paths = results.map(r => r.path);
|
||||
assert.ok(paths.some(p => p.includes('found.txt')), 'Should find readable/found.txt');
|
||||
assert.ok(!paths.some(p => p.includes('hidden.txt')), 'Should not find unreadable/hidden.txt');
|
||||
} finally {
|
||||
fs.chmodSync(unreadableSubdir, 0o755);
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 79: countInFile with valid string pattern ──
|
||||
console.log('\nRound 79: countInFile (valid string pattern):');
|
||||
|
||||
if (test('countInFile counts occurrences using a plain string pattern', () => {
|
||||
const testFile = path.join(utils.getTempDir(), `utils-test-count-str-${Date.now()}.txt`);
|
||||
try {
|
||||
utils.writeFile(testFile, 'apple banana apple cherry apple');
|
||||
// Pass a plain string (not RegExp) — exercises typeof pattern === 'string'
|
||||
// branch at utils.js:441-442 which creates new RegExp(pattern, 'g')
|
||||
const count = utils.countInFile(testFile, 'apple');
|
||||
assert.strictEqual(count, 3, 'String pattern should count all occurrences');
|
||||
} finally {
|
||||
fs.unlinkSync(testFile);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 79: grepFile with valid string pattern ──
|
||||
console.log('\nRound 79: grepFile (valid string pattern):');
|
||||
|
||||
if (test('grepFile finds matching lines using a plain string pattern', () => {
|
||||
const testFile = path.join(utils.getTempDir(), `utils-test-grep-str-${Date.now()}.txt`);
|
||||
try {
|
||||
utils.writeFile(testFile, 'line1 alpha\nline2 beta\nline3 alpha\nline4 gamma');
|
||||
// Pass a plain string (not RegExp) — exercises the else branch
|
||||
// at utils.js:468-469 which creates new RegExp(pattern)
|
||||
const matches = utils.grepFile(testFile, 'alpha');
|
||||
assert.strictEqual(matches.length, 2, 'String pattern should find 2 matching lines');
|
||||
assert.strictEqual(matches[0].lineNumber, 1, 'First match at line 1');
|
||||
assert.strictEqual(matches[1].lineNumber, 3, 'Second match at line 3');
|
||||
assert.ok(matches[0].content.includes('alpha'), 'Content should include pattern');
|
||||
} finally {
|
||||
fs.unlinkSync(testFile);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 84: findFiles inner statSync catch (TOCTOU — broken symlink) ──
|
||||
console.log('\nRound 84: findFiles (inner statSync catch — broken symlink):');
|
||||
|
||||
if (test('findFiles skips broken symlinks that match the pattern', () => {
|
||||
// findFiles at utils.js:170-173: readdirSync returns entries including broken
|
||||
// symlinks (entry.isFile() returns false for broken symlinks, but the test also
|
||||
// verifies the overall robustness). On some systems, broken symlinks can be
|
||||
// returned by readdirSync and pass through isFile() depending on the driver.
|
||||
// More importantly: if statSync throws inside the inner loop, catch continues.
|
||||
//
|
||||
// To reliably trigger the statSync catch: create a real file, list it, then
|
||||
// simulate the race. Since we can't truly race, we use a broken symlink which
|
||||
// will at minimum verify the function doesn't crash on unusual dir entries.
|
||||
const tmpDir = path.join(utils.getTempDir(), `ecc-r84-findfiles-toctou-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
|
||||
// Create a real file and a broken symlink, both matching *.txt
|
||||
const realFile = path.join(tmpDir, 'real.txt');
|
||||
fs.writeFileSync(realFile, 'content');
|
||||
const brokenLink = path.join(tmpDir, 'broken.txt');
|
||||
fs.symlinkSync('/nonexistent/path/does/not/exist', brokenLink);
|
||||
|
||||
try {
|
||||
const results = utils.findFiles(tmpDir, '*.txt');
|
||||
// The real file should be found; the broken symlink should be skipped
|
||||
const paths = results.map(r => r.path);
|
||||
assert.ok(paths.some(p => p.includes('real.txt')), 'Should find the real file');
|
||||
assert.ok(!paths.some(p => p.includes('broken.txt')),
|
||||
'Should not include broken symlink in results');
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 85: getSessionIdShort fallback parameter ──
|
||||
console.log('\ngetSessionIdShort fallback (Round 85):');
|
||||
|
||||
if (test('getSessionIdShort uses fallback when getProjectName returns null (CWD at root)', () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log(' (skipped — root CWD differs on Windows)');
|
||||
return;
|
||||
}
|
||||
// Spawn a subprocess at CWD=/ with CLAUDE_SESSION_ID empty.
|
||||
// At /, git rev-parse --show-toplevel fails → getGitRepoName() = null.
|
||||
// path.basename('/') = '' → '' || null = null → getProjectName() = null.
|
||||
// So getSessionIdShort('my-custom-fallback') = null || 'my-custom-fallback'.
|
||||
const utilsPath = path.join(__dirname, '..', '..', 'scripts', 'lib', 'utils.js');
|
||||
const script = `
|
||||
const utils = require('${utilsPath.replace(/'/g, "\\'")}');
|
||||
process.stdout.write(utils.getSessionIdShort('my-custom-fallback'));
|
||||
`;
|
||||
const { spawnSync } = require('child_process');
|
||||
const result = spawnSync('node', ['-e', script], {
|
||||
encoding: 'utf8',
|
||||
cwd: '/',
|
||||
env: { ...process.env, CLAUDE_SESSION_ID: '' },
|
||||
timeout: 10000
|
||||
});
|
||||
assert.strictEqual(result.status, 0, `Should exit 0, got status ${result.status}. stderr: ${result.stderr}`);
|
||||
assert.strictEqual(result.stdout, 'my-custom-fallback',
|
||||
`At CWD=/ with no session ID, should use the fallback parameter. Got: "${result.stdout}"`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 88: replaceInFile with empty replacement (deletion) ──
|
||||
console.log('\nRound 88: replaceInFile with empty replacement string (deletion):');
|
||||
if (test('replaceInFile with empty string replacement deletes matched text', () => {
|
||||
const tmpDir = path.join(utils.getTempDir(), `ecc-r88-replace-empty-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
const tmpFile = path.join(tmpDir, 'delete-test.txt');
|
||||
try {
|
||||
fs.writeFileSync(tmpFile, 'hello REMOVE_ME world');
|
||||
const result = utils.replaceInFile(tmpFile, 'REMOVE_ME ', '');
|
||||
assert.strictEqual(result, true, 'Should return true on successful replacement');
|
||||
const content = fs.readFileSync(tmpFile, 'utf8');
|
||||
assert.strictEqual(content, 'hello world',
|
||||
'Empty replacement should delete the matched text');
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 88: countInFile with valid file but zero matches ──
|
||||
console.log('\nRound 88: countInFile with existing file but non-matching pattern:');
|
||||
if (test('countInFile returns 0 for valid file with no pattern matches', () => {
|
||||
const tmpDir = path.join(utils.getTempDir(), `ecc-r88-count-zero-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
const tmpFile = path.join(tmpDir, 'no-match.txt');
|
||||
try {
|
||||
fs.writeFileSync(tmpFile, 'apple banana cherry');
|
||||
const count = utils.countInFile(tmpFile, 'ZZZZNOTHERE');
|
||||
assert.strictEqual(count, 0,
|
||||
'Should return 0 when regex matches nothing in existing file');
|
||||
const countRegex = utils.countInFile(tmpFile, /ZZZZNOTHERE/g);
|
||||
assert.strictEqual(countRegex, 0,
|
||||
'Should return 0 for RegExp with no matches in existing file');
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log('\n=== Test Results ===');
|
||||
console.log(`Passed: ${passed}`);
|
||||
|
||||
@@ -8,6 +8,8 @@
|
||||
|
||||
const assert = require('assert');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const { execFileSync } = require('child_process');
|
||||
|
||||
const SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'setup-package-manager.js');
|
||||
@@ -256,6 +258,137 @@ function runTests() {
|
||||
assert.strictEqual(installCount, 4, `Expected 4 "Install:" entries, found ${installCount}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 62: --global success path and bare PM name ──
|
||||
console.log('\n--global success path (Round 62):');
|
||||
|
||||
if (test('--global npm writes config and succeeds', () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `spm-test-global-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
try {
|
||||
const result = run(['--global', 'npm'], { HOME: tmpDir, USERPROFILE: tmpDir });
|
||||
assert.strictEqual(result.code, 0, `Expected exit 0, got ${result.code}. stderr: ${result.stderr}`);
|
||||
assert.ok(result.stdout.includes('Global preference set to'), 'Should show success message');
|
||||
assert.ok(result.stdout.includes('npm'), 'Should mention npm');
|
||||
// Verify config file was created
|
||||
const configPath = path.join(tmpDir, '.claude', 'package-manager.json');
|
||||
assert.ok(fs.existsSync(configPath), 'Config file should be created');
|
||||
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
||||
assert.strictEqual(config.packageManager, 'npm', 'Config should contain npm');
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nbare PM name success (Round 62):');
|
||||
|
||||
if (test('bare npm sets global preference and succeeds', () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `spm-test-bare-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
try {
|
||||
const result = run(['npm'], { HOME: tmpDir, USERPROFILE: tmpDir });
|
||||
assert.strictEqual(result.code, 0, `Expected exit 0, got ${result.code}. stderr: ${result.stderr}`);
|
||||
assert.ok(result.stdout.includes('Global preference set to'), 'Should show success message');
|
||||
// Verify config file was created
|
||||
const configPath = path.join(tmpDir, '.claude', 'package-manager.json');
|
||||
assert.ok(fs.existsSync(configPath), 'Config file should be created');
|
||||
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
||||
assert.strictEqual(config.packageManager, 'npm', 'Config should contain npm');
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\n--detect source label (Round 62):');
|
||||
|
||||
if (test('--detect with env var shows source as environment', () => {
|
||||
const result = run(['--detect'], { CLAUDE_PACKAGE_MANAGER: 'pnpm' });
|
||||
assert.strictEqual(result.code, 0);
|
||||
assert.ok(result.stdout.includes('Source: environment'), 'Should show environment as source');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 68: --project success path and --list (current) marker ──
|
||||
console.log('\n--project success path (Round 68):');
|
||||
|
||||
if (test('--project npm writes project config and succeeds', () => {
|
||||
const tmpDir = path.join(os.tmpdir(), `spm-test-project-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
try {
|
||||
const result = require('child_process').spawnSync('node', [SCRIPT, '--project', 'npm'], {
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
env: { ...process.env },
|
||||
timeout: 10000,
|
||||
cwd: tmpDir
|
||||
});
|
||||
assert.strictEqual(result.status, 0, `Expected exit 0, got ${result.status}. stderr: ${result.stderr}`);
|
||||
assert.ok(result.stdout.includes('Project preference set to'), 'Should show project success message');
|
||||
assert.ok(result.stdout.includes('npm'), 'Should mention npm');
|
||||
// Verify config file was created in the project CWD
|
||||
const configPath = path.join(tmpDir, '.claude', 'package-manager.json');
|
||||
assert.ok(fs.existsSync(configPath), 'Project config file should be created in CWD');
|
||||
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
||||
assert.strictEqual(config.packageManager, 'npm', 'Config should contain npm');
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\n--list (current) marker (Round 68):');
|
||||
|
||||
if (test('--list output includes (current) marker for active PM', () => {
|
||||
const result = run(['--list']);
|
||||
assert.strictEqual(result.code, 0);
|
||||
assert.ok(result.stdout.includes('(current)'), '--list should mark the active PM with (current)');
|
||||
// The (current) marker should appear exactly once
|
||||
const currentCount = (result.stdout.match(/\(current\)/g) || []).length;
|
||||
assert.strictEqual(currentCount, 1, `Expected exactly 1 "(current)" in --list, found ${currentCount}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 74: setGlobal catch — setPreferredPackageManager throws ──
|
||||
console.log('\nRound 74: setGlobal catch (save failure):');
|
||||
|
||||
if (test('--global npm fails when HOME is not a directory', () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log(' (skipped — /dev/null not available on Windows)');
|
||||
return;
|
||||
}
|
||||
// HOME=/dev/null causes ensureDir to throw ENOTDIR when creating ~/.claude/
|
||||
const result = run(['--global', 'npm'], { HOME: '/dev/null', USERPROFILE: '/dev/null' });
|
||||
assert.strictEqual(result.code, 1, `Expected exit 1, got ${result.code}`);
|
||||
assert.ok(result.stderr.includes('Error:'),
|
||||
`stderr should contain Error:, got: ${result.stderr}`);
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 74: setProject catch — setProjectPackageManager throws ──
|
||||
console.log('\nRound 74: setProject catch (save failure):');
|
||||
|
||||
if (test('--project npm fails when CWD is read-only', () => {
|
||||
if (process.platform === 'win32' || process.getuid?.() === 0) {
|
||||
console.log(' (skipped — chmod ineffective on Windows/root)');
|
||||
return;
|
||||
}
|
||||
const tmpDir = path.join(os.tmpdir(), `spm-test-ro-${Date.now()}`);
|
||||
fs.mkdirSync(tmpDir, { recursive: true });
|
||||
try {
|
||||
// Make CWD read-only so .claude/ dir creation fails with EACCES
|
||||
fs.chmodSync(tmpDir, 0o555);
|
||||
const result = require('child_process').spawnSync('node', [SCRIPT, '--project', 'npm'], {
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
env: { ...process.env },
|
||||
timeout: 10000,
|
||||
cwd: tmpDir
|
||||
});
|
||||
assert.strictEqual(result.status, 1,
|
||||
`Expected exit 1, got ${result.status}. stderr: ${result.stderr}`);
|
||||
assert.ok(result.stderr.includes('Error:'),
|
||||
`stderr should contain Error:, got: ${result.stderr}`);
|
||||
} finally {
|
||||
try { fs.chmodSync(tmpDir, 0o755); } catch { /* best-effort */ }
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
@@ -451,6 +451,80 @@ function runTests() {
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 54: analysisResults with zero values ──
|
||||
console.log('\nanalysisResults zero values (Round 54):');
|
||||
|
||||
if (test('analysisResults handles zero values for all data fields', () => {
|
||||
const output = new SkillCreateOutput('repo');
|
||||
const logs = captureLog(() => output.analysisResults({
|
||||
commits: 0, timeRange: '', contributors: 0, files: 0,
|
||||
}));
|
||||
const combined = logs.join('\n');
|
||||
assert.ok(combined.includes('0'), 'Should display zero values');
|
||||
assert.ok(logs.length > 0, 'Should produce output without crash');
|
||||
// Box lines should still be 60 chars wide
|
||||
const boxLines = combined.split('\n').filter(l => {
|
||||
const s = stripAnsi(l).trim();
|
||||
return s.startsWith('\u256D') || s.startsWith('\u2502') || s.startsWith('\u2570');
|
||||
});
|
||||
assert.ok(boxLines.length >= 3, 'Should render a complete box');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 68: demo function export ──
|
||||
console.log('\ndemo export (Round 68):');
|
||||
|
||||
if (test('module exports demo function alongside SkillCreateOutput', () => {
|
||||
const mod = require('../../scripts/skill-create-output');
|
||||
assert.ok(mod.demo, 'Should export demo function');
|
||||
assert.strictEqual(typeof mod.demo, 'function', 'demo should be a function');
|
||||
assert.ok(mod.SkillCreateOutput, 'Should also export SkillCreateOutput');
|
||||
assert.strictEqual(typeof mod.SkillCreateOutput, 'function', 'SkillCreateOutput should be a constructor');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 85: patterns() confidence=0 uses ?? (not ||) ──
|
||||
console.log('\nRound 85: patterns() confidence=0 nullish coalescing:');
|
||||
|
||||
if (test('patterns() with confidence=0 shows 0%, not 80% (nullish coalescing fix)', () => {
|
||||
const output = new SkillCreateOutput('repo');
|
||||
const logs = captureLog(() => output.patterns([
|
||||
{ name: 'Zero Confidence', trigger: 'never', confidence: 0, evidence: 'none' },
|
||||
]));
|
||||
const combined = stripAnsi(logs.join('\n'));
|
||||
// With ?? operator: 0 ?? 0.8 = 0 → Math.round(0 * 100) = 0 → shows "0%"
|
||||
// With || operator (bug): 0 || 0.8 = 0.8 → shows "80%"
|
||||
assert.ok(combined.includes('0%'), 'Should show 0% for zero confidence');
|
||||
assert.ok(!combined.includes('80%'),
|
||||
'Should NOT show 80% — confidence=0 is explicitly provided, not missing');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// ── Round 87: analyzePhase() async method (untested) ──
|
||||
console.log('\nRound 87: analyzePhase() async method:');
|
||||
|
||||
if (test('analyzePhase completes without error and writes to stdout', () => {
|
||||
const output = new SkillCreateOutput('test-repo');
|
||||
// analyzePhase is async and calls animateProgress which uses sleep() and
|
||||
// process.stdout.write/clearLine/cursorTo. In non-TTY environments clearLine
|
||||
// and cursorTo are undefined, but the code uses optional chaining (?.) to
|
||||
// handle this safely. We verify it resolves without throwing.
|
||||
// Capture stdout.write to verify output was produced.
|
||||
const writes = [];
|
||||
const origWrite = process.stdout.write;
|
||||
process.stdout.write = function(str) { writes.push(String(str)); return true; };
|
||||
try {
|
||||
// Call synchronously by accessing the returned promise — we just need to
|
||||
// verify it doesn't throw during setup. The sleeps total 1.9s so we
|
||||
// verify the promise is a thenable (async function returns Promise).
|
||||
const promise = output.analyzePhase({ commits: 42 });
|
||||
assert.ok(promise && typeof promise.then === 'function',
|
||||
'analyzePhase should return a Promise');
|
||||
} finally {
|
||||
process.stdout.write = origWrite;
|
||||
}
|
||||
// Verify that process.stdout.write was called (the header line is written synchronously)
|
||||
assert.ok(writes.length > 0, 'Should have written output via process.stdout.write');
|
||||
assert.ok(writes.some(w => w.includes('Analyzing')), 'Should include "Analyzing" label');
|
||||
})) passed++; else failed++;
|
||||
|
||||
// Summary
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
|
||||
Reference in New Issue
Block a user