mirror of
https://github.com/affaan-m/everything-claude-code.git
synced 2026-03-30 13:43:26 +08:00
fix: harden observer hooks and test discovery (#513)
This commit is contained in:
@@ -232,7 +232,9 @@
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "node \"${CLAUDE_PLUGIN_ROOT}/scripts/hooks/run-with-flags.js\" \"session:end:marker\" \"scripts/hooks/session-end-marker.js\" \"minimal,standard,strict\""
|
||||
"command": "node \"${CLAUDE_PLUGIN_ROOT}/scripts/hooks/run-with-flags.js\" \"session:end:marker\" \"scripts/hooks/session-end-marker.js\" \"minimal,standard,strict\"",
|
||||
"async": true,
|
||||
"timeout": 10
|
||||
}
|
||||
],
|
||||
"description": "Session end lifecycle marker (non-blocking)"
|
||||
|
||||
@@ -1,83 +1,208 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Catalog agents, commands, and skills from the repo.
|
||||
* Outputs JSON with counts and lists for CI/docs sync.
|
||||
* Verify repo catalog counts against README.md and AGENTS.md.
|
||||
*
|
||||
* Usage: node scripts/ci/catalog.js [--json|--md]
|
||||
* Default: --json to stdout
|
||||
* Usage:
|
||||
* node scripts/ci/catalog.js
|
||||
* node scripts/ci/catalog.js --json
|
||||
* node scripts/ci/catalog.js --md
|
||||
* node scripts/ci/catalog.js --text
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const ROOT = path.join(__dirname, '../..');
|
||||
const AGENTS_DIR = path.join(ROOT, 'agents');
|
||||
const COMMANDS_DIR = path.join(ROOT, 'commands');
|
||||
const SKILLS_DIR = path.join(ROOT, 'skills');
|
||||
const README_PATH = path.join(ROOT, 'README.md');
|
||||
const AGENTS_PATH = path.join(ROOT, 'AGENTS.md');
|
||||
|
||||
function listAgents() {
|
||||
if (!fs.existsSync(AGENTS_DIR)) return [];
|
||||
try {
|
||||
return fs.readdirSync(AGENTS_DIR)
|
||||
.filter(f => f.endsWith('.md'))
|
||||
.map(f => f.slice(0, -3))
|
||||
.sort();
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to read agents directory (${AGENTS_DIR}): ${error.message}`);
|
||||
}
|
||||
const OUTPUT_MODE = process.argv.includes('--md')
|
||||
? 'md'
|
||||
: process.argv.includes('--text')
|
||||
? 'text'
|
||||
: 'json';
|
||||
|
||||
function normalizePathSegments(relativePath) {
|
||||
return relativePath.split(path.sep).join('/');
|
||||
}
|
||||
|
||||
function listCommands() {
|
||||
if (!fs.existsSync(COMMANDS_DIR)) return [];
|
||||
try {
|
||||
return fs.readdirSync(COMMANDS_DIR)
|
||||
.filter(f => f.endsWith('.md'))
|
||||
.map(f => f.slice(0, -3))
|
||||
.sort();
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to read commands directory (${COMMANDS_DIR}): ${error.message}`);
|
||||
function listMatchingFiles(relativeDir, matcher) {
|
||||
const directory = path.join(ROOT, relativeDir);
|
||||
if (!fs.existsSync(directory)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return fs.readdirSync(directory, { withFileTypes: true })
|
||||
.filter(entry => matcher(entry))
|
||||
.map(entry => normalizePathSegments(path.join(relativeDir, entry.name)))
|
||||
.sort();
|
||||
}
|
||||
|
||||
function listSkills() {
|
||||
if (!fs.existsSync(SKILLS_DIR)) return [];
|
||||
try {
|
||||
const entries = fs.readdirSync(SKILLS_DIR, { withFileTypes: true });
|
||||
return entries
|
||||
.filter(e => e.isDirectory() && fs.existsSync(path.join(SKILLS_DIR, e.name, 'SKILL.md')))
|
||||
.map(e => e.name)
|
||||
.sort();
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to read skills directory (${SKILLS_DIR}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
function buildCatalog() {
|
||||
const agents = listMatchingFiles('agents', entry => entry.isFile() && entry.name.endsWith('.md'));
|
||||
const commands = listMatchingFiles('commands', entry => entry.isFile() && entry.name.endsWith('.md'));
|
||||
const skills = listMatchingFiles('skills', entry => entry.isDirectory() && fs.existsSync(path.join(ROOT, 'skills', entry.name, 'SKILL.md')))
|
||||
.map(skillDir => `${skillDir}/SKILL.md`);
|
||||
|
||||
function run() {
|
||||
const agents = listAgents();
|
||||
const commands = listCommands();
|
||||
const skills = listSkills();
|
||||
|
||||
const catalog = {
|
||||
agents: { count: agents.length, list: agents },
|
||||
commands: { count: commands.length, list: commands },
|
||||
skills: { count: skills.length, list: skills }
|
||||
return {
|
||||
agents: { count: agents.length, files: agents, glob: 'agents/*.md' },
|
||||
commands: { count: commands.length, files: commands, glob: 'commands/*.md' },
|
||||
skills: { count: skills.length, files: skills, glob: 'skills/*/SKILL.md' }
|
||||
};
|
||||
}
|
||||
|
||||
const format = process.argv[2] === '--md' ? 'md' : 'json';
|
||||
if (format === 'md') {
|
||||
console.log('# ECC Catalog (generated)\n');
|
||||
console.log(`- **Agents:** ${catalog.agents.count}`);
|
||||
console.log(`- **Commands:** ${catalog.commands.count}`);
|
||||
console.log(`- **Skills:** ${catalog.skills.count}\n`);
|
||||
console.log('## Agents\n');
|
||||
catalog.agents.list.forEach(a => { console.log(`- ${a}`); });
|
||||
console.log('\n## Commands\n');
|
||||
catalog.commands.list.forEach(c => { console.log(`- ${c}`); });
|
||||
console.log('\n## Skills\n');
|
||||
catalog.skills.list.forEach(s => { console.log(`- ${s}`); });
|
||||
} else {
|
||||
console.log(JSON.stringify(catalog, null, 2));
|
||||
function readFileOrThrow(filePath) {
|
||||
try {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to read ${path.basename(filePath)}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
function parseReadmeExpectations(readmeContent) {
|
||||
const expectations = [];
|
||||
|
||||
const quickStartMatch = readmeContent.match(/access to\s+(\d+)\s+agents,\s+(\d+)\s+skills,\s+and\s+(\d+)\s+commands/i);
|
||||
if (!quickStartMatch) {
|
||||
throw new Error('README.md is missing the quick-start catalog summary');
|
||||
}
|
||||
|
||||
expectations.push(
|
||||
{ category: 'agents', mode: 'exact', expected: Number(quickStartMatch[1]), source: 'README.md quick-start summary' },
|
||||
{ category: 'skills', mode: 'exact', expected: Number(quickStartMatch[2]), source: 'README.md quick-start summary' },
|
||||
{ category: 'commands', mode: 'exact', expected: Number(quickStartMatch[3]), source: 'README.md quick-start summary' }
|
||||
);
|
||||
|
||||
const tablePatterns = [
|
||||
{ category: 'agents', regex: /\|\s*Agents\s*\|\s*✅\s*(\d+)\s+agents\s*\|/i, source: 'README.md comparison table' },
|
||||
{ category: 'commands', regex: /\|\s*Commands\s*\|\s*✅\s*(\d+)\s+commands\s*\|/i, source: 'README.md comparison table' },
|
||||
{ category: 'skills', regex: /\|\s*Skills\s*\|\s*✅\s*(\d+)\s+skills\s*\|/i, source: 'README.md comparison table' }
|
||||
];
|
||||
|
||||
for (const pattern of tablePatterns) {
|
||||
const match = readmeContent.match(pattern.regex);
|
||||
if (!match) {
|
||||
throw new Error(`${pattern.source} is missing the ${pattern.category} row`);
|
||||
}
|
||||
|
||||
expectations.push({
|
||||
category: pattern.category,
|
||||
mode: 'exact',
|
||||
expected: Number(match[1]),
|
||||
source: `${pattern.source} (${pattern.category})`
|
||||
});
|
||||
}
|
||||
|
||||
return expectations;
|
||||
}
|
||||
|
||||
function parseAgentsDocExpectations(agentsContent) {
|
||||
const summaryMatch = agentsContent.match(/providing\s+(\d+)\s+specialized agents,\s+(\d+)(\+)?\s+skills,\s+(\d+)\s+commands/i);
|
||||
if (!summaryMatch) {
|
||||
throw new Error('AGENTS.md is missing the catalog summary line');
|
||||
}
|
||||
|
||||
return [
|
||||
{ category: 'agents', mode: 'exact', expected: Number(summaryMatch[1]), source: 'AGENTS.md summary' },
|
||||
{
|
||||
category: 'skills',
|
||||
mode: summaryMatch[3] ? 'minimum' : 'exact',
|
||||
expected: Number(summaryMatch[2]),
|
||||
source: 'AGENTS.md summary'
|
||||
},
|
||||
{ category: 'commands', mode: 'exact', expected: Number(summaryMatch[4]), source: 'AGENTS.md summary' }
|
||||
];
|
||||
}
|
||||
|
||||
function evaluateExpectations(catalog, expectations) {
|
||||
return expectations.map(expectation => {
|
||||
const actual = catalog[expectation.category].count;
|
||||
const ok = expectation.mode === 'minimum'
|
||||
? actual >= expectation.expected
|
||||
: actual === expectation.expected;
|
||||
|
||||
return {
|
||||
...expectation,
|
||||
actual,
|
||||
ok
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function formatExpectation(expectation) {
|
||||
const comparator = expectation.mode === 'minimum' ? '>=' : '=';
|
||||
return `${expectation.source}: ${expectation.category} documented ${comparator} ${expectation.expected}, actual ${expectation.actual}`;
|
||||
}
|
||||
|
||||
function renderText(result) {
|
||||
console.log('Catalog counts:');
|
||||
console.log(`- agents: ${result.catalog.agents.count}`);
|
||||
console.log(`- commands: ${result.catalog.commands.count}`);
|
||||
console.log(`- skills: ${result.catalog.skills.count}`);
|
||||
console.log('');
|
||||
|
||||
const mismatches = result.checks.filter(check => !check.ok);
|
||||
if (mismatches.length === 0) {
|
||||
console.log('Documentation counts match the repository catalog.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.error('Documentation count mismatches found:');
|
||||
for (const mismatch of mismatches) {
|
||||
console.error(`- ${formatExpectation(mismatch)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function renderMarkdown(result) {
|
||||
const mismatches = result.checks.filter(check => !check.ok);
|
||||
console.log('# ECC Catalog Verification\n');
|
||||
console.log('| Category | Count | Pattern |');
|
||||
console.log('| --- | ---: | --- |');
|
||||
console.log(`| Agents | ${result.catalog.agents.count} | \`${result.catalog.agents.glob}\` |`);
|
||||
console.log(`| Commands | ${result.catalog.commands.count} | \`${result.catalog.commands.glob}\` |`);
|
||||
console.log(`| Skills | ${result.catalog.skills.count} | \`${result.catalog.skills.glob}\` |`);
|
||||
console.log('');
|
||||
|
||||
if (mismatches.length === 0) {
|
||||
console.log('Documentation counts match the repository catalog.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('## Mismatches\n');
|
||||
for (const mismatch of mismatches) {
|
||||
console.log(`- ${formatExpectation(mismatch)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function main() {
|
||||
const catalog = buildCatalog();
|
||||
const readmeContent = readFileOrThrow(README_PATH);
|
||||
const agentsContent = readFileOrThrow(AGENTS_PATH);
|
||||
const expectations = [
|
||||
...parseReadmeExpectations(readmeContent),
|
||||
...parseAgentsDocExpectations(agentsContent)
|
||||
];
|
||||
const checks = evaluateExpectations(catalog, expectations);
|
||||
const result = { catalog, checks };
|
||||
|
||||
if (OUTPUT_MODE === 'json') {
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
} else if (OUTPUT_MODE === 'md') {
|
||||
renderMarkdown(result);
|
||||
} else {
|
||||
renderText(result);
|
||||
}
|
||||
|
||||
if (checks.some(check => !check.ok)) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
main();
|
||||
} catch (error) {
|
||||
console.error(`ERROR: ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -76,6 +76,10 @@ fi
|
||||
# ─────────────────────────────────────────────
|
||||
# Lightweight config and automated session guards
|
||||
# ─────────────────────────────────────────────
|
||||
#
|
||||
# IMPORTANT: keep these guards above detect-project.sh.
|
||||
# Sourcing detect-project.sh creates project-scoped directories and updates
|
||||
# projects.json, so automated sessions must return before that point.
|
||||
|
||||
CONFIG_DIR="${HOME}/.claude/homunculus"
|
||||
|
||||
|
||||
@@ -147,6 +147,71 @@ function withPrependedPath(binDir, env = {}) {
|
||||
};
|
||||
}
|
||||
|
||||
function assertNoProjectDetectionSideEffects(homeDir, testName) {
|
||||
const homunculusDir = path.join(homeDir, '.claude', 'homunculus');
|
||||
const registryPath = path.join(homunculusDir, 'projects.json');
|
||||
const projectsDir = path.join(homunculusDir, 'projects');
|
||||
|
||||
assert.ok(!fs.existsSync(registryPath), `${testName} should not create projects.json`);
|
||||
|
||||
const projectEntries = fs.existsSync(projectsDir)
|
||||
? fs.readdirSync(projectsDir).filter(entry => fs.statSync(path.join(projectsDir, entry)).isDirectory())
|
||||
: [];
|
||||
assert.strictEqual(projectEntries.length, 0, `${testName} should not create project directories`);
|
||||
}
|
||||
|
||||
async function assertObserveSkipBeforeProjectDetection(testCase) {
|
||||
const observePath = path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'hooks', 'observe.sh');
|
||||
const homeDir = createTestDir();
|
||||
const projectDir = createTestDir();
|
||||
|
||||
try {
|
||||
const cwd = testCase.cwdSuffix ? path.join(projectDir, testCase.cwdSuffix) : projectDir;
|
||||
fs.mkdirSync(cwd, { recursive: true });
|
||||
|
||||
const payload = JSON.stringify({
|
||||
tool_name: 'Bash',
|
||||
tool_input: { command: 'echo hello' },
|
||||
tool_response: 'ok',
|
||||
session_id: `session-${testCase.name.replace(/[^a-z0-9]+/gi, '-')}`,
|
||||
cwd,
|
||||
...(testCase.payload || {})
|
||||
});
|
||||
|
||||
const result = await runShellScript(observePath, ['post'], payload, {
|
||||
HOME: homeDir,
|
||||
USERPROFILE: homeDir,
|
||||
...testCase.env
|
||||
}, projectDir);
|
||||
|
||||
assert.strictEqual(result.code, 0, `${testCase.name} should exit successfully, stderr: ${result.stderr}`);
|
||||
assertNoProjectDetectionSideEffects(homeDir, testCase.name);
|
||||
} finally {
|
||||
cleanupTestDir(homeDir);
|
||||
cleanupTestDir(projectDir);
|
||||
}
|
||||
}
|
||||
|
||||
function runPatchedRunAll(tempRoot) {
|
||||
const wrapperPath = path.join(tempRoot, 'run-all-wrapper.js');
|
||||
const tempTestsDir = path.join(tempRoot, 'tests');
|
||||
let source = fs.readFileSync(path.join(__dirname, '..', 'run-all.js'), 'utf8');
|
||||
source = source.replace('const testsDir = __dirname;', `const testsDir = ${JSON.stringify(tempTestsDir)};`);
|
||||
fs.writeFileSync(wrapperPath, source);
|
||||
|
||||
const result = spawnSync('node', [wrapperPath], {
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: 15000,
|
||||
});
|
||||
|
||||
return {
|
||||
code: result.status ?? 1,
|
||||
stdout: result.stdout || '',
|
||||
stderr: result.stderr || '',
|
||||
};
|
||||
}
|
||||
|
||||
// Test suite
|
||||
async function runTests() {
|
||||
console.log('\n=== Testing Hook Scripts ===\n');
|
||||
@@ -389,22 +454,28 @@ async function runTests() {
|
||||
|
||||
if (
|
||||
await asyncTest('includes session ID in filename', async () => {
|
||||
const isoHome = path.join(os.tmpdir(), `ecc-session-id-${Date.now()}`);
|
||||
const testSessionId = 'test-session-abc12345';
|
||||
const expectedShortId = 'abc12345'; // Last 8 chars
|
||||
|
||||
// Run with custom session ID
|
||||
try {
|
||||
await runScript(path.join(scriptsDir, 'session-end.js'), '', {
|
||||
HOME: isoHome,
|
||||
USERPROFILE: isoHome,
|
||||
CLAUDE_SESSION_ID: testSessionId
|
||||
});
|
||||
|
||||
// Check if session file was created with session ID
|
||||
// Use local time to match the script's getDateString() function
|
||||
const sessionsDir = path.join(os.homedir(), '.claude', 'sessions');
|
||||
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
|
||||
const now = new Date();
|
||||
const today = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}-${String(now.getDate()).padStart(2, '0')}`;
|
||||
const sessionFile = path.join(sessionsDir, `${today}-${expectedShortId}-session.tmp`);
|
||||
|
||||
assert.ok(fs.existsSync(sessionFile), `Session file should exist: ${sessionFile}`);
|
||||
} finally {
|
||||
fs.rmSync(isoHome, { recursive: true, force: true });
|
||||
}
|
||||
})
|
||||
)
|
||||
passed++;
|
||||
@@ -1660,6 +1731,21 @@ async function runTests() {
|
||||
passed++;
|
||||
else failed++;
|
||||
|
||||
if (
|
||||
test('SessionEnd marker hook is async and cleanup-safe', () => {
|
||||
const hooksPath = path.join(__dirname, '..', '..', 'hooks', 'hooks.json');
|
||||
const hooks = JSON.parse(fs.readFileSync(hooksPath, 'utf8'));
|
||||
const sessionEndHooks = hooks.hooks.SessionEnd.flatMap(entry => entry.hooks);
|
||||
const markerHook = sessionEndHooks.find(hook => hook.command.includes('session-end-marker.js'));
|
||||
|
||||
assert.ok(markerHook, 'SessionEnd should invoke session-end-marker.js');
|
||||
assert.strictEqual(markerHook.async, true, 'SessionEnd marker hook should run async during cleanup');
|
||||
assert.ok(Number.isInteger(markerHook.timeout) && markerHook.timeout > 0, 'SessionEnd marker hook should define a timeout');
|
||||
})
|
||||
)
|
||||
passed++;
|
||||
else failed++;
|
||||
|
||||
if (
|
||||
test('all hook commands use node or approved shell wrappers', () => {
|
||||
const hooksPath = path.join(__dirname, '..', '..', 'hooks', 'hooks.json');
|
||||
@@ -2292,75 +2378,44 @@ async function runTests() {
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('observe.sh skips automated sessions before project detection side effects', async () => {
|
||||
const observePath = path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'hooks', 'observe.sh');
|
||||
const cases = [
|
||||
{
|
||||
if (await asyncTest('observe.sh skips non-cli entrypoints before project detection side effects', async () => {
|
||||
await assertObserveSkipBeforeProjectDetection({
|
||||
name: 'non-cli entrypoint',
|
||||
env: { CLAUDE_CODE_ENTRYPOINT: 'mcp' }
|
||||
},
|
||||
{
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('observe.sh skips minimal hook profile before project detection side effects', async () => {
|
||||
await assertObserveSkipBeforeProjectDetection({
|
||||
name: 'minimal hook profile',
|
||||
env: { CLAUDE_CODE_ENTRYPOINT: 'cli', ECC_HOOK_PROFILE: 'minimal' }
|
||||
},
|
||||
{
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('observe.sh skips cooperative skip env before project detection side effects', async () => {
|
||||
await assertObserveSkipBeforeProjectDetection({
|
||||
name: 'cooperative skip env',
|
||||
env: { CLAUDE_CODE_ENTRYPOINT: 'cli', ECC_SKIP_OBSERVE: '1' }
|
||||
},
|
||||
{
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('observe.sh skips subagent payloads before project detection side effects', async () => {
|
||||
await assertObserveSkipBeforeProjectDetection({
|
||||
name: 'subagent payload',
|
||||
env: { CLAUDE_CODE_ENTRYPOINT: 'cli' },
|
||||
payload: { agent_id: 'agent-123' }
|
||||
},
|
||||
{
|
||||
});
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('observe.sh skips configured observer-session paths before project detection side effects', async () => {
|
||||
await assertObserveSkipBeforeProjectDetection({
|
||||
name: 'cwd skip path',
|
||||
env: {
|
||||
CLAUDE_CODE_ENTRYPOINT: 'cli',
|
||||
ECC_OBSERVE_SKIP_PATHS: ' observer-sessions , .claude-mem '
|
||||
},
|
||||
cwdSuffix: path.join('observer-sessions', 'worker')
|
||||
}
|
||||
];
|
||||
|
||||
for (const testCase of cases) {
|
||||
const homeDir = createTestDir();
|
||||
const projectDir = createTestDir();
|
||||
|
||||
try {
|
||||
const cwd = testCase.cwdSuffix ? path.join(projectDir, testCase.cwdSuffix) : projectDir;
|
||||
fs.mkdirSync(cwd, { recursive: true });
|
||||
|
||||
const payload = JSON.stringify({
|
||||
tool_name: 'Bash',
|
||||
tool_input: { command: 'echo hello' },
|
||||
tool_response: 'ok',
|
||||
session_id: `session-${testCase.name.replace(/[^a-z0-9]+/gi, '-')}`,
|
||||
cwd,
|
||||
...(testCase.payload || {})
|
||||
});
|
||||
|
||||
const result = await runShellScript(observePath, ['post'], payload, {
|
||||
HOME: homeDir,
|
||||
...testCase.env
|
||||
}, projectDir);
|
||||
|
||||
assert.strictEqual(result.code, 0, `${testCase.name} should exit successfully, stderr: ${result.stderr}`);
|
||||
|
||||
const homunculusDir = path.join(homeDir, '.claude', 'homunculus');
|
||||
const registryPath = path.join(homunculusDir, 'projects.json');
|
||||
const projectsDir = path.join(homunculusDir, 'projects');
|
||||
|
||||
assert.ok(!fs.existsSync(registryPath), `${testCase.name} should not create projects.json`);
|
||||
|
||||
const projectEntries = fs.existsSync(projectsDir)
|
||||
? fs.readdirSync(projectsDir).filter(entry => fs.statSync(path.join(projectsDir, entry)).isDirectory())
|
||||
: [];
|
||||
assert.strictEqual(projectEntries.length, 0, `${testCase.name} should not create project directories`);
|
||||
} finally {
|
||||
cleanupTestDir(homeDir);
|
||||
cleanupTestDir(projectDir);
|
||||
}
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('matches .tsx extension for type checking', async () => {
|
||||
@@ -3320,6 +3375,32 @@ async function runTests() {
|
||||
passed++;
|
||||
else failed++;
|
||||
|
||||
if (
|
||||
await asyncTest('test runner discovers nested tests via tests/**/*.test.js glob', async () => {
|
||||
const testRoot = createTestDir();
|
||||
const testsDir = path.join(testRoot, 'tests');
|
||||
const nestedDir = path.join(testsDir, 'nested');
|
||||
fs.mkdirSync(nestedDir, { recursive: true });
|
||||
|
||||
fs.writeFileSync(path.join(testsDir, 'top.test.js'), "console.log('Passed: 1\\nFailed: 0');\n");
|
||||
fs.writeFileSync(path.join(nestedDir, 'deep.test.js'), "console.log('Passed: 2\\nFailed: 0');\n");
|
||||
fs.writeFileSync(path.join(nestedDir, 'ignore.js'), "console.log('Passed: 999\\nFailed: 999');\n");
|
||||
|
||||
try {
|
||||
const result = runPatchedRunAll(testRoot);
|
||||
assert.strictEqual(result.code, 0, `run-all wrapper should succeed, stderr: ${result.stderr}`);
|
||||
assert.ok(result.stdout.includes('Running top.test.js'), 'Should run the top-level test');
|
||||
assert.ok(result.stdout.includes('Running nested/deep.test.js'), 'Should run nested .test.js files');
|
||||
assert.ok(!result.stdout.includes('ignore.js'), 'Should ignore non-.test.js files');
|
||||
assert.ok(result.stdout.includes('Total Tests: 3'), `Should aggregate nested test totals, got: ${result.stdout}`);
|
||||
} finally {
|
||||
cleanupTestDir(testRoot);
|
||||
}
|
||||
})
|
||||
)
|
||||
passed++;
|
||||
else failed++;
|
||||
|
||||
// ── Round 32: post-edit-typecheck special characters & check-console-log ──
|
||||
console.log('\nRound 32: post-edit-typecheck (special character paths):');
|
||||
|
||||
|
||||
@@ -10,25 +10,40 @@ const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
const testsDir = __dirname;
|
||||
const repoRoot = path.resolve(testsDir, '..');
|
||||
const TEST_GLOB = 'tests/**/*.test.js';
|
||||
|
||||
/**
|
||||
* Discover all *.test.js files under testsDir (relative paths for stable output order).
|
||||
*/
|
||||
function discoverTestFiles(dir, baseDir = dir, acc = []) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
const rel = path.relative(baseDir, full);
|
||||
if (e.isDirectory()) {
|
||||
discoverTestFiles(full, baseDir, acc);
|
||||
} else if (e.isFile() && e.name.endsWith('.test.js')) {
|
||||
acc.push(rel);
|
||||
function matchesTestGlob(relativePath) {
|
||||
const normalized = relativePath.split(path.sep).join('/');
|
||||
if (typeof path.matchesGlob === 'function') {
|
||||
return path.matchesGlob(normalized, TEST_GLOB);
|
||||
}
|
||||
}
|
||||
return acc.sort();
|
||||
|
||||
return /^tests\/(?:.+\/)?[^/]+\.test\.js$/.test(normalized);
|
||||
}
|
||||
|
||||
const testFiles = discoverTestFiles(testsDir);
|
||||
function walkFiles(dir, acc = []) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
walkFiles(fullPath, acc);
|
||||
} else if (entry.isFile()) {
|
||||
acc.push(fullPath);
|
||||
}
|
||||
}
|
||||
return acc;
|
||||
}
|
||||
|
||||
function discoverTestFiles() {
|
||||
return walkFiles(testsDir)
|
||||
.map(fullPath => path.relative(repoRoot, fullPath))
|
||||
.filter(matchesTestGlob)
|
||||
.map(repoRelativePath => path.relative(testsDir, path.join(repoRoot, repoRelativePath)))
|
||||
.sort();
|
||||
}
|
||||
|
||||
const testFiles = discoverTestFiles();
|
||||
|
||||
const BOX_W = 58; // inner width between ║ delimiters
|
||||
const boxLine = s => `║${s.padEnd(BOX_W)}║`;
|
||||
@@ -38,6 +53,11 @@ console.log(boxLine(' Everything Claude Code - Test Suite'));
|
||||
console.log('╚' + '═'.repeat(BOX_W) + '╝');
|
||||
console.log();
|
||||
|
||||
if (testFiles.length === 0) {
|
||||
console.log(`✗ No test files matched ${TEST_GLOB}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let totalPassed = 0;
|
||||
let totalFailed = 0;
|
||||
let totalTests = 0;
|
||||
|
||||
Reference in New Issue
Block a user