fix: audit consumer projects from cwd

This commit is contained in:
Affaan Mustafa
2026-03-29 21:38:21 -04:00
parent dd675d4258
commit 1b4597a3d6
4 changed files with 323 additions and 53 deletions

View File

@@ -4,22 +4,23 @@ Run a deterministic repository harness audit and return a prioritized scorecard.
## Usage ## Usage
`/harness-audit [scope] [--format text|json]` `/harness-audit [scope] [--format text|json] [--root path]`
- `scope` (optional): `repo` (default), `hooks`, `skills`, `commands`, `agents` - `scope` (optional): `repo` (default), `hooks`, `skills`, `commands`, `agents`
- `--format`: output style (`text` default, `json` for automation) - `--format`: output style (`text` default, `json` for automation)
- `--root`: audit a specific path instead of the current working directory
## Deterministic Engine ## Deterministic Engine
Always run: Always run:
```bash ```bash
node scripts/harness-audit.js <scope> --format <text|json> node scripts/harness-audit.js <scope> --format <text|json> [--root <path>]
``` ```
This script is the source of truth for scoring and checks. Do not invent additional dimensions or ad-hoc points. This script is the source of truth for scoring and checks. Do not invent additional dimensions or ad-hoc points.
Rubric version: `2026-03-16`. Rubric version: `2026-03-30`.
The script computes 7 fixed categories (`0-10` normalized each): The script computes 7 fixed categories (`0-10` normalized each):
@@ -32,6 +33,7 @@ The script computes 7 fixed categories (`0-10` normalized each):
7. Cost Efficiency 7. Cost Efficiency
Scores are derived from explicit file/rule checks and are reproducible for the same commit. Scores are derived from explicit file/rule checks and are reproducible for the same commit.
The script audits the current working directory by default and auto-detects whether the target is the ECC repo itself or a consumer project using ECC.
## Output Contract ## Output Contract

View File

@@ -4,22 +4,23 @@ Run a deterministic repository harness audit and return a prioritized scorecard.
## Usage ## Usage
`/harness-audit [scope] [--format text|json]` `/harness-audit [scope] [--format text|json] [--root path]`
- `scope` (optional): `repo` (default), `hooks`, `skills`, `commands`, `agents` - `scope` (optional): `repo` (default), `hooks`, `skills`, `commands`, `agents`
- `--format`: output style (`text` default, `json` for automation) - `--format`: output style (`text` default, `json` for automation)
- `--root`: audit a specific path instead of the current working directory
## Deterministic Engine ## Deterministic Engine
Always run: Always run:
```bash ```bash
node scripts/harness-audit.js <scope> --format <text|json> node scripts/harness-audit.js <scope> --format <text|json> [--root <path>]
``` ```
This script is the source of truth for scoring and checks. Do not invent additional dimensions or ad-hoc points. This script is the source of truth for scoring and checks. Do not invent additional dimensions or ad-hoc points.
Rubric version: `2026-03-16`. Rubric version: `2026-03-30`.
The script computes 7 fixed categories (`0-10` normalized each): The script computes 7 fixed categories (`0-10` normalized each):
@@ -32,6 +33,7 @@ The script computes 7 fixed categories (`0-10` normalized each):
7. Cost Efficiency 7. Cost Efficiency
Scores are derived from explicit file/rule checks and are reproducible for the same commit. Scores are derived from explicit file/rule checks and are reproducible for the same commit.
The script audits the current working directory by default and auto-detects whether the target is the ECC repo itself or a consumer project using ECC.
## Output Contract ## Output Contract

View File

@@ -3,8 +3,6 @@
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const REPO_ROOT = path.join(__dirname, '..');
const CATEGORIES = [ const CATEGORIES = [
'Tool Coverage', 'Tool Coverage',
'Context Efficiency', 'Context Efficiency',
@@ -29,6 +27,7 @@ function parseArgs(argv) {
scope: 'repo', scope: 'repo',
format: 'text', format: 'text',
help: false, help: false,
root: path.resolve(process.env.AUDIT_ROOT || process.cwd()),
}; };
for (let index = 0; index < args.length; index += 1) { for (let index = 0; index < args.length; index += 1) {
@@ -51,6 +50,12 @@ function parseArgs(argv) {
continue; continue;
} }
if (arg === '--root') {
parsed.root = path.resolve(args[index + 1] || process.cwd());
index += 1;
continue;
}
if (arg.startsWith('--format=')) { if (arg.startsWith('--format=')) {
parsed.format = arg.split('=')[1].toLowerCase(); parsed.format = arg.split('=')[1].toLowerCase();
continue; continue;
@@ -61,6 +66,11 @@ function parseArgs(argv) {
continue; continue;
} }
if (arg.startsWith('--root=')) {
parsed.root = path.resolve(arg.slice('--root='.length));
continue;
}
if (arg.startsWith('-')) { if (arg.startsWith('-')) {
throw new Error(`Unknown argument: ${arg}`); throw new Error(`Unknown argument: ${arg}`);
} }
@@ -75,16 +85,16 @@ function parseArgs(argv) {
return parsed; return parsed;
} }
function fileExists(relativePath) { function fileExists(rootDir, relativePath) {
return fs.existsSync(path.join(REPO_ROOT, relativePath)); return fs.existsSync(path.join(rootDir, relativePath));
} }
function readText(relativePath) { function readText(rootDir, relativePath) {
return fs.readFileSync(path.join(REPO_ROOT, relativePath), 'utf8'); return fs.readFileSync(path.join(rootDir, relativePath), 'utf8');
} }
function countFiles(relativeDir, extension) { function countFiles(rootDir, relativeDir, extension) {
const dirPath = path.join(REPO_ROOT, relativeDir); const dirPath = path.join(rootDir, relativeDir);
if (!fs.existsSync(dirPath)) { if (!fs.existsSync(dirPath)) {
return 0; return 0;
} }
@@ -109,19 +119,90 @@ function countFiles(relativeDir, extension) {
return count; return count;
} }
function safeRead(relativePath) { function safeRead(rootDir, relativePath) {
try { try {
return readText(relativePath); return readText(rootDir, relativePath);
} catch (_error) { } catch (_error) {
return ''; return '';
} }
} }
function getChecks() { function safeParseJson(text) {
const packageJson = JSON.parse(readText('package.json')); if (!text || !text.trim()) {
const commandPrimary = safeRead('commands/harness-audit.md').trim(); return null;
const commandParity = safeRead('.opencode/commands/harness-audit.md').trim(); }
const hooksJson = safeRead('hooks/hooks.json');
try {
return JSON.parse(text);
} catch (_error) {
return null;
}
}
function hasFileWithExtension(rootDir, relativeDir, extensions) {
const dirPath = path.join(rootDir, relativeDir);
if (!fs.existsSync(dirPath)) {
return false;
}
const allowed = Array.isArray(extensions) ? extensions : [extensions];
const stack = [dirPath];
while (stack.length > 0) {
const current = stack.pop();
const entries = fs.readdirSync(current, { withFileTypes: true });
for (const entry of entries) {
const nextPath = path.join(current, entry.name);
if (entry.isDirectory()) {
stack.push(nextPath);
continue;
}
if (allowed.some((extension) => entry.name.endsWith(extension))) {
return true;
}
}
}
return false;
}
function detectTargetMode(rootDir) {
const packageJson = safeParseJson(safeRead(rootDir, 'package.json'));
if (packageJson?.name === 'everything-claude-code') {
return 'repo';
}
if (
fileExists(rootDir, 'scripts/harness-audit.js') &&
fileExists(rootDir, '.claude-plugin/plugin.json') &&
fileExists(rootDir, 'agents') &&
fileExists(rootDir, 'skills')
) {
return 'repo';
}
return 'consumer';
}
function findPluginInstall(rootDir) {
const homeDir = process.env.HOME || '';
const candidates = [
path.join(rootDir, '.claude', 'plugins', 'everything-claude-code', '.claude-plugin', 'plugin.json'),
path.join(rootDir, '.claude', 'plugins', 'everything-claude-code', 'plugin.json'),
homeDir && path.join(homeDir, '.claude', 'plugins', 'everything-claude-code', '.claude-plugin', 'plugin.json'),
homeDir && path.join(homeDir, '.claude', 'plugins', 'everything-claude-code', 'plugin.json'),
].filter(Boolean);
return candidates.find(candidate => fs.existsSync(candidate)) || null;
}
function getRepoChecks(rootDir) {
const packageJson = JSON.parse(readText(rootDir, 'package.json'));
const commandPrimary = safeRead(rootDir, 'commands/harness-audit.md').trim();
const commandParity = safeRead(rootDir, '.opencode/commands/harness-audit.md').trim();
const hooksJson = safeRead(rootDir, 'hooks/hooks.json');
return [ return [
{ {
@@ -131,7 +212,7 @@ function getChecks() {
scopes: ['repo', 'hooks'], scopes: ['repo', 'hooks'],
path: 'hooks/hooks.json', path: 'hooks/hooks.json',
description: 'Hook configuration file exists', description: 'Hook configuration file exists',
pass: fileExists('hooks/hooks.json'), pass: fileExists(rootDir, 'hooks/hooks.json'),
fix: 'Create hooks/hooks.json and define baseline hook events.', fix: 'Create hooks/hooks.json and define baseline hook events.',
}, },
{ {
@@ -141,7 +222,7 @@ function getChecks() {
scopes: ['repo', 'hooks'], scopes: ['repo', 'hooks'],
path: 'scripts/hooks/', path: 'scripts/hooks/',
description: 'At least 8 hook implementation scripts exist', description: 'At least 8 hook implementation scripts exist',
pass: countFiles('scripts/hooks', '.js') >= 8, pass: countFiles(rootDir, 'scripts/hooks', '.js') >= 8,
fix: 'Add missing hook implementations in scripts/hooks/.', fix: 'Add missing hook implementations in scripts/hooks/.',
}, },
{ {
@@ -151,7 +232,7 @@ function getChecks() {
scopes: ['repo', 'agents'], scopes: ['repo', 'agents'],
path: 'agents/', path: 'agents/',
description: 'At least 10 agent definitions exist', description: 'At least 10 agent definitions exist',
pass: countFiles('agents', '.md') >= 10, pass: countFiles(rootDir, 'agents', '.md') >= 10,
fix: 'Add or restore agent definitions under agents/.', fix: 'Add or restore agent definitions under agents/.',
}, },
{ {
@@ -161,7 +242,7 @@ function getChecks() {
scopes: ['repo', 'skills'], scopes: ['repo', 'skills'],
path: 'skills/', path: 'skills/',
description: 'At least 20 skill definitions exist', description: 'At least 20 skill definitions exist',
pass: countFiles('skills', 'SKILL.md') >= 20, pass: countFiles(rootDir, 'skills', 'SKILL.md') >= 20,
fix: 'Add missing skill directories with SKILL.md definitions.', fix: 'Add missing skill directories with SKILL.md definitions.',
}, },
{ {
@@ -181,7 +262,7 @@ function getChecks() {
scopes: ['repo', 'skills'], scopes: ['repo', 'skills'],
path: 'skills/strategic-compact/SKILL.md', path: 'skills/strategic-compact/SKILL.md',
description: 'Strategic compaction guidance is present', description: 'Strategic compaction guidance is present',
pass: fileExists('skills/strategic-compact/SKILL.md'), pass: fileExists(rootDir, 'skills/strategic-compact/SKILL.md'),
fix: 'Add strategic context compaction guidance at skills/strategic-compact/SKILL.md.', fix: 'Add strategic context compaction guidance at skills/strategic-compact/SKILL.md.',
}, },
{ {
@@ -191,7 +272,7 @@ function getChecks() {
scopes: ['repo', 'hooks'], scopes: ['repo', 'hooks'],
path: 'scripts/hooks/suggest-compact.js', path: 'scripts/hooks/suggest-compact.js',
description: 'Suggest-compact automation hook exists', description: 'Suggest-compact automation hook exists',
pass: fileExists('scripts/hooks/suggest-compact.js'), pass: fileExists(rootDir, 'scripts/hooks/suggest-compact.js'),
fix: 'Implement scripts/hooks/suggest-compact.js for context pressure hints.', fix: 'Implement scripts/hooks/suggest-compact.js for context pressure hints.',
}, },
{ {
@@ -201,7 +282,7 @@ function getChecks() {
scopes: ['repo', 'commands'], scopes: ['repo', 'commands'],
path: 'commands/model-route.md', path: 'commands/model-route.md',
description: 'Model routing command exists', description: 'Model routing command exists',
pass: fileExists('commands/model-route.md'), pass: fileExists(rootDir, 'commands/model-route.md'),
fix: 'Add model-route command guidance in commands/model-route.md.', fix: 'Add model-route command guidance in commands/model-route.md.',
}, },
{ {
@@ -211,7 +292,7 @@ function getChecks() {
scopes: ['repo'], scopes: ['repo'],
path: 'docs/token-optimization.md', path: 'docs/token-optimization.md',
description: 'Token optimization documentation exists', description: 'Token optimization documentation exists',
pass: fileExists('docs/token-optimization.md'), pass: fileExists(rootDir, 'docs/token-optimization.md'),
fix: 'Add docs/token-optimization.md with concrete context-cost controls.', fix: 'Add docs/token-optimization.md with concrete context-cost controls.',
}, },
{ {
@@ -221,7 +302,7 @@ function getChecks() {
scopes: ['repo'], scopes: ['repo'],
path: 'tests/run-all.js', path: 'tests/run-all.js',
description: 'Central test runner exists', description: 'Central test runner exists',
pass: fileExists('tests/run-all.js'), pass: fileExists(rootDir, 'tests/run-all.js'),
fix: 'Add tests/run-all.js to enforce complete suite execution.', fix: 'Add tests/run-all.js to enforce complete suite execution.',
}, },
{ {
@@ -241,7 +322,7 @@ function getChecks() {
scopes: ['repo', 'hooks'], scopes: ['repo', 'hooks'],
path: 'tests/hooks/hooks.test.js', path: 'tests/hooks/hooks.test.js',
description: 'Hook coverage test file exists', description: 'Hook coverage test file exists',
pass: fileExists('tests/hooks/hooks.test.js'), pass: fileExists(rootDir, 'tests/hooks/hooks.test.js'),
fix: 'Add tests/hooks/hooks.test.js for hook behavior validation.', fix: 'Add tests/hooks/hooks.test.js for hook behavior validation.',
}, },
{ {
@@ -251,7 +332,7 @@ function getChecks() {
scopes: ['repo'], scopes: ['repo'],
path: 'scripts/doctor.js', path: 'scripts/doctor.js',
description: 'Installation drift doctor script exists', description: 'Installation drift doctor script exists',
pass: fileExists('scripts/doctor.js'), pass: fileExists(rootDir, 'scripts/doctor.js'),
fix: 'Add scripts/doctor.js for install-state integrity checks.', fix: 'Add scripts/doctor.js for install-state integrity checks.',
}, },
{ {
@@ -261,7 +342,7 @@ function getChecks() {
scopes: ['repo', 'hooks'], scopes: ['repo', 'hooks'],
path: 'hooks/memory-persistence/', path: 'hooks/memory-persistence/',
description: 'Memory persistence hooks directory exists', description: 'Memory persistence hooks directory exists',
pass: fileExists('hooks/memory-persistence'), pass: fileExists(rootDir, 'hooks/memory-persistence'),
fix: 'Add hooks/memory-persistence with lifecycle hook definitions.', fix: 'Add hooks/memory-persistence with lifecycle hook definitions.',
}, },
{ {
@@ -271,7 +352,7 @@ function getChecks() {
scopes: ['repo', 'hooks'], scopes: ['repo', 'hooks'],
path: 'scripts/hooks/session-start.js', path: 'scripts/hooks/session-start.js',
description: 'Session start/end persistence scripts exist', description: 'Session start/end persistence scripts exist',
pass: fileExists('scripts/hooks/session-start.js') && fileExists('scripts/hooks/session-end.js'), pass: fileExists(rootDir, 'scripts/hooks/session-start.js') && fileExists(rootDir, 'scripts/hooks/session-end.js'),
fix: 'Implement scripts/hooks/session-start.js and scripts/hooks/session-end.js.', fix: 'Implement scripts/hooks/session-start.js and scripts/hooks/session-end.js.',
}, },
{ {
@@ -281,7 +362,7 @@ function getChecks() {
scopes: ['repo', 'skills'], scopes: ['repo', 'skills'],
path: 'skills/continuous-learning-v2/SKILL.md', path: 'skills/continuous-learning-v2/SKILL.md',
description: 'Continuous learning v2 skill exists', description: 'Continuous learning v2 skill exists',
pass: fileExists('skills/continuous-learning-v2/SKILL.md'), pass: fileExists(rootDir, 'skills/continuous-learning-v2/SKILL.md'),
fix: 'Add skills/continuous-learning-v2/SKILL.md for memory evolution flow.', fix: 'Add skills/continuous-learning-v2/SKILL.md for memory evolution flow.',
}, },
{ {
@@ -291,7 +372,7 @@ function getChecks() {
scopes: ['repo', 'skills'], scopes: ['repo', 'skills'],
path: 'skills/eval-harness/SKILL.md', path: 'skills/eval-harness/SKILL.md',
description: 'Eval harness skill exists', description: 'Eval harness skill exists',
pass: fileExists('skills/eval-harness/SKILL.md'), pass: fileExists(rootDir, 'skills/eval-harness/SKILL.md'),
fix: 'Add skills/eval-harness/SKILL.md for pass/fail regression evaluation.', fix: 'Add skills/eval-harness/SKILL.md for pass/fail regression evaluation.',
}, },
{ {
@@ -301,7 +382,7 @@ function getChecks() {
scopes: ['repo', 'commands'], scopes: ['repo', 'commands'],
path: 'commands/eval.md', path: 'commands/eval.md',
description: 'Eval and verification commands exist', description: 'Eval and verification commands exist',
pass: fileExists('commands/eval.md') && fileExists('commands/verify.md') && fileExists('commands/checkpoint.md'), pass: fileExists(rootDir, 'commands/eval.md') && fileExists(rootDir, 'commands/verify.md') && fileExists(rootDir, 'commands/checkpoint.md'),
fix: 'Add eval/checkpoint/verify commands to standardize verification loops.', fix: 'Add eval/checkpoint/verify commands to standardize verification loops.',
}, },
{ {
@@ -311,7 +392,7 @@ function getChecks() {
scopes: ['repo'], scopes: ['repo'],
path: 'tests/', path: 'tests/',
description: 'At least 10 test files exist', description: 'At least 10 test files exist',
pass: countFiles('tests', '.test.js') >= 10, pass: countFiles(rootDir, 'tests', '.test.js') >= 10,
fix: 'Increase automated test coverage across scripts/hooks/lib.', fix: 'Increase automated test coverage across scripts/hooks/lib.',
}, },
{ {
@@ -321,7 +402,7 @@ function getChecks() {
scopes: ['repo', 'skills'], scopes: ['repo', 'skills'],
path: 'skills/security-review/SKILL.md', path: 'skills/security-review/SKILL.md',
description: 'Security review skill exists', description: 'Security review skill exists',
pass: fileExists('skills/security-review/SKILL.md'), pass: fileExists(rootDir, 'skills/security-review/SKILL.md'),
fix: 'Add skills/security-review/SKILL.md for security checklist coverage.', fix: 'Add skills/security-review/SKILL.md for security checklist coverage.',
}, },
{ {
@@ -331,7 +412,7 @@ function getChecks() {
scopes: ['repo', 'agents'], scopes: ['repo', 'agents'],
path: 'agents/security-reviewer.md', path: 'agents/security-reviewer.md',
description: 'Security reviewer agent exists', description: 'Security reviewer agent exists',
pass: fileExists('agents/security-reviewer.md'), pass: fileExists(rootDir, 'agents/security-reviewer.md'),
fix: 'Add agents/security-reviewer.md for delegated security audits.', fix: 'Add agents/security-reviewer.md for delegated security audits.',
}, },
{ {
@@ -351,7 +432,7 @@ function getChecks() {
scopes: ['repo', 'commands'], scopes: ['repo', 'commands'],
path: 'commands/security-scan.md', path: 'commands/security-scan.md',
description: 'Security scan command exists', description: 'Security scan command exists',
pass: fileExists('commands/security-scan.md'), pass: fileExists(rootDir, 'commands/security-scan.md'),
fix: 'Add commands/security-scan.md with scan and remediation workflow.', fix: 'Add commands/security-scan.md with scan and remediation workflow.',
}, },
{ {
@@ -361,7 +442,7 @@ function getChecks() {
scopes: ['repo', 'skills'], scopes: ['repo', 'skills'],
path: 'skills/cost-aware-llm-pipeline/SKILL.md', path: 'skills/cost-aware-llm-pipeline/SKILL.md',
description: 'Cost-aware LLM skill exists', description: 'Cost-aware LLM skill exists',
pass: fileExists('skills/cost-aware-llm-pipeline/SKILL.md'), pass: fileExists(rootDir, 'skills/cost-aware-llm-pipeline/SKILL.md'),
fix: 'Add skills/cost-aware-llm-pipeline/SKILL.md for budget-aware routing.', fix: 'Add skills/cost-aware-llm-pipeline/SKILL.md for budget-aware routing.',
}, },
{ {
@@ -371,7 +452,7 @@ function getChecks() {
scopes: ['repo'], scopes: ['repo'],
path: 'docs/token-optimization.md', path: 'docs/token-optimization.md',
description: 'Cost optimization documentation exists', description: 'Cost optimization documentation exists',
pass: fileExists('docs/token-optimization.md'), pass: fileExists(rootDir, 'docs/token-optimization.md'),
fix: 'Create docs/token-optimization.md with target settings and tradeoffs.', fix: 'Create docs/token-optimization.md with target settings and tradeoffs.',
}, },
{ {
@@ -381,12 +462,136 @@ function getChecks() {
scopes: ['repo', 'commands'], scopes: ['repo', 'commands'],
path: 'commands/model-route.md', path: 'commands/model-route.md',
description: 'Model route command exists for complexity-aware routing', description: 'Model route command exists for complexity-aware routing',
pass: fileExists('commands/model-route.md'), pass: fileExists(rootDir, 'commands/model-route.md'),
fix: 'Add commands/model-route.md and route policies for cheap-default execution.', fix: 'Add commands/model-route.md and route policies for cheap-default execution.',
}, },
]; ];
} }
function getConsumerChecks(rootDir) {
const packageJson = safeParseJson(safeRead(rootDir, 'package.json'));
const gitignore = safeRead(rootDir, '.gitignore');
const projectHooks = safeRead(rootDir, '.claude/settings.json');
const pluginInstall = findPluginInstall(rootDir);
return [
{
id: 'consumer-plugin-install',
category: 'Tool Coverage',
points: 4,
scopes: ['repo'],
path: '~/.claude/plugins/everything-claude-code/',
description: 'Everything Claude Code is installed for the active user or project',
pass: Boolean(pluginInstall),
fix: 'Install the ECC plugin for this user or project before auditing project-specific harness quality.',
},
{
id: 'consumer-project-overrides',
category: 'Tool Coverage',
points: 3,
scopes: ['repo', 'hooks', 'skills', 'commands', 'agents'],
path: '.claude/',
description: 'Project-specific harness overrides exist under .claude/',
pass: countFiles(rootDir, '.claude/agents', '.md') > 0 ||
countFiles(rootDir, '.claude/skills', 'SKILL.md') > 0 ||
countFiles(rootDir, '.claude/commands', '.md') > 0 ||
fileExists(rootDir, '.claude/settings.json') ||
fileExists(rootDir, '.claude/hooks.json'),
fix: 'Add project-local .claude hooks, commands, skills, or settings that tailor ECC to this repo.',
},
{
id: 'consumer-instructions',
category: 'Context Efficiency',
points: 3,
scopes: ['repo'],
path: 'AGENTS.md',
description: 'The project has explicit agent or instruction context',
pass: fileExists(rootDir, 'AGENTS.md') || fileExists(rootDir, 'CLAUDE.md') || fileExists(rootDir, '.claude/CLAUDE.md'),
fix: 'Add AGENTS.md or CLAUDE.md so the harness has project-specific instructions.',
},
{
id: 'consumer-project-config',
category: 'Context Efficiency',
points: 2,
scopes: ['repo', 'hooks'],
path: '.mcp.json',
description: 'The project declares local MCP or Claude settings',
pass: fileExists(rootDir, '.mcp.json') || fileExists(rootDir, '.claude/settings.json') || fileExists(rootDir, '.claude/settings.local.json'),
fix: 'Add .mcp.json or .claude/settings.json so project-local tool configuration is explicit.',
},
{
id: 'consumer-test-suite',
category: 'Quality Gates',
points: 4,
scopes: ['repo'],
path: 'tests/',
description: 'The project has an automated test entrypoint',
pass: typeof packageJson?.scripts?.test === 'string' || countFiles(rootDir, 'tests', '.test.js') > 0 || hasFileWithExtension(rootDir, '.', ['.spec.js', '.spec.ts', '.test.ts']),
fix: 'Add a test script or checked-in tests so harness recommendations can be verified automatically.',
},
{
id: 'consumer-ci-workflow',
category: 'Quality Gates',
points: 3,
scopes: ['repo'],
path: '.github/workflows/',
description: 'The project has CI workflows checked in',
pass: hasFileWithExtension(rootDir, '.github/workflows', ['.yml', '.yaml']),
fix: 'Add at least one CI workflow so harness and test checks run outside local development.',
},
{
id: 'consumer-memory-notes',
category: 'Memory Persistence',
points: 2,
scopes: ['repo'],
path: '.claude/memory.md',
description: 'Project memory or durable notes are checked in',
pass: fileExists(rootDir, '.claude/memory.md') || countFiles(rootDir, 'docs/adr', '.md') > 0,
fix: 'Add durable project memory such as .claude/memory.md or ADRs under docs/adr/.',
},
{
id: 'consumer-eval-coverage',
category: 'Eval Coverage',
points: 2,
scopes: ['repo'],
path: 'evals/',
description: 'The project has evals or multiple automated tests',
pass: countFiles(rootDir, 'evals', null) > 0 || countFiles(rootDir, 'tests', '.test.js') >= 3,
fix: 'Add eval fixtures or at least a few focused automated tests for critical flows.',
},
{
id: 'consumer-security-policy',
category: 'Security Guardrails',
points: 2,
scopes: ['repo'],
path: 'SECURITY.md',
description: 'The project exposes a security policy or automated dependency scanning',
pass: fileExists(rootDir, 'SECURITY.md') || fileExists(rootDir, '.github/dependabot.yml') || fileExists(rootDir, '.github/codeql.yml'),
fix: 'Add SECURITY.md or dependency/code scanning configuration to document the project security posture.',
},
{
id: 'consumer-secret-hygiene',
category: 'Security Guardrails',
points: 2,
scopes: ['repo'],
path: '.gitignore',
description: 'The project ignores common secret env files',
pass: gitignore.includes('.env'),
fix: 'Ignore .env-style files in .gitignore so secrets do not land in the repo.',
},
{
id: 'consumer-hook-guardrails',
category: 'Security Guardrails',
points: 2,
scopes: ['repo', 'hooks'],
path: '.claude/settings.json',
description: 'Project-local hook settings reference tool/prompt guardrails',
pass: projectHooks.includes('PreToolUse') || projectHooks.includes('beforeSubmitPrompt') || fileExists(rootDir, '.claude/hooks.json'),
fix: 'Add project-local hook settings or hook definitions for prompt/tool guardrails.',
},
];
}
function summarizeCategoryScores(checks) { function summarizeCategoryScores(checks) {
const scores = {}; const scores = {};
for (const category of CATEGORIES) { for (const category of CATEGORIES) {
@@ -407,8 +612,11 @@ function summarizeCategoryScores(checks) {
return scores; return scores;
} }
function buildReport(scope) { function buildReport(scope, options = {}) {
const checks = getChecks().filter(check => check.scopes.includes(scope)); const rootDir = path.resolve(options.rootDir || process.cwd());
const targetMode = options.targetMode || detectTargetMode(rootDir);
const checks = (targetMode === 'repo' ? getRepoChecks(rootDir) : getConsumerChecks(rootDir))
.filter(check => check.scopes.includes(scope));
const categoryScores = summarizeCategoryScores(checks); const categoryScores = summarizeCategoryScores(checks);
const maxScore = checks.reduce((sum, check) => sum + check.points, 0); const maxScore = checks.reduce((sum, check) => sum + check.points, 0);
const overallScore = checks const overallScore = checks
@@ -428,8 +636,10 @@ function buildReport(scope) {
return { return {
scope, scope,
root_dir: rootDir,
target_mode: targetMode,
deterministic: true, deterministic: true,
rubric_version: '2026-03-16', rubric_version: '2026-03-30',
overall_score: overallScore, overall_score: overallScore,
max_score: maxScore, max_score: maxScore,
categories: categoryScores, categories: categoryScores,
@@ -446,7 +656,8 @@ function buildReport(scope) {
} }
function printText(report) { function printText(report) {
console.log(`Harness Audit (${report.scope}): ${report.overall_score}/${report.max_score}`); console.log(`Harness Audit (${report.scope}, ${report.target_mode}): ${report.overall_score}/${report.max_score}`);
console.log(`Root: ${report.root_dir}`);
console.log(''); console.log('');
for (const category of CATEGORIES) { for (const category of CATEGORIES) {
@@ -474,8 +685,10 @@ function printText(report) {
function showHelp(exitCode = 0) { function showHelp(exitCode = 0) {
console.log(` console.log(`
Usage: node scripts/harness-audit.js [scope] [--scope <repo|hooks|skills|commands|agents>] [--format <text|json>] Usage: node scripts/harness-audit.js [scope] [--scope <repo|hooks|skills|commands|agents>] [--format <text|json>]
[--root <path>]
Deterministic harness audit based on explicit file/rule checks. Deterministic harness audit based on explicit file/rule checks.
Audits the current working directory by default and auto-detects ECC repo mode vs consumer-project mode.
`); `);
process.exit(exitCode); process.exit(exitCode);
} }
@@ -489,7 +702,7 @@ function main() {
return; return;
} }
const report = buildReport(args.scope); const report = buildReport(args.scope, { rootDir: args.root });
if (args.format === 'json') { if (args.format === 'json') {
console.log(JSON.stringify(report, null, 2)); console.log(JSON.stringify(report, null, 2));

View File

@@ -3,14 +3,28 @@
*/ */
const assert = require('assert'); const assert = require('assert');
const fs = require('fs');
const os = require('os');
const path = require('path'); const path = require('path');
const { execFileSync } = require('child_process'); const { execFileSync } = require('child_process');
const SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'harness-audit.js'); const SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'harness-audit.js');
function run(args = []) { function createTempDir(prefix) {
return fs.mkdtempSync(path.join(os.tmpdir(), prefix));
}
function cleanup(dirPath) {
fs.rmSync(dirPath, { recursive: true, force: true });
}
function run(args = [], options = {}) {
const stdout = execFileSync('node', [SCRIPT, ...args], { const stdout = execFileSync('node', [SCRIPT, ...args], {
cwd: path.join(__dirname, '..', '..'), cwd: options.cwd || path.join(__dirname, '..', '..'),
env: {
...process.env,
HOME: options.homeDir || process.env.HOME,
},
encoding: 'utf8', encoding: 'utf8',
stdio: ['pipe', 'pipe', 'pipe'], stdio: ['pipe', 'pipe', 'pipe'],
timeout: 10000, timeout: 10000,
@@ -48,7 +62,8 @@ function runTests() {
const parsed = JSON.parse(run(['repo', '--format', 'json'])); const parsed = JSON.parse(run(['repo', '--format', 'json']));
assert.strictEqual(parsed.deterministic, true); assert.strictEqual(parsed.deterministic, true);
assert.strictEqual(parsed.rubric_version, '2026-03-16'); assert.strictEqual(parsed.rubric_version, '2026-03-30');
assert.strictEqual(parsed.target_mode, 'repo');
assert.ok(parsed.overall_score >= 0); assert.ok(parsed.overall_score >= 0);
assert.ok(parsed.max_score > 0); assert.ok(parsed.max_score > 0);
assert.ok(parsed.overall_score <= parsed.max_score); assert.ok(parsed.overall_score <= parsed.max_score);
@@ -75,10 +90,48 @@ function runTests() {
if (test('text format includes summary header', () => { if (test('text format includes summary header', () => {
const output = run(['repo']); const output = run(['repo']);
assert.ok(output.includes('Harness Audit (repo):')); assert.ok(output.includes('Harness Audit (repo, repo):'));
assert.ok(output.includes('Top 3 Actions:') || output.includes('Checks:')); assert.ok(output.includes('Top 3 Actions:') || output.includes('Checks:'));
})) passed++; else failed++; })) passed++; else failed++;
if (test('audits consumer projects from cwd instead of the ECC repo root', () => {
const homeDir = createTempDir('harness-audit-home-');
const projectRoot = createTempDir('harness-audit-project-');
try {
fs.mkdirSync(path.join(homeDir, '.claude', 'plugins', 'everything-claude-code', '.claude-plugin'), { recursive: true });
fs.writeFileSync(
path.join(homeDir, '.claude', 'plugins', 'everything-claude-code', '.claude-plugin', 'plugin.json'),
JSON.stringify({ name: 'everything-claude-code' }, null, 2)
);
fs.mkdirSync(path.join(projectRoot, '.github', 'workflows'), { recursive: true });
fs.mkdirSync(path.join(projectRoot, 'tests'), { recursive: true });
fs.mkdirSync(path.join(projectRoot, '.claude'), { recursive: true });
fs.writeFileSync(path.join(projectRoot, 'AGENTS.md'), '# Project instructions\n');
fs.writeFileSync(path.join(projectRoot, '.mcp.json'), JSON.stringify({ mcpServers: {} }, null, 2));
fs.writeFileSync(path.join(projectRoot, '.gitignore'), 'node_modules\n.env\n');
fs.writeFileSync(path.join(projectRoot, '.github', 'workflows', 'ci.yml'), 'name: ci\n');
fs.writeFileSync(path.join(projectRoot, 'tests', 'app.test.js'), 'test placeholder\n');
fs.writeFileSync(path.join(projectRoot, '.claude', 'settings.json'), JSON.stringify({ hooks: ['PreToolUse'] }, null, 2));
fs.writeFileSync(
path.join(projectRoot, 'package.json'),
JSON.stringify({ name: 'consumer-project', scripts: { test: 'node tests/app.test.js' } }, null, 2)
);
const parsed = JSON.parse(run(['repo', '--format', 'json'], { cwd: projectRoot, homeDir }));
assert.strictEqual(parsed.target_mode, 'consumer');
assert.strictEqual(parsed.root_dir, fs.realpathSync(projectRoot));
assert.ok(parsed.overall_score > 0, 'Consumer project should receive non-zero score when harness signals exist');
assert.ok(parsed.checks.some(check => check.id === 'consumer-plugin-install' && check.pass));
assert.ok(parsed.checks.every(check => !check.path.startsWith('agents/') && !check.path.startsWith('skills/')));
} finally {
cleanup(homeDir);
cleanup(projectRoot);
}
})) passed++; else failed++;
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`); console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
process.exit(failed > 0 ? 1 : 0); process.exit(failed > 0 ? 1 : 0);
} }