mirror of
https://github.com/affaan-m/everything-claude-code.git
synced 2026-03-30 21:53:28 +08:00
Compare commits
6 Commits
fix/deskto
...
fix/codex-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d8c940f2e7 | ||
|
|
a4d4b1d756 | ||
|
|
c90566f9be | ||
|
|
b9a01d3c32 | ||
|
|
fab80c99b7 | ||
|
|
8846210ca2 |
@@ -4,22 +4,23 @@ Run a deterministic repository harness audit and return a prioritized scorecard.
|
||||
|
||||
## Usage
|
||||
|
||||
`/harness-audit [scope] [--format text|json]`
|
||||
`/harness-audit [scope] [--format text|json] [--root path]`
|
||||
|
||||
- `scope` (optional): `repo` (default), `hooks`, `skills`, `commands`, `agents`
|
||||
- `--format`: output style (`text` default, `json` for automation)
|
||||
- `--root`: audit a specific path instead of the current working directory
|
||||
|
||||
## Deterministic Engine
|
||||
|
||||
Always run:
|
||||
|
||||
```bash
|
||||
node scripts/harness-audit.js <scope> --format <text|json>
|
||||
node scripts/harness-audit.js <scope> --format <text|json> [--root <path>]
|
||||
```
|
||||
|
||||
This script is the source of truth for scoring and checks. Do not invent additional dimensions or ad-hoc points.
|
||||
|
||||
Rubric version: `2026-03-16`.
|
||||
Rubric version: `2026-03-30`.
|
||||
|
||||
The script computes 7 fixed categories (`0-10` normalized each):
|
||||
|
||||
@@ -32,6 +33,7 @@ The script computes 7 fixed categories (`0-10` normalized each):
|
||||
7. Cost Efficiency
|
||||
|
||||
Scores are derived from explicit file/rule checks and are reproducible for the same commit.
|
||||
The script audits the current working directory by default and auto-detects whether the target is the ECC repo itself or a consumer project using ECC.
|
||||
|
||||
## Output Contract
|
||||
|
||||
|
||||
@@ -39,6 +39,40 @@ ensure_manifest_entry() {
|
||||
fi
|
||||
}
|
||||
|
||||
manifest_has_entry() {
|
||||
local manifest="$1"
|
||||
local entry="$2"
|
||||
|
||||
[ -f "$manifest" ] && grep -Fqx "$entry" "$manifest"
|
||||
}
|
||||
|
||||
copy_managed_file() {
|
||||
local source_path="$1"
|
||||
local target_path="$2"
|
||||
local manifest="$3"
|
||||
local manifest_entry="$4"
|
||||
local make_executable="${5:-0}"
|
||||
|
||||
local already_managed=0
|
||||
if manifest_has_entry "$manifest" "$manifest_entry"; then
|
||||
already_managed=1
|
||||
fi
|
||||
|
||||
if [ -f "$target_path" ]; then
|
||||
if [ "$already_managed" -eq 1 ]; then
|
||||
ensure_manifest_entry "$manifest" "$manifest_entry"
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
cp "$source_path" "$target_path"
|
||||
if [ "$make_executable" -eq 1 ]; then
|
||||
chmod +x "$target_path"
|
||||
fi
|
||||
ensure_manifest_entry "$manifest" "$manifest_entry"
|
||||
return 0
|
||||
}
|
||||
|
||||
# Install function
|
||||
do_install() {
|
||||
local target_dir="$PWD"
|
||||
@@ -95,12 +129,8 @@ do_install() {
|
||||
[ -f "$f" ] || continue
|
||||
local_name=$(basename "$f")
|
||||
target_path="$trae_full_path/commands/$local_name"
|
||||
if [ ! -f "$target_path" ]; then
|
||||
cp "$f" "$target_path"
|
||||
ensure_manifest_entry "$MANIFEST" "commands/$local_name"
|
||||
if copy_managed_file "$f" "$target_path" "$MANIFEST" "commands/$local_name"; then
|
||||
commands=$((commands + 1))
|
||||
else
|
||||
ensure_manifest_entry "$MANIFEST" "commands/$local_name"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
@@ -111,12 +141,8 @@ do_install() {
|
||||
[ -f "$f" ] || continue
|
||||
local_name=$(basename "$f")
|
||||
target_path="$trae_full_path/agents/$local_name"
|
||||
if [ ! -f "$target_path" ]; then
|
||||
cp "$f" "$target_path"
|
||||
ensure_manifest_entry "$MANIFEST" "agents/$local_name"
|
||||
if copy_managed_file "$f" "$target_path" "$MANIFEST" "agents/$local_name"; then
|
||||
agents=$((agents + 1))
|
||||
else
|
||||
ensure_manifest_entry "$MANIFEST" "agents/$local_name"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
@@ -134,11 +160,9 @@ do_install() {
|
||||
target_path="$target_skill_dir/$relative_path"
|
||||
|
||||
mkdir -p "$(dirname "$target_path")"
|
||||
if [ ! -f "$target_path" ]; then
|
||||
cp "$source_file" "$target_path"
|
||||
if copy_managed_file "$source_file" "$target_path" "$MANIFEST" "skills/$skill_name/$relative_path"; then
|
||||
skill_copied=1
|
||||
fi
|
||||
ensure_manifest_entry "$MANIFEST" "skills/$skill_name/$relative_path"
|
||||
done < <(find "$d" -type f | sort)
|
||||
|
||||
if [ "$skill_copied" -eq 1 ]; then
|
||||
@@ -154,11 +178,9 @@ do_install() {
|
||||
target_path="$trae_full_path/rules/$relative_path"
|
||||
|
||||
mkdir -p "$(dirname "$target_path")"
|
||||
if [ ! -f "$target_path" ]; then
|
||||
cp "$rule_file" "$target_path"
|
||||
if copy_managed_file "$rule_file" "$target_path" "$MANIFEST" "rules/$relative_path"; then
|
||||
rules=$((rules + 1))
|
||||
fi
|
||||
ensure_manifest_entry "$MANIFEST" "rules/$relative_path"
|
||||
done < <(find "$REPO_ROOT/rules" -type f | sort)
|
||||
fi
|
||||
|
||||
@@ -167,12 +189,8 @@ do_install() {
|
||||
if [ -f "$readme_file" ]; then
|
||||
local_name=$(basename "$readme_file")
|
||||
target_path="$trae_full_path/$local_name"
|
||||
if [ ! -f "$target_path" ]; then
|
||||
cp "$readme_file" "$target_path"
|
||||
ensure_manifest_entry "$MANIFEST" "$local_name"
|
||||
if copy_managed_file "$readme_file" "$target_path" "$MANIFEST" "$local_name"; then
|
||||
other=$((other + 1))
|
||||
else
|
||||
ensure_manifest_entry "$MANIFEST" "$local_name"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
@@ -182,13 +200,8 @@ do_install() {
|
||||
if [ -f "$script_file" ]; then
|
||||
local_name=$(basename "$script_file")
|
||||
target_path="$trae_full_path/$local_name"
|
||||
if [ ! -f "$target_path" ]; then
|
||||
cp "$script_file" "$target_path"
|
||||
chmod +x "$target_path"
|
||||
ensure_manifest_entry "$MANIFEST" "$local_name"
|
||||
if copy_managed_file "$script_file" "$target_path" "$MANIFEST" "$local_name" 1; then
|
||||
other=$((other + 1))
|
||||
else
|
||||
ensure_manifest_entry "$MANIFEST" "$local_name"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -4,22 +4,23 @@ Run a deterministic repository harness audit and return a prioritized scorecard.
|
||||
|
||||
## Usage
|
||||
|
||||
`/harness-audit [scope] [--format text|json]`
|
||||
`/harness-audit [scope] [--format text|json] [--root path]`
|
||||
|
||||
- `scope` (optional): `repo` (default), `hooks`, `skills`, `commands`, `agents`
|
||||
- `--format`: output style (`text` default, `json` for automation)
|
||||
- `--root`: audit a specific path instead of the current working directory
|
||||
|
||||
## Deterministic Engine
|
||||
|
||||
Always run:
|
||||
|
||||
```bash
|
||||
node scripts/harness-audit.js <scope> --format <text|json>
|
||||
node scripts/harness-audit.js <scope> --format <text|json> [--root <path>]
|
||||
```
|
||||
|
||||
This script is the source of truth for scoring and checks. Do not invent additional dimensions or ad-hoc points.
|
||||
|
||||
Rubric version: `2026-03-16`.
|
||||
Rubric version: `2026-03-30`.
|
||||
|
||||
The script computes 7 fixed categories (`0-10` normalized each):
|
||||
|
||||
@@ -32,6 +33,7 @@ The script computes 7 fixed categories (`0-10` normalized each):
|
||||
7. Cost Efficiency
|
||||
|
||||
Scores are derived from explicit file/rule checks and are reproducible for the same commit.
|
||||
The script audits the current working directory by default and auto-detects whether the target is the ECC repo itself or a consumer project using ECC.
|
||||
|
||||
## Output Contract
|
||||
|
||||
|
||||
@@ -75,17 +75,17 @@ origin: auto-extracted
|
||||
|
||||
**Guideline dimensions** (informing the verdict, not scored):
|
||||
|
||||
- **Specificity & Actionability**: Contains code examples or commands that are immediately usable
|
||||
- **Scope Fit**: Name, trigger conditions, and content are aligned and focused on a single pattern
|
||||
- **Uniqueness**: Provides value not covered by existing skills (informed by checklist results)
|
||||
- **Reusability**: Realistic trigger scenarios exist in future sessions
|
||||
- **Specificity & Actionability**: Contains code examples or commands that are immediately usable
|
||||
- **Scope Fit**: Name, trigger conditions, and content are aligned and focused on a single pattern
|
||||
- **Uniqueness**: Provides value not covered by existing skills (informed by checklist results)
|
||||
- **Reusability**: Realistic trigger scenarios exist in future sessions
|
||||
|
||||
6. **Verdict-specific confirmation flow**
|
||||
|
||||
- **Improve then Save**: Present the required improvements + revised draft + updated checklist/verdict after one re-evaluation; if the revised verdict is **Save**, save after user confirmation, otherwise follow the new verdict
|
||||
- **Save**: Present save path + checklist results + 1-line verdict rationale + full draft → save after user confirmation
|
||||
- **Absorb into [X]**: Present target path + additions (diff format) + checklist results + verdict rationale → append after user confirmation
|
||||
- **Drop**: Show checklist results + reasoning only (no confirmation needed)
|
||||
- **Improve then Save**: Present the required improvements + revised draft + updated checklist/verdict after one re-evaluation; if the revised verdict is **Save**, save after user confirmation, otherwise follow the new verdict
|
||||
- **Save**: Present save path + checklist results + 1-line verdict rationale + full draft → save after user confirmation
|
||||
- **Absorb into [X]**: Present target path + additions (diff format) + checklist results + verdict rationale → append after user confirmation
|
||||
- **Drop**: Show checklist results + reasoning only (no confirmation needed)
|
||||
|
||||
7. Save / Absorb to the determined location
|
||||
|
||||
|
||||
@@ -203,17 +203,17 @@ Synthesize both analyses, generate **Step-by-step Implementation Plan**:
|
||||
2. Save plan to `.claude/plan/<feature-name>.md` (extract feature name from requirement, e.g., `user-auth`, `payment-module`)
|
||||
3. Output prompt in **bold text** (MUST use actual saved file path):
|
||||
|
||||
---
|
||||
---
|
||||
**Plan generated and saved to `.claude/plan/actual-feature-name.md`**
|
||||
|
||||
**Please review the plan above. You can:**
|
||||
- **Modify plan**: Tell me what needs adjustment, I'll update the plan
|
||||
- **Execute plan**: Copy the following command to a new session
|
||||
- **Modify plan**: Tell me what needs adjustment, I'll update the plan
|
||||
- **Execute plan**: Copy the following command to a new session
|
||||
|
||||
```
|
||||
/ccg:execute .claude/plan/actual-feature-name.md
|
||||
```
|
||||
---
|
||||
```
|
||||
/ccg:execute .claude/plan/actual-feature-name.md
|
||||
```
|
||||
---
|
||||
|
||||
**NOTE**: The `actual-feature-name.md` above MUST be replaced with the actual saved filename!
|
||||
|
||||
|
||||
@@ -310,7 +310,7 @@
|
||||
"timeout": 10
|
||||
}
|
||||
],
|
||||
"description": "Send desktop notification (macOS/WSL) with task summary when Claude responds"
|
||||
"description": "Send macOS desktop notification with task summary when Claude responds"
|
||||
}
|
||||
],
|
||||
"SessionEnd": [
|
||||
|
||||
11
install.sh
11
install.sh
@@ -20,4 +20,13 @@ if [ ! -d "$SCRIPT_DIR/node_modules" ]; then
|
||||
(cd "$SCRIPT_DIR" && npm install --no-audit --no-fund --loglevel=error)
|
||||
fi
|
||||
|
||||
exec node "$SCRIPT_DIR/scripts/install-apply.js" "$@"
|
||||
# On MSYS2/Git Bash, convert the POSIX path to a Windows path so Node.js
|
||||
# (a native Windows binary) receives a valid path instead of a doubled one
|
||||
# like G:\g\projects\... that results from Git Bash's auto path conversion.
|
||||
if command -v cygpath &>/dev/null; then
|
||||
NODE_SCRIPT="$(cygpath -w "$SCRIPT_DIR/scripts/install-apply.js")"
|
||||
else
|
||||
NODE_SCRIPT="$SCRIPT_DIR/scripts/install-apply.js"
|
||||
fi
|
||||
|
||||
exec node "$NODE_SCRIPT" "$@"
|
||||
|
||||
@@ -50,9 +50,7 @@ const writeModeSkip = new Set([
|
||||
path.normalize('tests/scripts/check-unicode-safety.test.js'),
|
||||
]);
|
||||
|
||||
const dangerousInvisibleRe =
|
||||
/[\u200B-\u200D\u2060\uFEFF\u202A-\u202E\u2066-\u2069\uFE00-\uFE0F\u{E0100}-\u{E01EF}]/gu;
|
||||
const emojiRe = /[\p{Extended_Pictographic}\p{Regional_Indicator}]/gu;
|
||||
const emojiRe = /(?:\p{Extended_Pictographic}|\p{Regional_Indicator})/gu;
|
||||
const allowedSymbolCodePoints = new Set([
|
||||
0x00A9,
|
||||
0x00AE,
|
||||
@@ -106,9 +104,31 @@ function isAllowedEmojiLikeSymbol(char) {
|
||||
return allowedSymbolCodePoints.has(char.codePointAt(0));
|
||||
}
|
||||
|
||||
function isDangerousInvisibleCodePoint(codePoint) {
|
||||
return (
|
||||
(codePoint >= 0x200B && codePoint <= 0x200D) ||
|
||||
codePoint === 0x2060 ||
|
||||
codePoint === 0xFEFF ||
|
||||
(codePoint >= 0x202A && codePoint <= 0x202E) ||
|
||||
(codePoint >= 0x2066 && codePoint <= 0x2069) ||
|
||||
(codePoint >= 0xFE00 && codePoint <= 0xFE0F) ||
|
||||
(codePoint >= 0xE0100 && codePoint <= 0xE01EF)
|
||||
);
|
||||
}
|
||||
|
||||
function stripDangerousInvisibleChars(text) {
|
||||
let next = '';
|
||||
for (const char of text) {
|
||||
if (!isDangerousInvisibleCodePoint(char.codePointAt(0))) {
|
||||
next += char;
|
||||
}
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
function sanitizeText(text) {
|
||||
let next = text;
|
||||
next = next.replace(dangerousInvisibleRe, '');
|
||||
next = stripDangerousInvisibleChars(next);
|
||||
|
||||
for (const [pattern, replacement] of targetedReplacements) {
|
||||
next = next.replace(pattern, replacement);
|
||||
@@ -146,6 +166,28 @@ function collectMatches(text, regex, kind) {
|
||||
return matches;
|
||||
}
|
||||
|
||||
function collectDangerousInvisibleMatches(text) {
|
||||
const matches = [];
|
||||
let index = 0;
|
||||
|
||||
for (const char of text) {
|
||||
const codePoint = char.codePointAt(0);
|
||||
if (isDangerousInvisibleCodePoint(codePoint)) {
|
||||
const { line, column } = lineAndColumn(text, index);
|
||||
matches.push({
|
||||
kind: 'dangerous-invisible',
|
||||
char,
|
||||
codePoint: `U+${codePoint.toString(16).toUpperCase()}`,
|
||||
line,
|
||||
column,
|
||||
});
|
||||
}
|
||||
index += char.length;
|
||||
}
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
const changedFiles = [];
|
||||
const violations = [];
|
||||
|
||||
@@ -172,7 +214,7 @@ for (const filePath of listFiles(repoRoot)) {
|
||||
}
|
||||
|
||||
const fileViolations = [
|
||||
...collectMatches(text, dangerousInvisibleRe, 'dangerous-invisible'),
|
||||
...collectDangerousInvisibleMatches(text),
|
||||
...collectMatches(text, emojiRe, 'emoji'),
|
||||
];
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const REPO_ROOT = path.join(__dirname, '..');
|
||||
|
||||
const CATEGORIES = [
|
||||
'Tool Coverage',
|
||||
'Context Efficiency',
|
||||
@@ -29,6 +27,7 @@ function parseArgs(argv) {
|
||||
scope: 'repo',
|
||||
format: 'text',
|
||||
help: false,
|
||||
root: path.resolve(process.env.AUDIT_ROOT || process.cwd()),
|
||||
};
|
||||
|
||||
for (let index = 0; index < args.length; index += 1) {
|
||||
@@ -51,6 +50,12 @@ function parseArgs(argv) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg === '--root') {
|
||||
parsed.root = path.resolve(args[index + 1] || process.cwd());
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--format=')) {
|
||||
parsed.format = arg.split('=')[1].toLowerCase();
|
||||
continue;
|
||||
@@ -61,6 +66,11 @@ function parseArgs(argv) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--root=')) {
|
||||
parsed.root = path.resolve(arg.slice('--root='.length));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('-')) {
|
||||
throw new Error(`Unknown argument: ${arg}`);
|
||||
}
|
||||
@@ -75,16 +85,16 @@ function parseArgs(argv) {
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function fileExists(relativePath) {
|
||||
return fs.existsSync(path.join(REPO_ROOT, relativePath));
|
||||
function fileExists(rootDir, relativePath) {
|
||||
return fs.existsSync(path.join(rootDir, relativePath));
|
||||
}
|
||||
|
||||
function readText(relativePath) {
|
||||
return fs.readFileSync(path.join(REPO_ROOT, relativePath), 'utf8');
|
||||
function readText(rootDir, relativePath) {
|
||||
return fs.readFileSync(path.join(rootDir, relativePath), 'utf8');
|
||||
}
|
||||
|
||||
function countFiles(relativeDir, extension) {
|
||||
const dirPath = path.join(REPO_ROOT, relativeDir);
|
||||
function countFiles(rootDir, relativeDir, extension) {
|
||||
const dirPath = path.join(rootDir, relativeDir);
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
return 0;
|
||||
}
|
||||
@@ -109,19 +119,90 @@ function countFiles(relativeDir, extension) {
|
||||
return count;
|
||||
}
|
||||
|
||||
function safeRead(relativePath) {
|
||||
function safeRead(rootDir, relativePath) {
|
||||
try {
|
||||
return readText(relativePath);
|
||||
return readText(rootDir, relativePath);
|
||||
} catch (_error) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
function getChecks() {
|
||||
const packageJson = JSON.parse(readText('package.json'));
|
||||
const commandPrimary = safeRead('commands/harness-audit.md').trim();
|
||||
const commandParity = safeRead('.opencode/commands/harness-audit.md').trim();
|
||||
const hooksJson = safeRead('hooks/hooks.json');
|
||||
function safeParseJson(text) {
|
||||
if (!text || !text.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
} catch (_error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function hasFileWithExtension(rootDir, relativeDir, extensions) {
|
||||
const dirPath = path.join(rootDir, relativeDir);
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const allowed = Array.isArray(extensions) ? extensions : [extensions];
|
||||
const stack = [dirPath];
|
||||
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop();
|
||||
const entries = fs.readdirSync(current, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const nextPath = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
stack.push(nextPath);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (allowed.some((extension) => entry.name.endsWith(extension))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function detectTargetMode(rootDir) {
|
||||
const packageJson = safeParseJson(safeRead(rootDir, 'package.json'));
|
||||
if (packageJson?.name === 'everything-claude-code') {
|
||||
return 'repo';
|
||||
}
|
||||
|
||||
if (
|
||||
fileExists(rootDir, 'scripts/harness-audit.js') &&
|
||||
fileExists(rootDir, '.claude-plugin/plugin.json') &&
|
||||
fileExists(rootDir, 'agents') &&
|
||||
fileExists(rootDir, 'skills')
|
||||
) {
|
||||
return 'repo';
|
||||
}
|
||||
|
||||
return 'consumer';
|
||||
}
|
||||
|
||||
function findPluginInstall(rootDir) {
|
||||
const homeDir = process.env.HOME || '';
|
||||
const candidates = [
|
||||
path.join(rootDir, '.claude', 'plugins', 'everything-claude-code', '.claude-plugin', 'plugin.json'),
|
||||
path.join(rootDir, '.claude', 'plugins', 'everything-claude-code', 'plugin.json'),
|
||||
homeDir && path.join(homeDir, '.claude', 'plugins', 'everything-claude-code', '.claude-plugin', 'plugin.json'),
|
||||
homeDir && path.join(homeDir, '.claude', 'plugins', 'everything-claude-code', 'plugin.json'),
|
||||
].filter(Boolean);
|
||||
|
||||
return candidates.find(candidate => fs.existsSync(candidate)) || null;
|
||||
}
|
||||
|
||||
function getRepoChecks(rootDir) {
|
||||
const packageJson = JSON.parse(readText(rootDir, 'package.json'));
|
||||
const commandPrimary = safeRead(rootDir, 'commands/harness-audit.md').trim();
|
||||
const commandParity = safeRead(rootDir, '.opencode/commands/harness-audit.md').trim();
|
||||
const hooksJson = safeRead(rootDir, 'hooks/hooks.json');
|
||||
|
||||
return [
|
||||
{
|
||||
@@ -131,7 +212,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'hooks'],
|
||||
path: 'hooks/hooks.json',
|
||||
description: 'Hook configuration file exists',
|
||||
pass: fileExists('hooks/hooks.json'),
|
||||
pass: fileExists(rootDir, 'hooks/hooks.json'),
|
||||
fix: 'Create hooks/hooks.json and define baseline hook events.',
|
||||
},
|
||||
{
|
||||
@@ -141,7 +222,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'hooks'],
|
||||
path: 'scripts/hooks/',
|
||||
description: 'At least 8 hook implementation scripts exist',
|
||||
pass: countFiles('scripts/hooks', '.js') >= 8,
|
||||
pass: countFiles(rootDir, 'scripts/hooks', '.js') >= 8,
|
||||
fix: 'Add missing hook implementations in scripts/hooks/.',
|
||||
},
|
||||
{
|
||||
@@ -151,7 +232,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'agents'],
|
||||
path: 'agents/',
|
||||
description: 'At least 10 agent definitions exist',
|
||||
pass: countFiles('agents', '.md') >= 10,
|
||||
pass: countFiles(rootDir, 'agents', '.md') >= 10,
|
||||
fix: 'Add or restore agent definitions under agents/.',
|
||||
},
|
||||
{
|
||||
@@ -161,7 +242,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'skills'],
|
||||
path: 'skills/',
|
||||
description: 'At least 20 skill definitions exist',
|
||||
pass: countFiles('skills', 'SKILL.md') >= 20,
|
||||
pass: countFiles(rootDir, 'skills', 'SKILL.md') >= 20,
|
||||
fix: 'Add missing skill directories with SKILL.md definitions.',
|
||||
},
|
||||
{
|
||||
@@ -181,7 +262,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'skills'],
|
||||
path: 'skills/strategic-compact/SKILL.md',
|
||||
description: 'Strategic compaction guidance is present',
|
||||
pass: fileExists('skills/strategic-compact/SKILL.md'),
|
||||
pass: fileExists(rootDir, 'skills/strategic-compact/SKILL.md'),
|
||||
fix: 'Add strategic context compaction guidance at skills/strategic-compact/SKILL.md.',
|
||||
},
|
||||
{
|
||||
@@ -191,7 +272,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'hooks'],
|
||||
path: 'scripts/hooks/suggest-compact.js',
|
||||
description: 'Suggest-compact automation hook exists',
|
||||
pass: fileExists('scripts/hooks/suggest-compact.js'),
|
||||
pass: fileExists(rootDir, 'scripts/hooks/suggest-compact.js'),
|
||||
fix: 'Implement scripts/hooks/suggest-compact.js for context pressure hints.',
|
||||
},
|
||||
{
|
||||
@@ -201,7 +282,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'commands'],
|
||||
path: 'commands/model-route.md',
|
||||
description: 'Model routing command exists',
|
||||
pass: fileExists('commands/model-route.md'),
|
||||
pass: fileExists(rootDir, 'commands/model-route.md'),
|
||||
fix: 'Add model-route command guidance in commands/model-route.md.',
|
||||
},
|
||||
{
|
||||
@@ -211,7 +292,7 @@ function getChecks() {
|
||||
scopes: ['repo'],
|
||||
path: 'docs/token-optimization.md',
|
||||
description: 'Token optimization documentation exists',
|
||||
pass: fileExists('docs/token-optimization.md'),
|
||||
pass: fileExists(rootDir, 'docs/token-optimization.md'),
|
||||
fix: 'Add docs/token-optimization.md with concrete context-cost controls.',
|
||||
},
|
||||
{
|
||||
@@ -221,7 +302,7 @@ function getChecks() {
|
||||
scopes: ['repo'],
|
||||
path: 'tests/run-all.js',
|
||||
description: 'Central test runner exists',
|
||||
pass: fileExists('tests/run-all.js'),
|
||||
pass: fileExists(rootDir, 'tests/run-all.js'),
|
||||
fix: 'Add tests/run-all.js to enforce complete suite execution.',
|
||||
},
|
||||
{
|
||||
@@ -241,7 +322,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'hooks'],
|
||||
path: 'tests/hooks/hooks.test.js',
|
||||
description: 'Hook coverage test file exists',
|
||||
pass: fileExists('tests/hooks/hooks.test.js'),
|
||||
pass: fileExists(rootDir, 'tests/hooks/hooks.test.js'),
|
||||
fix: 'Add tests/hooks/hooks.test.js for hook behavior validation.',
|
||||
},
|
||||
{
|
||||
@@ -251,7 +332,7 @@ function getChecks() {
|
||||
scopes: ['repo'],
|
||||
path: 'scripts/doctor.js',
|
||||
description: 'Installation drift doctor script exists',
|
||||
pass: fileExists('scripts/doctor.js'),
|
||||
pass: fileExists(rootDir, 'scripts/doctor.js'),
|
||||
fix: 'Add scripts/doctor.js for install-state integrity checks.',
|
||||
},
|
||||
{
|
||||
@@ -261,7 +342,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'hooks'],
|
||||
path: 'hooks/memory-persistence/',
|
||||
description: 'Memory persistence hooks directory exists',
|
||||
pass: fileExists('hooks/memory-persistence'),
|
||||
pass: fileExists(rootDir, 'hooks/memory-persistence'),
|
||||
fix: 'Add hooks/memory-persistence with lifecycle hook definitions.',
|
||||
},
|
||||
{
|
||||
@@ -271,7 +352,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'hooks'],
|
||||
path: 'scripts/hooks/session-start.js',
|
||||
description: 'Session start/end persistence scripts exist',
|
||||
pass: fileExists('scripts/hooks/session-start.js') && fileExists('scripts/hooks/session-end.js'),
|
||||
pass: fileExists(rootDir, 'scripts/hooks/session-start.js') && fileExists(rootDir, 'scripts/hooks/session-end.js'),
|
||||
fix: 'Implement scripts/hooks/session-start.js and scripts/hooks/session-end.js.',
|
||||
},
|
||||
{
|
||||
@@ -281,7 +362,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'skills'],
|
||||
path: 'skills/continuous-learning-v2/SKILL.md',
|
||||
description: 'Continuous learning v2 skill exists',
|
||||
pass: fileExists('skills/continuous-learning-v2/SKILL.md'),
|
||||
pass: fileExists(rootDir, 'skills/continuous-learning-v2/SKILL.md'),
|
||||
fix: 'Add skills/continuous-learning-v2/SKILL.md for memory evolution flow.',
|
||||
},
|
||||
{
|
||||
@@ -291,7 +372,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'skills'],
|
||||
path: 'skills/eval-harness/SKILL.md',
|
||||
description: 'Eval harness skill exists',
|
||||
pass: fileExists('skills/eval-harness/SKILL.md'),
|
||||
pass: fileExists(rootDir, 'skills/eval-harness/SKILL.md'),
|
||||
fix: 'Add skills/eval-harness/SKILL.md for pass/fail regression evaluation.',
|
||||
},
|
||||
{
|
||||
@@ -301,7 +382,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'commands'],
|
||||
path: 'commands/eval.md',
|
||||
description: 'Eval and verification commands exist',
|
||||
pass: fileExists('commands/eval.md') && fileExists('commands/verify.md') && fileExists('commands/checkpoint.md'),
|
||||
pass: fileExists(rootDir, 'commands/eval.md') && fileExists(rootDir, 'commands/verify.md') && fileExists(rootDir, 'commands/checkpoint.md'),
|
||||
fix: 'Add eval/checkpoint/verify commands to standardize verification loops.',
|
||||
},
|
||||
{
|
||||
@@ -311,7 +392,7 @@ function getChecks() {
|
||||
scopes: ['repo'],
|
||||
path: 'tests/',
|
||||
description: 'At least 10 test files exist',
|
||||
pass: countFiles('tests', '.test.js') >= 10,
|
||||
pass: countFiles(rootDir, 'tests', '.test.js') >= 10,
|
||||
fix: 'Increase automated test coverage across scripts/hooks/lib.',
|
||||
},
|
||||
{
|
||||
@@ -321,7 +402,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'skills'],
|
||||
path: 'skills/security-review/SKILL.md',
|
||||
description: 'Security review skill exists',
|
||||
pass: fileExists('skills/security-review/SKILL.md'),
|
||||
pass: fileExists(rootDir, 'skills/security-review/SKILL.md'),
|
||||
fix: 'Add skills/security-review/SKILL.md for security checklist coverage.',
|
||||
},
|
||||
{
|
||||
@@ -331,7 +412,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'agents'],
|
||||
path: 'agents/security-reviewer.md',
|
||||
description: 'Security reviewer agent exists',
|
||||
pass: fileExists('agents/security-reviewer.md'),
|
||||
pass: fileExists(rootDir, 'agents/security-reviewer.md'),
|
||||
fix: 'Add agents/security-reviewer.md for delegated security audits.',
|
||||
},
|
||||
{
|
||||
@@ -351,7 +432,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'commands'],
|
||||
path: 'commands/security-scan.md',
|
||||
description: 'Security scan command exists',
|
||||
pass: fileExists('commands/security-scan.md'),
|
||||
pass: fileExists(rootDir, 'commands/security-scan.md'),
|
||||
fix: 'Add commands/security-scan.md with scan and remediation workflow.',
|
||||
},
|
||||
{
|
||||
@@ -361,7 +442,7 @@ function getChecks() {
|
||||
scopes: ['repo', 'skills'],
|
||||
path: 'skills/cost-aware-llm-pipeline/SKILL.md',
|
||||
description: 'Cost-aware LLM skill exists',
|
||||
pass: fileExists('skills/cost-aware-llm-pipeline/SKILL.md'),
|
||||
pass: fileExists(rootDir, 'skills/cost-aware-llm-pipeline/SKILL.md'),
|
||||
fix: 'Add skills/cost-aware-llm-pipeline/SKILL.md for budget-aware routing.',
|
||||
},
|
||||
{
|
||||
@@ -371,7 +452,7 @@ function getChecks() {
|
||||
scopes: ['repo'],
|
||||
path: 'docs/token-optimization.md',
|
||||
description: 'Cost optimization documentation exists',
|
||||
pass: fileExists('docs/token-optimization.md'),
|
||||
pass: fileExists(rootDir, 'docs/token-optimization.md'),
|
||||
fix: 'Create docs/token-optimization.md with target settings and tradeoffs.',
|
||||
},
|
||||
{
|
||||
@@ -381,12 +462,136 @@ function getChecks() {
|
||||
scopes: ['repo', 'commands'],
|
||||
path: 'commands/model-route.md',
|
||||
description: 'Model route command exists for complexity-aware routing',
|
||||
pass: fileExists('commands/model-route.md'),
|
||||
pass: fileExists(rootDir, 'commands/model-route.md'),
|
||||
fix: 'Add commands/model-route.md and route policies for cheap-default execution.',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
function getConsumerChecks(rootDir) {
|
||||
const packageJson = safeParseJson(safeRead(rootDir, 'package.json'));
|
||||
const gitignore = safeRead(rootDir, '.gitignore');
|
||||
const projectHooks = safeRead(rootDir, '.claude/settings.json');
|
||||
const pluginInstall = findPluginInstall(rootDir);
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'consumer-plugin-install',
|
||||
category: 'Tool Coverage',
|
||||
points: 4,
|
||||
scopes: ['repo'],
|
||||
path: '~/.claude/plugins/everything-claude-code/',
|
||||
description: 'Everything Claude Code is installed for the active user or project',
|
||||
pass: Boolean(pluginInstall),
|
||||
fix: 'Install the ECC plugin for this user or project before auditing project-specific harness quality.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-project-overrides',
|
||||
category: 'Tool Coverage',
|
||||
points: 3,
|
||||
scopes: ['repo', 'hooks', 'skills', 'commands', 'agents'],
|
||||
path: '.claude/',
|
||||
description: 'Project-specific harness overrides exist under .claude/',
|
||||
pass: countFiles(rootDir, '.claude/agents', '.md') > 0 ||
|
||||
countFiles(rootDir, '.claude/skills', 'SKILL.md') > 0 ||
|
||||
countFiles(rootDir, '.claude/commands', '.md') > 0 ||
|
||||
fileExists(rootDir, '.claude/settings.json') ||
|
||||
fileExists(rootDir, '.claude/hooks.json'),
|
||||
fix: 'Add project-local .claude hooks, commands, skills, or settings that tailor ECC to this repo.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-instructions',
|
||||
category: 'Context Efficiency',
|
||||
points: 3,
|
||||
scopes: ['repo'],
|
||||
path: 'AGENTS.md',
|
||||
description: 'The project has explicit agent or instruction context',
|
||||
pass: fileExists(rootDir, 'AGENTS.md') || fileExists(rootDir, 'CLAUDE.md') || fileExists(rootDir, '.claude/CLAUDE.md'),
|
||||
fix: 'Add AGENTS.md or CLAUDE.md so the harness has project-specific instructions.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-project-config',
|
||||
category: 'Context Efficiency',
|
||||
points: 2,
|
||||
scopes: ['repo', 'hooks'],
|
||||
path: '.mcp.json',
|
||||
description: 'The project declares local MCP or Claude settings',
|
||||
pass: fileExists(rootDir, '.mcp.json') || fileExists(rootDir, '.claude/settings.json') || fileExists(rootDir, '.claude/settings.local.json'),
|
||||
fix: 'Add .mcp.json or .claude/settings.json so project-local tool configuration is explicit.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-test-suite',
|
||||
category: 'Quality Gates',
|
||||
points: 4,
|
||||
scopes: ['repo'],
|
||||
path: 'tests/',
|
||||
description: 'The project has an automated test entrypoint',
|
||||
pass: typeof packageJson?.scripts?.test === 'string' || countFiles(rootDir, 'tests', '.test.js') > 0 || hasFileWithExtension(rootDir, '.', ['.spec.js', '.spec.ts', '.test.ts']),
|
||||
fix: 'Add a test script or checked-in tests so harness recommendations can be verified automatically.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-ci-workflow',
|
||||
category: 'Quality Gates',
|
||||
points: 3,
|
||||
scopes: ['repo'],
|
||||
path: '.github/workflows/',
|
||||
description: 'The project has CI workflows checked in',
|
||||
pass: hasFileWithExtension(rootDir, '.github/workflows', ['.yml', '.yaml']),
|
||||
fix: 'Add at least one CI workflow so harness and test checks run outside local development.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-memory-notes',
|
||||
category: 'Memory Persistence',
|
||||
points: 2,
|
||||
scopes: ['repo'],
|
||||
path: '.claude/memory.md',
|
||||
description: 'Project memory or durable notes are checked in',
|
||||
pass: fileExists(rootDir, '.claude/memory.md') || countFiles(rootDir, 'docs/adr', '.md') > 0,
|
||||
fix: 'Add durable project memory such as .claude/memory.md or ADRs under docs/adr/.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-eval-coverage',
|
||||
category: 'Eval Coverage',
|
||||
points: 2,
|
||||
scopes: ['repo'],
|
||||
path: 'evals/',
|
||||
description: 'The project has evals or multiple automated tests',
|
||||
pass: countFiles(rootDir, 'evals', null) > 0 || countFiles(rootDir, 'tests', '.test.js') >= 3,
|
||||
fix: 'Add eval fixtures or at least a few focused automated tests for critical flows.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-security-policy',
|
||||
category: 'Security Guardrails',
|
||||
points: 2,
|
||||
scopes: ['repo'],
|
||||
path: 'SECURITY.md',
|
||||
description: 'The project exposes a security policy or automated dependency scanning',
|
||||
pass: fileExists(rootDir, 'SECURITY.md') || fileExists(rootDir, '.github/dependabot.yml') || fileExists(rootDir, '.github/codeql.yml'),
|
||||
fix: 'Add SECURITY.md or dependency/code scanning configuration to document the project security posture.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-secret-hygiene',
|
||||
category: 'Security Guardrails',
|
||||
points: 2,
|
||||
scopes: ['repo'],
|
||||
path: '.gitignore',
|
||||
description: 'The project ignores common secret env files',
|
||||
pass: gitignore.includes('.env'),
|
||||
fix: 'Ignore .env-style files in .gitignore so secrets do not land in the repo.',
|
||||
},
|
||||
{
|
||||
id: 'consumer-hook-guardrails',
|
||||
category: 'Security Guardrails',
|
||||
points: 2,
|
||||
scopes: ['repo', 'hooks'],
|
||||
path: '.claude/settings.json',
|
||||
description: 'Project-local hook settings reference tool/prompt guardrails',
|
||||
pass: projectHooks.includes('PreToolUse') || projectHooks.includes('beforeSubmitPrompt') || fileExists(rootDir, '.claude/hooks.json'),
|
||||
fix: 'Add project-local hook settings or hook definitions for prompt/tool guardrails.',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
function summarizeCategoryScores(checks) {
|
||||
const scores = {};
|
||||
for (const category of CATEGORIES) {
|
||||
@@ -407,8 +612,11 @@ function summarizeCategoryScores(checks) {
|
||||
return scores;
|
||||
}
|
||||
|
||||
function buildReport(scope) {
|
||||
const checks = getChecks().filter(check => check.scopes.includes(scope));
|
||||
function buildReport(scope, options = {}) {
|
||||
const rootDir = path.resolve(options.rootDir || process.cwd());
|
||||
const targetMode = options.targetMode || detectTargetMode(rootDir);
|
||||
const checks = (targetMode === 'repo' ? getRepoChecks(rootDir) : getConsumerChecks(rootDir))
|
||||
.filter(check => check.scopes.includes(scope));
|
||||
const categoryScores = summarizeCategoryScores(checks);
|
||||
const maxScore = checks.reduce((sum, check) => sum + check.points, 0);
|
||||
const overallScore = checks
|
||||
@@ -428,8 +636,10 @@ function buildReport(scope) {
|
||||
|
||||
return {
|
||||
scope,
|
||||
root_dir: rootDir,
|
||||
target_mode: targetMode,
|
||||
deterministic: true,
|
||||
rubric_version: '2026-03-16',
|
||||
rubric_version: '2026-03-30',
|
||||
overall_score: overallScore,
|
||||
max_score: maxScore,
|
||||
categories: categoryScores,
|
||||
@@ -446,7 +656,8 @@ function buildReport(scope) {
|
||||
}
|
||||
|
||||
function printText(report) {
|
||||
console.log(`Harness Audit (${report.scope}): ${report.overall_score}/${report.max_score}`);
|
||||
console.log(`Harness Audit (${report.scope}, ${report.target_mode}): ${report.overall_score}/${report.max_score}`);
|
||||
console.log(`Root: ${report.root_dir}`);
|
||||
console.log('');
|
||||
|
||||
for (const category of CATEGORIES) {
|
||||
@@ -474,8 +685,10 @@ function printText(report) {
|
||||
function showHelp(exitCode = 0) {
|
||||
console.log(`
|
||||
Usage: node scripts/harness-audit.js [scope] [--scope <repo|hooks|skills|commands|agents>] [--format <text|json>]
|
||||
[--root <path>]
|
||||
|
||||
Deterministic harness audit based on explicit file/rule checks.
|
||||
Audits the current working directory by default and auto-detects ECC repo mode vs consumer-project mode.
|
||||
`);
|
||||
process.exit(exitCode);
|
||||
}
|
||||
@@ -489,7 +702,7 @@ function main() {
|
||||
return;
|
||||
}
|
||||
|
||||
const report = buildReport(args.scope);
|
||||
const report = buildReport(args.scope, { rootDir: args.root });
|
||||
|
||||
if (args.format === 'json') {
|
||||
console.log(JSON.stringify(report, null, 2));
|
||||
|
||||
@@ -3,11 +3,9 @@
|
||||
* Desktop Notification Hook (Stop)
|
||||
*
|
||||
* Sends a native desktop notification with the task summary when Claude
|
||||
* finishes responding. Supports:
|
||||
* - macOS: osascript (native)
|
||||
* - WSL: PowerShell 7 or Windows PowerShell + BurntToast module
|
||||
*
|
||||
* On WSL, if BurntToast is not installed, logs a tip for installation.
|
||||
* finishes responding. Currently supports macOS (osascript); other
|
||||
* platforms exit silently. Windows (PowerShell) and Linux (notify-send)
|
||||
* support is planned.
|
||||
*
|
||||
* Hook ID : stop:desktop-notify
|
||||
* Profiles: standard, strict
|
||||
@@ -21,64 +19,6 @@ const { isMacOS, log } = require('../lib/utils');
|
||||
const TITLE = 'Claude Code';
|
||||
const MAX_BODY_LENGTH = 100;
|
||||
|
||||
/**
|
||||
* Memoized WSL detection at module load (avoids repeated /proc/version reads).
|
||||
*/
|
||||
let isWSL = false;
|
||||
if (process.platform === 'linux') {
|
||||
try {
|
||||
isWSL = require('fs').readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft');
|
||||
} catch {
|
||||
isWSL = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find available PowerShell executable on WSL.
|
||||
* Returns first accessible path, or null if none found.
|
||||
*/
|
||||
function findPowerShell() {
|
||||
if (!isWSL) return null;
|
||||
|
||||
const candidates = [
|
||||
'pwsh.exe', // WSL interop resolves from Windows PATH
|
||||
'powershell.exe', // WSL interop for Windows PowerShell
|
||||
'/mnt/c/Program Files/PowerShell/7/pwsh.exe', // PowerShell 7 (default install)
|
||||
'/mnt/c/Windows/System32/WindowsPowerShell/v1.0/powershell.exe', // Windows PowerShell
|
||||
];
|
||||
|
||||
for (const path of candidates) {
|
||||
try {
|
||||
const result = spawnSync(path, ['-Command', 'exit 0'],
|
||||
{ stdio: ['ignore', 'pipe', 'ignore'], timeout: 1000 });
|
||||
if (result.status === 0) {
|
||||
return path;
|
||||
}
|
||||
} catch {
|
||||
// continue
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a Windows Toast notification via PowerShell BurntToast.
|
||||
* Returns true on success, false on failure.
|
||||
*/
|
||||
function notifyWindows(pwshPath, title, body) {
|
||||
const safeBody = body.replace(/'/g, "''");
|
||||
const safeTitle = title.replace(/'/g, "''");
|
||||
const command = `Import-Module BurntToast; New-BurntToastNotification -Text '${safeTitle}', '${safeBody}'`;
|
||||
const result = spawnSync(pwshPath, ['-Command', command],
|
||||
{ stdio: ['ignore', 'pipe', 'pipe'], timeout: 5000 });
|
||||
if (result.error || result.status !== 0) {
|
||||
const stderr = typeof result.stderr?.toString === 'function' ? result.stderr.toString().trim() : '';
|
||||
log(`[DesktopNotify] BurntToast failed (exit ${result.status}): ${result.error ? result.error.message : stderr}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a short summary from the last assistant message.
|
||||
* Takes the first non-empty line and truncates to MAX_BODY_LENGTH chars.
|
||||
@@ -113,28 +53,20 @@ function notifyMacOS(title, body) {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: future platform support
|
||||
// function notifyWindows(title, body) { ... }
|
||||
// function notifyLinux(title, body) { ... }
|
||||
|
||||
/**
|
||||
* Fast-path entry point for run-with-flags.js (avoids extra process spawn).
|
||||
*/
|
||||
function run(raw) {
|
||||
try {
|
||||
if (!isMacOS) return raw;
|
||||
|
||||
const input = raw.trim() ? JSON.parse(raw) : {};
|
||||
const summary = extractSummary(input.last_assistant_message);
|
||||
|
||||
if (isMacOS) {
|
||||
notifyMacOS(TITLE, summary);
|
||||
} else if (isWSL) {
|
||||
const ps = findPowerShell();
|
||||
if (ps && notifyWindows(ps, TITLE, summary)) {
|
||||
// notification sent successfully
|
||||
} else if (ps) {
|
||||
// PowerShell found but BurntToast not available
|
||||
log('[DesktopNotify] Tip: Install BurntToast module to enable notifications');
|
||||
} else {
|
||||
// No PowerShell found
|
||||
log('[DesktopNotify] Tip: Install BurntToast module in PowerShell for notifications');
|
||||
}
|
||||
}
|
||||
notifyMacOS(TITLE, summary);
|
||||
} catch (err) {
|
||||
log(`[DesktopNotify] Error: ${err.message}`);
|
||||
}
|
||||
|
||||
@@ -460,16 +460,28 @@ fi
|
||||
|
||||
log "Installing global git safety hooks"
|
||||
if [[ "$MODE" == "dry-run" ]]; then
|
||||
"$HOOKS_INSTALLER" --dry-run
|
||||
HOME="$HOME" \
|
||||
CODEX_HOME="$CODEX_HOME" \
|
||||
AGENTS_HOME="${AGENTS_HOME:-$HOME/.agents}" \
|
||||
ECC_GLOBAL_HOOKS_DIR="${ECC_GLOBAL_HOOKS_DIR:-$CODEX_HOME/git-hooks}" \
|
||||
"$HOOKS_INSTALLER" --dry-run
|
||||
else
|
||||
"$HOOKS_INSTALLER"
|
||||
HOME="$HOME" \
|
||||
CODEX_HOME="$CODEX_HOME" \
|
||||
AGENTS_HOME="${AGENTS_HOME:-$HOME/.agents}" \
|
||||
ECC_GLOBAL_HOOKS_DIR="${ECC_GLOBAL_HOOKS_DIR:-$CODEX_HOME/git-hooks}" \
|
||||
"$HOOKS_INSTALLER"
|
||||
fi
|
||||
|
||||
log "Running global regression sanity check"
|
||||
if [[ "$MODE" == "dry-run" ]]; then
|
||||
printf '[dry-run] %s\n' "$SANITY_CHECKER"
|
||||
else
|
||||
"$SANITY_CHECKER"
|
||||
HOME="$HOME" \
|
||||
CODEX_HOME="$CODEX_HOME" \
|
||||
AGENTS_HOME="${AGENTS_HOME:-$HOME/.agents}" \
|
||||
ECC_GLOBAL_HOOKS_DIR="${ECC_GLOBAL_HOOKS_DIR:-$CODEX_HOME/git-hooks}" \
|
||||
"$SANITY_CHECKER"
|
||||
fi
|
||||
|
||||
log "Sync complete"
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
/**
|
||||
* Tests for scripts/hooks/desktop-notify.js
|
||||
*/
|
||||
|
||||
const assert = require('assert');
|
||||
const fs = require('fs');
|
||||
const Module = require('module');
|
||||
const path = require('path');
|
||||
|
||||
const modulePath = path.join(__dirname, '..', '..', 'scripts', 'hooks', 'desktop-notify.js');
|
||||
const moduleSource = fs.readFileSync(modulePath, 'utf8');
|
||||
|
||||
function test(name, fn) {
|
||||
try {
|
||||
fn();
|
||||
console.log(` ✓ ${name}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.log(` ✗ ${name}`);
|
||||
console.log(` Error: ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function loadDesktopNotify({ procVersion = 'Linux version microsoft', spawnImpl, isMacOS = false }) {
|
||||
const logs = [];
|
||||
const mod = new Module(modulePath, module);
|
||||
mod.filename = modulePath;
|
||||
mod.paths = Module._nodeModulePaths(path.dirname(modulePath));
|
||||
|
||||
const originalRequire = mod.require.bind(mod);
|
||||
mod.require = request => {
|
||||
if (request === 'child_process') {
|
||||
return { spawnSync: spawnImpl };
|
||||
}
|
||||
if (request === '../lib/utils') {
|
||||
return {
|
||||
isMacOS,
|
||||
log: message => logs.push(message),
|
||||
};
|
||||
}
|
||||
if (request === 'fs') {
|
||||
return {
|
||||
...fs,
|
||||
readFileSync(target, encoding) {
|
||||
if (target === '/proc/version') {
|
||||
return procVersion;
|
||||
}
|
||||
return fs.readFileSync(target, encoding);
|
||||
}
|
||||
};
|
||||
}
|
||||
return originalRequire(request);
|
||||
};
|
||||
|
||||
const platformDescriptor = Object.getOwnPropertyDescriptor(process, 'platform');
|
||||
Object.defineProperty(process, 'platform', {
|
||||
configurable: true,
|
||||
value: 'linux',
|
||||
});
|
||||
|
||||
try {
|
||||
mod._compile(moduleSource, modulePath);
|
||||
} finally {
|
||||
Object.defineProperty(process, 'platform', platformDescriptor);
|
||||
}
|
||||
|
||||
return { run: mod.exports.run, logs };
|
||||
}
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
if (
|
||||
test('successful WSL toast does not log BurntToast install guidance', () => {
|
||||
const calls = [];
|
||||
const { run, logs } = loadDesktopNotify({
|
||||
spawnImpl(command, args) {
|
||||
calls.push({ command, args });
|
||||
if (calls.length === 1) {
|
||||
return { status: 0, stderr: Buffer.from('') };
|
||||
}
|
||||
return { status: 0, stderr: Buffer.from('') };
|
||||
}
|
||||
});
|
||||
|
||||
const payload = JSON.stringify({ last_assistant_message: 'Build completed successfully' });
|
||||
assert.strictEqual(run(payload), payload);
|
||||
assert.strictEqual(calls.length, 2, 'Expected PowerShell probe and notification send');
|
||||
assert.strictEqual(logs.length, 0, `Expected no warnings, got: ${logs.join('\n')}`);
|
||||
})
|
||||
)
|
||||
passed++;
|
||||
else failed++;
|
||||
|
||||
if (
|
||||
test('failed WSL toast logs failure and install guidance once', () => {
|
||||
const { run, logs } = loadDesktopNotify({
|
||||
spawnImpl(command, args) {
|
||||
if (args[1] === 'exit 0') {
|
||||
return { status: 0, stderr: Buffer.from('') };
|
||||
}
|
||||
return { status: 1, stderr: Buffer.from('module missing') };
|
||||
}
|
||||
});
|
||||
|
||||
const payload = JSON.stringify({ last_assistant_message: 'Done' });
|
||||
assert.strictEqual(run(payload), payload);
|
||||
assert.ok(logs.some(message => message.includes('BurntToast failed')), 'Expected BurntToast failure log');
|
||||
assert.ok(logs.some(message => message.includes('Install BurntToast module')), 'Expected install tip');
|
||||
})
|
||||
)
|
||||
passed++;
|
||||
else failed++;
|
||||
|
||||
console.log(`\nPassed: ${passed}`);
|
||||
console.log(`Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
@@ -1221,9 +1221,14 @@ async function runTests() {
|
||||
fs.writeFileSync(path.join(rootDir, '.prettierrc'), '{}');
|
||||
fs.writeFileSync(filePath, 'export const value = 1;\n');
|
||||
createCommandShim(binDir, 'npx', logFile);
|
||||
const isolatedHome = path.join(testDir, 'isolated-home');
|
||||
fs.mkdirSync(path.join(isolatedHome, '.claude'), { recursive: true });
|
||||
|
||||
const stdinJson = JSON.stringify({ tool_input: { file_path: filePath } });
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-format.js'), stdinJson, withPrependedPath(binDir));
|
||||
const result = await runScript(path.join(scriptsDir, 'post-edit-format.js'), stdinJson, withPrependedPath(binDir, {
|
||||
HOME: isolatedHome,
|
||||
USERPROFILE: isolatedHome
|
||||
}));
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 for config-only repo');
|
||||
const logEntries = readCommandLog(logFile);
|
||||
|
||||
@@ -37,6 +37,33 @@ function cleanupTestDir(testDir) {
|
||||
fs.rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
function withIsolatedHome(fn) {
|
||||
const isolatedHome = fs.mkdtempSync(path.join(os.tmpdir(), 'pm-home-'));
|
||||
const originalHome = process.env.HOME;
|
||||
const originalUserProfile = process.env.USERPROFILE;
|
||||
|
||||
process.env.HOME = isolatedHome;
|
||||
process.env.USERPROFILE = isolatedHome;
|
||||
|
||||
try {
|
||||
return fn(isolatedHome);
|
||||
} finally {
|
||||
if (originalHome !== undefined) {
|
||||
process.env.HOME = originalHome;
|
||||
} else {
|
||||
delete process.env.HOME;
|
||||
}
|
||||
|
||||
if (originalUserProfile !== undefined) {
|
||||
process.env.USERPROFILE = originalUserProfile;
|
||||
} else {
|
||||
delete process.env.USERPROFILE;
|
||||
}
|
||||
|
||||
fs.rmSync(isolatedHome, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// Test suite
|
||||
function runTests() {
|
||||
console.log('\n=== Testing package-manager.js ===\n');
|
||||
@@ -711,9 +738,11 @@ function runTests() {
|
||||
const originalEnv = process.env.CLAUDE_PACKAGE_MANAGER;
|
||||
try {
|
||||
delete process.env.CLAUDE_PACKAGE_MANAGER;
|
||||
const result = pm.getPackageManager({ projectDir: testDir });
|
||||
assert.strictEqual(result.name, 'npm', 'Should default to npm');
|
||||
assert.strictEqual(result.source, 'default');
|
||||
withIsolatedHome(() => {
|
||||
const result = pm.getPackageManager({ projectDir: testDir });
|
||||
assert.strictEqual(result.name, 'npm', 'Should default to npm');
|
||||
assert.strictEqual(result.source, 'default');
|
||||
});
|
||||
} finally {
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.CLAUDE_PACKAGE_MANAGER = originalEnv;
|
||||
|
||||
@@ -58,6 +58,33 @@ function cleanupTmpDirs() {
|
||||
tmpDirs.length = 0;
|
||||
}
|
||||
|
||||
function withIsolatedHome(fn) {
|
||||
const isolatedHome = fs.mkdtempSync(path.join(os.tmpdir(), 'resolve-fmt-home-'));
|
||||
const originalHome = process.env.HOME;
|
||||
const originalUserProfile = process.env.USERPROFILE;
|
||||
|
||||
process.env.HOME = isolatedHome;
|
||||
process.env.USERPROFILE = isolatedHome;
|
||||
|
||||
try {
|
||||
return fn(isolatedHome);
|
||||
} finally {
|
||||
if (originalHome !== undefined) {
|
||||
process.env.HOME = originalHome;
|
||||
} else {
|
||||
delete process.env.HOME;
|
||||
}
|
||||
|
||||
if (originalUserProfile !== undefined) {
|
||||
process.env.USERPROFILE = originalUserProfile;
|
||||
} else {
|
||||
delete process.env.USERPROFILE;
|
||||
}
|
||||
|
||||
fs.rmSync(isolatedHome, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
function runTests() {
|
||||
console.log('\n=== Testing resolve-formatter.js ===\n');
|
||||
|
||||
@@ -168,10 +195,12 @@ function runTests() {
|
||||
|
||||
run('resolveFormatterBin: falls back to npx for biome', () => {
|
||||
const root = makeTmpDir();
|
||||
const result = resolveFormatterBin(root, 'biome');
|
||||
const expectedBin = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
assert.strictEqual(result.bin, expectedBin);
|
||||
assert.deepStrictEqual(result.prefix, ['@biomejs/biome']);
|
||||
withIsolatedHome(() => {
|
||||
const result = resolveFormatterBin(root, 'biome');
|
||||
const expectedBin = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
assert.strictEqual(result.bin, expectedBin);
|
||||
assert.deepStrictEqual(result.prefix, ['@biomejs/biome']);
|
||||
});
|
||||
});
|
||||
|
||||
run('resolveFormatterBin: uses local prettier binary when available', () => {
|
||||
@@ -188,10 +217,12 @@ function runTests() {
|
||||
|
||||
run('resolveFormatterBin: falls back to npx for prettier', () => {
|
||||
const root = makeTmpDir();
|
||||
const result = resolveFormatterBin(root, 'prettier');
|
||||
const expectedBin = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
assert.strictEqual(result.bin, expectedBin);
|
||||
assert.deepStrictEqual(result.prefix, ['prettier']);
|
||||
withIsolatedHome(() => {
|
||||
const result = resolveFormatterBin(root, 'prettier');
|
||||
const expectedBin = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
assert.strictEqual(result.bin, expectedBin);
|
||||
assert.deepStrictEqual(result.prefix, ['prettier']);
|
||||
});
|
||||
});
|
||||
|
||||
run('resolveFormatterBin: returns null for unknown formatter', () => {
|
||||
|
||||
@@ -45,6 +45,21 @@ function runBash(scriptPath, args = [], env = {}, cwd = repoRoot) {
|
||||
});
|
||||
}
|
||||
|
||||
function makeHermeticCodexEnv(homeDir, codexDir, extraEnv = {}) {
|
||||
const agentsHome = path.join(homeDir, '.agents');
|
||||
const hooksDir = path.join(codexDir, 'git-hooks');
|
||||
return {
|
||||
HOME: homeDir,
|
||||
USERPROFILE: homeDir,
|
||||
CODEX_HOME: codexDir,
|
||||
AGENTS_HOME: agentsHome,
|
||||
ECC_GLOBAL_HOOKS_DIR: hooksDir,
|
||||
CLAUDE_PACKAGE_MANAGER: 'npm',
|
||||
CLAUDE_CODE_PACKAGE_MANAGER: 'npm',
|
||||
...extraEnv,
|
||||
};
|
||||
}
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
@@ -116,10 +131,12 @@ if (
|
||||
fs.mkdirSync(codexDir, { recursive: true });
|
||||
fs.writeFileSync(configPath, config);
|
||||
|
||||
const syncResult = runBash(syncScript, [], { HOME: homeDir, CODEX_HOME: codexDir });
|
||||
assert.strictEqual(syncResult.status, 0, syncResult.stderr || syncResult.stdout);
|
||||
const syncResult = runBash(syncScript, ['--update-mcp'], makeHermeticCodexEnv(homeDir, codexDir));
|
||||
assert.strictEqual(syncResult.status, 0, `${syncResult.stdout}\n${syncResult.stderr}`);
|
||||
const syncedConfig = fs.readFileSync(configPath, 'utf8');
|
||||
assert.match(syncedConfig, /^\[mcp_servers\.context7\]$/m);
|
||||
|
||||
const checkResult = runBash(checkScript, [], { HOME: homeDir, CODEX_HOME: codexDir });
|
||||
const checkResult = runBash(checkScript, [], makeHermeticCodexEnv(homeDir, codexDir));
|
||||
assert.strictEqual(checkResult.status, 0, checkResult.stderr || checkResult.stdout);
|
||||
assert.match(checkResult.stdout, /MCP section \[mcp_servers\.context7\] or \[mcp_servers\.context7-mcp\] exists/);
|
||||
} finally {
|
||||
|
||||
@@ -3,14 +3,28 @@
|
||||
*/
|
||||
|
||||
const assert = require('assert');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const { execFileSync } = require('child_process');
|
||||
|
||||
const SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'harness-audit.js');
|
||||
|
||||
function run(args = []) {
|
||||
function createTempDir(prefix) {
|
||||
return fs.mkdtempSync(path.join(os.tmpdir(), prefix));
|
||||
}
|
||||
|
||||
function cleanup(dirPath) {
|
||||
fs.rmSync(dirPath, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
function run(args = [], options = {}) {
|
||||
const stdout = execFileSync('node', [SCRIPT, ...args], {
|
||||
cwd: path.join(__dirname, '..', '..'),
|
||||
cwd: options.cwd || path.join(__dirname, '..', '..'),
|
||||
env: {
|
||||
...process.env,
|
||||
HOME: options.homeDir || process.env.HOME,
|
||||
},
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: 10000,
|
||||
@@ -48,7 +62,8 @@ function runTests() {
|
||||
const parsed = JSON.parse(run(['repo', '--format', 'json']));
|
||||
|
||||
assert.strictEqual(parsed.deterministic, true);
|
||||
assert.strictEqual(parsed.rubric_version, '2026-03-16');
|
||||
assert.strictEqual(parsed.rubric_version, '2026-03-30');
|
||||
assert.strictEqual(parsed.target_mode, 'repo');
|
||||
assert.ok(parsed.overall_score >= 0);
|
||||
assert.ok(parsed.max_score > 0);
|
||||
assert.ok(parsed.overall_score <= parsed.max_score);
|
||||
@@ -75,10 +90,48 @@ function runTests() {
|
||||
|
||||
if (test('text format includes summary header', () => {
|
||||
const output = run(['repo']);
|
||||
assert.ok(output.includes('Harness Audit (repo):'));
|
||||
assert.ok(output.includes('Harness Audit (repo, repo):'));
|
||||
assert.ok(output.includes('Top 3 Actions:') || output.includes('Checks:'));
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('audits consumer projects from cwd instead of the ECC repo root', () => {
|
||||
const homeDir = createTempDir('harness-audit-home-');
|
||||
const projectRoot = createTempDir('harness-audit-project-');
|
||||
|
||||
try {
|
||||
fs.mkdirSync(path.join(homeDir, '.claude', 'plugins', 'everything-claude-code', '.claude-plugin'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(homeDir, '.claude', 'plugins', 'everything-claude-code', '.claude-plugin', 'plugin.json'),
|
||||
JSON.stringify({ name: 'everything-claude-code' }, null, 2)
|
||||
);
|
||||
|
||||
fs.mkdirSync(path.join(projectRoot, '.github', 'workflows'), { recursive: true });
|
||||
fs.mkdirSync(path.join(projectRoot, 'tests'), { recursive: true });
|
||||
fs.mkdirSync(path.join(projectRoot, '.claude'), { recursive: true });
|
||||
fs.writeFileSync(path.join(projectRoot, 'AGENTS.md'), '# Project instructions\n');
|
||||
fs.writeFileSync(path.join(projectRoot, '.mcp.json'), JSON.stringify({ mcpServers: {} }, null, 2));
|
||||
fs.writeFileSync(path.join(projectRoot, '.gitignore'), 'node_modules\n.env\n');
|
||||
fs.writeFileSync(path.join(projectRoot, '.github', 'workflows', 'ci.yml'), 'name: ci\n');
|
||||
fs.writeFileSync(path.join(projectRoot, 'tests', 'app.test.js'), 'test placeholder\n');
|
||||
fs.writeFileSync(path.join(projectRoot, '.claude', 'settings.json'), JSON.stringify({ hooks: ['PreToolUse'] }, null, 2));
|
||||
fs.writeFileSync(
|
||||
path.join(projectRoot, 'package.json'),
|
||||
JSON.stringify({ name: 'consumer-project', scripts: { test: 'node tests/app.test.js' } }, null, 2)
|
||||
);
|
||||
|
||||
const parsed = JSON.parse(run(['repo', '--format', 'json'], { cwd: projectRoot, homeDir }));
|
||||
|
||||
assert.strictEqual(parsed.target_mode, 'consumer');
|
||||
assert.strictEqual(parsed.root_dir, fs.realpathSync(projectRoot));
|
||||
assert.ok(parsed.overall_score > 0, 'Consumer project should receive non-zero score when harness signals exist');
|
||||
assert.ok(parsed.checks.some(check => check.id === 'consumer-plugin-install' && check.pass));
|
||||
assert.ok(parsed.checks.every(check => !check.path.startsWith('agents/') && !check.path.startsWith('skills/')));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
}
|
||||
|
||||
179
tests/scripts/trae-install.test.js
Normal file
179
tests/scripts/trae-install.test.js
Normal file
@@ -0,0 +1,179 @@
|
||||
/**
|
||||
* Tests for .trae/install.sh and .trae/uninstall.sh
|
||||
*/
|
||||
|
||||
const assert = require('assert');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const { execFileSync } = require('child_process');
|
||||
|
||||
const REPO_ROOT = path.join(__dirname, '..', '..');
|
||||
const INSTALL_SCRIPT = path.join(REPO_ROOT, '.trae', 'install.sh');
|
||||
const UNINSTALL_SCRIPT = path.join(REPO_ROOT, '.trae', 'uninstall.sh');
|
||||
|
||||
function createTempDir(prefix) {
|
||||
return fs.mkdtempSync(path.join(os.tmpdir(), prefix));
|
||||
}
|
||||
|
||||
function cleanup(dirPath) {
|
||||
fs.rmSync(dirPath, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
function runInstall(options = {}) {
|
||||
return execFileSync('bash', [INSTALL_SCRIPT, ...(options.args || [])], {
|
||||
cwd: options.cwd,
|
||||
env: {
|
||||
...process.env,
|
||||
HOME: options.homeDir || process.env.HOME,
|
||||
},
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: 20000,
|
||||
});
|
||||
}
|
||||
|
||||
function runUninstall(options = {}) {
|
||||
return execFileSync('bash', [UNINSTALL_SCRIPT, ...(options.args || [])], {
|
||||
cwd: options.cwd,
|
||||
env: {
|
||||
...process.env,
|
||||
HOME: options.homeDir || process.env.HOME,
|
||||
},
|
||||
encoding: 'utf8',
|
||||
input: options.input || 'y\n',
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: 20000,
|
||||
});
|
||||
}
|
||||
|
||||
function readManifestLines(projectRoot) {
|
||||
const manifestPath = path.join(projectRoot, '.trae', '.ecc-manifest');
|
||||
return fs.readFileSync(manifestPath, 'utf8')
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
function test(name, fn) {
|
||||
try {
|
||||
fn();
|
||||
console.log(` \u2713 ${name}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.log(` \u2717 ${name}`);
|
||||
console.log(` Error: ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function runTests() {
|
||||
console.log('\n=== Testing Trae install/uninstall scripts ===\n');
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
console.log(' - skipped on Windows; Trae shell scripts are Unix-only');
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (test('does not claim ownership of preexisting target files', () => {
|
||||
const homeDir = createTempDir('trae-home-');
|
||||
const projectRoot = createTempDir('trae-project-');
|
||||
|
||||
try {
|
||||
const preexistingCommandPath = path.join(projectRoot, '.trae', 'commands', 'e2e.md');
|
||||
fs.mkdirSync(path.dirname(preexistingCommandPath), { recursive: true });
|
||||
fs.writeFileSync(preexistingCommandPath, 'user owned command\n');
|
||||
|
||||
runInstall({ cwd: projectRoot, homeDir });
|
||||
|
||||
const manifestLines = readManifestLines(projectRoot);
|
||||
assert.ok(!manifestLines.includes('commands/e2e.md'), 'Preexisting file should not be recorded in manifest');
|
||||
|
||||
runUninstall({ cwd: projectRoot, homeDir });
|
||||
|
||||
assert.strictEqual(fs.readFileSync(preexistingCommandPath, 'utf8'), 'user owned command\n');
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('records nested skill files and the full rules tree in the manifest', () => {
|
||||
const homeDir = createTempDir('trae-home-');
|
||||
const projectRoot = createTempDir('trae-project-');
|
||||
|
||||
try {
|
||||
runInstall({ cwd: projectRoot, homeDir });
|
||||
|
||||
const manifestLines = readManifestLines(projectRoot);
|
||||
assert.ok(manifestLines.includes('skills/skill-comply/pyproject.toml'));
|
||||
assert.ok(manifestLines.includes('rules/common/code-review.md'));
|
||||
assert.ok(manifestLines.includes('rules/python/coding-style.md'));
|
||||
assert.ok(manifestLines.includes('rules/zh/README.md'));
|
||||
|
||||
assert.ok(fs.existsSync(path.join(projectRoot, '.trae', 'skills', 'skill-comply', 'pyproject.toml')));
|
||||
assert.ok(fs.existsSync(path.join(projectRoot, '.trae', 'rules', 'python', 'coding-style.md')));
|
||||
assert.ok(fs.existsSync(path.join(projectRoot, '.trae', 'rules', 'zh', 'README.md')));
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('reinstall preserves managed manifest coverage without duplicate entries', () => {
|
||||
const homeDir = createTempDir('trae-home-');
|
||||
const projectRoot = createTempDir('trae-project-');
|
||||
|
||||
try {
|
||||
runInstall({ cwd: projectRoot, homeDir });
|
||||
|
||||
const managedCommandPath = path.join(projectRoot, '.trae', 'commands', 'e2e.md');
|
||||
fs.rmSync(managedCommandPath);
|
||||
|
||||
runInstall({ cwd: projectRoot, homeDir });
|
||||
|
||||
const manifestLines = readManifestLines(projectRoot);
|
||||
const entryCount = manifestLines.filter((line) => line === 'commands/e2e.md').length;
|
||||
|
||||
assert.strictEqual(entryCount, 1, 'Managed file should appear once in manifest after reinstall');
|
||||
assert.ok(fs.existsSync(managedCommandPath), 'Managed file should be recreated on reinstall');
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('uninstall rejects manifest entries that escape the Trae root via symlink traversal', () => {
|
||||
const homeDir = createTempDir('trae-home-');
|
||||
const projectRoot = createTempDir('trae-project-');
|
||||
const externalRoot = createTempDir('trae-outside-');
|
||||
|
||||
try {
|
||||
const traeRoot = path.join(projectRoot, '.trae');
|
||||
fs.mkdirSync(traeRoot, { recursive: true });
|
||||
|
||||
const outsideSecretPath = path.join(externalRoot, 'secret.txt');
|
||||
fs.writeFileSync(outsideSecretPath, 'do not remove\n');
|
||||
fs.symlinkSync(externalRoot, path.join(traeRoot, 'escape-link'));
|
||||
fs.writeFileSync(path.join(traeRoot, '.ecc-manifest'), 'escape-link/secret.txt\n.ecc-manifest\n');
|
||||
|
||||
const stdout = runUninstall({ cwd: projectRoot, homeDir });
|
||||
|
||||
assert.ok(stdout.includes('Skipped: escape-link/secret.txt (invalid manifest entry)'));
|
||||
assert.strictEqual(fs.readFileSync(outsideSecretPath, 'utf8'), 'do not remove\n');
|
||||
} finally {
|
||||
cleanup(homeDir);
|
||||
cleanup(projectRoot);
|
||||
cleanup(externalRoot);
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
}
|
||||
|
||||
runTests();
|
||||
Reference in New Issue
Block a user