fix: harden hook portability and plugin docs

This commit is contained in:
Affaan Mustafa
2026-03-09 21:07:42 -07:00
committed by Affaan Mustafa
parent 0f416b0b9d
commit 440178d697
11 changed files with 490 additions and 62 deletions

View File

@@ -337,8 +337,10 @@ For PMX, prioritize these E2E tests:
## Related Agents ## Related Agents
This command invokes the `e2e-runner` agent located at: This command invokes the `e2e-runner` agent provided by ECC.
`~/.claude/agents/e2e-runner.md`
For manual installs, the source file lives at:
`agents/e2e-runner.md`
## Quick Commands ## Quick Commands

View File

@@ -109,5 +109,7 @@ After planning:
## Related Agents ## Related Agents
This command invokes the `planner` agent located at: This command invokes the `planner` agent provided by ECC.
`~/.claude/agents/planner.md`
For manual installs, the source file lives at:
`agents/planner.md`

View File

@@ -319,8 +319,10 @@ Never skip the RED phase. Never write code before tests.
## Related Agents ## Related Agents
This command invokes the `tdd-guide` agent located at: This command invokes the `tdd-guide` agent provided by ECC.
`~/.claude/agents/tdd-guide.md`
And can reference the `tdd-workflow` skill at: The related `tdd-workflow` skill is also bundled with ECC.
`~/.claude/skills/tdd-workflow/`
For manual installs, the source files live at:
- `agents/tdd-guide.md`
- `skills/tdd-workflow/SKILL.md`

View File

@@ -13,8 +13,31 @@
const { execFileSync } = require('child_process'); const { execFileSync } = require('child_process');
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const { getPackageManager } = require('../lib/package-manager');
const MAX_STDIN = 1024 * 1024; // 1MB limit const MAX_STDIN = 1024 * 1024; // 1MB limit
const BIOME_CONFIGS = ['biome.json', 'biome.jsonc'];
const PRETTIER_CONFIGS = [
'.prettierrc',
'.prettierrc.json',
'.prettierrc.json5',
'.prettierrc.js',
'.prettierrc.cjs',
'.prettierrc.mjs',
'.prettierrc.ts',
'.prettierrc.cts',
'.prettierrc.mts',
'.prettierrc.yml',
'.prettierrc.yaml',
'.prettierrc.toml',
'prettier.config.js',
'prettier.config.cjs',
'prettier.config.mjs',
'prettier.config.ts',
'prettier.config.cts',
'prettier.config.mts',
];
const PROJECT_ROOT_MARKERS = ['package.json', ...BIOME_CONFIGS, ...PRETTIER_CONFIGS];
let data = ''; let data = '';
process.stdin.setEncoding('utf8'); process.stdin.setEncoding('utf8');
@@ -27,46 +50,66 @@ process.stdin.on('data', chunk => {
function findProjectRoot(startDir) { function findProjectRoot(startDir) {
let dir = startDir; let dir = startDir;
while (dir !== path.dirname(dir)) {
if (fs.existsSync(path.join(dir, 'package.json'))) return dir; while (true) {
dir = path.dirname(dir); if (PROJECT_ROOT_MARKERS.some(marker => fs.existsSync(path.join(dir, marker)))) {
return dir;
}
const parentDir = path.dirname(dir);
if (parentDir === dir) break;
dir = parentDir;
} }
return startDir; return startDir;
} }
function detectFormatter(projectRoot) { function detectFormatter(projectRoot) {
const biomeConfigs = ['biome.json', 'biome.jsonc']; for (const cfg of BIOME_CONFIGS) {
for (const cfg of biomeConfigs) {
if (fs.existsSync(path.join(projectRoot, cfg))) return 'biome'; if (fs.existsSync(path.join(projectRoot, cfg))) return 'biome';
} }
const prettierConfigs = [ for (const cfg of PRETTIER_CONFIGS) {
'.prettierrc',
'.prettierrc.json',
'.prettierrc.js',
'.prettierrc.cjs',
'.prettierrc.mjs',
'.prettierrc.yml',
'.prettierrc.yaml',
'.prettierrc.toml',
'prettier.config.js',
'prettier.config.cjs',
'prettier.config.mjs',
];
for (const cfg of prettierConfigs) {
if (fs.existsSync(path.join(projectRoot, cfg))) return 'prettier'; if (fs.existsSync(path.join(projectRoot, cfg))) return 'prettier';
} }
return null; return null;
} }
function getFormatterCommand(formatter, filePath) { function getRunnerBin(bin) {
const npxBin = process.platform === 'win32' ? 'npx.cmd' : 'npx'; if (process.platform !== 'win32') return bin;
if (bin === 'npx') return 'npx.cmd';
if (bin === 'pnpm') return 'pnpm.cmd';
if (bin === 'yarn') return 'yarn.cmd';
if (bin === 'bunx') return 'bunx.cmd';
return bin;
}
function getFormatterRunner(projectRoot) {
const pm = getPackageManager({ projectDir: projectRoot });
const execCmd = pm?.config?.execCmd || 'npx';
const [bin = 'npx', ...prefix] = execCmd.split(/\s+/).filter(Boolean);
return {
bin: getRunnerBin(bin),
prefix
};
}
function getFormatterCommand(formatter, filePath, projectRoot) {
const runner = getFormatterRunner(projectRoot);
if (formatter === 'biome') { if (formatter === 'biome') {
return { bin: npxBin, args: ['@biomejs/biome', 'format', '--write', filePath] }; return {
bin: runner.bin,
args: [...runner.prefix, '@biomejs/biome', 'format', '--write', filePath]
};
} }
if (formatter === 'prettier') { if (formatter === 'prettier') {
return { bin: npxBin, args: ['prettier', '--write', filePath] }; return {
bin: runner.bin,
args: [...runner.prefix, 'prettier', '--write', filePath]
};
} }
return null; return null;
} }
@@ -80,7 +123,7 @@ process.stdin.on('end', () => {
try { try {
const projectRoot = findProjectRoot(path.dirname(path.resolve(filePath))); const projectRoot = findProjectRoot(path.dirname(path.resolve(filePath)));
const formatter = detectFormatter(projectRoot); const formatter = detectFormatter(projectRoot);
const cmd = getFormatterCommand(formatter, filePath); const cmd = getFormatterCommand(formatter, filePath, projectRoot);
if (cmd) { if (cmd) {
execFileSync(cmd.bin, cmd.args, { execFileSync(cmd.bin, cmd.args, {

View File

@@ -4,6 +4,142 @@
const MAX_STDIN = 1024 * 1024; const MAX_STDIN = 1024 * 1024;
const { splitShellSegments } = require('../lib/shell-split'); const { splitShellSegments } = require('../lib/shell-split');
const DEV_COMMAND_WORDS = new Set([
'npm',
'pnpm',
'yarn',
'bun',
'npx',
'bash',
'sh',
'zsh',
'fish',
'tmux'
]);
const SKIPPABLE_PREFIX_WORDS = new Set(['env', 'command', 'builtin', 'exec', 'noglob', 'sudo']);
const PREFIX_OPTION_VALUE_WORDS = {
env: new Set(['-u', '-C', '-S', '--unset', '--chdir', '--split-string']),
sudo: new Set([
'-u',
'-g',
'-h',
'-p',
'-r',
'-t',
'-C',
'--user',
'--group',
'--host',
'--prompt',
'--role',
'--type',
'--close-from'
])
};
function readToken(input, startIndex) {
let index = startIndex;
while (index < input.length && /\s/.test(input[index])) index += 1;
if (index >= input.length) return null;
let token = '';
let quote = null;
while (index < input.length) {
const ch = input[index];
if (quote) {
if (ch === quote) {
quote = null;
index += 1;
continue;
}
if (ch === '\\' && quote === '"' && index + 1 < input.length) {
token += input[index + 1];
index += 2;
continue;
}
token += ch;
index += 1;
continue;
}
if (ch === '"' || ch === "'") {
quote = ch;
index += 1;
continue;
}
if (/\s/.test(ch)) break;
if (ch === '\\' && index + 1 < input.length) {
token += input[index + 1];
index += 2;
continue;
}
token += ch;
index += 1;
}
return { token, end: index };
}
function shouldSkipOptionValue(wrapper, optionToken) {
if (!wrapper || !optionToken || optionToken.includes('=')) return false;
const optionSet = PREFIX_OPTION_VALUE_WORDS[wrapper];
return Boolean(optionSet && optionSet.has(optionToken));
}
function isOptionToken(token) {
return token.startsWith('-') && token.length > 1;
}
function getLeadingCommandWord(segment) {
let index = 0;
let activeWrapper = null;
let skipNextValue = false;
while (index < segment.length) {
const parsed = readToken(segment, index);
if (!parsed) return null;
index = parsed.end;
const token = parsed.token;
if (!token) continue;
if (skipNextValue) {
skipNextValue = false;
continue;
}
if (token === '--') {
activeWrapper = null;
continue;
}
if (/^[A-Za-z_][A-Za-z0-9_]*=.*/.test(token)) continue;
if (SKIPPABLE_PREFIX_WORDS.has(token)) {
activeWrapper = token;
continue;
}
if (activeWrapper && isOptionToken(token)) {
if (shouldSkipOptionValue(activeWrapper, token)) {
skipNextValue = true;
}
continue;
}
return token;
}
return null;
}
let raw = ''; let raw = '';
process.stdin.setEncoding('utf8'); process.stdin.setEncoding('utf8');
process.stdin.on('data', chunk => { process.stdin.on('data', chunk => {
@@ -23,7 +159,13 @@ process.stdin.on('end', () => {
const tmuxLauncher = /^\s*tmux\s+(new|new-session|new-window|split-window)\b/; const tmuxLauncher = /^\s*tmux\s+(new|new-session|new-window|split-window)\b/;
const devPattern = /\b(npm\s+run\s+dev|pnpm(?:\s+run)?\s+dev|yarn\s+dev|bun\s+run\s+dev)\b/; const devPattern = /\b(npm\s+run\s+dev|pnpm(?:\s+run)?\s+dev|yarn\s+dev|bun\s+run\s+dev)\b/;
const hasBlockedDev = segments.some(segment => devPattern.test(segment) && !tmuxLauncher.test(segment)); const hasBlockedDev = segments.some(segment => {
const commandWord = getLeadingCommandWord(segment);
if (!commandWord || !DEV_COMMAND_WORDS.has(commandWord)) {
return false;
}
return devPattern.test(segment) && !tmuxLauncher.test(segment);
});
if (hasBlockedDev) { if (hasBlockedDev) {
console.error('[Hook] BLOCKED: Dev server must run in tmux for log access'); console.error('[Hook] BLOCKED: Dev server must run in tmux for log access');

View File

@@ -4,6 +4,8 @@ set -euo pipefail
HOOK_ID="${1:-}" HOOK_ID="${1:-}"
REL_SCRIPT_PATH="${2:-}" REL_SCRIPT_PATH="${2:-}"
PROFILES_CSV="${3:-standard,strict}" PROFILES_CSV="${3:-standard,strict}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-$(cd "${SCRIPT_DIR}/../.." && pwd)}"
# Preserve stdin for passthrough or script execution # Preserve stdin for passthrough or script execution
INPUT="$(cat)" INPUT="$(cat)"
@@ -14,13 +16,13 @@ if [[ -z "$HOOK_ID" || -z "$REL_SCRIPT_PATH" ]]; then
fi fi
# Ask Node helper if this hook is enabled # Ask Node helper if this hook is enabled
ENABLED="$(node "${CLAUDE_PLUGIN_ROOT}/scripts/hooks/check-hook-enabled.js" "$HOOK_ID" "$PROFILES_CSV" 2>/dev/null || echo yes)" ENABLED="$(node "${PLUGIN_ROOT}/scripts/hooks/check-hook-enabled.js" "$HOOK_ID" "$PROFILES_CSV" 2>/dev/null || echo yes)"
if [[ "$ENABLED" != "yes" ]]; then if [[ "$ENABLED" != "yes" ]]; then
printf '%s' "$INPUT" printf '%s' "$INPUT"
exit 0 exit 0
fi fi
SCRIPT_PATH="${CLAUDE_PLUGIN_ROOT}/${REL_SCRIPT_PATH}" SCRIPT_PATH="${PLUGIN_ROOT}/${REL_SCRIPT_PATH}"
if [[ ! -f "$SCRIPT_PATH" ]]; then if [[ ! -f "$SCRIPT_PATH" ]]; then
echo "[Hook] Script not found for ${HOOK_ID}: ${SCRIPT_PATH}" >&2 echo "[Hook] Script not found for ${HOOK_ID}: ${SCRIPT_PATH}" >&2
printf '%s' "$INPUT" printf '%s' "$INPUT"

View File

@@ -28,6 +28,7 @@ OBSERVER_LOOP_SCRIPT="${SCRIPT_DIR}/observer-loop.sh"
# Source shared project detection helper # Source shared project detection helper
# This sets: PROJECT_ID, PROJECT_NAME, PROJECT_ROOT, PROJECT_DIR # This sets: PROJECT_ID, PROJECT_NAME, PROJECT_ROOT, PROJECT_DIR
source "${SKILL_ROOT}/scripts/detect-project.sh" source "${SKILL_ROOT}/scripts/detect-project.sh"
PYTHON_CMD="${CLV2_PYTHON_CMD:-}"
# ───────────────────────────────────────────── # ─────────────────────────────────────────────
# Configuration # Configuration
@@ -46,7 +47,10 @@ OBSERVER_INTERVAL_MINUTES=5
MIN_OBSERVATIONS=20 MIN_OBSERVATIONS=20
OBSERVER_ENABLED=false OBSERVER_ENABLED=false
if [ -f "$CONFIG_FILE" ]; then if [ -f "$CONFIG_FILE" ]; then
_config=$(CLV2_CONFIG="$CONFIG_FILE" python3 -c " if [ -z "$PYTHON_CMD" ]; then
echo "No python interpreter found; using built-in observer defaults." >&2
else
_config=$(CLV2_CONFIG="$CONFIG_FILE" "$PYTHON_CMD" -c "
import json, os import json, os
with open(os.environ['CLV2_CONFIG']) as f: with open(os.environ['CLV2_CONFIG']) as f:
cfg = json.load(f) cfg = json.load(f)
@@ -57,17 +61,18 @@ print(str(obs.get('enabled', False)).lower())
" 2>/dev/null || echo "5 " 2>/dev/null || echo "5
20 20
false") false")
_interval=$(echo "$_config" | sed -n '1p') _interval=$(echo "$_config" | sed -n '1p')
_min_obs=$(echo "$_config" | sed -n '2p') _min_obs=$(echo "$_config" | sed -n '2p')
_enabled=$(echo "$_config" | sed -n '3p') _enabled=$(echo "$_config" | sed -n '3p')
if [ "$_interval" -gt 0 ] 2>/dev/null; then if [ "$_interval" -gt 0 ] 2>/dev/null; then
OBSERVER_INTERVAL_MINUTES="$_interval" OBSERVER_INTERVAL_MINUTES="$_interval"
fi fi
if [ "$_min_obs" -gt 0 ] 2>/dev/null; then if [ "$_min_obs" -gt 0 ] 2>/dev/null; then
MIN_OBSERVATIONS="$_min_obs" MIN_OBSERVATIONS="$_min_obs"
fi fi
if [ "$_enabled" = "true" ]; then if [ "$_enabled" = "true" ]; then
OBSERVER_ENABLED=true OBSERVER_ENABLED=true
fi
fi fi
fi fi
OBSERVER_INTERVAL_SECONDS=$((OBSERVER_INTERVAL_MINUTES * 60)) OBSERVER_INTERVAL_SECONDS=$((OBSERVER_INTERVAL_MINUTES * 60))

View File

@@ -27,13 +27,38 @@ if [ -z "$INPUT_JSON" ]; then
exit 0 exit 0
fi fi
resolve_python_cmd() {
if [ -n "${CLV2_PYTHON_CMD:-}" ] && command -v "$CLV2_PYTHON_CMD" >/dev/null 2>&1; then
printf '%s\n' "$CLV2_PYTHON_CMD"
return 0
fi
if command -v python3 >/dev/null 2>&1; then
printf '%s\n' python3
return 0
fi
if command -v python >/dev/null 2>&1; then
printf '%s\n' python
return 0
fi
return 1
}
PYTHON_CMD="$(resolve_python_cmd 2>/dev/null || true)"
if [ -z "$PYTHON_CMD" ]; then
echo "[observe] No python interpreter found, skipping observation" >&2
exit 0
fi
# ───────────────────────────────────────────── # ─────────────────────────────────────────────
# Extract cwd from stdin for project detection # Extract cwd from stdin for project detection
# ───────────────────────────────────────────── # ─────────────────────────────────────────────
# Extract cwd from the hook JSON to use for project detection. # Extract cwd from the hook JSON to use for project detection.
# This avoids spawning a separate git subprocess when cwd is available. # This avoids spawning a separate git subprocess when cwd is available.
STDIN_CWD=$(echo "$INPUT_JSON" | python3 -c ' STDIN_CWD=$(echo "$INPUT_JSON" | "$PYTHON_CMD" -c '
import json, sys import json, sys
try: try:
data = json.load(sys.stdin) data = json.load(sys.stdin)
@@ -58,6 +83,7 @@ SKILL_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
# Source shared project detection helper # Source shared project detection helper
# This sets: PROJECT_ID, PROJECT_NAME, PROJECT_ROOT, PROJECT_DIR # This sets: PROJECT_ID, PROJECT_NAME, PROJECT_ROOT, PROJECT_DIR
source "${SKILL_ROOT}/scripts/detect-project.sh" source "${SKILL_ROOT}/scripts/detect-project.sh"
PYTHON_CMD="${CLV2_PYTHON_CMD:-$PYTHON_CMD}"
# ───────────────────────────────────────────── # ─────────────────────────────────────────────
# Configuration # Configuration
@@ -79,9 +105,9 @@ if [ ! -f "$PURGE_MARKER" ] || [ "$(find "$PURGE_MARKER" -mtime +1 2>/dev/null)"
touch "$PURGE_MARKER" 2>/dev/null || true touch "$PURGE_MARKER" 2>/dev/null || true
fi fi
# Parse using python via stdin pipe (safe for all JSON payloads) # Parse using Python via stdin pipe (safe for all JSON payloads)
# Pass HOOK_PHASE via env var since Claude Code does not include hook type in stdin JSON # Pass HOOK_PHASE via env var since Claude Code does not include hook type in stdin JSON
PARSED=$(echo "$INPUT_JSON" | HOOK_PHASE="$HOOK_PHASE" python3 -c ' PARSED=$(echo "$INPUT_JSON" | HOOK_PHASE="$HOOK_PHASE" "$PYTHON_CMD" -c '
import json import json
import sys import sys
import os import os
@@ -129,13 +155,13 @@ except Exception as e:
') ')
# Check if parsing succeeded # Check if parsing succeeded
PARSED_OK=$(echo "$PARSED" | python3 -c "import json,sys; print(json.load(sys.stdin).get('parsed', False))" 2>/dev/null || echo "False") PARSED_OK=$(echo "$PARSED" | "$PYTHON_CMD" -c "import json,sys; print(json.load(sys.stdin).get('parsed', False))" 2>/dev/null || echo "False")
if [ "$PARSED_OK" != "True" ]; then if [ "$PARSED_OK" != "True" ]; then
# Fallback: log raw input for debugging (scrub secrets before persisting) # Fallback: log raw input for debugging (scrub secrets before persisting)
timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ") timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
export TIMESTAMP="$timestamp" export TIMESTAMP="$timestamp"
echo "$INPUT_JSON" | python3 -c ' echo "$INPUT_JSON" | "$PYTHON_CMD" -c '
import json, sys, os, re import json, sys, os, re
_SECRET_RE = re.compile( _SECRET_RE = re.compile(
@@ -170,7 +196,7 @@ export PROJECT_ID_ENV="$PROJECT_ID"
export PROJECT_NAME_ENV="$PROJECT_NAME" export PROJECT_NAME_ENV="$PROJECT_NAME"
export TIMESTAMP="$timestamp" export TIMESTAMP="$timestamp"
echo "$PARSED" | python3 -c ' echo "$PARSED" | "$PYTHON_CMD" -c '
import json, sys, os, re import json, sys, os, re
parsed = json.load(sys.stdin) parsed = json.load(sys.stdin)

View File

@@ -23,6 +23,28 @@ _CLV2_HOMUNCULUS_DIR="${HOME}/.claude/homunculus"
_CLV2_PROJECTS_DIR="${_CLV2_HOMUNCULUS_DIR}/projects" _CLV2_PROJECTS_DIR="${_CLV2_HOMUNCULUS_DIR}/projects"
_CLV2_REGISTRY_FILE="${_CLV2_HOMUNCULUS_DIR}/projects.json" _CLV2_REGISTRY_FILE="${_CLV2_HOMUNCULUS_DIR}/projects.json"
_clv2_resolve_python_cmd() {
if [ -n "${CLV2_PYTHON_CMD:-}" ] && command -v "$CLV2_PYTHON_CMD" >/dev/null 2>&1; then
printf '%s\n' "$CLV2_PYTHON_CMD"
return 0
fi
if command -v python3 >/dev/null 2>&1; then
printf '%s\n' python3
return 0
fi
if command -v python >/dev/null 2>&1; then
printf '%s\n' python
return 0
fi
return 1
}
_CLV2_PYTHON_CMD="$(_clv2_resolve_python_cmd 2>/dev/null || true)"
export CLV2_PYTHON_CMD
_clv2_detect_project() { _clv2_detect_project() {
local project_root="" local project_root=""
local project_name="" local project_name=""
@@ -73,10 +95,12 @@ _clv2_detect_project() {
fi fi
local hash_input="${remote_url:-$project_root}" local hash_input="${remote_url:-$project_root}"
# Use SHA256 via python3 (portable across macOS/Linux, no shasum/sha256sum divergence) # Prefer Python for consistent SHA256 behavior across shells/platforms.
project_id=$(printf '%s' "$hash_input" | python3 -c "import sys,hashlib; print(hashlib.sha256(sys.stdin.buffer.read()).hexdigest()[:12])" 2>/dev/null) if [ -n "$_CLV2_PYTHON_CMD" ]; then
project_id=$(printf '%s' "$hash_input" | "$_CLV2_PYTHON_CMD" -c "import sys,hashlib; print(hashlib.sha256(sys.stdin.buffer.read()).hexdigest()[:12])" 2>/dev/null)
fi
# Fallback if python3 failed # Fallback if Python is unavailable or hash generation failed.
if [ -z "$project_id" ]; then if [ -z "$project_id" ]; then
project_id=$(printf '%s' "$hash_input" | shasum -a 256 2>/dev/null | cut -c1-12 || \ project_id=$(printf '%s' "$hash_input" | shasum -a 256 2>/dev/null | cut -c1-12 || \
printf '%s' "$hash_input" | sha256sum 2>/dev/null | cut -c1-12 || \ printf '%s' "$hash_input" | sha256sum 2>/dev/null | cut -c1-12 || \
@@ -85,9 +109,9 @@ _clv2_detect_project() {
# Backward compatibility: if credentials were stripped and the hash changed, # Backward compatibility: if credentials were stripped and the hash changed,
# check if a project dir exists under the legacy hash and reuse it # check if a project dir exists under the legacy hash and reuse it
if [ "$legacy_hash_input" != "$hash_input" ]; then if [ "$legacy_hash_input" != "$hash_input" ] && [ -n "$_CLV2_PYTHON_CMD" ]; then
local legacy_id local legacy_id=""
legacy_id=$(printf '%s' "$legacy_hash_input" | python3 -c "import sys,hashlib; print(hashlib.sha256(sys.stdin.buffer.read()).hexdigest()[:12])" 2>/dev/null) legacy_id=$(printf '%s' "$legacy_hash_input" | "$_CLV2_PYTHON_CMD" -c "import sys,hashlib; print(hashlib.sha256(sys.stdin.buffer.read()).hexdigest()[:12])" 2>/dev/null)
if [ -n "$legacy_id" ] && [ -d "${_CLV2_PROJECTS_DIR}/${legacy_id}" ] && [ ! -d "${_CLV2_PROJECTS_DIR}/${project_id}" ]; then if [ -n "$legacy_id" ] && [ -d "${_CLV2_PROJECTS_DIR}/${legacy_id}" ] && [ ! -d "${_CLV2_PROJECTS_DIR}/${project_id}" ]; then
# Migrate legacy directory to new hash # Migrate legacy directory to new hash
mv "${_CLV2_PROJECTS_DIR}/${legacy_id}" "${_CLV2_PROJECTS_DIR}/${project_id}" 2>/dev/null || project_id="$legacy_id" mv "${_CLV2_PROJECTS_DIR}/${legacy_id}" "${_CLV2_PROJECTS_DIR}/${project_id}" 2>/dev/null || project_id="$legacy_id"
@@ -120,14 +144,18 @@ _clv2_update_project_registry() {
mkdir -p "$(dirname "$_CLV2_REGISTRY_FILE")" mkdir -p "$(dirname "$_CLV2_REGISTRY_FILE")"
if [ -z "$_CLV2_PYTHON_CMD" ]; then
return 0
fi
# Pass values via env vars to avoid shell→python injection. # Pass values via env vars to avoid shell→python injection.
# python3 reads them with os.environ, which is safe for any string content. # Python reads them with os.environ, which is safe for any string content.
_CLV2_REG_PID="$pid" \ _CLV2_REG_PID="$pid" \
_CLV2_REG_PNAME="$pname" \ _CLV2_REG_PNAME="$pname" \
_CLV2_REG_PROOT="$proot" \ _CLV2_REG_PROOT="$proot" \
_CLV2_REG_PREMOTE="$premote" \ _CLV2_REG_PREMOTE="$premote" \
_CLV2_REG_FILE="$_CLV2_REGISTRY_FILE" \ _CLV2_REG_FILE="$_CLV2_REGISTRY_FILE" \
python3 -c ' "$_CLV2_PYTHON_CMD" -c '
import json, os import json, os
from datetime import datetime, timezone from datetime import datetime, timezone

View File

@@ -208,4 +208,4 @@ When retrieving context for this task:
- [The Longform Guide](https://x.com/affaanmustafa/status/2014040193557471352) - Subagent orchestration section - [The Longform Guide](https://x.com/affaanmustafa/status/2014040193557471352) - Subagent orchestration section
- `continuous-learning` skill - For patterns that improve over time - `continuous-learning` skill - For patterns that improve over time
- Agent definitions in `~/.claude/agents/` - Agent definitions bundled with ECC (manual install path: `agents/`)

View File

@@ -75,6 +75,35 @@ function cleanupTestDir(testDir) {
fs.rmSync(testDir, { recursive: true, force: true }); fs.rmSync(testDir, { recursive: true, force: true });
} }
function createCommandShim(binDir, baseName, logFile) {
fs.mkdirSync(binDir, { recursive: true });
const shimJs = path.join(binDir, `${baseName}-shim.js`);
fs.writeFileSync(shimJs, [
'const fs = require(\'fs\');',
`fs.appendFileSync(${JSON.stringify(logFile)}, JSON.stringify({ bin: ${JSON.stringify(baseName)}, args: process.argv.slice(2), cwd: process.cwd() }) + '\\n');`
].join('\n'));
if (process.platform === 'win32') {
const shimCmd = path.join(binDir, `${baseName}.cmd`);
fs.writeFileSync(shimCmd, `@echo off\r\nnode "${shimJs}" %*\r\n`);
return shimCmd;
}
const shimPath = path.join(binDir, baseName);
fs.writeFileSync(shimPath, `#!/usr/bin/env node\nrequire(${JSON.stringify(shimJs)});\n`);
fs.chmodSync(shimPath, 0o755);
return shimPath;
}
function readCommandLog(logFile) {
if (!fs.existsSync(logFile)) return [];
return fs.readFileSync(logFile, 'utf8')
.split('\n')
.filter(Boolean)
.map(line => JSON.parse(line));
}
// Test suite // Test suite
async function runTests() { async function runTests() {
console.log('\n=== Testing Hook Scripts ===\n'); console.log('\n=== Testing Hook Scripts ===\n');
@@ -701,6 +730,131 @@ async function runTests() {
assert.ok(result.stdout.includes('tool_input'), 'Should pass through original data'); assert.ok(result.stdout.includes('tool_input'), 'Should pass through original data');
})) passed++; else failed++; })) passed++; else failed++;
if (await asyncTest('finds formatter config in parent dirs without package.json', async () => {
const testDir = createTestDir();
const rootDir = path.join(testDir, 'config-only-repo');
const nestedDir = path.join(rootDir, 'src', 'nested');
const filePath = path.join(nestedDir, 'component.ts');
const binDir = path.join(testDir, 'bin');
const logFile = path.join(testDir, 'formatter.log');
fs.mkdirSync(nestedDir, { recursive: true });
fs.writeFileSync(path.join(rootDir, '.prettierrc'), '{}');
fs.writeFileSync(filePath, 'export const value = 1;\n');
createCommandShim(binDir, 'npx', logFile);
const stdinJson = JSON.stringify({ tool_input: { file_path: filePath } });
const result = await runScript(path.join(scriptsDir, 'post-edit-format.js'), stdinJson, {
PATH: `${binDir}${path.delimiter}${process.env.PATH || ''}`
});
assert.strictEqual(result.code, 0, 'Should exit 0 for config-only repo');
const logEntries = readCommandLog(logFile);
assert.strictEqual(logEntries.length, 1, 'Should invoke formatter once');
assert.strictEqual(
fs.realpathSync(logEntries[0].cwd),
fs.realpathSync(rootDir),
'Should run formatter from config root'
);
assert.deepStrictEqual(
logEntries[0].args,
['prettier', '--write', filePath],
'Should use the formatter on the nested file'
);
cleanupTestDir(testDir);
})) passed++; else failed++;
if (await asyncTest('respects CLAUDE_PACKAGE_MANAGER for formatter fallback runner', async () => {
const testDir = createTestDir();
const rootDir = path.join(testDir, 'pnpm-repo');
const filePath = path.join(rootDir, 'index.ts');
const binDir = path.join(testDir, 'bin');
const logFile = path.join(testDir, 'pnpm.log');
fs.mkdirSync(rootDir, { recursive: true });
fs.writeFileSync(path.join(rootDir, '.prettierrc'), '{}');
fs.writeFileSync(filePath, 'export const value = 1;\n');
createCommandShim(binDir, 'pnpm', logFile);
const stdinJson = JSON.stringify({ tool_input: { file_path: filePath } });
const result = await runScript(path.join(scriptsDir, 'post-edit-format.js'), stdinJson, {
PATH: `${binDir}${path.delimiter}${process.env.PATH || ''}`,
CLAUDE_PACKAGE_MANAGER: 'pnpm'
});
assert.strictEqual(result.code, 0, 'Should exit 0 when pnpm fallback is used');
const logEntries = readCommandLog(logFile);
assert.strictEqual(logEntries.length, 1, 'Should invoke pnpm fallback runner once');
assert.strictEqual(logEntries[0].bin, 'pnpm', 'Should use pnpm runner');
assert.deepStrictEqual(
logEntries[0].args,
['dlx', 'prettier', '--write', filePath],
'Should use pnpm dlx for fallback formatter execution'
);
cleanupTestDir(testDir);
})) passed++; else failed++;
if (await asyncTest('respects project package-manager config for formatter fallback runner', async () => {
const testDir = createTestDir();
const rootDir = path.join(testDir, 'bun-repo');
const filePath = path.join(rootDir, 'index.ts');
const binDir = path.join(testDir, 'bin');
const logFile = path.join(testDir, 'bun.log');
fs.mkdirSync(path.join(rootDir, '.claude'), { recursive: true });
fs.writeFileSync(path.join(rootDir, '.claude', 'package-manager.json'), JSON.stringify({ packageManager: 'bun' }));
fs.writeFileSync(path.join(rootDir, '.prettierrc'), '{}');
fs.writeFileSync(filePath, 'export const value = 1;\n');
createCommandShim(binDir, 'bunx', logFile);
const stdinJson = JSON.stringify({ tool_input: { file_path: filePath } });
const result = await runScript(path.join(scriptsDir, 'post-edit-format.js'), stdinJson, {
PATH: `${binDir}${path.delimiter}${process.env.PATH || ''}`
});
assert.strictEqual(result.code, 0, 'Should exit 0 when project config selects bun');
const logEntries = readCommandLog(logFile);
assert.strictEqual(logEntries.length, 1, 'Should invoke bunx fallback runner once');
assert.strictEqual(logEntries[0].bin, 'bunx', 'Should use bunx runner');
assert.deepStrictEqual(
logEntries[0].args,
['prettier', '--write', filePath],
'Should use bunx for fallback formatter execution'
);
cleanupTestDir(testDir);
})) passed++; else failed++;
console.log('\npre-bash-dev-server-block.js:');
if (await asyncTest('allows non-dev commands whose heredoc text mentions npm run dev', async () => {
const command = [
'gh pr create --title "fix: docs" --body "$(cat <<\'EOF\'',
'## Test plan',
'- run npm run dev to verify the site starts',
'EOF',
')"'
].join('\n');
const stdinJson = JSON.stringify({ tool_input: { command } });
const result = await runScript(path.join(scriptsDir, 'pre-bash-dev-server-block.js'), stdinJson);
assert.strictEqual(result.code, 0, 'Non-dev commands should pass through');
assert.strictEqual(result.stdout, stdinJson, 'Should preserve original input');
assert.ok(!result.stderr.includes('BLOCKED'), 'Should not emit a block message');
})) passed++; else failed++;
if (await asyncTest('blocks bare npm run dev outside tmux on non-Windows platforms', async () => {
const stdinJson = JSON.stringify({ tool_input: { command: 'npm run dev' } });
const result = await runScript(path.join(scriptsDir, 'pre-bash-dev-server-block.js'), stdinJson);
if (process.platform === 'win32') {
assert.strictEqual(result.code, 0, 'Windows path should pass through');
assert.strictEqual(result.stdout, stdinJson, 'Windows path should preserve original input');
} else {
assert.strictEqual(result.code, 2, 'Unix path should block bare dev servers');
assert.ok(result.stderr.includes('BLOCKED'), 'Should explain why the command was blocked');
}
})) passed++; else failed++;
// post-edit-typecheck.js tests // post-edit-typecheck.js tests
console.log('\npost-edit-typecheck.js:'); console.log('\npost-edit-typecheck.js:');
@@ -1516,6 +1670,28 @@ async function runTests() {
assert.ok(typecheckSource.includes('npx.cmd'), 'Should use npx.cmd for Windows cross-platform safety'); assert.ok(typecheckSource.includes('npx.cmd'), 'Should use npx.cmd for Windows cross-platform safety');
})) passed++; else failed++; })) passed++; else failed++;
console.log('\nShell wrapper portability:');
if (test('run-with-flags-shell resolves plugin root when CLAUDE_PLUGIN_ROOT is unset', () => {
const wrapperSource = fs.readFileSync(path.join(scriptsDir, 'run-with-flags-shell.sh'), 'utf8');
assert.ok(
wrapperSource.includes('PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-'),
'Shell wrapper should derive PLUGIN_ROOT from its own script path'
);
})) passed++; else failed++;
if (test('continuous-learning shell scripts use resolved Python command instead of hardcoded python3 invocations', () => {
const observeSource = fs.readFileSync(path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'hooks', 'observe.sh'), 'utf8');
const startObserverSource = fs.readFileSync(path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'agents', 'start-observer.sh'), 'utf8');
const detectProjectSource = fs.readFileSync(path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'scripts', 'detect-project.sh'), 'utf8');
assert.ok(!/python3\s+-c/.test(observeSource), 'observe.sh should not invoke python3 directly');
assert.ok(!/python3\s+-c/.test(startObserverSource), 'start-observer.sh should not invoke python3 directly');
assert.ok(observeSource.includes('PYTHON_CMD'), 'observe.sh should resolve Python dynamically');
assert.ok(startObserverSource.includes('CLV2_PYTHON_CMD'), 'start-observer.sh should reuse detected Python command');
assert.ok(detectProjectSource.includes('_clv2_resolve_python_cmd'), 'detect-project.sh should provide shared Python resolution');
})) passed++; else failed++;
if (await asyncTest('matches .tsx extension for type checking', async () => { if (await asyncTest('matches .tsx extension for type checking', async () => {
const testDir = createTestDir(); const testDir = createTestDir();
const testFile = path.join(testDir, 'component.tsx'); const testFile = path.join(testDir, 'component.tsx');