mirror of
https://github.com/affaan-m/everything-claude-code.git
synced 2026-04-12 20:53:34 +08:00
Compare commits
4 Commits
0f416b0b9d
...
af51fcacb7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
af51fcacb7 | ||
|
|
1c5e07ff77 | ||
|
|
d66bd6439b | ||
|
|
440178d697 |
@@ -297,6 +297,15 @@ Then in your `opencode.json`:
|
||||
}
|
||||
```
|
||||
|
||||
This only loads the published ECC OpenCode plugin module (hooks/events and exported plugin tools).
|
||||
It does **not** automatically inject ECC's full `agent`, `command`, or `instructions` config into your project.
|
||||
|
||||
If you want the full ECC OpenCode workflow surface, use the repository's bundled `.opencode/opencode.json` as your base config or copy these pieces into your project:
|
||||
- `.opencode/commands/`
|
||||
- `.opencode/prompts/`
|
||||
- `.opencode/instructions/INSTRUCTIONS.md`
|
||||
- the `agent` and `command` sections from `.opencode/opencode.json`
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Configuration Not Loading
|
||||
@@ -322,6 +331,7 @@ Then in your `opencode.json`:
|
||||
1. Verify the command is defined in `opencode.json` or as `.md` file in `.opencode/commands/`
|
||||
2. Check the referenced agent exists
|
||||
3. Ensure the template uses `$ARGUMENTS` for user input
|
||||
4. If you installed only `plugin: ["ecc-universal"]`, note that npm plugin install does not auto-add ECC commands or agents to your project config
|
||||
|
||||
## Best Practices
|
||||
|
||||
|
||||
@@ -32,7 +32,16 @@ Add to your `opencode.json`:
|
||||
"plugin": ["ecc-universal"]
|
||||
}
|
||||
```
|
||||
After installation, the `ecc-install` CLI becomes available:
|
||||
|
||||
This loads the ECC OpenCode plugin module from npm:
|
||||
- hook/event integrations
|
||||
- bundled custom tools exported by the plugin
|
||||
|
||||
It does **not** auto-register the full ECC command/agent/instruction catalog in your project config. For the full OpenCode setup, either:
|
||||
- run OpenCode inside this repository, or
|
||||
- copy the relevant `.opencode/commands/`, `.opencode/prompts/`, `.opencode/instructions/`, and the `instructions`, `agent`, and `command` config entries into your own project
|
||||
|
||||
After installation, the `ecc-install` CLI is also available:
|
||||
|
||||
```bash
|
||||
npx ecc-install typescript
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
/**
|
||||
* Everything Claude Code (ECC) Plugin for OpenCode
|
||||
*
|
||||
* This package provides a complete OpenCode plugin with:
|
||||
* - 13 specialized agents (planner, architect, code-reviewer, etc.)
|
||||
* - 31 commands (/plan, /tdd, /code-review, etc.)
|
||||
* This package provides the published ECC OpenCode plugin module:
|
||||
* - Plugin hooks (auto-format, TypeScript check, console.log warning, env injection, etc.)
|
||||
* - Custom tools (run-tests, check-coverage, security-audit, format-code, lint-check, git-summary)
|
||||
* - 37 skills (coding-standards, security-review, tdd-workflow, etc.)
|
||||
* - Bundled reference config/assets for the wider ECC OpenCode setup
|
||||
*
|
||||
* Usage:
|
||||
*
|
||||
@@ -22,6 +20,10 @@
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* That enables the published plugin module only. For ECC commands, agents,
|
||||
* prompts, and instructions, use this repository's `.opencode/opencode.json`
|
||||
* as a base or copy the bundled `.opencode/` assets into your project.
|
||||
*
|
||||
* Option 2: Clone and use directly
|
||||
* ```bash
|
||||
* git clone https://github.com/affaan-m/everything-claude-code
|
||||
@@ -51,6 +53,7 @@ export const metadata = {
|
||||
agents: 13,
|
||||
commands: 31,
|
||||
skills: 37,
|
||||
configAssets: true,
|
||||
hookEvents: [
|
||||
"file.edited",
|
||||
"tool.execute.before",
|
||||
|
||||
@@ -1050,6 +1050,13 @@ Then add to your `opencode.json`:
|
||||
}
|
||||
```
|
||||
|
||||
That npm plugin entry enables ECC's published OpenCode plugin module (hooks/events and plugin tools).
|
||||
It does **not** automatically add ECC's full command/agent/instruction catalog to your project config.
|
||||
|
||||
For the full ECC OpenCode setup, either:
|
||||
- run OpenCode inside this repository, or
|
||||
- copy the bundled `.opencode/` config assets into your project and wire the `instructions`, `agent`, and `command` entries in `opencode.json`
|
||||
|
||||
### Documentation
|
||||
|
||||
- **Migration Guide**: `.opencode/MIGRATION.md`
|
||||
|
||||
@@ -337,8 +337,10 @@ For PMX, prioritize these E2E tests:
|
||||
|
||||
## Related Agents
|
||||
|
||||
This command invokes the `e2e-runner` agent located at:
|
||||
`~/.claude/agents/e2e-runner.md`
|
||||
This command invokes the `e2e-runner` agent provided by ECC.
|
||||
|
||||
For manual installs, the source file lives at:
|
||||
`agents/e2e-runner.md`
|
||||
|
||||
## Quick Commands
|
||||
|
||||
|
||||
@@ -109,5 +109,7 @@ After planning:
|
||||
|
||||
## Related Agents
|
||||
|
||||
This command invokes the `planner` agent located at:
|
||||
`~/.claude/agents/planner.md`
|
||||
This command invokes the `planner` agent provided by ECC.
|
||||
|
||||
For manual installs, the source file lives at:
|
||||
`agents/planner.md`
|
||||
|
||||
@@ -319,8 +319,10 @@ Never skip the RED phase. Never write code before tests.
|
||||
|
||||
## Related Agents
|
||||
|
||||
This command invokes the `tdd-guide` agent located at:
|
||||
`~/.claude/agents/tdd-guide.md`
|
||||
This command invokes the `tdd-guide` agent provided by ECC.
|
||||
|
||||
And can reference the `tdd-workflow` skill at:
|
||||
`~/.claude/skills/tdd-workflow/`
|
||||
The related `tdd-workflow` skill is also bundled with ECC.
|
||||
|
||||
For manual installs, the source files live at:
|
||||
- `agents/tdd-guide.md`
|
||||
- `skills/tdd-workflow/SKILL.md`
|
||||
|
||||
@@ -34,6 +34,25 @@
|
||||
"agents": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" }
|
||||
},
|
||||
"features": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"agents": { "type": "integer", "minimum": 0 },
|
||||
"commands": { "type": "integer", "minimum": 0 },
|
||||
"skills": { "type": "integer", "minimum": 0 },
|
||||
"configAssets": { "type": "boolean" },
|
||||
"hookEvents": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" }
|
||||
},
|
||||
"customTools": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" }
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
@@ -10,11 +10,34 @@
|
||||
* Fails silently if no formatter is found or installed.
|
||||
*/
|
||||
|
||||
const { execFileSync } = require('child_process');
|
||||
const { execFileSync, spawnSync } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { getPackageManager } = require('../lib/package-manager');
|
||||
|
||||
const MAX_STDIN = 1024 * 1024; // 1MB limit
|
||||
const BIOME_CONFIGS = ['biome.json', 'biome.jsonc'];
|
||||
const PRETTIER_CONFIGS = [
|
||||
'.prettierrc',
|
||||
'.prettierrc.json',
|
||||
'.prettierrc.json5',
|
||||
'.prettierrc.js',
|
||||
'.prettierrc.cjs',
|
||||
'.prettierrc.mjs',
|
||||
'.prettierrc.ts',
|
||||
'.prettierrc.cts',
|
||||
'.prettierrc.mts',
|
||||
'.prettierrc.yml',
|
||||
'.prettierrc.yaml',
|
||||
'.prettierrc.toml',
|
||||
'prettier.config.js',
|
||||
'prettier.config.cjs',
|
||||
'prettier.config.mjs',
|
||||
'prettier.config.ts',
|
||||
'prettier.config.cts',
|
||||
'prettier.config.mts',
|
||||
];
|
||||
const PROJECT_ROOT_MARKERS = ['package.json', ...BIOME_CONFIGS, ...PRETTIER_CONFIGS];
|
||||
let data = '';
|
||||
process.stdin.setEncoding('utf8');
|
||||
|
||||
@@ -27,50 +50,102 @@ process.stdin.on('data', chunk => {
|
||||
|
||||
function findProjectRoot(startDir) {
|
||||
let dir = startDir;
|
||||
while (dir !== path.dirname(dir)) {
|
||||
if (fs.existsSync(path.join(dir, 'package.json'))) return dir;
|
||||
dir = path.dirname(dir);
|
||||
let fallbackDir = null;
|
||||
|
||||
while (true) {
|
||||
if (detectFormatter(dir)) {
|
||||
return dir;
|
||||
}
|
||||
|
||||
if (!fallbackDir && PROJECT_ROOT_MARKERS.some(marker => fs.existsSync(path.join(dir, marker)))) {
|
||||
fallbackDir = dir;
|
||||
}
|
||||
|
||||
const parentDir = path.dirname(dir);
|
||||
if (parentDir === dir) break;
|
||||
dir = parentDir;
|
||||
}
|
||||
return startDir;
|
||||
|
||||
return fallbackDir || startDir;
|
||||
}
|
||||
|
||||
function detectFormatter(projectRoot) {
|
||||
const biomeConfigs = ['biome.json', 'biome.jsonc'];
|
||||
for (const cfg of biomeConfigs) {
|
||||
for (const cfg of BIOME_CONFIGS) {
|
||||
if (fs.existsSync(path.join(projectRoot, cfg))) return 'biome';
|
||||
}
|
||||
|
||||
const prettierConfigs = [
|
||||
'.prettierrc',
|
||||
'.prettierrc.json',
|
||||
'.prettierrc.js',
|
||||
'.prettierrc.cjs',
|
||||
'.prettierrc.mjs',
|
||||
'.prettierrc.yml',
|
||||
'.prettierrc.yaml',
|
||||
'.prettierrc.toml',
|
||||
'prettier.config.js',
|
||||
'prettier.config.cjs',
|
||||
'prettier.config.mjs',
|
||||
];
|
||||
for (const cfg of prettierConfigs) {
|
||||
for (const cfg of PRETTIER_CONFIGS) {
|
||||
if (fs.existsSync(path.join(projectRoot, cfg))) return 'prettier';
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function getFormatterCommand(formatter, filePath) {
|
||||
const npxBin = process.platform === 'win32' ? 'npx.cmd' : 'npx';
|
||||
function getRunnerBin(bin) {
|
||||
if (process.platform !== 'win32') return bin;
|
||||
if (bin === 'npx') return 'npx.cmd';
|
||||
if (bin === 'pnpm') return 'pnpm.cmd';
|
||||
if (bin === 'yarn') return 'yarn.cmd';
|
||||
if (bin === 'bunx') return 'bunx.cmd';
|
||||
return bin;
|
||||
}
|
||||
|
||||
function getFormatterRunner(projectRoot) {
|
||||
const pm = getPackageManager({ projectDir: projectRoot });
|
||||
const execCmd = pm?.config?.execCmd || 'npx';
|
||||
const [bin = 'npx', ...prefix] = execCmd.split(/\s+/).filter(Boolean);
|
||||
|
||||
return {
|
||||
bin: getRunnerBin(bin),
|
||||
prefix
|
||||
};
|
||||
}
|
||||
|
||||
function getFormatterCommand(formatter, filePath, projectRoot) {
|
||||
const runner = getFormatterRunner(projectRoot);
|
||||
|
||||
if (formatter === 'biome') {
|
||||
return { bin: npxBin, args: ['@biomejs/biome', 'format', '--write', filePath] };
|
||||
return {
|
||||
bin: runner.bin,
|
||||
args: [...runner.prefix, '@biomejs/biome', 'format', '--write', filePath]
|
||||
};
|
||||
}
|
||||
if (formatter === 'prettier') {
|
||||
return { bin: npxBin, args: ['prettier', '--write', filePath] };
|
||||
return {
|
||||
bin: runner.bin,
|
||||
args: [...runner.prefix, 'prettier', '--write', filePath]
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function runFormatterCommand(cmd, projectRoot) {
|
||||
if (process.platform === 'win32' && cmd.bin.endsWith('.cmd')) {
|
||||
const result = spawnSync(cmd.bin, cmd.args, {
|
||||
cwd: projectRoot,
|
||||
shell: true,
|
||||
stdio: 'pipe',
|
||||
timeout: 15000
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
throw result.error;
|
||||
}
|
||||
|
||||
if (typeof result.status === 'number' && result.status !== 0) {
|
||||
throw new Error(result.stderr?.toString() || `Formatter exited with status ${result.status}`);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
execFileSync(cmd.bin, cmd.args, {
|
||||
cwd: projectRoot,
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: 15000
|
||||
});
|
||||
}
|
||||
|
||||
process.stdin.on('end', () => {
|
||||
try {
|
||||
const input = JSON.parse(data);
|
||||
@@ -80,14 +155,10 @@ process.stdin.on('end', () => {
|
||||
try {
|
||||
const projectRoot = findProjectRoot(path.dirname(path.resolve(filePath)));
|
||||
const formatter = detectFormatter(projectRoot);
|
||||
const cmd = getFormatterCommand(formatter, filePath);
|
||||
const cmd = getFormatterCommand(formatter, filePath, projectRoot);
|
||||
|
||||
if (cmd) {
|
||||
execFileSync(cmd.bin, cmd.args, {
|
||||
cwd: projectRoot,
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: 15000
|
||||
});
|
||||
runFormatterCommand(cmd, projectRoot);
|
||||
}
|
||||
} catch {
|
||||
// Formatter not installed, file missing, or failed — non-blocking
|
||||
|
||||
@@ -2,8 +2,149 @@
|
||||
'use strict';
|
||||
|
||||
const MAX_STDIN = 1024 * 1024;
|
||||
const path = require('path');
|
||||
const { splitShellSegments } = require('../lib/shell-split');
|
||||
|
||||
const DEV_COMMAND_WORDS = new Set([
|
||||
'npm',
|
||||
'pnpm',
|
||||
'yarn',
|
||||
'bun',
|
||||
'npx',
|
||||
'tmux'
|
||||
]);
|
||||
const SKIPPABLE_PREFIX_WORDS = new Set(['env', 'command', 'builtin', 'exec', 'noglob', 'sudo', 'nohup']);
|
||||
const PREFIX_OPTION_VALUE_WORDS = {
|
||||
env: new Set(['-u', '-C', '-S', '--unset', '--chdir', '--split-string']),
|
||||
sudo: new Set([
|
||||
'-u',
|
||||
'-g',
|
||||
'-h',
|
||||
'-p',
|
||||
'-r',
|
||||
'-t',
|
||||
'-C',
|
||||
'--user',
|
||||
'--group',
|
||||
'--host',
|
||||
'--prompt',
|
||||
'--role',
|
||||
'--type',
|
||||
'--close-from'
|
||||
])
|
||||
};
|
||||
|
||||
function readToken(input, startIndex) {
|
||||
let index = startIndex;
|
||||
while (index < input.length && /\s/.test(input[index])) index += 1;
|
||||
if (index >= input.length) return null;
|
||||
|
||||
let token = '';
|
||||
let quote = null;
|
||||
|
||||
while (index < input.length) {
|
||||
const ch = input[index];
|
||||
|
||||
if (quote) {
|
||||
if (ch === quote) {
|
||||
quote = null;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch === '\\' && quote === '"' && index + 1 < input.length) {
|
||||
token += input[index + 1];
|
||||
index += 2;
|
||||
continue;
|
||||
}
|
||||
|
||||
token += ch;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch === '"' || ch === "'") {
|
||||
quote = ch;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/\s/.test(ch)) break;
|
||||
|
||||
if (ch === '\\' && index + 1 < input.length) {
|
||||
token += input[index + 1];
|
||||
index += 2;
|
||||
continue;
|
||||
}
|
||||
|
||||
token += ch;
|
||||
index += 1;
|
||||
}
|
||||
|
||||
return { token, end: index };
|
||||
}
|
||||
|
||||
function shouldSkipOptionValue(wrapper, optionToken) {
|
||||
if (!wrapper || !optionToken || optionToken.includes('=')) return false;
|
||||
const optionSet = PREFIX_OPTION_VALUE_WORDS[wrapper];
|
||||
return Boolean(optionSet && optionSet.has(optionToken));
|
||||
}
|
||||
|
||||
function isOptionToken(token) {
|
||||
return token.startsWith('-') && token.length > 1;
|
||||
}
|
||||
|
||||
function normalizeCommandWord(token) {
|
||||
if (!token) return '';
|
||||
const base = path.basename(token).toLowerCase();
|
||||
return base.replace(/\.(cmd|exe|bat)$/i, '');
|
||||
}
|
||||
|
||||
function getLeadingCommandWord(segment) {
|
||||
let index = 0;
|
||||
let activeWrapper = null;
|
||||
let skipNextValue = false;
|
||||
|
||||
while (index < segment.length) {
|
||||
const parsed = readToken(segment, index);
|
||||
if (!parsed) return null;
|
||||
index = parsed.end;
|
||||
|
||||
const token = parsed.token;
|
||||
if (!token) continue;
|
||||
|
||||
if (skipNextValue) {
|
||||
skipNextValue = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token === '--') {
|
||||
activeWrapper = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/^[A-Za-z_][A-Za-z0-9_]*=.*/.test(token)) continue;
|
||||
|
||||
const normalizedToken = normalizeCommandWord(token);
|
||||
|
||||
if (SKIPPABLE_PREFIX_WORDS.has(normalizedToken)) {
|
||||
activeWrapper = normalizedToken;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (activeWrapper && isOptionToken(token)) {
|
||||
if (shouldSkipOptionValue(activeWrapper, token)) {
|
||||
skipNextValue = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
return normalizedToken;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
let raw = '';
|
||||
process.stdin.setEncoding('utf8');
|
||||
process.stdin.on('data', chunk => {
|
||||
@@ -23,7 +164,13 @@ process.stdin.on('end', () => {
|
||||
const tmuxLauncher = /^\s*tmux\s+(new|new-session|new-window|split-window)\b/;
|
||||
const devPattern = /\b(npm\s+run\s+dev|pnpm(?:\s+run)?\s+dev|yarn\s+dev|bun\s+run\s+dev)\b/;
|
||||
|
||||
const hasBlockedDev = segments.some(segment => devPattern.test(segment) && !tmuxLauncher.test(segment));
|
||||
const hasBlockedDev = segments.some(segment => {
|
||||
const commandWord = getLeadingCommandWord(segment);
|
||||
if (!commandWord || !DEV_COMMAND_WORDS.has(commandWord)) {
|
||||
return false;
|
||||
}
|
||||
return devPattern.test(segment) && !tmuxLauncher.test(segment);
|
||||
});
|
||||
|
||||
if (hasBlockedDev) {
|
||||
console.error('[Hook] BLOCKED: Dev server must run in tmux for log access');
|
||||
|
||||
@@ -4,6 +4,8 @@ set -euo pipefail
|
||||
HOOK_ID="${1:-}"
|
||||
REL_SCRIPT_PATH="${2:-}"
|
||||
PROFILES_CSV="${3:-standard,strict}"
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-$(cd "${SCRIPT_DIR}/../.." && pwd)}"
|
||||
|
||||
# Preserve stdin for passthrough or script execution
|
||||
INPUT="$(cat)"
|
||||
@@ -14,13 +16,13 @@ if [[ -z "$HOOK_ID" || -z "$REL_SCRIPT_PATH" ]]; then
|
||||
fi
|
||||
|
||||
# Ask Node helper if this hook is enabled
|
||||
ENABLED="$(node "${CLAUDE_PLUGIN_ROOT}/scripts/hooks/check-hook-enabled.js" "$HOOK_ID" "$PROFILES_CSV" 2>/dev/null || echo yes)"
|
||||
ENABLED="$(node "${PLUGIN_ROOT}/scripts/hooks/check-hook-enabled.js" "$HOOK_ID" "$PROFILES_CSV" 2>/dev/null || echo yes)"
|
||||
if [[ "$ENABLED" != "yes" ]]; then
|
||||
printf '%s' "$INPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
SCRIPT_PATH="${CLAUDE_PLUGIN_ROOT}/${REL_SCRIPT_PATH}"
|
||||
SCRIPT_PATH="${PLUGIN_ROOT}/${REL_SCRIPT_PATH}"
|
||||
if [[ ! -f "$SCRIPT_PATH" ]]; then
|
||||
echo "[Hook] Script not found for ${HOOK_ID}: ${SCRIPT_PATH}" >&2
|
||||
printf '%s' "$INPUT"
|
||||
|
||||
@@ -28,6 +28,7 @@ OBSERVER_LOOP_SCRIPT="${SCRIPT_DIR}/observer-loop.sh"
|
||||
# Source shared project detection helper
|
||||
# This sets: PROJECT_ID, PROJECT_NAME, PROJECT_ROOT, PROJECT_DIR
|
||||
source "${SKILL_ROOT}/scripts/detect-project.sh"
|
||||
PYTHON_CMD="${CLV2_PYTHON_CMD:-}"
|
||||
|
||||
# ─────────────────────────────────────────────
|
||||
# Configuration
|
||||
@@ -46,7 +47,10 @@ OBSERVER_INTERVAL_MINUTES=5
|
||||
MIN_OBSERVATIONS=20
|
||||
OBSERVER_ENABLED=false
|
||||
if [ -f "$CONFIG_FILE" ]; then
|
||||
_config=$(CLV2_CONFIG="$CONFIG_FILE" python3 -c "
|
||||
if [ -z "$PYTHON_CMD" ]; then
|
||||
echo "No python interpreter found; using built-in observer defaults." >&2
|
||||
else
|
||||
_config=$(CLV2_CONFIG="$CONFIG_FILE" "$PYTHON_CMD" -c "
|
||||
import json, os
|
||||
with open(os.environ['CLV2_CONFIG']) as f:
|
||||
cfg = json.load(f)
|
||||
@@ -57,17 +61,18 @@ print(str(obs.get('enabled', False)).lower())
|
||||
" 2>/dev/null || echo "5
|
||||
20
|
||||
false")
|
||||
_interval=$(echo "$_config" | sed -n '1p')
|
||||
_min_obs=$(echo "$_config" | sed -n '2p')
|
||||
_enabled=$(echo "$_config" | sed -n '3p')
|
||||
if [ "$_interval" -gt 0 ] 2>/dev/null; then
|
||||
OBSERVER_INTERVAL_MINUTES="$_interval"
|
||||
fi
|
||||
if [ "$_min_obs" -gt 0 ] 2>/dev/null; then
|
||||
MIN_OBSERVATIONS="$_min_obs"
|
||||
fi
|
||||
if [ "$_enabled" = "true" ]; then
|
||||
OBSERVER_ENABLED=true
|
||||
_interval=$(echo "$_config" | sed -n '1p')
|
||||
_min_obs=$(echo "$_config" | sed -n '2p')
|
||||
_enabled=$(echo "$_config" | sed -n '3p')
|
||||
if [ "$_interval" -gt 0 ] 2>/dev/null; then
|
||||
OBSERVER_INTERVAL_MINUTES="$_interval"
|
||||
fi
|
||||
if [ "$_min_obs" -gt 0 ] 2>/dev/null; then
|
||||
MIN_OBSERVATIONS="$_min_obs"
|
||||
fi
|
||||
if [ "$_enabled" = "true" ]; then
|
||||
OBSERVER_ENABLED=true
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
OBSERVER_INTERVAL_SECONDS=$((OBSERVER_INTERVAL_MINUTES * 60))
|
||||
|
||||
@@ -27,13 +27,38 @@ if [ -z "$INPUT_JSON" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
resolve_python_cmd() {
|
||||
if [ -n "${CLV2_PYTHON_CMD:-}" ] && command -v "$CLV2_PYTHON_CMD" >/dev/null 2>&1; then
|
||||
printf '%s\n' "$CLV2_PYTHON_CMD"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
printf '%s\n' python3
|
||||
return 0
|
||||
fi
|
||||
|
||||
if command -v python >/dev/null 2>&1; then
|
||||
printf '%s\n' python
|
||||
return 0
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
PYTHON_CMD="$(resolve_python_cmd 2>/dev/null || true)"
|
||||
if [ -z "$PYTHON_CMD" ]; then
|
||||
echo "[observe] No python interpreter found, skipping observation" >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# ─────────────────────────────────────────────
|
||||
# Extract cwd from stdin for project detection
|
||||
# ─────────────────────────────────────────────
|
||||
|
||||
# Extract cwd from the hook JSON to use for project detection.
|
||||
# This avoids spawning a separate git subprocess when cwd is available.
|
||||
STDIN_CWD=$(echo "$INPUT_JSON" | python3 -c '
|
||||
STDIN_CWD=$(echo "$INPUT_JSON" | "$PYTHON_CMD" -c '
|
||||
import json, sys
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
@@ -58,6 +83,7 @@ SKILL_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
# Source shared project detection helper
|
||||
# This sets: PROJECT_ID, PROJECT_NAME, PROJECT_ROOT, PROJECT_DIR
|
||||
source "${SKILL_ROOT}/scripts/detect-project.sh"
|
||||
PYTHON_CMD="${CLV2_PYTHON_CMD:-$PYTHON_CMD}"
|
||||
|
||||
# ─────────────────────────────────────────────
|
||||
# Configuration
|
||||
@@ -79,9 +105,9 @@ if [ ! -f "$PURGE_MARKER" ] || [ "$(find "$PURGE_MARKER" -mtime +1 2>/dev/null)"
|
||||
touch "$PURGE_MARKER" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Parse using python via stdin pipe (safe for all JSON payloads)
|
||||
# Parse using Python via stdin pipe (safe for all JSON payloads)
|
||||
# Pass HOOK_PHASE via env var since Claude Code does not include hook type in stdin JSON
|
||||
PARSED=$(echo "$INPUT_JSON" | HOOK_PHASE="$HOOK_PHASE" python3 -c '
|
||||
PARSED=$(echo "$INPUT_JSON" | HOOK_PHASE="$HOOK_PHASE" "$PYTHON_CMD" -c '
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
@@ -129,13 +155,13 @@ except Exception as e:
|
||||
')
|
||||
|
||||
# Check if parsing succeeded
|
||||
PARSED_OK=$(echo "$PARSED" | python3 -c "import json,sys; print(json.load(sys.stdin).get('parsed', False))" 2>/dev/null || echo "False")
|
||||
PARSED_OK=$(echo "$PARSED" | "$PYTHON_CMD" -c "import json,sys; print(json.load(sys.stdin).get('parsed', False))" 2>/dev/null || echo "False")
|
||||
|
||||
if [ "$PARSED_OK" != "True" ]; then
|
||||
# Fallback: log raw input for debugging (scrub secrets before persisting)
|
||||
timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
export TIMESTAMP="$timestamp"
|
||||
echo "$INPUT_JSON" | python3 -c '
|
||||
echo "$INPUT_JSON" | "$PYTHON_CMD" -c '
|
||||
import json, sys, os, re
|
||||
|
||||
_SECRET_RE = re.compile(
|
||||
@@ -170,7 +196,7 @@ export PROJECT_ID_ENV="$PROJECT_ID"
|
||||
export PROJECT_NAME_ENV="$PROJECT_NAME"
|
||||
export TIMESTAMP="$timestamp"
|
||||
|
||||
echo "$PARSED" | python3 -c '
|
||||
echo "$PARSED" | "$PYTHON_CMD" -c '
|
||||
import json, sys, os, re
|
||||
|
||||
parsed = json.load(sys.stdin)
|
||||
|
||||
@@ -23,6 +23,29 @@ _CLV2_HOMUNCULUS_DIR="${HOME}/.claude/homunculus"
|
||||
_CLV2_PROJECTS_DIR="${_CLV2_HOMUNCULUS_DIR}/projects"
|
||||
_CLV2_REGISTRY_FILE="${_CLV2_HOMUNCULUS_DIR}/projects.json"
|
||||
|
||||
_clv2_resolve_python_cmd() {
|
||||
if [ -n "${CLV2_PYTHON_CMD:-}" ] && command -v "$CLV2_PYTHON_CMD" >/dev/null 2>&1; then
|
||||
printf '%s\n' "$CLV2_PYTHON_CMD"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
printf '%s\n' python3
|
||||
return 0
|
||||
fi
|
||||
|
||||
if command -v python >/dev/null 2>&1; then
|
||||
printf '%s\n' python
|
||||
return 0
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
_CLV2_PYTHON_CMD="$(_clv2_resolve_python_cmd 2>/dev/null || true)"
|
||||
CLV2_PYTHON_CMD="$_CLV2_PYTHON_CMD"
|
||||
export CLV2_PYTHON_CMD
|
||||
|
||||
_clv2_detect_project() {
|
||||
local project_root=""
|
||||
local project_name=""
|
||||
@@ -73,10 +96,12 @@ _clv2_detect_project() {
|
||||
fi
|
||||
|
||||
local hash_input="${remote_url:-$project_root}"
|
||||
# Use SHA256 via python3 (portable across macOS/Linux, no shasum/sha256sum divergence)
|
||||
project_id=$(printf '%s' "$hash_input" | python3 -c "import sys,hashlib; print(hashlib.sha256(sys.stdin.buffer.read()).hexdigest()[:12])" 2>/dev/null)
|
||||
# Prefer Python for consistent SHA256 behavior across shells/platforms.
|
||||
if [ -n "$_CLV2_PYTHON_CMD" ]; then
|
||||
project_id=$(printf '%s' "$hash_input" | "$_CLV2_PYTHON_CMD" -c "import sys,hashlib; print(hashlib.sha256(sys.stdin.buffer.read()).hexdigest()[:12])" 2>/dev/null)
|
||||
fi
|
||||
|
||||
# Fallback if python3 failed
|
||||
# Fallback if Python is unavailable or hash generation failed.
|
||||
if [ -z "$project_id" ]; then
|
||||
project_id=$(printf '%s' "$hash_input" | shasum -a 256 2>/dev/null | cut -c1-12 || \
|
||||
printf '%s' "$hash_input" | sha256sum 2>/dev/null | cut -c1-12 || \
|
||||
@@ -85,9 +110,9 @@ _clv2_detect_project() {
|
||||
|
||||
# Backward compatibility: if credentials were stripped and the hash changed,
|
||||
# check if a project dir exists under the legacy hash and reuse it
|
||||
if [ "$legacy_hash_input" != "$hash_input" ]; then
|
||||
local legacy_id
|
||||
legacy_id=$(printf '%s' "$legacy_hash_input" | python3 -c "import sys,hashlib; print(hashlib.sha256(sys.stdin.buffer.read()).hexdigest()[:12])" 2>/dev/null)
|
||||
if [ "$legacy_hash_input" != "$hash_input" ] && [ -n "$_CLV2_PYTHON_CMD" ]; then
|
||||
local legacy_id=""
|
||||
legacy_id=$(printf '%s' "$legacy_hash_input" | "$_CLV2_PYTHON_CMD" -c "import sys,hashlib; print(hashlib.sha256(sys.stdin.buffer.read()).hexdigest()[:12])" 2>/dev/null)
|
||||
if [ -n "$legacy_id" ] && [ -d "${_CLV2_PROJECTS_DIR}/${legacy_id}" ] && [ ! -d "${_CLV2_PROJECTS_DIR}/${project_id}" ]; then
|
||||
# Migrate legacy directory to new hash
|
||||
mv "${_CLV2_PROJECTS_DIR}/${legacy_id}" "${_CLV2_PROJECTS_DIR}/${project_id}" 2>/dev/null || project_id="$legacy_id"
|
||||
@@ -120,14 +145,18 @@ _clv2_update_project_registry() {
|
||||
|
||||
mkdir -p "$(dirname "$_CLV2_REGISTRY_FILE")"
|
||||
|
||||
if [ -z "$_CLV2_PYTHON_CMD" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Pass values via env vars to avoid shell→python injection.
|
||||
# python3 reads them with os.environ, which is safe for any string content.
|
||||
# Python reads them with os.environ, which is safe for any string content.
|
||||
_CLV2_REG_PID="$pid" \
|
||||
_CLV2_REG_PNAME="$pname" \
|
||||
_CLV2_REG_PROOT="$proot" \
|
||||
_CLV2_REG_PREMOTE="$premote" \
|
||||
_CLV2_REG_FILE="$_CLV2_REGISTRY_FILE" \
|
||||
python3 -c '
|
||||
"$_CLV2_PYTHON_CMD" -c '
|
||||
import json, os
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
@@ -208,4 +208,4 @@ When retrieving context for this task:
|
||||
|
||||
- [The Longform Guide](https://x.com/affaanmustafa/status/2014040193557471352) - Subagent orchestration section
|
||||
- `continuous-learning` skill - For patterns that improve over time
|
||||
- Agent definitions in `~/.claude/agents/`
|
||||
- Agent definitions bundled with ECC (manual install path: `agents/`)
|
||||
|
||||
@@ -75,6 +75,55 @@ function cleanupTestDir(testDir) {
|
||||
fs.rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
function createCommandShim(binDir, baseName, logFile) {
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
|
||||
const shimJs = path.join(binDir, `${baseName}-shim.js`);
|
||||
fs.writeFileSync(shimJs, [
|
||||
'const fs = require(\'fs\');',
|
||||
`fs.appendFileSync(${JSON.stringify(logFile)}, JSON.stringify({ bin: ${JSON.stringify(baseName)}, args: process.argv.slice(2), cwd: process.cwd() }) + '\\n');`
|
||||
].join('\n'));
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
const shimCmd = path.join(binDir, `${baseName}.cmd`);
|
||||
fs.writeFileSync(shimCmd, `@echo off\r\nnode "${shimJs}" %*\r\n`);
|
||||
return shimCmd;
|
||||
}
|
||||
|
||||
const shimPath = path.join(binDir, baseName);
|
||||
fs.writeFileSync(shimPath, `#!/usr/bin/env node\nrequire(${JSON.stringify(shimJs)});\n`);
|
||||
fs.chmodSync(shimPath, 0o755);
|
||||
return shimPath;
|
||||
}
|
||||
|
||||
function readCommandLog(logFile) {
|
||||
if (!fs.existsSync(logFile)) return [];
|
||||
return fs.readFileSync(logFile, 'utf8')
|
||||
.split('\n')
|
||||
.filter(Boolean)
|
||||
.map(line => {
|
||||
try {
|
||||
return JSON.parse(line);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
function withPrependedPath(binDir, env = {}) {
|
||||
const pathKey = Object.keys(process.env).find(key => key.toLowerCase() === 'path')
|
||||
|| (process.platform === 'win32' ? 'Path' : 'PATH');
|
||||
const currentPath = process.env[pathKey] || process.env.PATH || '';
|
||||
const nextPath = `${binDir}${path.delimiter}${currentPath}`;
|
||||
|
||||
return {
|
||||
...env,
|
||||
[pathKey]: nextPath,
|
||||
PATH: nextPath
|
||||
};
|
||||
}
|
||||
|
||||
// Test suite
|
||||
async function runTests() {
|
||||
console.log('\n=== Testing Hook Scripts ===\n');
|
||||
@@ -701,6 +750,162 @@ async function runTests() {
|
||||
assert.ok(result.stdout.includes('tool_input'), 'Should pass through original data');
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('finds formatter config in parent dirs without package.json', async () => {
|
||||
const testDir = createTestDir();
|
||||
const rootDir = path.join(testDir, 'config-only-repo');
|
||||
const nestedDir = path.join(rootDir, 'src', 'nested');
|
||||
const filePath = path.join(nestedDir, 'component.ts');
|
||||
const binDir = path.join(testDir, 'bin');
|
||||
const logFile = path.join(testDir, 'formatter.log');
|
||||
|
||||
fs.mkdirSync(nestedDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(rootDir, '.prettierrc'), '{}');
|
||||
fs.writeFileSync(filePath, 'export const value = 1;\n');
|
||||
createCommandShim(binDir, 'npx', logFile);
|
||||
|
||||
const stdinJson = JSON.stringify({ tool_input: { file_path: filePath } });
|
||||
const result = await runScript(
|
||||
path.join(scriptsDir, 'post-edit-format.js'),
|
||||
stdinJson,
|
||||
withPrependedPath(binDir)
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 for config-only repo');
|
||||
const logEntries = readCommandLog(logFile);
|
||||
assert.strictEqual(logEntries.length, 1, 'Should invoke formatter once');
|
||||
assert.strictEqual(
|
||||
fs.realpathSync(logEntries[0].cwd),
|
||||
fs.realpathSync(rootDir),
|
||||
'Should run formatter from config root'
|
||||
);
|
||||
assert.deepStrictEqual(
|
||||
logEntries[0].args,
|
||||
['prettier', '--write', filePath],
|
||||
'Should use the formatter on the nested file'
|
||||
);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('respects CLAUDE_PACKAGE_MANAGER for formatter fallback runner', async () => {
|
||||
const testDir = createTestDir();
|
||||
const rootDir = path.join(testDir, 'pnpm-repo');
|
||||
const filePath = path.join(rootDir, 'index.ts');
|
||||
const binDir = path.join(testDir, 'bin');
|
||||
const logFile = path.join(testDir, 'pnpm.log');
|
||||
|
||||
fs.mkdirSync(rootDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(rootDir, '.prettierrc'), '{}');
|
||||
fs.writeFileSync(filePath, 'export const value = 1;\n');
|
||||
createCommandShim(binDir, 'pnpm', logFile);
|
||||
|
||||
const stdinJson = JSON.stringify({ tool_input: { file_path: filePath } });
|
||||
const result = await runScript(
|
||||
path.join(scriptsDir, 'post-edit-format.js'),
|
||||
stdinJson,
|
||||
withPrependedPath(binDir, { CLAUDE_PACKAGE_MANAGER: 'pnpm' })
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 when pnpm fallback is used');
|
||||
const logEntries = readCommandLog(logFile);
|
||||
assert.strictEqual(logEntries.length, 1, 'Should invoke pnpm fallback runner once');
|
||||
assert.strictEqual(logEntries[0].bin, 'pnpm', 'Should use pnpm runner');
|
||||
assert.deepStrictEqual(
|
||||
logEntries[0].args,
|
||||
['dlx', 'prettier', '--write', filePath],
|
||||
'Should use pnpm dlx for fallback formatter execution'
|
||||
);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('respects project package-manager config for formatter fallback runner', async () => {
|
||||
const testDir = createTestDir();
|
||||
const rootDir = path.join(testDir, 'bun-repo');
|
||||
const filePath = path.join(rootDir, 'index.ts');
|
||||
const binDir = path.join(testDir, 'bin');
|
||||
const logFile = path.join(testDir, 'bun.log');
|
||||
|
||||
fs.mkdirSync(path.join(rootDir, '.claude'), { recursive: true });
|
||||
fs.writeFileSync(path.join(rootDir, '.claude', 'package-manager.json'), JSON.stringify({ packageManager: 'bun' }));
|
||||
fs.writeFileSync(path.join(rootDir, '.prettierrc'), '{}');
|
||||
fs.writeFileSync(filePath, 'export const value = 1;\n');
|
||||
createCommandShim(binDir, 'bunx', logFile);
|
||||
|
||||
const stdinJson = JSON.stringify({ tool_input: { file_path: filePath } });
|
||||
const result = await runScript(
|
||||
path.join(scriptsDir, 'post-edit-format.js'),
|
||||
stdinJson,
|
||||
withPrependedPath(binDir)
|
||||
);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Should exit 0 when project config selects bun');
|
||||
const logEntries = readCommandLog(logFile);
|
||||
assert.strictEqual(logEntries.length, 1, 'Should invoke bunx fallback runner once');
|
||||
assert.strictEqual(logEntries[0].bin, 'bunx', 'Should use bunx runner');
|
||||
assert.deepStrictEqual(
|
||||
logEntries[0].args,
|
||||
['prettier', '--write', filePath],
|
||||
'Should use bunx for fallback formatter execution'
|
||||
);
|
||||
cleanupTestDir(testDir);
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\npre-bash-dev-server-block.js:');
|
||||
|
||||
if (await asyncTest('allows non-dev commands whose heredoc text mentions npm run dev', async () => {
|
||||
const command = [
|
||||
'gh pr create --title "fix: docs" --body "$(cat <<\'EOF\'',
|
||||
'## Test plan',
|
||||
'- run npm run dev to verify the site starts',
|
||||
'EOF',
|
||||
')"'
|
||||
].join('\n');
|
||||
const stdinJson = JSON.stringify({ tool_input: { command } });
|
||||
const result = await runScript(path.join(scriptsDir, 'pre-bash-dev-server-block.js'), stdinJson);
|
||||
|
||||
assert.strictEqual(result.code, 0, 'Non-dev commands should pass through');
|
||||
assert.strictEqual(result.stdout, stdinJson, 'Should preserve original input');
|
||||
assert.ok(!result.stderr.includes('BLOCKED'), 'Should not emit a block message');
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('blocks bare npm run dev outside tmux on non-Windows platforms', async () => {
|
||||
const stdinJson = JSON.stringify({ tool_input: { command: 'npm run dev' } });
|
||||
const result = await runScript(path.join(scriptsDir, 'pre-bash-dev-server-block.js'), stdinJson);
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
assert.strictEqual(result.code, 0, 'Windows path should pass through');
|
||||
assert.strictEqual(result.stdout, stdinJson, 'Windows path should preserve original input');
|
||||
} else {
|
||||
assert.strictEqual(result.code, 2, 'Unix path should block bare dev servers');
|
||||
assert.ok(result.stderr.includes('BLOCKED'), 'Should explain why the command was blocked');
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('blocks env-wrapped npm run dev outside tmux on non-Windows platforms', async () => {
|
||||
const stdinJson = JSON.stringify({ tool_input: { command: '/usr/bin/env npm run dev' } });
|
||||
const result = await runScript(path.join(scriptsDir, 'pre-bash-dev-server-block.js'), stdinJson);
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
assert.strictEqual(result.code, 0, 'Windows path should pass through');
|
||||
assert.strictEqual(result.stdout, stdinJson, 'Windows path should preserve original input');
|
||||
} else {
|
||||
assert.strictEqual(result.code, 2, 'Unix path should block wrapped dev servers');
|
||||
assert.ok(result.stderr.includes('BLOCKED'), 'Should explain why the command was blocked');
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('blocks nohup-wrapped npm run dev outside tmux on non-Windows platforms', async () => {
|
||||
const stdinJson = JSON.stringify({ tool_input: { command: 'nohup npm run dev >/tmp/dev.log 2>&1 &' } });
|
||||
const result = await runScript(path.join(scriptsDir, 'pre-bash-dev-server-block.js'), stdinJson);
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
assert.strictEqual(result.code, 0, 'Windows path should pass through');
|
||||
assert.strictEqual(result.stdout, stdinJson, 'Windows path should preserve original input');
|
||||
} else {
|
||||
assert.strictEqual(result.code, 2, 'Unix path should block wrapped dev servers');
|
||||
assert.ok(result.stderr.includes('BLOCKED'), 'Should explain why the command was blocked');
|
||||
}
|
||||
})) passed++; else failed++;
|
||||
|
||||
// post-edit-typecheck.js tests
|
||||
console.log('\npost-edit-typecheck.js:');
|
||||
|
||||
@@ -1487,7 +1692,14 @@ async function runTests() {
|
||||
const formatSource = fs.readFileSync(path.join(scriptsDir, 'post-edit-format.js'), 'utf8');
|
||||
// Strip comments to avoid matching "shell: true" in comment text
|
||||
const codeOnly = formatSource.replace(/\/\/.*$/gm, '').replace(/\/\*[\s\S]*?\*\//g, '');
|
||||
assert.ok(!codeOnly.includes('shell:'), 'post-edit-format.js should not pass shell option in code');
|
||||
assert.ok(
|
||||
!/execFileSync\([^)]*shell\s*:/.test(codeOnly),
|
||||
'post-edit-format.js should not pass shell option to execFileSync'
|
||||
);
|
||||
assert.ok(
|
||||
codeOnly.includes("process.platform === 'win32' && cmd.bin.endsWith('.cmd')"),
|
||||
'Windows shell execution must stay gated to .cmd shims'
|
||||
);
|
||||
assert.ok(formatSource.includes('npx.cmd'), 'Should use npx.cmd for Windows cross-platform safety');
|
||||
})) passed++; else failed++;
|
||||
|
||||
@@ -1516,6 +1728,55 @@ async function runTests() {
|
||||
assert.ok(typecheckSource.includes('npx.cmd'), 'Should use npx.cmd for Windows cross-platform safety');
|
||||
})) passed++; else failed++;
|
||||
|
||||
console.log('\nShell wrapper portability:');
|
||||
|
||||
if (test('run-with-flags-shell resolves plugin root when CLAUDE_PLUGIN_ROOT is unset', () => {
|
||||
const wrapperSource = fs.readFileSync(path.join(scriptsDir, 'run-with-flags-shell.sh'), 'utf8');
|
||||
assert.ok(
|
||||
wrapperSource.includes('PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-'),
|
||||
'Shell wrapper should derive PLUGIN_ROOT from its own script path'
|
||||
);
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (test('continuous-learning shell scripts use resolved Python command instead of hardcoded python3 invocations', () => {
|
||||
const observeSource = fs.readFileSync(path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'hooks', 'observe.sh'), 'utf8');
|
||||
const startObserverSource = fs.readFileSync(path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'agents', 'start-observer.sh'), 'utf8');
|
||||
const detectProjectSource = fs.readFileSync(path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'scripts', 'detect-project.sh'), 'utf8');
|
||||
|
||||
assert.ok(!/python3\s+-c/.test(observeSource), 'observe.sh should not invoke python3 directly');
|
||||
assert.ok(!/python3\s+-c/.test(startObserverSource), 'start-observer.sh should not invoke python3 directly');
|
||||
assert.ok(observeSource.includes('PYTHON_CMD'), 'observe.sh should resolve Python dynamically');
|
||||
assert.ok(startObserverSource.includes('CLV2_PYTHON_CMD'), 'start-observer.sh should reuse detected Python command');
|
||||
assert.ok(detectProjectSource.includes('_clv2_resolve_python_cmd'), 'detect-project.sh should provide shared Python resolution');
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('detect-project exports the resolved Python command for downstream scripts', async () => {
|
||||
const detectProjectPath = path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'scripts', 'detect-project.sh');
|
||||
const shellCommand = [
|
||||
`source "${detectProjectPath}" >/dev/null 2>&1`,
|
||||
'printf "%s\\n" "${CLV2_PYTHON_CMD:-}"'
|
||||
].join('; ');
|
||||
|
||||
const shell = process.platform === 'win32' ? 'bash' : 'bash';
|
||||
const proc = spawn(shell, ['-lc', shellCommand], {
|
||||
env: process.env,
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
proc.stdout.on('data', data => stdout += data);
|
||||
proc.stderr.on('data', data => stderr += data);
|
||||
|
||||
const code = await new Promise((resolve, reject) => {
|
||||
proc.on('close', resolve);
|
||||
proc.on('error', reject);
|
||||
});
|
||||
|
||||
assert.strictEqual(code, 0, `detect-project.sh should source cleanly, stderr: ${stderr}`);
|
||||
assert.ok(stdout.trim().length > 0, 'CLV2_PYTHON_CMD should export a resolved interpreter path');
|
||||
})) passed++; else failed++;
|
||||
|
||||
if (await asyncTest('matches .tsx extension for type checking', async () => {
|
||||
const testDir = createTestDir();
|
||||
const testFile = path.join(testDir, 'component.tsx');
|
||||
|
||||
Reference in New Issue
Block a user