fix: address remaining orchestration review comments

This commit is contained in:
Affaan Mustafa
2026-03-12 15:34:05 -07:00
parent 0d96876505
commit af318b8f04
12 changed files with 242 additions and 37 deletions

View File

@@ -8,14 +8,16 @@ origin: ECC
Distribute content across multiple social platforms with platform-native adaptation. Distribute content across multiple social platforms with platform-native adaptation.
## When to Activate ## When to Use
- User wants to post content to multiple platforms - User wants to post content to multiple platforms
- Publishing announcements, launches, or updates across social media - Publishing announcements, launches, or updates across social media
- Repurposing a post from one platform to others - Repurposing a post from one platform to others
- User says "crosspost", "post everywhere", "share on all platforms", or "distribute this" - User says "crosspost", "post everywhere", "share on all platforms", or "distribute this"
## Core Rules ## How It Works
### Core Rules
1. **Never post identical content cross-platform.** Each platform gets a native adaptation. 1. **Never post identical content cross-platform.** Each platform gets a native adaptation.
2. **Primary platform first.** Post to the main platform, then adapt for others. 2. **Primary platform first.** Post to the main platform, then adapt for others.
@@ -23,7 +25,7 @@ Distribute content across multiple social platforms with platform-native adaptat
4. **One idea per post.** If the source content has multiple ideas, split across posts. 4. **One idea per post.** If the source content has multiple ideas, split across posts.
5. **Attribution matters.** If crossposting someone else's content, credit the source. 5. **Attribution matters.** If crossposting someone else's content, credit the source.
## Platform Specifications ### Platform Specifications
| Platform | Max Length | Link Handling | Hashtags | Media | | Platform | Max Length | Link Handling | Hashtags | Media |
|----------|-----------|---------------|----------|-------| |----------|-----------|---------------|----------|-------|
@@ -32,7 +34,7 @@ Distribute content across multiple social platforms with platform-native adaptat
| Threads | 500 chars | Separate link attachment | None typical | Images, video | | Threads | 500 chars | Separate link attachment | None typical | Images, video |
| Bluesky | 300 chars | Via facets (rich text) | None (use feeds) | Images | | Bluesky | 300 chars | Via facets (rich text) | None (use feeds) | Images |
## Workflow ### Workflow
### Step 1: Create Source Content ### Step 1: Create Source Content
@@ -87,7 +89,7 @@ Post adapted versions to remaining platforms:
- Stagger timing (not all at once — 30-60 min gaps) - Stagger timing (not all at once — 30-60 min gaps)
- Include cross-platform references where appropriate ("longer thread on X" etc.) - Include cross-platform references where appropriate ("longer thread on X" etc.)
## Content Adaptation Examples ## Examples
### Source: Product Launch ### Source: Product Launch

View File

@@ -27,7 +27,7 @@ Exa MCP server must be configured. Add to `~/.claude.json`:
"args": [ "args": [
"-y", "-y",
"exa-mcp-server", "exa-mcp-server",
"tools=web_search_exa,web_search_advanced_exa,get_code_context_exa,crawling_exa,company_research_exa,linkedin_search_exa,deep_researcher_start,deep_researcher_check" "tools=web_search_exa,web_search_advanced_exa,get_code_context_exa,crawling_exa,company_research_exa,people_search_exa,deep_researcher_start,deep_researcher_check"
], ],
"env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" } "env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" }
} }

View File

@@ -106,11 +106,31 @@ function parseWorkerTask(content) {
}; };
} }
function parseFirstSection(content, headings) {
for (const heading of headings) {
const section = parseSection(content, heading);
if (section) {
return section;
}
}
return '';
}
function parseWorkerHandoff(content) { function parseWorkerHandoff(content) {
return { return {
summary: parseBullets(parseSection(content, 'Summary')), summary: parseBullets(parseFirstSection(content, ['Summary'])),
validation: parseBullets(parseSection(content, 'Validation')), validation: parseBullets(parseFirstSection(content, [
remainingRisks: parseBullets(parseSection(content, 'Remaining Risks')) 'Validation',
'Tests / Verification',
'Tests',
'Verification'
])),
remainingRisks: parseBullets(parseFirstSection(content, [
'Remaining Risks',
'Follow-ups',
'Follow Ups'
]))
}; };
} }
@@ -250,18 +270,48 @@ function buildSessionSnapshot({ sessionName, coordinationDir, panes }) {
}; };
} }
function readPlanConfig(absoluteTarget) {
let config;
try {
config = JSON.parse(fs.readFileSync(absoluteTarget, 'utf8'));
} catch (error) {
throw new Error(`Invalid orchestration plan JSON: ${absoluteTarget}`);
}
if (!config || Array.isArray(config) || typeof config !== 'object') {
throw new Error(`Invalid orchestration plan: expected a JSON object (${absoluteTarget})`);
}
return config;
}
function readPlanString(config, key, absoluteTarget) {
const value = config[key];
if (value === undefined) {
return undefined;
}
if (typeof value !== 'string' || value.trim().length === 0) {
throw new Error(`Invalid orchestration plan: ${key} must be a non-empty string (${absoluteTarget})`);
}
return value.trim();
}
function resolveSnapshotTarget(targetPath, cwd = process.cwd()) { function resolveSnapshotTarget(targetPath, cwd = process.cwd()) {
const absoluteTarget = path.resolve(cwd, targetPath); const absoluteTarget = path.resolve(cwd, targetPath);
if (fs.existsSync(absoluteTarget) && fs.statSync(absoluteTarget).isFile()) { if (fs.existsSync(absoluteTarget) && fs.statSync(absoluteTarget).isFile()) {
const config = JSON.parse(fs.readFileSync(absoluteTarget, 'utf8')); const config = readPlanConfig(absoluteTarget);
const repoRoot = path.resolve(config.repoRoot || cwd); const repoRoot = path.resolve(readPlanString(config, 'repoRoot', absoluteTarget) || cwd);
const sessionName = normalizeSessionName( const sessionName = normalizeSessionName(
config.sessionName || path.basename(repoRoot), readPlanString(config, 'sessionName', absoluteTarget) || path.basename(repoRoot),
'session' 'session'
); );
const coordinationRoot = path.resolve( const coordinationRoot = path.resolve(
config.coordinationRoot || path.join(repoRoot, '.orchestration') readPlanString(config, 'coordinationRoot', absoluteTarget) || path.join(repoRoot, '.orchestration')
); );
return { return {

View File

@@ -34,6 +34,18 @@ function formatCommand(program, args) {
return [program, ...args.map(shellQuote)].join(' '); return [program, ...args.map(shellQuote)].join(' ');
} }
function buildTemplateVariables(values) {
return Object.entries(values).reduce((accumulator, [key, value]) => {
const stringValue = String(value);
const quotedValue = shellQuote(stringValue);
accumulator[key] = quotedValue;
accumulator[`${key}_raw`] = stringValue;
accumulator[`${key}_sh`] = quotedValue;
return accumulator;
}, {});
}
function normalizeSeedPaths(seedPaths, repoRoot) { function normalizeSeedPaths(seedPaths, repoRoot) {
const resolvedRepoRoot = path.resolve(repoRoot); const resolvedRepoRoot = path.resolve(repoRoot);
const entries = Array.isArray(seedPaths) ? seedPaths : []; const entries = Array.isArray(seedPaths) ? seedPaths : [];
@@ -126,6 +138,13 @@ function buildWorkerArtifacts(workerPlan) {
'## Completion', '## Completion',
'Do not spawn subagents or external agents for this task.', 'Do not spawn subagents or external agents for this task.',
'Report results in your final response.', 'Report results in your final response.',
'Respond with these exact sections so orchestration parsing can succeed:',
'## Summary',
'- ...',
'## Validation',
'- ...',
'## Remaining Risks',
'- ...',
`The worker launcher captures your response in \`${workerPlan.handoffFilePath}\` automatically.`, `The worker launcher captures your response in \`${workerPlan.handoffFilePath}\` automatically.`,
`The worker launcher updates \`${workerPlan.statusFilePath}\` automatically.` `The worker launcher updates \`${workerPlan.statusFilePath}\` automatically.`
].join('\n') ].join('\n')
@@ -138,13 +157,10 @@ function buildWorkerArtifacts(workerPlan) {
'## Summary', '## Summary',
'- Pending', '- Pending',
'', '',
'## Files Changed', '## Validation',
'- Pending', '- Pending',
'', '',
'## Tests / Verification', '## Remaining Risks',
'- Pending',
'',
'## Follow-ups',
'- Pending' '- Pending'
].join('\n') ].join('\n')
}, },
@@ -180,6 +196,7 @@ function buildOrchestrationPlan(config = {}) {
throw new Error('buildOrchestrationPlan requires at least one worker'); throw new Error('buildOrchestrationPlan requires at least one worker');
} }
const seenWorkerSlugs = new Map();
const workerPlans = workers.map((worker, index) => { const workerPlans = workers.map((worker, index) => {
if (!worker || typeof worker.task !== 'string' || worker.task.trim().length === 0) { if (!worker || typeof worker.task !== 'string' || worker.task.trim().length === 0) {
throw new Error(`Worker ${index + 1} is missing a task`); throw new Error(`Worker ${index + 1} is missing a task`);
@@ -187,6 +204,13 @@ function buildOrchestrationPlan(config = {}) {
const workerName = worker.name || `worker-${index + 1}`; const workerName = worker.name || `worker-${index + 1}`;
const workerSlug = slugify(workerName, `worker-${index + 1}`); const workerSlug = slugify(workerName, `worker-${index + 1}`);
if (seenWorkerSlugs.has(workerSlug)) {
const firstWorkerName = seenWorkerSlugs.get(workerSlug);
throw new Error(
`Worker names must map to unique slugs: ${workerSlug} (${firstWorkerName}, ${workerName})`
);
}
seenWorkerSlugs.set(workerSlug, workerName);
const branchName = `orchestrator-${sessionName}-${workerSlug}`; const branchName = `orchestrator-${sessionName}-${workerSlug}`;
const worktreePath = path.join(worktreeRoot, `${repoName}-${sessionName}-${workerSlug}`); const worktreePath = path.join(worktreeRoot, `${repoName}-${sessionName}-${workerSlug}`);
const workerCoordinationDir = path.join(coordinationDir, workerSlug); const workerCoordinationDir = path.join(coordinationDir, workerSlug);
@@ -196,7 +220,7 @@ function buildOrchestrationPlan(config = {}) {
const launcherCommand = worker.launcherCommand || defaultLauncher; const launcherCommand = worker.launcherCommand || defaultLauncher;
const workerSeedPaths = normalizeSeedPaths(worker.seedPaths, repoRoot); const workerSeedPaths = normalizeSeedPaths(worker.seedPaths, repoRoot);
const seedPaths = normalizeSeedPaths([...globalSeedPaths, ...workerSeedPaths], repoRoot); const seedPaths = normalizeSeedPaths([...globalSeedPaths, ...workerSeedPaths], repoRoot);
const templateVariables = { const templateVariables = buildTemplateVariables({
branch_name: branchName, branch_name: branchName,
handoff_file: handoffFilePath, handoff_file: handoffFilePath,
repo_root: repoRoot, repo_root: repoRoot,
@@ -206,7 +230,7 @@ function buildOrchestrationPlan(config = {}) {
worker_name: workerName, worker_name: workerName,
worker_slug: workerSlug, worker_slug: workerSlug,
worktree_path: worktreePath worktree_path: worktreePath
}; });
if (!launcherCommand) { if (!launcherCommand) {
throw new Error(`Worker ${workerName} is missing a launcherCommand`); throw new Error(`Worker ${workerName} is missing a launcherCommand`);

View File

@@ -51,11 +51,11 @@ Rules:
- Report progress and final results in stdout only. - Report progress and final results in stdout only.
- Do not write handoff or status files yourself; the launcher manages those artifacts. - Do not write handoff or status files yourself; the launcher manages those artifacts.
- If you change code or docs, keep the scope narrow and defensible. - If you change code or docs, keep the scope narrow and defensible.
- In your final response, include exactly these sections: - In your final response, include these exact sections:
1. Summary 1. Summary
2. Files Changed 2. Validation
3. Validation 3. Remaining Risks
4. Remaining Risks - You may include Files Changed if useful, but keep the three sections above exact.
Task file: $task_file Task file: $task_file

View File

@@ -17,8 +17,11 @@ function usage() {
' node scripts/orchestrate-worktrees.js <plan.json> [--write-only]', ' node scripts/orchestrate-worktrees.js <plan.json> [--write-only]',
'', '',
'Placeholders supported in launcherCommand:', 'Placeholders supported in launcherCommand:',
' {worker_name} {worker_slug} {session_name} {repo_root}', ' Shell-safe defaults: {worker_name} {worker_slug} {session_name} {repo_root}',
' {worktree_path} {branch_name} {task_file} {handoff_file} {status_file}', ' Shell-safe defaults: {worktree_path} {branch_name} {task_file} {handoff_file} {status_file}',
' Raw variants: {worker_name_raw} {worker_slug_raw} {session_name_raw} {repo_root_raw}',
' Raw variants: {worktree_path_raw} {branch_name_raw} {task_file_raw} {handoff_file_raw} {status_file_raw}',
' Explicit shell-safe aliases also exist with the _sh suffix.',
'', '',
'Without flags the script prints a dry-run plan only.' 'Without flags the script prints a dry-run plan only.'
].join('\n')); ].join('\n'));

View File

@@ -90,6 +90,7 @@ Options:
- "Framework & Language" — "Django, Spring Boot, Go, Python, Java, Frontend, Backend patterns" - "Framework & Language" — "Django, Spring Boot, Go, Python, Java, Frontend, Backend patterns"
- "Database" — "PostgreSQL, ClickHouse, JPA/Hibernate patterns" - "Database" — "PostgreSQL, ClickHouse, JPA/Hibernate patterns"
- "Workflow & Quality" — "TDD, verification, learning, security review, compaction" - "Workflow & Quality" — "TDD, verification, learning, security review, compaction"
- "Business & Content" — "Article writing, content engine, market research, investor materials, outreach"
- "Research & APIs" — "Deep research, Exa search, Claude API patterns" - "Research & APIs" — "Deep research, Exa search, Claude API patterns"
- "Social & Content Distribution" — "X/Twitter API, crossposting alongside content-engine" - "Social & Content Distribution" — "X/Twitter API, crossposting alongside content-engine"
- "Media Generation" — "fal.ai image/video/audio alongside VideoDB" - "Media Generation" — "fal.ai image/video/audio alongside VideoDB"

View File

@@ -8,14 +8,16 @@ origin: ECC
Distribute content across multiple social platforms with platform-native adaptation. Distribute content across multiple social platforms with platform-native adaptation.
## When to Activate ## When to Use
- User wants to post content to multiple platforms - User wants to post content to multiple platforms
- Publishing announcements, launches, or updates across social media - Publishing announcements, launches, or updates across social media
- Repurposing a post from one platform to others - Repurposing a post from one platform to others
- User says "crosspost", "post everywhere", "share on all platforms", or "distribute this" - User says "crosspost", "post everywhere", "share on all platforms", or "distribute this"
## Core Rules ## How It Works
### Core Rules
1. **Never post identical content cross-platform.** Each platform gets a native adaptation. 1. **Never post identical content cross-platform.** Each platform gets a native adaptation.
2. **Primary platform first.** Post to the main platform, then adapt for others. 2. **Primary platform first.** Post to the main platform, then adapt for others.
@@ -23,7 +25,7 @@ Distribute content across multiple social platforms with platform-native adaptat
4. **One idea per post.** If the source content has multiple ideas, split across posts. 4. **One idea per post.** If the source content has multiple ideas, split across posts.
5. **Attribution matters.** If crossposting someone else's content, credit the source. 5. **Attribution matters.** If crossposting someone else's content, credit the source.
## Platform Specifications ### Platform Specifications
| Platform | Max Length | Link Handling | Hashtags | Media | | Platform | Max Length | Link Handling | Hashtags | Media |
|----------|-----------|---------------|----------|-------| |----------|-----------|---------------|----------|-------|
@@ -32,7 +34,7 @@ Distribute content across multiple social platforms with platform-native adaptat
| Threads | 500 chars | Separate link attachment | None typical | Images, video | | Threads | 500 chars | Separate link attachment | None typical | Images, video |
| Bluesky | 300 chars | Via facets (rich text) | None (use feeds) | Images | | Bluesky | 300 chars | Via facets (rich text) | None (use feeds) | Images |
## Workflow ### Workflow
### Step 1: Create Source Content ### Step 1: Create Source Content
@@ -87,7 +89,7 @@ Post adapted versions to remaining platforms:
- Stagger timing (not all at once — 30-60 min gaps) - Stagger timing (not all at once — 30-60 min gaps)
- Include cross-platform references where appropriate ("longer thread on X" etc.) - Include cross-platform references where appropriate ("longer thread on X" etc.)
## Content Adaptation Examples ## Examples
### Source: Product Launch ### Source: Product Launch

View File

@@ -150,7 +150,7 @@ Example `plan.json`:
{ {
"sessionName": "skill-audit", "sessionName": "skill-audit",
"baseRef": "HEAD", "baseRef": "HEAD",
"launcherCommand": "codex exec --cwd {worktree_path} --task-file {task_file}", "launcherCommand": "codex exec --cwd {worktree_path_sh} --task-file {task_file_sh}",
"workers": [ "workers": [
{ "name": "docs-a", "task": "Fix skills 1-4 and write handoff notes." }, { "name": "docs-a", "task": "Fix skills 1-4 and write handoff notes." },
{ "name": "docs-b", "task": "Fix skills 5-8 and write handoff notes." } { "name": "docs-b", "task": "Fix skills 5-8 and write handoff notes." }
@@ -176,7 +176,7 @@ Use `seedPaths` when workers need access to dirty or untracked local files that
"scripts/lib/tmux-worktree-orchestrator.js", "scripts/lib/tmux-worktree-orchestrator.js",
".claude/plan/workflow-e2e-test.json" ".claude/plan/workflow-e2e-test.json"
], ],
"launcherCommand": "bash {repo_root}/scripts/orchestrate-codex-worker.sh {task_file} {handoff_file} {status_file}", "launcherCommand": "bash {repo_root_sh}/scripts/orchestrate-codex-worker.sh {task_file_sh} {handoff_file_sh} {status_file_sh}",
"workers": [ "workers": [
{ "name": "seed-check", "task": "Verify seeded files are present before starting work." } { "name": "seed-check", "task": "Verify seeded files are present before starting work." }
] ]

View File

@@ -27,7 +27,7 @@ Exa MCP server must be configured. Add to `~/.claude.json`:
"args": [ "args": [
"-y", "-y",
"exa-mcp-server", "exa-mcp-server",
"tools=web_search_exa,web_search_advanced_exa,get_code_context_exa,crawling_exa,company_research_exa,linkedin_search_exa,deep_researcher_start,deep_researcher_check" "tools=web_search_exa,web_search_advanced_exa,get_code_context_exa,crawling_exa,company_research_exa,people_search_exa,deep_researcher_start,deep_researcher_check"
], ],
"env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" } "env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" }
} }
@@ -103,11 +103,11 @@ company_research_exa(companyName: "Anthropic", numResults: 5)
| `companyName` | string | required | Company name | | `companyName` | string | required | Company name |
| `numResults` | number | 5 | Number of results | | `numResults` | number | 5 | Number of results |
### linkedin_search_exa ### people_search_exa
Find professional profiles and company-adjacent people research. Find professional profiles and bios.
``` ```
linkedin_search_exa(query: "AI safety researchers at Anthropic", numResults: 5) people_search_exa(query: "AI safety researchers at Anthropic", numResults: 5)
``` ```
### crawling_exa ### crawling_exa

View File

@@ -109,6 +109,25 @@ test('parseWorkerHandoff also supports bold section headers', () => {
assert.deepStrictEqual(handoff.remainingRisks, ['No runtime screenshot']); assert.deepStrictEqual(handoff.remainingRisks, ['No runtime screenshot']);
}); });
test('parseWorkerHandoff accepts legacy verification and follow-up headings', () => {
const handoff = parseWorkerHandoff([
'# Handoff',
'',
'## Summary',
'- Worker completed successfully',
'',
'## Tests / Verification',
'- Ran tests',
'',
'## Follow-ups',
'- Re-run screenshots after deploy'
].join('\n'));
assert.deepStrictEqual(handoff.summary, ['Worker completed successfully']);
assert.deepStrictEqual(handoff.validation, ['Ran tests']);
assert.deepStrictEqual(handoff.remainingRisks, ['Re-run screenshots after deploy']);
});
test('loadWorkerSnapshots reads coordination worker directories', () => { test('loadWorkerSnapshots reads coordination worker directories', () => {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orch-session-')); const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orch-session-'));
const coordinationDir = path.join(tempRoot, 'coordination'); const coordinationDir = path.join(tempRoot, 'coordination');
@@ -237,5 +256,52 @@ test('resolveSnapshotTarget normalizes plan session names and defaults to the re
} }
}); });
test('resolveSnapshotTarget rejects malformed plan files and invalid config fields', () => {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orch-target-'));
const repoRoot = path.join(tempRoot, 'repo');
fs.mkdirSync(repoRoot, { recursive: true });
const invalidJsonPath = path.join(repoRoot, 'invalid-json.json');
const invalidSessionNamePath = path.join(repoRoot, 'invalid-session.json');
const invalidRepoRootPath = path.join(repoRoot, 'invalid-repo-root.json');
const invalidCoordinationRootPath = path.join(repoRoot, 'invalid-coordination-root.json');
fs.writeFileSync(invalidJsonPath, '{not valid json');
fs.writeFileSync(invalidSessionNamePath, JSON.stringify({
sessionName: '',
repoRoot
}));
fs.writeFileSync(invalidRepoRootPath, JSON.stringify({
sessionName: 'workflow',
repoRoot: ['not-a-string']
}));
fs.writeFileSync(invalidCoordinationRootPath, JSON.stringify({
sessionName: 'workflow',
repoRoot,
coordinationRoot: ' '
}));
try {
assert.throws(
() => resolveSnapshotTarget(invalidJsonPath, repoRoot),
/Invalid orchestration plan JSON/
);
assert.throws(
() => resolveSnapshotTarget(invalidSessionNamePath, repoRoot),
/sessionName must be a non-empty string/
);
assert.throws(
() => resolveSnapshotTarget(invalidRepoRootPath, repoRoot),
/repoRoot must be a non-empty string/
);
assert.throws(
() => resolveSnapshotTarget(invalidCoordinationRootPath, repoRoot),
/coordinationRoot must be a non-empty string/
);
} finally {
fs.rmSync(tempRoot, { recursive: true, force: true });
}
});
console.log(`\n=== Results: ${passed} passed, ${failed} failed ===`); console.log(`\n=== Results: ${passed} passed, ${failed} failed ===`);
if (failed > 0) process.exit(1); if (failed > 0) process.exit(1);

View File

@@ -137,6 +137,44 @@ test('buildOrchestrationPlan normalizes global and worker seed paths', () => {
]); ]);
}); });
test('buildOrchestrationPlan rejects worker names that collapse to the same slug', () => {
assert.throws(
() => buildOrchestrationPlan({
repoRoot: '/tmp/ecc',
sessionName: 'duplicates',
launcherCommand: 'echo run',
workers: [
{ name: 'Docs A', task: 'Fix skill docs' },
{ name: 'Docs/A', task: 'Fix tests' }
]
}),
/unique slugs/
);
});
test('buildOrchestrationPlan exposes shell-safe launcher placeholders with raw aliases', () => {
const repoRoot = path.join('/tmp', 'My Repo');
const plan = buildOrchestrationPlan({
repoRoot,
sessionName: 'Spacing Audit',
launcherCommand: 'bash {repo_root}/scripts/orchestrate-codex-worker.sh {task_file} {handoff_file} {status_file} {worker_name} {worker_name_raw}',
workers: [{ name: 'Docs Fixer', task: 'Update docs' }]
});
assert.ok(
plan.workerPlans[0].launchCommand.includes(`bash '${repoRoot}'/scripts/orchestrate-codex-worker.sh`),
'repo_root should be shell-safe by default'
);
assert.ok(
plan.workerPlans[0].launchCommand.includes(`'${plan.workerPlans[0].taskFilePath}'`),
'task_file should be shell-safe by default'
);
assert.ok(
plan.workerPlans[0].launchCommand.includes(`'${plan.workerPlans[0].workerName}' ${plan.workerPlans[0].workerName}`),
'worker_name_raw should preserve the unquoted value when explicitly requested'
);
});
test('normalizeSeedPaths rejects paths outside the repo root', () => { test('normalizeSeedPaths rejects paths outside the repo root', () => {
assert.throws( assert.throws(
() => normalizeSeedPaths(['../outside.txt'], '/tmp/ecc'), () => normalizeSeedPaths(['../outside.txt'], '/tmp/ecc'),
@@ -170,11 +208,18 @@ test('materializePlan keeps worker instructions inside the worktree boundary', (
materializePlan(plan); materializePlan(plan);
const taskFile = fs.readFileSync(plan.workerPlans[0].taskFilePath, 'utf8'); const taskFile = fs.readFileSync(plan.workerPlans[0].taskFilePath, 'utf8');
const handoffFile = fs.readFileSync(plan.workerPlans[0].handoffFilePath, 'utf8');
assert.ok( assert.ok(
taskFile.includes('Report results in your final response.'), taskFile.includes('Report results in your final response.'),
'Task file should tell the worker to report in stdout' 'Task file should tell the worker to report in stdout'
); );
assert.ok(
taskFile.includes('## Summary') &&
taskFile.includes('## Validation') &&
taskFile.includes('## Remaining Risks'),
'Task file should require parser-compatible headings'
);
assert.ok( assert.ok(
taskFile.includes('Do not spawn subagents or external agents for this task.'), taskFile.includes('Do not spawn subagents or external agents for this task.'),
'Task file should keep nested workers single-session' 'Task file should keep nested workers single-session'
@@ -187,6 +232,18 @@ test('materializePlan keeps worker instructions inside the worktree boundary', (
!taskFile.includes('Update `'), !taskFile.includes('Update `'),
'Task file should not instruct the nested worker to update orchestration status files' 'Task file should not instruct the nested worker to update orchestration status files'
); );
assert.ok(
handoffFile.includes('## Summary') &&
handoffFile.includes('## Validation') &&
handoffFile.includes('## Remaining Risks'),
'Handoff placeholder should seed parser-compatible headings'
);
assert.ok(
!handoffFile.includes('## Files Changed') &&
!handoffFile.includes('## Tests / Verification') &&
!handoffFile.includes('## Follow-ups'),
'Handoff placeholder should not use legacy headings'
);
} finally { } finally {
fs.rmSync(tempRoot, { recursive: true, force: true }); fs.rmSync(tempRoot, { recursive: true, force: true });
} }