3 Commits

Author SHA1 Message Date
Affaan Mustafa
424f3b3729 fix: resolve exa skill markdown lint 2026-03-12 10:20:42 -07:00
Affaan Mustafa
bdf4befb3e fix: resolve orchestration lint errors 2026-03-12 09:35:03 -07:00
Affaan Mustafa
2349e21731 feat: add orchestration workflows and harness skills 2026-03-12 09:26:36 -07:00
24 changed files with 123 additions and 572 deletions

View File

@@ -230,8 +230,6 @@ print(f"Cache creation: {message.usage.cache_creation_input_tokens}")
Process large volumes asynchronously at 50% cost reduction: Process large volumes asynchronously at 50% cost reduction:
```python ```python
import time
batch = client.messages.batches.create( batch = client.messages.batches.create(
requests=[ requests=[
{ {
@@ -308,8 +306,6 @@ while True:
## Error Handling ## Error Handling
```python ```python
import time
from anthropic import APIError, RateLimitError, APIConnectionError from anthropic import APIError, RateLimitError, APIConnectionError
try: try:

View File

@@ -8,16 +8,14 @@ origin: ECC
Distribute content across multiple social platforms with platform-native adaptation. Distribute content across multiple social platforms with platform-native adaptation.
## When to Use ## When to Activate
- User wants to post content to multiple platforms - User wants to post content to multiple platforms
- Publishing announcements, launches, or updates across social media - Publishing announcements, launches, or updates across social media
- Repurposing a post from one platform to others - Repurposing a post from one platform to others
- User says "crosspost", "post everywhere", "share on all platforms", or "distribute this" - User says "crosspost", "post everywhere", "share on all platforms", or "distribute this"
## How It Works ## Core Rules
### Core Rules
1. **Never post identical content cross-platform.** Each platform gets a native adaptation. 1. **Never post identical content cross-platform.** Each platform gets a native adaptation.
2. **Primary platform first.** Post to the main platform, then adapt for others. 2. **Primary platform first.** Post to the main platform, then adapt for others.
@@ -25,7 +23,7 @@ Distribute content across multiple social platforms with platform-native adaptat
4. **One idea per post.** If the source content has multiple ideas, split across posts. 4. **One idea per post.** If the source content has multiple ideas, split across posts.
5. **Attribution matters.** If crossposting someone else's content, credit the source. 5. **Attribution matters.** If crossposting someone else's content, credit the source.
### Platform Specifications ## Platform Specifications
| Platform | Max Length | Link Handling | Hashtags | Media | | Platform | Max Length | Link Handling | Hashtags | Media |
|----------|-----------|---------------|----------|-------| |----------|-----------|---------------|----------|-------|
@@ -34,7 +32,7 @@ Distribute content across multiple social platforms with platform-native adaptat
| Threads | 500 chars | Separate link attachment | None typical | Images, video | | Threads | 500 chars | Separate link attachment | None typical | Images, video |
| Bluesky | 300 chars | Via facets (rich text) | None (use feeds) | Images | | Bluesky | 300 chars | Via facets (rich text) | None (use feeds) | Images |
### Workflow ## Workflow
### Step 1: Create Source Content ### Step 1: Create Source Content
@@ -89,7 +87,7 @@ Post adapted versions to remaining platforms:
- Stagger timing (not all at once — 30-60 min gaps) - Stagger timing (not all at once — 30-60 min gaps)
- Include cross-platform references where appropriate ("longer thread on X" etc.) - Include cross-platform references where appropriate ("longer thread on X" etc.)
## Examples ## Content Adaptation Examples
### Source: Product Launch ### Source: Product Launch
@@ -150,7 +148,6 @@ A pattern I've been using that's made a real difference:
If using a crossposting service (e.g., Postbridge, Buffer, or a custom API), the pattern looks like: If using a crossposting service (e.g., Postbridge, Buffer, or a custom API), the pattern looks like:
```python ```python
import os
import requests import requests
resp = requests.post( resp = requests.post(
@@ -163,10 +160,8 @@ resp = requests.post(
"linkedin": {"text": linkedin_version}, "linkedin": {"text": linkedin_version},
"threads": {"text": threads_version} "threads": {"text": threads_version}
} }
}, }
timeout=30
) )
resp.raise_for_status()
``` ```
### Manual Posting ### Manual Posting

View File

@@ -24,11 +24,7 @@ Exa MCP server must be configured. Add to `~/.claude.json`:
```json ```json
"exa-web-search": { "exa-web-search": {
"command": "npx", "command": "npx",
"args": [ "args": ["-y", "exa-mcp-server"],
"-y",
"exa-mcp-server",
"tools=web_search_exa,web_search_advanced_exa,get_code_context_exa,crawling_exa,company_research_exa,people_search_exa,deep_researcher_start,deep_researcher_check"
],
"env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" } "env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" }
} }
``` ```

View File

@@ -92,7 +92,6 @@ def post_thread(oauth, tweets: list[str]) -> list[str]:
if reply_to: if reply_to:
payload["reply"] = {"in_reply_to_tweet_id": reply_to} payload["reply"] = {"in_reply_to_tweet_id": reply_to}
resp = oauth.post("https://api.x.com/2/tweets", json=payload) resp = oauth.post("https://api.x.com/2/tweets", json=payload)
resp.raise_for_status()
tweet_id = resp.json()["data"]["id"] tweet_id = resp.json()["data"]["id"]
ids.append(tweet_id) ids.append(tweet_id)
reply_to = tweet_id reply_to = tweet_id
@@ -168,8 +167,6 @@ resp = oauth.post(
Always check `x-rate-limit-remaining` and `x-rate-limit-reset` headers. Always check `x-rate-limit-remaining` and `x-rate-limit-reset` headers.
```python ```python
import time
remaining = int(resp.headers.get("x-rate-limit-remaining", 0)) remaining = int(resp.headers.get("x-rate-limit-remaining", 0))
if remaining < 5: if remaining < 5:
reset = int(resp.headers.get("x-rate-limit-reset", 0)) reset = int(resp.headers.get("x-rate-limit-reset", 0))

109
package-lock.json generated
View File

@@ -18,7 +18,7 @@
"c8": "^10.1.2", "c8": "^10.1.2",
"eslint": "^9.39.2", "eslint": "^9.39.2",
"globals": "^17.1.0", "globals": "^17.1.0",
"markdownlint-cli": "^0.48.0" "markdownlint-cli": "^0.47.0"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
@@ -267,6 +267,29 @@
"url": "https://github.com/sponsors/nzakas" "url": "https://github.com/sponsors/nzakas"
} }
}, },
"node_modules/@isaacs/balanced-match": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
"integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@isaacs/brace-expansion": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz",
"integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@isaacs/balanced-match": "^4.0.1"
},
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@isaacs/cliui": { "node_modules/@isaacs/cliui": {
"version": "8.0.2", "version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
@@ -691,10 +714,11 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/commander": { "node_modules/commander": {
"version": "14.0.3", "version": "14.0.2",
"resolved": "https://registry.npmjs.org/commander/-/commander-14.0.3.tgz", "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.2.tgz",
"integrity": "sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw==", "integrity": "sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ==",
"dev": true, "dev": true,
"license": "MIT",
"engines": { "engines": {
"node": ">=20" "node": ">=20"
} }
@@ -820,6 +844,7 @@
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
"dev": true, "dev": true,
"license": "BSD-2-Clause",
"engines": { "engines": {
"node": ">=0.12" "node": ">=0.12"
}, },
@@ -1563,6 +1588,7 @@
"resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz",
"integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"uc.micro": "^2.0.0" "uc.micro": "^2.0.0"
} }
@@ -1614,10 +1640,11 @@
} }
}, },
"node_modules/markdown-it": { "node_modules/markdown-it": {
"version": "14.1.1", "version": "14.1.0",
"resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.1.tgz", "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz",
"integrity": "sha512-BuU2qnTti9YKgK5N+IeMubp14ZUKUUw7yeJbkjtosvHiP0AZ5c8IAgEMk79D0eC8F23r4Ac/q8cAIFdm2FtyoA==", "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"argparse": "^2.0.1", "argparse": "^2.0.1",
"entities": "^4.4.0", "entities": "^4.4.0",
@@ -1655,22 +1682,23 @@
} }
}, },
"node_modules/markdownlint-cli": { "node_modules/markdownlint-cli": {
"version": "0.48.0", "version": "0.47.0",
"resolved": "https://registry.npmjs.org/markdownlint-cli/-/markdownlint-cli-0.48.0.tgz", "resolved": "https://registry.npmjs.org/markdownlint-cli/-/markdownlint-cli-0.47.0.tgz",
"integrity": "sha512-NkZQNu2E0Q5qLEEHwWj674eYISTLD4jMHkBzDobujXd1kv+yCxi8jOaD/rZoQNW1FBBMMGQpuW5So8B51N/e0A==", "integrity": "sha512-HOcxeKFAdDoldvoYDofd85vI8LgNWy8vmYpCwnlLV46PJcodmGzD7COSSBlhHwsfT4o9KrAStGodImVBus31Bg==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"commander": "~14.0.3", "commander": "~14.0.2",
"deep-extend": "~0.6.0", "deep-extend": "~0.6.0",
"ignore": "~7.0.5", "ignore": "~7.0.5",
"js-yaml": "~4.1.1", "js-yaml": "~4.1.1",
"jsonc-parser": "~3.3.1", "jsonc-parser": "~3.3.1",
"jsonpointer": "~5.0.1", "jsonpointer": "~5.0.1",
"markdown-it": "~14.1.1", "markdown-it": "~14.1.0",
"markdownlint": "~0.40.0", "markdownlint": "~0.40.0",
"minimatch": "~10.2.4", "minimatch": "~10.1.1",
"run-con": "~1.3.2", "run-con": "~1.3.2",
"smol-toml": "~1.6.0", "smol-toml": "~1.5.2",
"tinyglobby": "~0.2.15" "tinyglobby": "~0.2.15"
}, },
"bin": { "bin": {
@@ -1680,27 +1708,6 @@
"node": ">=20" "node": ">=20"
} }
}, },
"node_modules/markdownlint-cli/node_modules/balanced-match": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
"integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
"dev": true,
"engines": {
"node": "18 || 20 || >=22"
}
},
"node_modules/markdownlint-cli/node_modules/brace-expansion": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz",
"integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==",
"dev": true,
"dependencies": {
"balanced-match": "^4.0.2"
},
"engines": {
"node": "18 || 20 || >=22"
}
},
"node_modules/markdownlint-cli/node_modules/ignore": { "node_modules/markdownlint-cli/node_modules/ignore": {
"version": "7.0.5", "version": "7.0.5",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz",
@@ -1712,15 +1719,16 @@
} }
}, },
"node_modules/markdownlint-cli/node_modules/minimatch": { "node_modules/markdownlint-cli/node_modules/minimatch": {
"version": "10.2.4", "version": "10.1.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz",
"integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
"dev": true, "dev": true,
"license": "BlueOak-1.0.0",
"dependencies": { "dependencies": {
"brace-expansion": "^5.0.2" "@isaacs/brace-expansion": "^5.0.0"
}, },
"engines": { "engines": {
"node": "18 || 20 || >=22" "node": "20 || >=22"
}, },
"funding": { "funding": {
"url": "https://github.com/sponsors/isaacs" "url": "https://github.com/sponsors/isaacs"
@@ -1730,7 +1738,8 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
"integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==",
"dev": true "dev": true,
"license": "MIT"
}, },
"node_modules/micromark": { "node_modules/micromark": {
"version": "4.0.2", "version": "4.0.2",
@@ -2269,10 +2278,11 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/minimatch": { "node_modules/minimatch": {
"version": "3.1.5", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true, "dev": true,
"license": "ISC",
"dependencies": { "dependencies": {
"brace-expansion": "^1.1.7" "brace-expansion": "^1.1.7"
}, },
@@ -2479,6 +2489,7 @@
"resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz",
"integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==",
"dev": true, "dev": true,
"license": "MIT",
"engines": { "engines": {
"node": ">=6" "node": ">=6"
} }
@@ -2579,10 +2590,11 @@
} }
}, },
"node_modules/smol-toml": { "node_modules/smol-toml": {
"version": "1.6.0", "version": "1.5.2",
"resolved": "https://registry.npmjs.org/smol-toml/-/smol-toml-1.6.0.tgz", "resolved": "https://registry.npmjs.org/smol-toml/-/smol-toml-1.5.2.tgz",
"integrity": "sha512-4zemZi0HvTnYwLfrpk/CF9LOd9Lt87kAt50GnqhMpyF9U3poDAP2+iukq2bZsO/ufegbYehBkqINbsWxj4l4cw==", "integrity": "sha512-QlaZEqcAH3/RtNyet1IPIYPsEWAaYyXXv1Krsi+1L/QHppjX4Ifm8MQsBISz9vE8cHicIq3clogsheili5vhaQ==",
"dev": true, "dev": true,
"license": "BSD-3-Clause",
"engines": { "engines": {
"node": ">= 18" "node": ">= 18"
}, },
@@ -2800,7 +2812,8 @@
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",
"integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==",
"dev": true "dev": true,
"license": "MIT"
}, },
"node_modules/uri-js": { "node_modules/uri-js": {
"version": "4.4.1", "version": "4.4.1",

View File

@@ -98,7 +98,7 @@
"c8": "^10.1.2", "c8": "^10.1.2",
"eslint": "^9.39.2", "eslint": "^9.39.2",
"globals": "^17.1.0", "globals": "^17.1.0",
"markdownlint-cli": "^0.48.0" "markdownlint-cli": "^0.47.0"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"

View File

@@ -17,16 +17,6 @@ function stripCodeTicks(value) {
return trimmed; return trimmed;
} }
function normalizeSessionName(value, fallback = 'session') {
const normalized = String(value || '')
.trim()
.toLowerCase()
.replace(/[^a-z0-9]+/g, '-')
.replace(/^-+|-+$/g, '');
return normalized || fallback;
}
function parseSection(content, heading) { function parseSection(content, heading) {
if (typeof content !== 'string' || content.length === 0) { if (typeof content !== 'string' || content.length === 0) {
return ''; return '';
@@ -106,31 +96,11 @@ function parseWorkerTask(content) {
}; };
} }
function parseFirstSection(content, headings) {
for (const heading of headings) {
const section = parseSection(content, heading);
if (section) {
return section;
}
}
return '';
}
function parseWorkerHandoff(content) { function parseWorkerHandoff(content) {
return { return {
summary: parseBullets(parseFirstSection(content, ['Summary'])), summary: parseBullets(parseSection(content, 'Summary')),
validation: parseBullets(parseFirstSection(content, [ validation: parseBullets(parseSection(content, 'Validation')),
'Validation', remainingRisks: parseBullets(parseSection(content, 'Remaining Risks'))
'Tests / Verification',
'Tests',
'Verification'
])),
remainingRisks: parseBullets(parseFirstSection(content, [
'Remaining Risks',
'Follow-ups',
'Follow Ups'
]))
}; };
} }
@@ -270,66 +240,28 @@ function buildSessionSnapshot({ sessionName, coordinationDir, panes }) {
}; };
} }
function readPlanConfig(absoluteTarget) {
let config;
try {
config = JSON.parse(fs.readFileSync(absoluteTarget, 'utf8'));
} catch (_error) {
throw new Error(`Invalid orchestration plan JSON: ${absoluteTarget}`);
}
if (!config || Array.isArray(config) || typeof config !== 'object') {
throw new Error(`Invalid orchestration plan: expected a JSON object (${absoluteTarget})`);
}
return config;
}
function readPlanString(config, key, absoluteTarget) {
const value = config[key];
if (value === undefined) {
return undefined;
}
if (typeof value !== 'string') {
throw new Error(`Invalid orchestration plan: ${key} must be a string when provided (${absoluteTarget})`);
}
const normalized = value.trim();
return normalized.length > 0 ? normalized : undefined;
}
function resolveSnapshotTarget(targetPath, cwd = process.cwd()) { function resolveSnapshotTarget(targetPath, cwd = process.cwd()) {
const absoluteTarget = path.resolve(cwd, targetPath); const absoluteTarget = path.resolve(cwd, targetPath);
if (fs.existsSync(absoluteTarget) && fs.statSync(absoluteTarget).isFile()) { if (fs.existsSync(absoluteTarget) && fs.statSync(absoluteTarget).isFile()) {
const config = readPlanConfig(absoluteTarget); const config = JSON.parse(fs.readFileSync(absoluteTarget, 'utf8'));
const repoRoot = path.resolve(readPlanString(config, 'repoRoot', absoluteTarget) || cwd); const repoRoot = path.resolve(config.repoRoot || cwd);
const sessionName = normalizeSessionName(
readPlanString(config, 'sessionName', absoluteTarget) || path.basename(repoRoot),
'session'
);
const coordinationRoot = path.resolve( const coordinationRoot = path.resolve(
readPlanString(config, 'coordinationRoot', absoluteTarget) || path.join(repoRoot, '.orchestration') config.coordinationRoot || path.join(repoRoot, '.orchestration')
); );
return { return {
sessionName, sessionName: config.sessionName,
coordinationDir: path.join(coordinationRoot, sessionName), coordinationDir: path.join(coordinationRoot, config.sessionName),
repoRoot, repoRoot,
targetType: 'plan' targetType: 'plan'
}; };
} }
const repoRoot = path.resolve(cwd);
const sessionName = normalizeSessionName(targetPath, path.basename(repoRoot));
return { return {
sessionName, sessionName: targetPath,
coordinationDir: path.join(repoRoot, '.orchestration', sessionName), coordinationDir: path.join(cwd, '.claude', 'orchestration', targetPath),
repoRoot, repoRoot: cwd,
targetType: 'session' targetType: 'session'
}; };
} }

View File

@@ -34,18 +34,6 @@ function formatCommand(program, args) {
return [program, ...args.map(shellQuote)].join(' '); return [program, ...args.map(shellQuote)].join(' ');
} }
function buildTemplateVariables(values) {
return Object.entries(values).reduce((accumulator, [key, value]) => {
const stringValue = String(value);
const quotedValue = shellQuote(stringValue);
accumulator[key] = stringValue;
accumulator[`${key}_raw`] = stringValue;
accumulator[`${key}_sh`] = quotedValue;
return accumulator;
}, {});
}
function normalizeSeedPaths(seedPaths, repoRoot) { function normalizeSeedPaths(seedPaths, repoRoot) {
const resolvedRepoRoot = path.resolve(repoRoot); const resolvedRepoRoot = path.resolve(repoRoot);
const entries = Array.isArray(seedPaths) ? seedPaths : []; const entries = Array.isArray(seedPaths) ? seedPaths : [];
@@ -68,12 +56,6 @@ function normalizeSeedPaths(seedPaths, repoRoot) {
} }
const normalizedPath = relativePath.split(path.sep).join('/'); const normalizedPath = relativePath.split(path.sep).join('/');
if (!normalizedPath || normalizedPath === '.') {
throw new Error('seedPaths entries must not target the repo root');
}
if (normalizedPath === '.git' || normalizedPath.startsWith('.git/')) {
throw new Error(`seedPaths entries must not target git metadata: ${entry}`);
}
if (seen.has(normalizedPath)) { if (seen.has(normalizedPath)) {
continue; continue;
} }
@@ -138,13 +120,6 @@ function buildWorkerArtifacts(workerPlan) {
'## Completion', '## Completion',
'Do not spawn subagents or external agents for this task.', 'Do not spawn subagents or external agents for this task.',
'Report results in your final response.', 'Report results in your final response.',
'Respond with these exact sections so orchestration parsing can succeed:',
'## Summary',
'- ...',
'## Validation',
'- ...',
'## Remaining Risks',
'- ...',
`The worker launcher captures your response in \`${workerPlan.handoffFilePath}\` automatically.`, `The worker launcher captures your response in \`${workerPlan.handoffFilePath}\` automatically.`,
`The worker launcher updates \`${workerPlan.statusFilePath}\` automatically.` `The worker launcher updates \`${workerPlan.statusFilePath}\` automatically.`
].join('\n') ].join('\n')
@@ -157,10 +132,13 @@ function buildWorkerArtifacts(workerPlan) {
'## Summary', '## Summary',
'- Pending', '- Pending',
'', '',
'## Validation', '## Files Changed',
'- Pending', '- Pending',
'', '',
'## Remaining Risks', '## Tests / Verification',
'- Pending',
'',
'## Follow-ups',
'- Pending' '- Pending'
].join('\n') ].join('\n')
}, },
@@ -196,7 +174,6 @@ function buildOrchestrationPlan(config = {}) {
throw new Error('buildOrchestrationPlan requires at least one worker'); throw new Error('buildOrchestrationPlan requires at least one worker');
} }
const seenWorkerSlugs = new Map();
const workerPlans = workers.map((worker, index) => { const workerPlans = workers.map((worker, index) => {
if (!worker || typeof worker.task !== 'string' || worker.task.trim().length === 0) { if (!worker || typeof worker.task !== 'string' || worker.task.trim().length === 0) {
throw new Error(`Worker ${index + 1} is missing a task`); throw new Error(`Worker ${index + 1} is missing a task`);
@@ -204,13 +181,6 @@ function buildOrchestrationPlan(config = {}) {
const workerName = worker.name || `worker-${index + 1}`; const workerName = worker.name || `worker-${index + 1}`;
const workerSlug = slugify(workerName, `worker-${index + 1}`); const workerSlug = slugify(workerName, `worker-${index + 1}`);
if (seenWorkerSlugs.has(workerSlug)) {
const firstWorkerName = seenWorkerSlugs.get(workerSlug);
throw new Error(
`Worker names must map to unique slugs: ${workerSlug} (${firstWorkerName}, ${workerName})`
);
}
seenWorkerSlugs.set(workerSlug, workerName);
const branchName = `orchestrator-${sessionName}-${workerSlug}`; const branchName = `orchestrator-${sessionName}-${workerSlug}`;
const worktreePath = path.join(worktreeRoot, `${repoName}-${sessionName}-${workerSlug}`); const worktreePath = path.join(worktreeRoot, `${repoName}-${sessionName}-${workerSlug}`);
const workerCoordinationDir = path.join(coordinationDir, workerSlug); const workerCoordinationDir = path.join(coordinationDir, workerSlug);
@@ -220,7 +190,7 @@ function buildOrchestrationPlan(config = {}) {
const launcherCommand = worker.launcherCommand || defaultLauncher; const launcherCommand = worker.launcherCommand || defaultLauncher;
const workerSeedPaths = normalizeSeedPaths(worker.seedPaths, repoRoot); const workerSeedPaths = normalizeSeedPaths(worker.seedPaths, repoRoot);
const seedPaths = normalizeSeedPaths([...globalSeedPaths, ...workerSeedPaths], repoRoot); const seedPaths = normalizeSeedPaths([...globalSeedPaths, ...workerSeedPaths], repoRoot);
const templateVariables = buildTemplateVariables({ const templateVariables = {
branch_name: branchName, branch_name: branchName,
handoff_file: handoffFilePath, handoff_file: handoffFilePath,
repo_root: repoRoot, repo_root: repoRoot,
@@ -230,7 +200,7 @@ function buildOrchestrationPlan(config = {}) {
worker_name: workerName, worker_name: workerName,
worker_slug: workerSlug, worker_slug: workerSlug,
worktree_path: worktreePath worktree_path: worktreePath
}); };
if (!launcherCommand) { if (!launcherCommand) {
throw new Error(`Worker ${workerName} is missing a launcherCommand`); throw new Error(`Worker ${workerName} is missing a launcherCommand`);

View File

@@ -51,11 +51,11 @@ Rules:
- Report progress and final results in stdout only. - Report progress and final results in stdout only.
- Do not write handoff or status files yourself; the launcher manages those artifacts. - Do not write handoff or status files yourself; the launcher manages those artifacts.
- If you change code or docs, keep the scope narrow and defensible. - If you change code or docs, keep the scope narrow and defensible.
- In your final response, include these exact sections: - In your final response, include exactly these sections:
1. Summary 1. Summary
2. Validation 2. Files Changed
3. Remaining Risks 3. Validation
- You may include Files Changed if useful, but keep the three sections above exact. 4. Remaining Risks
Task file: $task_file Task file: $task_file

View File

@@ -17,11 +17,8 @@ function usage() {
' node scripts/orchestrate-worktrees.js <plan.json> [--write-only]', ' node scripts/orchestrate-worktrees.js <plan.json> [--write-only]',
'', '',
'Placeholders supported in launcherCommand:', 'Placeholders supported in launcherCommand:',
' Raw defaults: {worker_name} {worker_slug} {session_name} {repo_root}', ' {worker_name} {worker_slug} {session_name} {repo_root}',
' Raw defaults: {worktree_path} {branch_name} {task_file} {handoff_file} {status_file}', ' {worktree_path} {branch_name} {task_file} {handoff_file} {status_file}',
' Shell-safe aliases: {worker_name_sh} {worker_slug_sh} {session_name_sh} {repo_root_sh}',
' Shell-safe aliases: {worktree_path_sh} {branch_name_sh} {task_file_sh} {handoff_file_sh} {status_file_sh}',
' Explicit raw aliases also exist with the _raw suffix.',
'', '',
'Without flags the script prints a dry-run plan only.' 'Without flags the script prints a dry-run plan only.'
].join('\n')); ].join('\n'));

View File

@@ -20,32 +20,9 @@ function usage() {
function parseArgs(argv) { function parseArgs(argv) {
const args = argv.slice(2); const args = argv.slice(2);
let target = null; const target = args.find(arg => !arg.startsWith('--'));
let writePath = null; const writeIndex = args.indexOf('--write');
const writePath = writeIndex >= 0 ? args[writeIndex + 1] : null;
for (let index = 0; index < args.length; index += 1) {
const arg = args[index];
if (arg === '--write') {
const candidate = args[index + 1];
if (!candidate || candidate.startsWith('--')) {
throw new Error('--write requires an output path');
}
writePath = candidate;
index += 1;
continue;
}
if (arg.startsWith('--')) {
throw new Error(`Unknown flag: ${arg}`);
}
if (target) {
throw new Error('Expected a single session name or plan path');
}
target = arg;
}
return { target, writePath }; return { target, writePath };
} }
@@ -79,4 +56,4 @@ if (require.main === module) {
} }
} }
module.exports = { main, parseArgs }; module.exports = { main };

View File

@@ -82,7 +82,7 @@ If the user chooses niche or core + niche, continue to category selection below
### 2b: Choose Skill Categories ### 2b: Choose Skill Categories
There are 41 skills organized into 8 categories. Use `AskUserQuestion` with `multiSelect: true`: There are 35 skills organized into 7 categories. Use `AskUserQuestion` with `multiSelect: true`:
``` ```
Question: "Which skill categories do you want to install?" Question: "Which skill categories do you want to install?"
@@ -90,7 +90,6 @@ Options:
- "Framework & Language" — "Django, Spring Boot, Go, Python, Java, Frontend, Backend patterns" - "Framework & Language" — "Django, Spring Boot, Go, Python, Java, Frontend, Backend patterns"
- "Database" — "PostgreSQL, ClickHouse, JPA/Hibernate patterns" - "Database" — "PostgreSQL, ClickHouse, JPA/Hibernate patterns"
- "Workflow & Quality" — "TDD, verification, learning, security review, compaction" - "Workflow & Quality" — "TDD, verification, learning, security review, compaction"
- "Business & Content" — "Article writing, content engine, market research, investor materials, outreach"
- "Research & APIs" — "Deep research, Exa search, Claude API patterns" - "Research & APIs" — "Deep research, Exa search, Claude API patterns"
- "Social & Content Distribution" — "X/Twitter API, crossposting alongside content-engine" - "Social & Content Distribution" — "X/Twitter API, crossposting alongside content-engine"
- "Media Generation" — "fal.ai image/video/audio alongside VideoDB" - "Media Generation" — "fal.ai image/video/audio alongside VideoDB"

View File

@@ -8,16 +8,14 @@ origin: ECC
Distribute content across multiple social platforms with platform-native adaptation. Distribute content across multiple social platforms with platform-native adaptation.
## When to Use ## When to Activate
- User wants to post content to multiple platforms - User wants to post content to multiple platforms
- Publishing announcements, launches, or updates across social media - Publishing announcements, launches, or updates across social media
- Repurposing a post from one platform to others - Repurposing a post from one platform to others
- User says "crosspost", "post everywhere", "share on all platforms", or "distribute this" - User says "crosspost", "post everywhere", "share on all platforms", or "distribute this"
## How It Works ## Core Rules
### Core Rules
1. **Never post identical content cross-platform.** Each platform gets a native adaptation. 1. **Never post identical content cross-platform.** Each platform gets a native adaptation.
2. **Primary platform first.** Post to the main platform, then adapt for others. 2. **Primary platform first.** Post to the main platform, then adapt for others.
@@ -25,7 +23,7 @@ Distribute content across multiple social platforms with platform-native adaptat
4. **One idea per post.** If the source content has multiple ideas, split across posts. 4. **One idea per post.** If the source content has multiple ideas, split across posts.
5. **Attribution matters.** If crossposting someone else's content, credit the source. 5. **Attribution matters.** If crossposting someone else's content, credit the source.
### Platform Specifications ## Platform Specifications
| Platform | Max Length | Link Handling | Hashtags | Media | | Platform | Max Length | Link Handling | Hashtags | Media |
|----------|-----------|---------------|----------|-------| |----------|-----------|---------------|----------|-------|
@@ -34,7 +32,7 @@ Distribute content across multiple social platforms with platform-native adaptat
| Threads | 500 chars | Separate link attachment | None typical | Images, video | | Threads | 500 chars | Separate link attachment | None typical | Images, video |
| Bluesky | 300 chars | Via facets (rich text) | None (use feeds) | Images | | Bluesky | 300 chars | Via facets (rich text) | None (use feeds) | Images |
### Workflow ## Workflow
### Step 1: Create Source Content ### Step 1: Create Source Content
@@ -89,7 +87,7 @@ Post adapted versions to remaining platforms:
- Stagger timing (not all at once — 30-60 min gaps) - Stagger timing (not all at once — 30-60 min gaps)
- Include cross-platform references where appropriate ("longer thread on X" etc.) - Include cross-platform references where appropriate ("longer thread on X" etc.)
## Examples ## Content Adaptation Examples
### Source: Product Launch ### Source: Product Launch
@@ -163,10 +161,8 @@ resp = requests.post(
"linkedin": {"text": linkedin_version}, "linkedin": {"text": linkedin_version},
"threads": {"text": threads_version} "threads": {"text": threads_version}
} }
}, }
timeout=30
) )
resp.raise_for_status()
``` ```
### Manual Posting ### Manual Posting

View File

@@ -150,7 +150,7 @@ Example `plan.json`:
{ {
"sessionName": "skill-audit", "sessionName": "skill-audit",
"baseRef": "HEAD", "baseRef": "HEAD",
"launcherCommand": "codex exec --cwd {worktree_path_sh} --task-file {task_file_sh}", "launcherCommand": "codex exec --cwd {worktree_path} --task-file {task_file}",
"workers": [ "workers": [
{ "name": "docs-a", "task": "Fix skills 1-4 and write handoff notes." }, { "name": "docs-a", "task": "Fix skills 1-4 and write handoff notes." },
{ "name": "docs-b", "task": "Fix skills 5-8 and write handoff notes." } { "name": "docs-b", "task": "Fix skills 5-8 and write handoff notes." }
@@ -176,7 +176,7 @@ Use `seedPaths` when workers need access to dirty or untracked local files that
"scripts/lib/tmux-worktree-orchestrator.js", "scripts/lib/tmux-worktree-orchestrator.js",
".claude/plan/workflow-e2e-test.json" ".claude/plan/workflow-e2e-test.json"
], ],
"launcherCommand": "bash {repo_root_sh}/scripts/orchestrate-codex-worker.sh {task_file_sh} {handoff_file_sh} {status_file_sh}", "launcherCommand": "bash {repo_root}/scripts/orchestrate-codex-worker.sh {task_file} {handoff_file} {status_file}",
"workers": [ "workers": [
{ "name": "seed-check", "task": "Verify seeded files are present before starting work." } { "name": "seed-check", "task": "Verify seeded files are present before starting work." }
] ]

View File

@@ -27,7 +27,7 @@ Exa MCP server must be configured. Add to `~/.claude.json`:
"args": [ "args": [
"-y", "-y",
"exa-mcp-server", "exa-mcp-server",
"tools=web_search_exa,web_search_advanced_exa,get_code_context_exa,crawling_exa,company_research_exa,people_search_exa,deep_researcher_start,deep_researcher_check" "tools=web_search_exa,get_code_context_exa,crawling_exa,company_research_exa,linkedin_search_exa,deep_researcher_start,deep_researcher_check"
], ],
"env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" } "env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" }
} }
@@ -103,11 +103,11 @@ company_research_exa(companyName: "Anthropic", numResults: 5)
| `companyName` | string | required | Company name | | `companyName` | string | required | Company name |
| `numResults` | number | 5 | Number of results | | `numResults` | number | 5 | Number of results |
### people_search_exa ### linkedin_search_exa
Find professional profiles and bios. Find professional profiles and company-adjacent people research.
``` ```
people_search_exa(query: "AI safety researchers at Anthropic", numResults: 5) linkedin_search_exa(query: "AI safety researchers at Anthropic", numResults: 5)
``` ```
### crawling_exa ### crawling_exa

View File

@@ -253,7 +253,7 @@ estimate_cost(
estimate_type: "unit_price", estimate_type: "unit_price",
endpoints: { endpoints: {
"fal-ai/nano-banana-pro": { "fal-ai/nano-banana-pro": {
"unit_quantity": 1 "num_images": 1
} }
} }
) )

View File

@@ -108,7 +108,7 @@ The user must set `VIDEO_DB_API_KEY` using **either** method:
- **Export in terminal** (before starting Claude): `export VIDEO_DB_API_KEY=your-key` - **Export in terminal** (before starting Claude): `export VIDEO_DB_API_KEY=your-key`
- **Project `.env` file**: Save `VIDEO_DB_API_KEY=your-key` in the project's `.env` file - **Project `.env` file**: Save `VIDEO_DB_API_KEY=your-key` in the project's `.env` file
Get a free API key at [console.videodb.io](https://console.videodb.io) (50 free uploads, no credit card). Get a free API key at https://console.videodb.io (50 free uploads, no credit card).
**Do NOT** read, write, or handle the API key yourself. Always let the user set it. **Do NOT** read, write, or handle the API key yourself. Always let the user set it.
@@ -354,6 +354,7 @@ Reference documentation is in the `reference/` directory adjacent to this SKILL.
- [reference/capture-reference.md](reference/capture-reference.md) - Capture SDK and WebSocket events - [reference/capture-reference.md](reference/capture-reference.md) - Capture SDK and WebSocket events
- [reference/use-cases.md](reference/use-cases.md) - Common video processing patterns and examples - [reference/use-cases.md](reference/use-cases.md) - Common video processing patterns and examples
**Do not use ffmpeg, moviepy, or local encoding tools** when VideoDB supports the operation. The following are all handled server-side by VideoDB — trimming, combining clips, overlaying audio or music, adding subtitles, text/image overlays, transcoding, resolution changes, aspect-ratio conversion, resizing for platform requirements, transcription, and media generation. Only fall back to local tools for operations listed under Limitations in reference/editor.md (transitions, speed changes, crop/zoom, colour grading, volume mixing). **Do not use ffmpeg, moviepy, or local encoding tools** when VideoDB supports the operation. The following are all handled server-side by VideoDB — trimming, combining clips, overlaying audio or music, adding subtitles, text/image overlays, transcoding, resolution changes, aspect-ratio conversion, resizing for platform requirements, transcription, and media generation. Only fall back to local tools for operations listed under Limitations in reference/editor.md (transitions, speed changes, crop/zoom, colour grading, volume mixing).
### When to use what ### When to use what

View File

@@ -380,7 +380,7 @@ results = video.search(
``` ```
> **Note:** `filter` is an explicit named parameter in `video.search()`. `scene_index_id` is passed through `**kwargs` to the API. > **Note:** `filter` is an explicit named parameter in `video.search()`. `scene_index_id` is passed through `**kwargs` to the API.
>
> **Important:** `video.search()` raises `InvalidRequestError` with message `"No results found"` when there are no matches. Always wrap search calls in try/except. For scene search, use `score_threshold=0.3` or higher to filter low-relevance noise. > **Important:** `video.search()` raises `InvalidRequestError` with message `"No results found"` when there are no matches. Always wrap search calls in try/except. For scene search, use `score_threshold=0.3` or higher to filter low-relevance noise.
For scene search, use `search_type=SearchType.semantic` with `index_type=IndexType.scene`. Pass `scene_index_id` when targeting a specific scene index. See [search.md](search.md) for details. For scene search, use `search_type=SearchType.semantic` with `index_type=IndexType.scene`. Pass `scene_index_id` when targeting a specific scene index. See [search.md](search.md) for details.

View File

@@ -107,7 +107,7 @@ Use [scripts/ws_listener.py](../scripts/ws_listener.py) to connect and dump even
} }
``` ```
> For latest details, see [the realtime context docs](https://docs.videodb.io/pages/ingest/capture-sdks/realtime-context.md). > For latest details, see https://docs.videodb.io/pages/ingest/capture-sdks/realtime-context.md
--- ---

View File

@@ -92,7 +92,6 @@ def post_thread(oauth, tweets: list[str]) -> list[str]:
if reply_to: if reply_to:
payload["reply"] = {"in_reply_to_tweet_id": reply_to} payload["reply"] = {"in_reply_to_tweet_id": reply_to}
resp = oauth.post("https://api.x.com/2/tweets", json=payload) resp = oauth.post("https://api.x.com/2/tweets", json=payload)
resp.raise_for_status()
tweet_id = resp.json()["data"]["id"] tweet_id = resp.json()["data"]["id"]
ids.append(tweet_id) ids.append(tweet_id)
reply_to = tweet_id reply_to = tweet_id

View File

@@ -98,44 +98,6 @@ function cleanupTestDir(testDir) {
fs.rmSync(testDir, { recursive: true, force: true }); fs.rmSync(testDir, { recursive: true, force: true });
} }
function normalizeComparablePath(targetPath) {
if (!targetPath) return '';
let normalizedPath = String(targetPath).trim().replace(/\\/g, '/');
if (/^\/[a-zA-Z]\//.test(normalizedPath)) {
normalizedPath = `${normalizedPath[1]}:/${normalizedPath.slice(3)}`;
}
if (/^[a-zA-Z]:\//.test(normalizedPath)) {
normalizedPath = `${normalizedPath[0].toUpperCase()}:${normalizedPath.slice(2)}`;
}
try {
normalizedPath = fs.realpathSync(normalizedPath);
} catch {
// Fall through to string normalization when the path cannot be resolved directly.
}
return path.normalize(normalizedPath).replace(/\\/g, '/').replace(/^([a-z]):/, (_, drive) => `${drive.toUpperCase()}:`);
}
function pathsReferToSameLocation(leftPath, rightPath) {
const normalizedLeftPath = normalizeComparablePath(leftPath);
const normalizedRightPath = normalizeComparablePath(rightPath);
if (!normalizedLeftPath || !normalizedRightPath) return false;
if (normalizedLeftPath === normalizedRightPath) return true;
try {
const leftStats = fs.statSync(normalizedLeftPath);
const rightStats = fs.statSync(normalizedRightPath);
return leftStats.dev === rightStats.dev && leftStats.ino === rightStats.ino;
} catch {
return false;
}
}
function createCommandShim(binDir, baseName, logFile) { function createCommandShim(binDir, baseName, logFile) {
fs.mkdirSync(binDir, { recursive: true }); fs.mkdirSync(binDir, { recursive: true });
@@ -2267,9 +2229,9 @@ async function runTests() {
assert.strictEqual(code, 0, `detect-project should source cleanly, stderr: ${stderr}`); assert.strictEqual(code, 0, `detect-project should source cleanly, stderr: ${stderr}`);
const [projectId] = stdout.trim().split(/\r?\n/); const [projectId, projectDir] = stdout.trim().split(/\r?\n/);
const registryPath = path.join(homeDir, '.claude', 'homunculus', 'projects.json'); const registryPath = path.join(homeDir, '.claude', 'homunculus', 'projects.json');
const projectMetadataPath = path.join(homeDir, '.claude', 'homunculus', 'projects', projectId, 'project.json'); const projectMetadataPath = path.join(projectDir, 'project.json');
assert.ok(projectId, 'detect-project should emit a project id'); assert.ok(projectId, 'detect-project should emit a project id');
assert.ok(fs.existsSync(registryPath), 'projects.json should be created'); assert.ok(fs.existsSync(registryPath), 'projects.json should be created');
@@ -2281,13 +2243,7 @@ async function runTests() {
assert.ok(registry[projectId], 'registry should contain the detected project'); assert.ok(registry[projectId], 'registry should contain the detected project');
assert.strictEqual(metadata.id, projectId, 'project.json should include the detected id'); assert.strictEqual(metadata.id, projectId, 'project.json should include the detected id');
assert.strictEqual(metadata.name, path.basename(repoDir), 'project.json should include the repo name'); assert.strictEqual(metadata.name, path.basename(repoDir), 'project.json should include the repo name');
const normalizedMetadataRoot = normalizeComparablePath(metadata.root); assert.strictEqual(fs.realpathSync(metadata.root), fs.realpathSync(repoDir), 'project.json should include the repo root');
const normalizedRepoDir = normalizeComparablePath(repoDir);
assert.ok(normalizedMetadataRoot, 'project.json should include a non-empty repo root');
assert.ok(
pathsReferToSameLocation(normalizedMetadataRoot, normalizedRepoDir),
`project.json should include the repo root (expected ${normalizedRepoDir}, got ${normalizedMetadataRoot})`,
);
assert.strictEqual(metadata.remote, 'https://github.com/example/ecc-test.git', 'project.json should include the sanitized remote'); assert.strictEqual(metadata.remote, 'https://github.com/example/ecc-test.git', 'project.json should include the sanitized remote');
assert.ok(metadata.created_at, 'project.json should include created_at'); assert.ok(metadata.created_at, 'project.json should include created_at');
assert.ok(metadata.last_seen, 'project.json should include last_seen'); assert.ok(metadata.last_seen, 'project.json should include last_seen');
@@ -4570,20 +4526,10 @@ async function runTests() {
const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('.tmp')); const files = fs.readdirSync(sessionsDir).filter(f => f.endsWith('.tmp'));
assert.ok(files.length > 0, 'Should create session file'); assert.ok(files.length > 0, 'Should create session file');
const content = fs.readFileSync(path.join(sessionsDir, files[0]), 'utf8'); const content = fs.readFileSync(path.join(sessionsDir, files[0]), 'utf8');
const summaryMatch = content.match(
/<!-- ECC:SUMMARY:START -->([\s\S]*?)<!-- ECC:SUMMARY:END -->/
);
// The real string message should appear // The real string message should appear
assert.ok(content.includes('Real user message'), 'Should include the string content user message'); assert.ok(content.includes('Real user message'), 'Should include the string content user message');
assert.ok(summaryMatch, 'Should include a generated summary block'); // Numeric/boolean/object content should NOT appear as text
const summaryBlock = summaryMatch[1]; assert.ok(!content.includes('42'), 'Numeric content should be skipped (else branch → empty string → filtered)');
// Numeric/boolean/object content should NOT appear as task bullets
assert.ok(
!summaryBlock.includes('\n- 42\n'),
'Numeric content should be skipped (else branch → empty string → filtered)'
);
assert.ok(!summaryBlock.includes('\n- true\n'), 'Boolean content should be skipped');
assert.ok(!summaryBlock.includes('[object Object]'), 'Object content should be skipped');
} finally { } finally {
fs.rmSync(isoHome, { recursive: true, force: true }); fs.rmSync(isoHome, { recursive: true, force: true });
} }

View File

@@ -109,25 +109,6 @@ test('parseWorkerHandoff also supports bold section headers', () => {
assert.deepStrictEqual(handoff.remainingRisks, ['No runtime screenshot']); assert.deepStrictEqual(handoff.remainingRisks, ['No runtime screenshot']);
}); });
test('parseWorkerHandoff accepts legacy verification and follow-up headings', () => {
const handoff = parseWorkerHandoff([
'# Handoff',
'',
'## Summary',
'- Worker completed successfully',
'',
'## Tests / Verification',
'- Ran tests',
'',
'## Follow-ups',
'- Re-run screenshots after deploy'
].join('\n'));
assert.deepStrictEqual(handoff.summary, ['Worker completed successfully']);
assert.deepStrictEqual(handoff.validation, ['Ran tests']);
assert.deepStrictEqual(handoff.remainingRisks, ['Re-run screenshots after deploy']);
});
test('loadWorkerSnapshots reads coordination worker directories', () => { test('loadWorkerSnapshots reads coordination worker directories', () => {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orch-session-')); const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orch-session-'));
const coordinationDir = path.join(tempRoot, 'coordination'); const coordinationDir = path.join(tempRoot, 'coordination');
@@ -223,92 +204,7 @@ test('resolveSnapshotTarget handles plan files and direct session names', () =>
const fromSession = resolveSnapshotTarget('workflow-visual-proof', repoRoot); const fromSession = resolveSnapshotTarget('workflow-visual-proof', repoRoot);
assert.strictEqual(fromSession.targetType, 'session'); assert.strictEqual(fromSession.targetType, 'session');
assert.ok(fromSession.coordinationDir.endsWith(path.join('.orchestration', 'workflow-visual-proof'))); assert.ok(fromSession.coordinationDir.endsWith(path.join('.claude', 'orchestration', 'workflow-visual-proof')));
} finally {
fs.rmSync(tempRoot, { recursive: true, force: true });
}
});
test('resolveSnapshotTarget normalizes plan session names and defaults to the repo name', () => {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orch-target-'));
const repoRoot = path.join(tempRoot, 'My Repo');
fs.mkdirSync(repoRoot, { recursive: true });
const namedPlanPath = path.join(repoRoot, 'named-plan.json');
const defaultPlanPath = path.join(repoRoot, 'default-plan.json');
fs.writeFileSync(namedPlanPath, JSON.stringify({
sessionName: 'Workflow Visual Proof',
repoRoot
}));
fs.writeFileSync(defaultPlanPath, JSON.stringify({ repoRoot }));
try {
const namedPlan = resolveSnapshotTarget(namedPlanPath, repoRoot);
assert.strictEqual(namedPlan.sessionName, 'workflow-visual-proof');
assert.ok(namedPlan.coordinationDir.endsWith(path.join('.orchestration', 'workflow-visual-proof')));
const defaultPlan = resolveSnapshotTarget(defaultPlanPath, repoRoot);
assert.strictEqual(defaultPlan.sessionName, 'my-repo');
assert.ok(defaultPlan.coordinationDir.endsWith(path.join('.orchestration', 'my-repo')));
} finally {
fs.rmSync(tempRoot, { recursive: true, force: true });
}
});
test('resolveSnapshotTarget rejects malformed plan files and invalid config fields', () => {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orch-target-'));
const repoRoot = path.join(tempRoot, 'repo');
fs.mkdirSync(repoRoot, { recursive: true });
const invalidJsonPath = path.join(repoRoot, 'invalid-json.json');
const blankFieldsPath = path.join(repoRoot, 'blank-fields.json');
const invalidSessionNamePath = path.join(repoRoot, 'invalid-session.json');
const invalidRepoRootPath = path.join(repoRoot, 'invalid-repo-root.json');
const invalidCoordinationRootPath = path.join(repoRoot, 'invalid-coordination-root.json');
fs.writeFileSync(invalidJsonPath, '{not valid json');
fs.writeFileSync(blankFieldsPath, JSON.stringify({
sessionName: ' ',
repoRoot: ' ',
coordinationRoot: ' '
}));
fs.writeFileSync(invalidSessionNamePath, JSON.stringify({
sessionName: 42,
repoRoot
}));
fs.writeFileSync(invalidRepoRootPath, JSON.stringify({
sessionName: 'workflow',
repoRoot: ['not-a-string']
}));
fs.writeFileSync(invalidCoordinationRootPath, JSON.stringify({
sessionName: 'workflow',
repoRoot,
coordinationRoot: false
}));
try {
const blankFields = resolveSnapshotTarget(blankFieldsPath, repoRoot);
assert.strictEqual(blankFields.sessionName, 'repo');
assert.strictEqual(blankFields.repoRoot, repoRoot);
assert.ok(blankFields.coordinationDir.endsWith(path.join('.orchestration', 'repo')));
assert.throws(
() => resolveSnapshotTarget(invalidJsonPath, repoRoot),
/Invalid orchestration plan JSON/
);
assert.throws(
() => resolveSnapshotTarget(invalidSessionNamePath, repoRoot),
/sessionName must be a string when provided/
);
assert.throws(
() => resolveSnapshotTarget(invalidRepoRootPath, repoRoot),
/repoRoot must be a string when provided/
);
assert.throws(
() => resolveSnapshotTarget(invalidCoordinationRootPath, repoRoot),
/coordinationRoot must be a string when provided/
);
} finally { } finally {
fs.rmSync(tempRoot, { recursive: true, force: true }); fs.rmSync(tempRoot, { recursive: true, force: true });
} }

View File

@@ -57,7 +57,7 @@ test('buildOrchestrationPlan creates worktrees, branches, and tmux commands', ()
repoRoot, repoRoot,
sessionName: 'Skill Audit', sessionName: 'Skill Audit',
baseRef: 'main', baseRef: 'main',
launcherCommand: 'codex exec --cwd {worktree_path_sh} --task-file {task_file_sh}', launcherCommand: 'codex exec --cwd {worktree_path} --task-file {task_file}',
workers: [ workers: [
{ name: 'Docs A', task: 'Fix skills 1-4' }, { name: 'Docs A', task: 'Fix skills 1-4' },
{ name: 'Docs B', task: 'Fix skills 5-8' } { name: 'Docs B', task: 'Fix skills 5-8' }
@@ -137,46 +137,6 @@ test('buildOrchestrationPlan normalizes global and worker seed paths', () => {
]); ]);
}); });
test('buildOrchestrationPlan rejects worker names that collapse to the same slug', () => {
assert.throws(
() => buildOrchestrationPlan({
repoRoot: '/tmp/ecc',
sessionName: 'duplicates',
launcherCommand: 'echo run',
workers: [
{ name: 'Docs A', task: 'Fix skill docs' },
{ name: 'Docs/A', task: 'Fix tests' }
]
}),
/unique slugs/
);
});
test('buildOrchestrationPlan exposes shell-safe launcher aliases alongside raw defaults', () => {
const repoRoot = path.join('/tmp', 'My Repo');
const plan = buildOrchestrationPlan({
repoRoot,
sessionName: 'Spacing Audit',
launcherCommand: 'bash {repo_root_sh}/scripts/orchestrate-codex-worker.sh {task_file_sh} {handoff_file_sh} {status_file_sh} {worker_name_sh} {worker_name}',
workers: [{ name: 'Docs Fixer', task: 'Update docs' }]
});
const quote = value => `'${String(value).replace(/'/g, `'\\''`)}'`;
const resolvedRepoRoot = plan.workerPlans[0].repoRoot;
assert.ok(
plan.workerPlans[0].launchCommand.includes(`bash ${quote(resolvedRepoRoot)}/scripts/orchestrate-codex-worker.sh`),
'repo_root_sh should provide a shell-safe path'
);
assert.ok(
plan.workerPlans[0].launchCommand.includes(quote(plan.workerPlans[0].taskFilePath)),
'task_file_sh should provide a shell-safe path'
);
assert.ok(
plan.workerPlans[0].launchCommand.includes(`${quote(plan.workerPlans[0].workerName)} ${plan.workerPlans[0].workerName}`),
'raw defaults should remain available alongside shell-safe aliases'
);
});
test('normalizeSeedPaths rejects paths outside the repo root', () => { test('normalizeSeedPaths rejects paths outside the repo root', () => {
assert.throws( assert.throws(
() => normalizeSeedPaths(['../outside.txt'], '/tmp/ecc'), () => normalizeSeedPaths(['../outside.txt'], '/tmp/ecc'),
@@ -184,17 +144,6 @@ test('normalizeSeedPaths rejects paths outside the repo root', () => {
); );
}); });
test('normalizeSeedPaths rejects repo root and git metadata paths', () => {
assert.throws(
() => normalizeSeedPaths(['.'], '/tmp/ecc'),
/must not target the repo root/
);
assert.throws(
() => normalizeSeedPaths(['.git/config'], '/tmp/ecc'),
/must not target git metadata/
);
});
test('materializePlan keeps worker instructions inside the worktree boundary', () => { test('materializePlan keeps worker instructions inside the worktree boundary', () => {
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orchestrator-test-')); const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-orchestrator-test-'));
@@ -203,25 +152,18 @@ test('materializePlan keeps worker instructions inside the worktree boundary', (
repoRoot: tempRoot, repoRoot: tempRoot,
coordinationRoot: path.join(tempRoot, '.claude', 'orchestration'), coordinationRoot: path.join(tempRoot, '.claude', 'orchestration'),
sessionName: 'Workflow E2E', sessionName: 'Workflow E2E',
launcherCommand: 'bash {repo_root_sh}/scripts/orchestrate-codex-worker.sh {task_file_sh} {handoff_file_sh} {status_file_sh}', launcherCommand: 'bash {repo_root}/scripts/orchestrate-codex-worker.sh {task_file} {handoff_file} {status_file}',
workers: [{ name: 'Docs', task: 'Update the workflow docs.' }] workers: [{ name: 'Docs', task: 'Update the workflow docs.' }]
}); });
materializePlan(plan); materializePlan(plan);
const taskFile = fs.readFileSync(plan.workerPlans[0].taskFilePath, 'utf8'); const taskFile = fs.readFileSync(plan.workerPlans[0].taskFilePath, 'utf8');
const handoffFile = fs.readFileSync(plan.workerPlans[0].handoffFilePath, 'utf8');
assert.ok( assert.ok(
taskFile.includes('Report results in your final response.'), taskFile.includes('Report results in your final response.'),
'Task file should tell the worker to report in stdout' 'Task file should tell the worker to report in stdout'
); );
assert.ok(
taskFile.includes('## Summary') &&
taskFile.includes('## Validation') &&
taskFile.includes('## Remaining Risks'),
'Task file should require parser-compatible headings'
);
assert.ok( assert.ok(
taskFile.includes('Do not spawn subagents or external agents for this task.'), taskFile.includes('Do not spawn subagents or external agents for this task.'),
'Task file should keep nested workers single-session' 'Task file should keep nested workers single-session'
@@ -234,18 +176,6 @@ test('materializePlan keeps worker instructions inside the worktree boundary', (
!taskFile.includes('Update `'), !taskFile.includes('Update `'),
'Task file should not instruct the nested worker to update orchestration status files' 'Task file should not instruct the nested worker to update orchestration status files'
); );
assert.ok(
handoffFile.includes('## Summary') &&
handoffFile.includes('## Validation') &&
handoffFile.includes('## Remaining Risks'),
'Handoff placeholder should seed parser-compatible headings'
);
assert.ok(
!handoffFile.includes('## Files Changed') &&
!handoffFile.includes('## Tests / Verification') &&
!handoffFile.includes('## Follow-ups'),
'Handoff placeholder should not use legacy headings'
);
} finally { } finally {
fs.rmSync(tempRoot, { recursive: true, force: true }); fs.rmSync(tempRoot, { recursive: true, force: true });
} }

View File

@@ -1,89 +0,0 @@
'use strict';
const assert = require('assert');
const { parseArgs } = require('../../scripts/orchestration-status');
console.log('=== Testing orchestration-status.js ===\n');
let passed = 0;
let failed = 0;
function test(desc, fn) {
try {
fn();
console.log(`${desc}`);
passed++;
} catch (error) {
console.log(`${desc}: ${error.message}`);
failed++;
}
}
test('parseArgs reads a target with an optional write path', () => {
assert.deepStrictEqual(
parseArgs([
'node',
'scripts/orchestration-status.js',
'workflow-visual-proof',
'--write',
'/tmp/snapshot.json'
]),
{
target: 'workflow-visual-proof',
writePath: '/tmp/snapshot.json'
}
);
});
test('parseArgs does not treat the write path as the target', () => {
assert.deepStrictEqual(
parseArgs([
'node',
'scripts/orchestration-status.js',
'--write',
'/tmp/snapshot.json',
'workflow-visual-proof'
]),
{
target: 'workflow-visual-proof',
writePath: '/tmp/snapshot.json'
}
);
});
test('parseArgs rejects missing write values and unknown flags', () => {
assert.throws(
() => parseArgs([
'node',
'scripts/orchestration-status.js',
'workflow-visual-proof',
'--write'
]),
/--write requires an output path/
);
assert.throws(
() => parseArgs([
'node',
'scripts/orchestration-status.js',
'workflow-visual-proof',
'--unknown'
]),
/Unknown flag/
);
});
test('parseArgs rejects multiple positional targets', () => {
assert.throws(
() => parseArgs([
'node',
'scripts/orchestration-status.js',
'first',
'second'
]),
/Expected a single session name or plan path/
);
});
console.log(`\n=== Results: ${passed} passed, ${failed} failed ===`);
if (failed > 0) process.exit(1);