feat: sync the codex baseline and agent roles

This commit is contained in:
Affaan Mustafa
2026-04-01 16:08:03 -07:00
parent dba5ae779b
commit 9a6080f2e1
5 changed files with 406 additions and 23 deletions

View File

@@ -61,6 +61,7 @@ Public ECC plugin repo for agents, skills, commands, hooks, rules, install surfa
- `#844` ui-demo skill
- `#1110` install-time Claude hook root resolution
- `#1106` portable Codex Context7 key extraction
- `#1107` Codex baseline merge and sample agent-role sync
- Port or rebuild inside ECC after full audit:
- `#894` Jira integration
- `#814` + `#808` rebuild as a single consolidated notifications lane for Opencode and cross-harness surfaces
@@ -102,3 +103,4 @@ Keep this file detailed for only the current sprint, blockers, and next actions.
- 2026-04-01: Direct-ported the real fix from the unresolved hook-path PR lane into the active installer. Claude installs now replace `${CLAUDE_PLUGIN_ROOT}` with the concrete install root in both `settings.json` and the copied `hooks/hooks.json`, which keeps PreToolUse/PostToolUse hooks working outside plugin-managed env injection.
- 2026-04-01: Replaced the GNU-only `grep -P` parser in `scripts/sync-ecc-to-codex.sh` with a portable Node parser for Context7 key extraction. Added source-level regression coverage so BSD/macOS syncs do not drift back to non-portable parsing.
- 2026-04-01: Targeted regression suite after the direct ports is green: `tests/scripts/install-apply.test.js`, `tests/scripts/sync-ecc-to-codex.test.js`, and `tests/scripts/codex-hooks.test.js`.
- 2026-04-01: Ported the useful core of `#1107` directly into `main` as an add-only Codex baseline merge. `scripts/sync-ecc-to-codex.sh` now fills missing non-MCP defaults from `.codex/config.toml`, syncs sample agent role files into `~/.codex/agents`, and preserves user config instead of replacing it. Added regression coverage for sparse configs and implicit parent tables.

View File

@@ -80,6 +80,7 @@
"scripts/orchestrate-worktrees.js",
"scripts/setup-package-manager.js",
"scripts/skill-create-output.js",
"scripts/codex/merge-codex-config.js",
"scripts/codex/merge-mcp-config.js",
"scripts/repair.js",
"scripts/harness-audit.js",

View File

@@ -0,0 +1,317 @@
#!/usr/bin/env node
'use strict';
/**
* Merge the non-MCP Codex baseline from `.codex/config.toml` into a target
* `config.toml` without overwriting existing user choices.
*
* Strategy: add-only.
* - Missing root keys are inserted before the first TOML table.
* - Missing table keys are appended to existing tables.
* - Missing tables are appended to the end of the file.
*/
const fs = require('fs');
const path = require('path');
let TOML;
try {
TOML = require('@iarna/toml');
} catch {
console.error('[ecc-codex] Missing dependency: @iarna/toml');
console.error('[ecc-codex] Run: npm install (from the ECC repo root)');
process.exit(1);
}
const ROOT_KEYS = ['approval_policy', 'sandbox_mode', 'web_search', 'notify', 'persistent_instructions'];
const TABLE_PATHS = [
'features',
'profiles.strict',
'profiles.yolo',
'agents',
'agents.explorer',
'agents.reviewer',
'agents.docs_researcher',
];
const TOML_HEADER_RE = /^[ \t]*(?:\[[^[\]\n][^\]\n]*\]|\[\[[^[\]\n][^\]\n]*\]\])[ \t]*(?:#.*)?$/m;
function log(message) {
console.log(`[ecc-codex] ${message}`);
}
function warn(message) {
console.warn(`[ecc-codex] WARNING: ${message}`);
}
function getNested(obj, pathParts) {
let current = obj;
for (const part of pathParts) {
if (!current || typeof current !== 'object' || !(part in current)) {
return undefined;
}
current = current[part];
}
return current;
}
function setNested(obj, pathParts, value) {
let current = obj;
for (let i = 0; i < pathParts.length - 1; i += 1) {
const part = pathParts[i];
if (!current[part] || typeof current[part] !== 'object' || Array.isArray(current[part])) {
current[part] = {};
}
current = current[part];
}
current[pathParts[pathParts.length - 1]] = value;
}
function findFirstTableIndex(raw) {
const match = TOML_HEADER_RE.exec(raw);
return match ? match.index : -1;
}
function findTableRange(raw, tablePath) {
const escaped = tablePath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
const headerPattern = new RegExp(`^[ \\t]*\\[${escaped}\\][ \\t]*(?:#.*)?$`, 'm');
const match = headerPattern.exec(raw);
if (!match) {
return null;
}
const headerEnd = raw.indexOf('\n', match.index);
const bodyStart = headerEnd === -1 ? raw.length : headerEnd + 1;
const nextHeaderRel = raw.slice(bodyStart).search(TOML_HEADER_RE);
const bodyEnd = nextHeaderRel === -1 ? raw.length : bodyStart + nextHeaderRel;
return { bodyStart, bodyEnd };
}
function ensureTrailingNewline(text) {
return text.endsWith('\n') ? text : `${text}\n`;
}
function insertBeforeFirstTable(raw, block) {
const normalizedBlock = ensureTrailingNewline(block.trimEnd());
const firstTableIndex = findFirstTableIndex(raw);
if (firstTableIndex === -1) {
const prefix = raw.trimEnd();
return prefix ? `${prefix}\n${normalizedBlock}` : normalizedBlock;
}
const before = raw.slice(0, firstTableIndex).trimEnd();
const after = raw.slice(firstTableIndex).replace(/^\n+/, '');
return `${before}\n\n${normalizedBlock}\n${after}`;
}
function appendBlock(raw, block) {
const prefix = raw.trimEnd();
const normalizedBlock = block.trimEnd();
return prefix ? `${prefix}\n\n${normalizedBlock}\n` : `${normalizedBlock}\n`;
}
function stringifyValue(value) {
return TOML.stringify({ value }).trim().replace(/^value = /, '');
}
function updateInlineTableKeys(raw, tablePath, missingKeys) {
const pathParts = tablePath.split('.');
if (pathParts.length < 2) {
return null;
}
const parentPath = pathParts.slice(0, -1).join('.');
const parentRange = findTableRange(raw, parentPath);
if (!parentRange) {
return null;
}
const tableKey = pathParts[pathParts.length - 1];
const escapedKey = tableKey.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
const body = raw.slice(parentRange.bodyStart, parentRange.bodyEnd);
const lines = body.split('\n');
for (let index = 0; index < lines.length; index += 1) {
const inlinePattern = new RegExp(`^(\\s*${escapedKey}\\s*=\\s*\\{)(.*?)(\\}\\s*(?:#.*)?)$`);
const match = inlinePattern.exec(lines[index]);
if (!match) {
continue;
}
const additions = Object.entries(missingKeys)
.map(([key, value]) => `${key} = ${stringifyValue(value)}`)
.join(', ');
const existingEntries = match[2].trim();
const nextEntries = existingEntries ? `${existingEntries}, ${additions}` : additions;
lines[index] = `${match[1]}${nextEntries}${match[3]}`;
return `${raw.slice(0, parentRange.bodyStart)}${lines.join('\n')}${raw.slice(parentRange.bodyEnd)}`;
}
return null;
}
function appendImplicitTable(raw, tablePath, missingKeys) {
const candidate = appendBlock(raw, stringifyTable(tablePath, missingKeys));
try {
TOML.parse(candidate);
return candidate;
} catch {
return null;
}
}
function appendToTable(raw, tablePath, block, missingKeys = null) {
const range = findTableRange(raw, tablePath);
if (!range) {
if (missingKeys) {
const inlineUpdated = updateInlineTableKeys(raw, tablePath, missingKeys);
if (inlineUpdated) {
return inlineUpdated;
}
const appendedTable = appendImplicitTable(raw, tablePath, missingKeys);
if (appendedTable) {
return appendedTable;
}
}
warn(`Skipping missing keys for [${tablePath}] because it has no standalone header and could not be safely updated`);
return raw;
}
const before = raw.slice(0, range.bodyEnd).trimEnd();
const after = raw.slice(range.bodyEnd).replace(/^\n*/, '\n');
return `${before}\n${block.trimEnd()}\n${after}`;
}
function stringifyRootKeys(keys) {
return TOML.stringify(keys).trim();
}
function stringifyTable(tablePath, value) {
const scalarOnly = {};
for (const [key, entryValue] of Object.entries(value)) {
if (entryValue && typeof entryValue === 'object' && !Array.isArray(entryValue)) {
continue;
}
scalarOnly[key] = entryValue;
}
const snippet = {};
setNested(snippet, tablePath.split('.'), scalarOnly);
return TOML.stringify(snippet).trim();
}
function stringifyTableKeys(tableValue) {
const lines = [];
for (const [key, value] of Object.entries(tableValue)) {
if (value && typeof value === 'object' && !Array.isArray(value)) {
continue;
}
lines.push(TOML.stringify({ [key]: value }).trim());
}
return lines.join('\n');
}
function main() {
const args = process.argv.slice(2);
const configPath = args.find(arg => !arg.startsWith('-'));
const dryRun = args.includes('--dry-run');
if (!configPath) {
console.error('Usage: merge-codex-config.js <config.toml> [--dry-run]');
process.exit(1);
}
const referencePath = path.join(__dirname, '..', '..', '.codex', 'config.toml');
if (!fs.existsSync(referencePath)) {
console.error(`[ecc-codex] Reference config not found: ${referencePath}`);
process.exit(1);
}
if (!fs.existsSync(configPath)) {
console.error(`[ecc-codex] Config file not found: ${configPath}`);
process.exit(1);
}
const raw = fs.readFileSync(configPath, 'utf8');
const referenceRaw = fs.readFileSync(referencePath, 'utf8');
let targetConfig;
let referenceConfig;
try {
targetConfig = TOML.parse(raw);
referenceConfig = TOML.parse(referenceRaw);
} catch (error) {
console.error(`[ecc-codex] Failed to parse TOML: ${error.message}`);
process.exit(1);
}
const missingRootKeys = {};
for (const key of ROOT_KEYS) {
if (referenceConfig[key] !== undefined && targetConfig[key] === undefined) {
missingRootKeys[key] = referenceConfig[key];
}
}
const missingTables = [];
const missingTableKeys = [];
for (const tablePath of TABLE_PATHS) {
const pathParts = tablePath.split('.');
const referenceValue = getNested(referenceConfig, pathParts);
if (referenceValue === undefined) {
continue;
}
const targetValue = getNested(targetConfig, pathParts);
if (targetValue === undefined) {
missingTables.push(tablePath);
continue;
}
const missingKeys = {};
for (const [key, value] of Object.entries(referenceValue)) {
if (value && typeof value === 'object' && !Array.isArray(value)) {
continue;
}
if (targetValue[key] === undefined) {
missingKeys[key] = value;
}
}
if (Object.keys(missingKeys).length > 0) {
missingTableKeys.push({ tablePath, missingKeys });
}
}
if (
Object.keys(missingRootKeys).length === 0 &&
missingTables.length === 0 &&
missingTableKeys.length === 0
) {
log('All baseline Codex settings already present. Nothing to do.');
return;
}
let nextRaw = raw;
if (Object.keys(missingRootKeys).length > 0) {
log(` [add-root] ${Object.keys(missingRootKeys).join(', ')}`);
nextRaw = insertBeforeFirstTable(nextRaw, stringifyRootKeys(missingRootKeys));
}
for (const { tablePath, missingKeys } of missingTableKeys) {
log(` [add-keys] [${tablePath}] -> ${Object.keys(missingKeys).join(', ')}`);
nextRaw = appendToTable(nextRaw, tablePath, stringifyTableKeys(missingKeys), missingKeys);
}
for (const tablePath of missingTables) {
log(` [add-table] [${tablePath}]`);
nextRaw = appendBlock(nextRaw, stringifyTable(tablePath, getNested(referenceConfig, tablePath.split('.'))));
}
if (dryRun) {
log('Dry run — would write the merged Codex baseline.');
return;
}
fs.writeFileSync(configPath, nextRaw, 'utf8');
log('Done. Baseline Codex settings merged.');
}
main();

View File

@@ -27,8 +27,11 @@ CONFIG_FILE="$CODEX_HOME/config.toml"
AGENTS_FILE="$CODEX_HOME/AGENTS.md"
AGENTS_ROOT_SRC="$REPO_ROOT/AGENTS.md"
AGENTS_CODEX_SUPP_SRC="$REPO_ROOT/.codex/AGENTS.md"
CODEX_AGENTS_SRC="$REPO_ROOT/.codex/agents"
CODEX_AGENTS_DEST="$CODEX_HOME/agents"
PROMPTS_SRC="$REPO_ROOT/commands"
PROMPTS_DEST="$CODEX_HOME/prompts"
BASELINE_MERGE_SCRIPT="$REPO_ROOT/scripts/codex/merge-codex-config.js"
HOOKS_INSTALLER="$REPO_ROOT/scripts/codex/install-global-git-hooks.sh"
SANITY_CHECKER="$REPO_ROOT/scripts/codex/check-codex-global-state.sh"
CURSOR_RULES_DIR="$REPO_ROOT/.cursor/rules"
@@ -146,7 +149,9 @@ MCP_MERGE_SCRIPT="$REPO_ROOT/scripts/codex/merge-mcp-config.js"
require_path "$REPO_ROOT/AGENTS.md" "ECC AGENTS.md"
require_path "$AGENTS_CODEX_SUPP_SRC" "ECC Codex AGENTS supplement"
require_path "$CODEX_AGENTS_SRC" "ECC Codex agent roles"
require_path "$PROMPTS_SRC" "ECC commands directory"
require_path "$BASELINE_MERGE_SCRIPT" "ECC Codex baseline merge script"
require_path "$HOOKS_INSTALLER" "ECC global git hooks installer"
require_path "$SANITY_CHECKER" "ECC global sanity checker"
require_path "$CURSOR_RULES_DIR" "ECC Cursor rules directory"
@@ -247,6 +252,26 @@ else
fi
fi
log "Merging ECC Codex baseline into $CONFIG_FILE (add-only, preserving user config)"
if [[ "$MODE" == "dry-run" ]]; then
node "$BASELINE_MERGE_SCRIPT" "$CONFIG_FILE" --dry-run
else
node "$BASELINE_MERGE_SCRIPT" "$CONFIG_FILE"
fi
log "Syncing sample Codex agent role files"
run_or_echo mkdir -p "$CODEX_AGENTS_DEST"
for agent_file in "$CODEX_AGENTS_SRC"/*.toml; do
[[ -f "$agent_file" ]] || continue
agent_name="$(basename "$agent_file")"
dest="$CODEX_AGENTS_DEST/$agent_name"
if [[ -e "$dest" ]]; then
log "Keeping existing Codex agent role file: $dest"
else
run_or_echo cp "$agent_file" "$dest"
fi
done
# Skills are NOT synced here — Codex CLI reads directly from
# ~/.agents/skills/ (installed by ECC installer / npx skills).
# Copying into ~/.codex/skills/ was unnecessary.

View File

@@ -7,6 +7,7 @@ const fs = require('fs');
const os = require('os');
const path = require('path');
const { spawnSync } = require('child_process');
const TOML = require('@iarna/toml');
const repoRoot = path.join(__dirname, '..', '..');
const installScript = path.join(repoRoot, 'scripts', 'codex', 'install-global-git-hooks.sh');
@@ -93,29 +94,16 @@ if (os.platform() === 'win32') {
else failed++;
if (
test('sync preserves baseline config and accepts the legacy context7 MCP section', () => {
test('sync installs the missing Codex baseline and accepts the legacy context7 MCP section', () => {
const homeDir = createTempDir('codex-sync-home-');
const codexDir = path.join(homeDir, '.codex');
const configPath = path.join(codexDir, 'config.toml');
const agentsPath = path.join(codexDir, 'AGENTS.md');
const config = [
'approval_policy = "on-request"',
'sandbox_mode = "workspace-write"',
'web_search = "live"',
'persistent_instructions = ""',
'',
'[features]',
'multi_agent = true',
'',
'[profiles.strict]',
'approval_policy = "on-request"',
'sandbox_mode = "read-only"',
'web_search = "cached"',
'',
'[profiles.yolo]',
'approval_policy = "never"',
'sandbox_mode = "workspace-write"',
'web_search = "live"',
'[agents]',
'explorer = { description = "Read-only codebase explorer for gathering evidence before changes are proposed." }',
'',
'[mcp_servers.context7]',
'command = "npx"',
@@ -147,13 +135,63 @@ if (
assert.match(syncedAgents, /^# Codex Supplement \(From ECC \.codex\/AGENTS\.md\)/m);
const syncedConfig = fs.readFileSync(configPath, 'utf8');
assert.match(syncedConfig, /^multi_agent\s*=\s*true$/m);
assert.match(syncedConfig, /^\[profiles\.strict\]$/m);
assert.match(syncedConfig, /^\[profiles\.yolo\]$/m);
assert.match(syncedConfig, /^\[mcp_servers\.github\]$/m);
assert.match(syncedConfig, /^\[mcp_servers\.memory\]$/m);
assert.match(syncedConfig, /^\[mcp_servers\.sequential-thinking\]$/m);
assert.match(syncedConfig, /^\[mcp_servers\.context7\]$/m);
const parsedConfig = TOML.parse(syncedConfig);
assert.strictEqual(parsedConfig.approval_policy, 'on-request');
assert.strictEqual(parsedConfig.sandbox_mode, 'workspace-write');
assert.strictEqual(parsedConfig.web_search, 'live');
assert.ok(!Object.prototype.hasOwnProperty.call(parsedConfig, 'multi_agent'));
assert.ok(parsedConfig.features);
assert.strictEqual(parsedConfig.features.multi_agent, true);
assert.ok(parsedConfig.profiles);
assert.strictEqual(parsedConfig.profiles.strict.approval_policy, 'on-request');
assert.strictEqual(parsedConfig.profiles.yolo.approval_policy, 'never');
assert.ok(parsedConfig.agents);
assert.strictEqual(parsedConfig.agents.max_threads, 6);
assert.strictEqual(parsedConfig.agents.max_depth, 1);
assert.strictEqual(parsedConfig.agents.explorer.config_file, 'agents/explorer.toml');
assert.strictEqual(parsedConfig.agents.reviewer.config_file, 'agents/reviewer.toml');
assert.strictEqual(parsedConfig.agents.docs_researcher.config_file, 'agents/docs-researcher.toml');
assert.ok(parsedConfig.mcp_servers.exa);
assert.ok(parsedConfig.mcp_servers.github);
assert.ok(parsedConfig.mcp_servers.memory);
assert.ok(parsedConfig.mcp_servers['sequential-thinking']);
assert.ok(parsedConfig.mcp_servers.context7);
for (const roleFile of ['explorer.toml', 'reviewer.toml', 'docs-researcher.toml']) {
assert.ok(fs.existsSync(path.join(codexDir, 'agents', roleFile)));
}
} finally {
cleanup(homeDir);
}
})
)
passed++;
else failed++;
if (
test('sync adds parent-table keys when the target only declares an implicit parent table', () => {
const homeDir = createTempDir('codex-sync-implicit-parent-home-');
const codexDir = path.join(homeDir, '.codex');
const configPath = path.join(codexDir, 'config.toml');
const config = [
'persistent_instructions = ""',
'',
'[agents.explorer]',
'description = "Read-only codebase explorer for gathering evidence before changes are proposed."',
'',
].join('\n');
try {
fs.mkdirSync(codexDir, { recursive: true });
fs.writeFileSync(configPath, config);
const syncResult = runBash(syncScript, [], makeHermeticCodexEnv(homeDir, codexDir));
assert.strictEqual(syncResult.status, 0, `${syncResult.stdout}\n${syncResult.stderr}`);
const parsedConfig = TOML.parse(fs.readFileSync(configPath, 'utf8'));
assert.strictEqual(parsedConfig.agents.max_threads, 6);
assert.strictEqual(parsedConfig.agents.max_depth, 1);
assert.strictEqual(parsedConfig.agents.explorer.config_file, 'agents/explorer.toml');
} finally {
cleanup(homeDir);
}