4 Commits

Author SHA1 Message Date
Affaan Mustafa
d5371d28aa feat: add skill evolution foundation (#514) 2026-03-15 21:47:39 -07:00
Affaan Mustafa
131f977841 feat: strengthen install lifecycle and target adapters (#512)
* fix: strengthen install lifecycle adapters

* fix: restore template content on uninstall
2026-03-15 21:47:31 -07:00
Affaan Mustafa
1e0238de96 feat: wire manifest resolution into install execution (#509) 2026-03-15 21:47:22 -07:00
Affaan Mustafa
8878c6d6b0 fix: harden observer hooks and test discovery (#513) 2026-03-15 21:47:15 -07:00
32 changed files with 4289 additions and 350 deletions

View File

@@ -232,7 +232,9 @@
"hooks": [
{
"type": "command",
"command": "node \"${CLAUDE_PLUGIN_ROOT}/scripts/hooks/run-with-flags.js\" \"session:end:marker\" \"scripts/hooks/session-end-marker.js\" \"minimal,standard,strict\""
"command": "node \"${CLAUDE_PLUGIN_ROOT}/scripts/hooks/run-with-flags.js\" \"session:end:marker\" \"scripts/hooks/session-end-marker.js\" \"minimal,standard,strict\"",
"async": true,
"timeout": 10
}
],
"description": "Session end lifecycle marker (non-blocking)"

View File

@@ -1,83 +1,208 @@
#!/usr/bin/env node
/**
* Catalog agents, commands, and skills from the repo.
* Outputs JSON with counts and lists for CI/docs sync.
* Verify repo catalog counts against README.md and AGENTS.md.
*
* Usage: node scripts/ci/catalog.js [--json|--md]
* Default: --json to stdout
* Usage:
* node scripts/ci/catalog.js
* node scripts/ci/catalog.js --json
* node scripts/ci/catalog.js --md
* node scripts/ci/catalog.js --text
*/
'use strict';
const fs = require('fs');
const path = require('path');
const ROOT = path.join(__dirname, '../..');
const AGENTS_DIR = path.join(ROOT, 'agents');
const COMMANDS_DIR = path.join(ROOT, 'commands');
const SKILLS_DIR = path.join(ROOT, 'skills');
const README_PATH = path.join(ROOT, 'README.md');
const AGENTS_PATH = path.join(ROOT, 'AGENTS.md');
function listAgents() {
if (!fs.existsSync(AGENTS_DIR)) return [];
try {
return fs.readdirSync(AGENTS_DIR)
.filter(f => f.endsWith('.md'))
.map(f => f.slice(0, -3))
.sort();
} catch (error) {
throw new Error(`Failed to read agents directory (${AGENTS_DIR}): ${error.message}`);
}
const OUTPUT_MODE = process.argv.includes('--md')
? 'md'
: process.argv.includes('--text')
? 'text'
: 'json';
function normalizePathSegments(relativePath) {
return relativePath.split(path.sep).join('/');
}
function listCommands() {
if (!fs.existsSync(COMMANDS_DIR)) return [];
try {
return fs.readdirSync(COMMANDS_DIR)
.filter(f => f.endsWith('.md'))
.map(f => f.slice(0, -3))
.sort();
} catch (error) {
throw new Error(`Failed to read commands directory (${COMMANDS_DIR}): ${error.message}`);
function listMatchingFiles(relativeDir, matcher) {
const directory = path.join(ROOT, relativeDir);
if (!fs.existsSync(directory)) {
return [];
}
return fs.readdirSync(directory, { withFileTypes: true })
.filter(entry => matcher(entry))
.map(entry => normalizePathSegments(path.join(relativeDir, entry.name)))
.sort();
}
function listSkills() {
if (!fs.existsSync(SKILLS_DIR)) return [];
try {
const entries = fs.readdirSync(SKILLS_DIR, { withFileTypes: true });
return entries
.filter(e => e.isDirectory() && fs.existsSync(path.join(SKILLS_DIR, e.name, 'SKILL.md')))
.map(e => e.name)
.sort();
} catch (error) {
throw new Error(`Failed to read skills directory (${SKILLS_DIR}): ${error.message}`);
}
}
function buildCatalog() {
const agents = listMatchingFiles('agents', entry => entry.isFile() && entry.name.endsWith('.md'));
const commands = listMatchingFiles('commands', entry => entry.isFile() && entry.name.endsWith('.md'));
const skills = listMatchingFiles('skills', entry => entry.isDirectory() && fs.existsSync(path.join(ROOT, 'skills', entry.name, 'SKILL.md')))
.map(skillDir => `${skillDir}/SKILL.md`);
function run() {
const agents = listAgents();
const commands = listCommands();
const skills = listSkills();
const catalog = {
agents: { count: agents.length, list: agents },
commands: { count: commands.length, list: commands },
skills: { count: skills.length, list: skills }
return {
agents: { count: agents.length, files: agents, glob: 'agents/*.md' },
commands: { count: commands.length, files: commands, glob: 'commands/*.md' },
skills: { count: skills.length, files: skills, glob: 'skills/*/SKILL.md' }
};
}
const format = process.argv[2] === '--md' ? 'md' : 'json';
if (format === 'md') {
console.log('# ECC Catalog (generated)\n');
console.log(`- **Agents:** ${catalog.agents.count}`);
console.log(`- **Commands:** ${catalog.commands.count}`);
console.log(`- **Skills:** ${catalog.skills.count}\n`);
console.log('## Agents\n');
catalog.agents.list.forEach(a => { console.log(`- ${a}`); });
console.log('\n## Commands\n');
catalog.commands.list.forEach(c => { console.log(`- ${c}`); });
console.log('\n## Skills\n');
catalog.skills.list.forEach(s => { console.log(`- ${s}`); });
} else {
console.log(JSON.stringify(catalog, null, 2));
function readFileOrThrow(filePath) {
try {
return fs.readFileSync(filePath, 'utf8');
} catch (error) {
throw new Error(`Failed to read ${path.basename(filePath)}: ${error.message}`);
}
}
run();
function parseReadmeExpectations(readmeContent) {
const expectations = [];
const quickStartMatch = readmeContent.match(/access to\s+(\d+)\s+agents,\s+(\d+)\s+skills,\s+and\s+(\d+)\s+commands/i);
if (!quickStartMatch) {
throw new Error('README.md is missing the quick-start catalog summary');
}
expectations.push(
{ category: 'agents', mode: 'exact', expected: Number(quickStartMatch[1]), source: 'README.md quick-start summary' },
{ category: 'skills', mode: 'exact', expected: Number(quickStartMatch[2]), source: 'README.md quick-start summary' },
{ category: 'commands', mode: 'exact', expected: Number(quickStartMatch[3]), source: 'README.md quick-start summary' }
);
const tablePatterns = [
{ category: 'agents', regex: /\|\s*Agents\s*\|\s*✅\s*(\d+)\s+agents\s*\|/i, source: 'README.md comparison table' },
{ category: 'commands', regex: /\|\s*Commands\s*\|\s*✅\s*(\d+)\s+commands\s*\|/i, source: 'README.md comparison table' },
{ category: 'skills', regex: /\|\s*Skills\s*\|\s*✅\s*(\d+)\s+skills\s*\|/i, source: 'README.md comparison table' }
];
for (const pattern of tablePatterns) {
const match = readmeContent.match(pattern.regex);
if (!match) {
throw new Error(`${pattern.source} is missing the ${pattern.category} row`);
}
expectations.push({
category: pattern.category,
mode: 'exact',
expected: Number(match[1]),
source: `${pattern.source} (${pattern.category})`
});
}
return expectations;
}
function parseAgentsDocExpectations(agentsContent) {
const summaryMatch = agentsContent.match(/providing\s+(\d+)\s+specialized agents,\s+(\d+)(\+)?\s+skills,\s+(\d+)\s+commands/i);
if (!summaryMatch) {
throw new Error('AGENTS.md is missing the catalog summary line');
}
return [
{ category: 'agents', mode: 'exact', expected: Number(summaryMatch[1]), source: 'AGENTS.md summary' },
{
category: 'skills',
mode: summaryMatch[3] ? 'minimum' : 'exact',
expected: Number(summaryMatch[2]),
source: 'AGENTS.md summary'
},
{ category: 'commands', mode: 'exact', expected: Number(summaryMatch[4]), source: 'AGENTS.md summary' }
];
}
function evaluateExpectations(catalog, expectations) {
return expectations.map(expectation => {
const actual = catalog[expectation.category].count;
const ok = expectation.mode === 'minimum'
? actual >= expectation.expected
: actual === expectation.expected;
return {
...expectation,
actual,
ok
};
});
}
function formatExpectation(expectation) {
const comparator = expectation.mode === 'minimum' ? '>=' : '=';
return `${expectation.source}: ${expectation.category} documented ${comparator} ${expectation.expected}, actual ${expectation.actual}`;
}
function renderText(result) {
console.log('Catalog counts:');
console.log(`- agents: ${result.catalog.agents.count}`);
console.log(`- commands: ${result.catalog.commands.count}`);
console.log(`- skills: ${result.catalog.skills.count}`);
console.log('');
const mismatches = result.checks.filter(check => !check.ok);
if (mismatches.length === 0) {
console.log('Documentation counts match the repository catalog.');
return;
}
console.error('Documentation count mismatches found:');
for (const mismatch of mismatches) {
console.error(`- ${formatExpectation(mismatch)}`);
}
}
function renderMarkdown(result) {
const mismatches = result.checks.filter(check => !check.ok);
console.log('# ECC Catalog Verification\n');
console.log('| Category | Count | Pattern |');
console.log('| --- | ---: | --- |');
console.log(`| Agents | ${result.catalog.agents.count} | \`${result.catalog.agents.glob}\` |`);
console.log(`| Commands | ${result.catalog.commands.count} | \`${result.catalog.commands.glob}\` |`);
console.log(`| Skills | ${result.catalog.skills.count} | \`${result.catalog.skills.glob}\` |`);
console.log('');
if (mismatches.length === 0) {
console.log('Documentation counts match the repository catalog.');
return;
}
console.log('## Mismatches\n');
for (const mismatch of mismatches) {
console.log(`- ${formatExpectation(mismatch)}`);
}
}
function main() {
const catalog = buildCatalog();
const readmeContent = readFileOrThrow(README_PATH);
const agentsContent = readFileOrThrow(AGENTS_PATH);
const expectations = [
...parseReadmeExpectations(readmeContent),
...parseAgentsDocExpectations(agentsContent)
];
const checks = evaluateExpectations(catalog, expectations);
const result = { catalog, checks };
if (OUTPUT_MODE === 'json') {
console.log(JSON.stringify(result, null, 2));
} else if (OUTPUT_MODE === 'md') {
renderMarkdown(result);
} else {
renderText(result);
}
if (checks.some(check => !check.ok)) {
process.exit(1);
}
}
try {
main();
} catch (error) {
console.error(`ERROR: ${error.message}`);
process.exit(1);
}

View File

@@ -8,19 +8,16 @@
const {
SUPPORTED_INSTALL_TARGETS,
listAvailableLanguages,
} = require('./lib/install-executor');
listLegacyCompatibilityLanguages,
} = require('./lib/install-manifests');
const {
LEGACY_INSTALL_TARGETS,
normalizeInstallRequest,
parseInstallArgs,
} = require('./lib/install/request');
const { loadInstallConfig } = require('./lib/install/config');
const { applyInstallPlan } = require('./lib/install/apply');
const { createInstallPlanFromRequest } = require('./lib/install/runtime');
function showHelp(exitCode = 0) {
const languages = listAvailableLanguages();
const languages = listLegacyCompatibilityLanguages();
console.log(`
Usage: install.sh [--target <${LEGACY_INSTALL_TARGETS.join('|')}>] [--dry-run] [--json] <language> [<language> ...]
@@ -61,6 +58,9 @@ function printHumanPlan(plan, dryRun) {
if (plan.mode === 'legacy') {
console.log(`Languages: ${plan.languages.join(', ')}`);
} else {
if (plan.mode === 'legacy-compat') {
console.log(`Legacy languages: ${plan.legacyLanguages.join(', ')}`);
}
console.log(`Profile: ${plan.profileId || '(custom modules)'}`);
console.log(`Included components: ${plan.includedComponentIds.join(', ') || '(none)'}`);
console.log(`Excluded components: ${plan.excludedComponentIds.join(', ') || '(none)'}`);
@@ -100,6 +100,9 @@ function main() {
showHelp(0);
}
const { loadInstallConfig } = require('./lib/install/config');
const { applyInstallPlan } = require('./lib/install-executor');
const { createInstallPlanFromRequest } = require('./lib/install/runtime');
const config = options.configPath
? loadInstallConfig(options.configPath, { cwd: process.cwd() })
: null;

View File

@@ -2,14 +2,14 @@ const fs = require('fs');
const path = require('path');
const { execFileSync } = require('child_process');
const { applyInstallPlan } = require('./install/apply');
const { LEGACY_INSTALL_TARGETS, parseInstallArgs } = require('./install/request');
const {
SUPPORTED_INSTALL_TARGETS,
listLegacyCompatibilityLanguages,
resolveLegacyCompatibilitySelection,
resolveInstallPlan,
} = require('./install-manifests');
const { getInstallTargetAdapter } = require('./install-targets/registry');
const { createInstallState } = require('./install-state');
const LANGUAGE_NAME_PATTERN = /^[a-zA-Z0-9_-]+$/;
const EXCLUDED_GENERATED_SOURCE_SUFFIXES = [
@@ -68,8 +68,11 @@ function readDirectoryNames(dirPath) {
}
function listAvailableLanguages(sourceRoot = getSourceRoot()) {
return readDirectoryNames(path.join(sourceRoot, 'rules'))
.filter(name => name !== 'common');
return [...new Set([
...listLegacyCompatibilityLanguages(),
...readDirectoryNames(path.join(sourceRoot, 'rules'))
.filter(name => name !== 'common'),
])].sort();
}
function validateLegacyTarget(target) {
@@ -108,6 +111,16 @@ function isGeneratedRuntimeSourcePath(sourceRelativePath) {
return EXCLUDED_GENERATED_SOURCE_SUFFIXES.some(suffix => normalizedPath.endsWith(suffix));
}
function createStatePreview(options) {
const { createInstallState } = require('./install-state');
return createInstallState(options);
}
function applyInstallPlan(plan) {
const { applyInstallPlan: applyPlan } = require('./install/apply');
return applyPlan(plan);
}
function buildCopyFileOperation({ moduleId, sourcePath, sourceRelativePath, destinationPath, strategy }) {
return {
kind: 'copy-file',
@@ -449,7 +462,7 @@ function createLegacyInstallPlan(options = {}) {
manifestVersion: getManifestVersion(sourceRoot),
};
const statePreview = createInstallState({
const statePreview = createStatePreview({
adapter: plan.adapter,
targetRoot: plan.targetRoot,
installStatePath: plan.installStatePath,
@@ -485,6 +498,38 @@ function createLegacyInstallPlan(options = {}) {
};
}
function createLegacyCompatInstallPlan(options = {}) {
const sourceRoot = options.sourceRoot || getSourceRoot();
const projectRoot = options.projectRoot || process.cwd();
const target = options.target || 'claude';
validateLegacyTarget(target);
const selection = resolveLegacyCompatibilitySelection({
repoRoot: sourceRoot,
target,
legacyLanguages: options.legacyLanguages || [],
});
return createManifestInstallPlan({
sourceRoot,
projectRoot,
homeDir: options.homeDir,
target,
profileId: null,
moduleIds: selection.moduleIds,
includeComponentIds: [],
excludeComponentIds: [],
legacyLanguages: selection.legacyLanguages,
legacyMode: true,
requestProfileId: null,
requestModuleIds: [],
requestIncludeComponentIds: [],
requestExcludeComponentIds: [],
mode: 'legacy-compat',
});
}
function materializeScaffoldOperation(sourceRoot, operation) {
const sourcePath = path.join(sourceRoot, operation.sourceRelativePath);
if (!fs.existsSync(sourcePath)) {
@@ -526,6 +571,21 @@ function createManifestInstallPlan(options = {}) {
const sourceRoot = options.sourceRoot || getSourceRoot();
const projectRoot = options.projectRoot || process.cwd();
const target = options.target || 'claude';
const legacyLanguages = Array.isArray(options.legacyLanguages)
? [...options.legacyLanguages]
: [];
const requestProfileId = Object.hasOwn(options, 'requestProfileId')
? options.requestProfileId
: (options.profileId || null);
const requestModuleIds = Object.hasOwn(options, 'requestModuleIds')
? [...options.requestModuleIds]
: (Array.isArray(options.moduleIds) ? [...options.moduleIds] : []);
const requestIncludeComponentIds = Object.hasOwn(options, 'requestIncludeComponentIds')
? [...options.requestIncludeComponentIds]
: (Array.isArray(options.includeComponentIds) ? [...options.includeComponentIds] : []);
const requestExcludeComponentIds = Object.hasOwn(options, 'requestExcludeComponentIds')
? [...options.requestExcludeComponentIds]
: (Array.isArray(options.excludeComponentIds) ? [...options.excludeComponentIds] : []);
const plan = resolveInstallPlan({
repoRoot: sourceRoot,
projectRoot,
@@ -543,21 +603,17 @@ function createManifestInstallPlan(options = {}) {
repoCommit: getRepoCommit(sourceRoot),
manifestVersion: getManifestVersion(sourceRoot),
};
const statePreview = createInstallState({
const statePreview = createStatePreview({
adapter,
targetRoot: plan.targetRoot,
installStatePath: plan.installStatePath,
request: {
profile: plan.profileId,
modules: Array.isArray(options.moduleIds) ? [...options.moduleIds] : [],
includeComponents: Array.isArray(options.includeComponentIds)
? [...options.includeComponentIds]
: [],
excludeComponents: Array.isArray(options.excludeComponentIds)
? [...options.excludeComponentIds]
: [],
legacyLanguages: [],
legacyMode: false,
profile: requestProfileId,
modules: requestModuleIds,
includeComponents: requestIncludeComponentIds,
excludeComponents: requestExcludeComponentIds,
legacyLanguages,
legacyMode: Boolean(options.legacyMode),
},
resolution: {
selectedModules: plan.selectedModuleIds,
@@ -568,7 +624,7 @@ function createManifestInstallPlan(options = {}) {
});
return {
mode: 'manifest',
mode: options.mode || 'manifest',
target,
adapter: {
id: adapter.id,
@@ -578,8 +634,9 @@ function createManifestInstallPlan(options = {}) {
targetRoot: plan.targetRoot,
installRoot: plan.targetRoot,
installStatePath: plan.installStatePath,
warnings: [],
languages: [],
warnings: Array.isArray(options.warnings) ? [...options.warnings] : [],
languages: legacyLanguages,
legacyLanguages,
profileId: plan.profileId,
requestedModuleIds: plan.requestedModuleIds,
explicitModuleIds: plan.explicitModuleIds,
@@ -597,6 +654,7 @@ module.exports = {
SUPPORTED_INSTALL_TARGETS,
LEGACY_INSTALL_TARGETS,
applyInstallPlan,
createLegacyCompatInstallPlan,
createManifestInstallPlan,
createLegacyInstallPlan,
getSourceRoot,

View File

@@ -4,7 +4,6 @@ const path = require('path');
const { resolveInstallPlan, loadInstallManifests } = require('./install-manifests');
const { readInstallState, writeInstallState } = require('./install-state');
const {
applyInstallPlan,
createLegacyInstallPlan,
createManifestInstallPlan,
} = require('./install-executor');
@@ -79,6 +78,420 @@ function areFilesEqual(leftPath, rightPath) {
}
}
function readFileUtf8(filePath) {
return fs.readFileSync(filePath, 'utf8');
}
function isPlainObject(value) {
return Boolean(value) && typeof value === 'object' && !Array.isArray(value);
}
function cloneJsonValue(value) {
if (value === undefined) {
return undefined;
}
return JSON.parse(JSON.stringify(value));
}
function parseJsonLikeValue(value, label) {
if (value === undefined) {
return undefined;
}
if (typeof value === 'string') {
try {
return JSON.parse(value);
} catch (error) {
throw new Error(`Invalid ${label}: ${error.message}`);
}
}
if (value === null || Array.isArray(value) || isPlainObject(value) || typeof value === 'number' || typeof value === 'boolean') {
return cloneJsonValue(value);
}
throw new Error(`Invalid ${label}: expected JSON-compatible data`);
}
function getOperationTextContent(operation) {
const candidateKeys = [
'renderedContent',
'content',
'managedContent',
'expectedContent',
'templateOutput',
];
for (const key of candidateKeys) {
if (typeof operation[key] === 'string') {
return operation[key];
}
}
return null;
}
function getOperationJsonPayload(operation) {
const candidateKeys = [
'mergePayload',
'managedPayload',
'payload',
'value',
'expectedValue',
];
for (const key of candidateKeys) {
if (operation[key] !== undefined) {
return parseJsonLikeValue(operation[key], `${operation.kind}.${key}`);
}
}
return undefined;
}
function getOperationPreviousContent(operation) {
const candidateKeys = [
'previousContent',
'originalContent',
'backupContent',
];
for (const key of candidateKeys) {
if (typeof operation[key] === 'string') {
return operation[key];
}
}
return null;
}
function getOperationPreviousJson(operation) {
const candidateKeys = [
'previousValue',
'previousJson',
'originalValue',
];
for (const key of candidateKeys) {
if (operation[key] !== undefined) {
return parseJsonLikeValue(operation[key], `${operation.kind}.${key}`);
}
}
return undefined;
}
function formatJson(value) {
return `${JSON.stringify(value, null, 2)}\n`;
}
function readJsonFile(filePath) {
return JSON.parse(readFileUtf8(filePath));
}
function ensureParentDir(filePath) {
fs.mkdirSync(path.dirname(filePath), { recursive: true });
}
function deepMergeJson(baseValue, patchValue) {
if (!isPlainObject(baseValue) || !isPlainObject(patchValue)) {
return cloneJsonValue(patchValue);
}
const merged = { ...baseValue };
for (const [key, value] of Object.entries(patchValue)) {
if (isPlainObject(value) && isPlainObject(merged[key])) {
merged[key] = deepMergeJson(merged[key], value);
} else {
merged[key] = cloneJsonValue(value);
}
}
return merged;
}
function jsonContainsSubset(actualValue, expectedValue) {
if (isPlainObject(expectedValue)) {
if (!isPlainObject(actualValue)) {
return false;
}
return Object.entries(expectedValue).every(([key, value]) => (
Object.prototype.hasOwnProperty.call(actualValue, key)
&& jsonContainsSubset(actualValue[key], value)
));
}
if (Array.isArray(expectedValue)) {
if (!Array.isArray(actualValue) || actualValue.length !== expectedValue.length) {
return false;
}
return expectedValue.every((item, index) => jsonContainsSubset(actualValue[index], item));
}
return actualValue === expectedValue;
}
const JSON_REMOVE_SENTINEL = Symbol('json-remove');
function deepRemoveJsonSubset(currentValue, managedValue) {
if (isPlainObject(managedValue)) {
if (!isPlainObject(currentValue)) {
return currentValue;
}
const nextValue = { ...currentValue };
for (const [key, value] of Object.entries(managedValue)) {
if (!Object.prototype.hasOwnProperty.call(nextValue, key)) {
continue;
}
if (isPlainObject(value)) {
const nestedValue = deepRemoveJsonSubset(nextValue[key], value);
if (nestedValue === JSON_REMOVE_SENTINEL) {
delete nextValue[key];
} else {
nextValue[key] = nestedValue;
}
continue;
}
if (Array.isArray(value)) {
if (Array.isArray(nextValue[key]) && jsonContainsSubset(nextValue[key], value)) {
delete nextValue[key];
}
continue;
}
if (nextValue[key] === value) {
delete nextValue[key];
}
}
return Object.keys(nextValue).length === 0 ? JSON_REMOVE_SENTINEL : nextValue;
}
if (Array.isArray(managedValue)) {
return jsonContainsSubset(currentValue, managedValue) ? JSON_REMOVE_SENTINEL : currentValue;
}
return currentValue === managedValue ? JSON_REMOVE_SENTINEL : currentValue;
}
function hydrateRecordedOperations(repoRoot, operations) {
return operations.map(operation => {
if (operation.kind !== 'copy-file') {
return { ...operation };
}
return {
...operation,
sourcePath: resolveOperationSourcePath(repoRoot, operation),
};
});
}
function buildRecordedStatePreview(state, context, operations) {
return {
...state,
operations: operations.map(operation => ({ ...operation })),
source: {
...state.source,
repoVersion: context.packageVersion,
manifestVersion: context.manifestVersion,
},
lastValidatedAt: new Date().toISOString(),
};
}
function shouldRepairFromRecordedOperations(state) {
return getManagedOperations(state).some(operation => operation.kind !== 'copy-file');
}
function executeRepairOperation(repoRoot, operation) {
if (operation.kind === 'copy-file') {
const sourcePath = resolveOperationSourcePath(repoRoot, operation);
if (!sourcePath || !fs.existsSync(sourcePath)) {
throw new Error(`Missing source file for repair: ${sourcePath || operation.sourceRelativePath}`);
}
ensureParentDir(operation.destinationPath);
fs.copyFileSync(sourcePath, operation.destinationPath);
return;
}
if (operation.kind === 'render-template') {
const renderedContent = getOperationTextContent(operation);
if (renderedContent === null) {
throw new Error(`Missing rendered content for repair: ${operation.destinationPath}`);
}
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, renderedContent);
return;
}
if (operation.kind === 'merge-json') {
const payload = getOperationJsonPayload(operation);
if (payload === undefined) {
throw new Error(`Missing merge payload for repair: ${operation.destinationPath}`);
}
const currentValue = fs.existsSync(operation.destinationPath)
? readJsonFile(operation.destinationPath)
: {};
const mergedValue = deepMergeJson(currentValue, payload);
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, formatJson(mergedValue));
return;
}
if (operation.kind === 'remove') {
if (!fs.existsSync(operation.destinationPath)) {
return;
}
fs.rmSync(operation.destinationPath, { recursive: true, force: true });
return;
}
throw new Error(`Unsupported repair operation kind: ${operation.kind}`);
}
function executeUninstallOperation(operation) {
if (operation.kind === 'copy-file') {
if (!fs.existsSync(operation.destinationPath)) {
return {
removedPaths: [],
cleanupTargets: [],
};
}
fs.rmSync(operation.destinationPath, { force: true });
return {
removedPaths: [operation.destinationPath],
cleanupTargets: [operation.destinationPath],
};
}
if (operation.kind === 'render-template') {
const previousContent = getOperationPreviousContent(operation);
if (previousContent !== null) {
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, previousContent);
return {
removedPaths: [],
cleanupTargets: [],
};
}
const previousJson = getOperationPreviousJson(operation);
if (previousJson !== undefined) {
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, formatJson(previousJson));
return {
removedPaths: [],
cleanupTargets: [],
};
}
if (!fs.existsSync(operation.destinationPath)) {
return {
removedPaths: [],
cleanupTargets: [],
};
}
fs.rmSync(operation.destinationPath, { force: true });
return {
removedPaths: [operation.destinationPath],
cleanupTargets: [operation.destinationPath],
};
}
if (operation.kind === 'merge-json') {
const previousContent = getOperationPreviousContent(operation);
if (previousContent !== null) {
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, previousContent);
return {
removedPaths: [],
cleanupTargets: [],
};
}
const previousJson = getOperationPreviousJson(operation);
if (previousJson !== undefined) {
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, formatJson(previousJson));
return {
removedPaths: [],
cleanupTargets: [],
};
}
if (!fs.existsSync(operation.destinationPath)) {
return {
removedPaths: [],
cleanupTargets: [],
};
}
const payload = getOperationJsonPayload(operation);
if (payload === undefined) {
throw new Error(`Missing merge payload for uninstall: ${operation.destinationPath}`);
}
const currentValue = readJsonFile(operation.destinationPath);
const nextValue = deepRemoveJsonSubset(currentValue, payload);
if (nextValue === JSON_REMOVE_SENTINEL) {
fs.rmSync(operation.destinationPath, { force: true });
return {
removedPaths: [operation.destinationPath],
cleanupTargets: [operation.destinationPath],
};
}
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, formatJson(nextValue));
return {
removedPaths: [],
cleanupTargets: [],
};
}
if (operation.kind === 'remove') {
const previousContent = getOperationPreviousContent(operation);
if (previousContent !== null) {
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, previousContent);
return {
removedPaths: [],
cleanupTargets: [],
};
}
const previousJson = getOperationPreviousJson(operation);
if (previousJson !== undefined) {
ensureParentDir(operation.destinationPath);
fs.writeFileSync(operation.destinationPath, formatJson(previousJson));
return {
removedPaths: [],
cleanupTargets: [],
};
}
return {
removedPaths: [],
cleanupTargets: [],
};
}
throw new Error(`Unsupported uninstall operation kind: ${operation.kind}`);
}
function inspectManagedOperation(repoRoot, operation) {
const destinationPath = operation.destinationPath;
if (!destinationPath) {
@@ -88,6 +501,22 @@ function inspectManagedOperation(repoRoot, operation) {
};
}
if (operation.kind === 'remove') {
if (fs.existsSync(destinationPath)) {
return {
status: 'drifted',
operation,
destinationPath,
};
}
return {
status: 'ok',
operation,
destinationPath,
};
}
if (!fs.existsSync(destinationPath)) {
return {
status: 'missing',
@@ -96,38 +525,97 @@ function inspectManagedOperation(repoRoot, operation) {
};
}
if (operation.kind !== 'copy-file') {
return {
status: 'unverified',
operation,
destinationPath,
};
}
if (operation.kind === 'copy-file') {
const sourcePath = resolveOperationSourcePath(repoRoot, operation);
if (!sourcePath || !fs.existsSync(sourcePath)) {
return {
status: 'missing-source',
operation,
destinationPath,
sourcePath,
};
}
if (!areFilesEqual(sourcePath, destinationPath)) {
return {
status: 'drifted',
operation,
destinationPath,
sourcePath,
};
}
const sourcePath = resolveOperationSourcePath(repoRoot, operation);
if (!sourcePath || !fs.existsSync(sourcePath)) {
return {
status: 'missing-source',
status: 'ok',
operation,
destinationPath,
sourcePath,
};
}
if (!areFilesEqual(sourcePath, destinationPath)) {
if (operation.kind === 'render-template') {
const renderedContent = getOperationTextContent(operation);
if (renderedContent === null) {
return {
status: 'unverified',
operation,
destinationPath,
};
}
if (readFileUtf8(destinationPath) !== renderedContent) {
return {
status: 'drifted',
operation,
destinationPath,
};
}
return {
status: 'drifted',
status: 'ok',
operation,
destinationPath,
};
}
if (operation.kind === 'merge-json') {
const payload = getOperationJsonPayload(operation);
if (payload === undefined) {
return {
status: 'unverified',
operation,
destinationPath,
};
}
try {
const currentValue = readJsonFile(destinationPath);
if (!jsonContainsSubset(currentValue, payload)) {
return {
status: 'drifted',
operation,
destinationPath,
};
}
} catch (_error) {
return {
status: 'drifted',
operation,
destinationPath,
};
}
return {
status: 'ok',
operation,
destinationPath,
sourcePath,
};
}
return {
status: 'ok',
status: 'unverified',
operation,
destinationPath,
sourcePath,
};
}
@@ -455,25 +943,12 @@ function createRepairPlanFromRecord(record, context) {
throw new Error('No install-state available for repair');
}
if (state.request.legacyMode) {
const operations = getManagedOperations(state).map(operation => ({
...operation,
sourcePath: resolveOperationSourcePath(context.repoRoot, operation),
}));
const statePreview = {
...state,
operations: operations.map(operation => ({ ...operation })),
source: {
...state.source,
repoVersion: context.packageVersion,
manifestVersion: context.manifestVersion,
},
lastValidatedAt: new Date().toISOString(),
};
if (state.request.legacyMode || shouldRepairFromRecordedOperations(state)) {
const operations = hydrateRecordedOperations(context.repoRoot, getManagedOperations(state));
const statePreview = buildRecordedStatePreview(state, context, operations);
return {
mode: 'legacy',
mode: state.request.legacyMode ? 'legacy' : 'recorded',
target: record.adapter.target,
adapter: record.adapter,
targetRoot: state.target.root,
@@ -571,11 +1046,10 @@ function repairInstalledStates(options = {}) {
}
if (repairOperations.length > 0) {
applyInstallPlan({
...desiredPlan,
operations: repairOperations,
statePreview: desiredPlan.statePreview,
});
for (const operation of repairOperations) {
executeRepairOperation(context.repoRoot, operation);
}
writeInstallState(desiredPlan.installStatePath, desiredPlan.statePreview);
} else {
writeInstallState(desiredPlan.installStatePath, desiredPlan.statePreview);
}
@@ -684,23 +1158,12 @@ function uninstallInstalledStates(options = {}) {
try {
const removedPaths = [];
const cleanupTargets = [];
const filePaths = Array.from(new Set(
getManagedOperations(state).map(operation => operation.destinationPath)
)).sort((left, right) => right.length - left.length);
const operations = getManagedOperations(state);
for (const filePath of filePaths) {
if (!fs.existsSync(filePath)) {
continue;
}
const stat = fs.lstatSync(filePath);
if (stat.isDirectory()) {
throw new Error(`Refusing to remove managed directory path without explicit support: ${filePath}`);
}
fs.rmSync(filePath, { force: true });
removedPaths.push(filePath);
cleanupTargets.push(filePath);
for (const operation of operations) {
const outcome = executeUninstallOperation(operation);
removedPaths.push(...outcome.removedPaths);
cleanupTargets.push(...outcome.cleanupTargets);
}
if (fs.existsSync(state.target.installStatePath)) {

View File

@@ -11,6 +11,50 @@ const COMPONENT_FAMILY_PREFIXES = {
framework: 'framework:',
capability: 'capability:',
};
const LEGACY_COMPAT_BASE_MODULE_IDS_BY_TARGET = Object.freeze({
claude: [
'rules-core',
'agents-core',
'commands-core',
'hooks-runtime',
'platform-configs',
'workflow-quality',
],
cursor: [
'rules-core',
'agents-core',
'commands-core',
'hooks-runtime',
'platform-configs',
'workflow-quality',
],
antigravity: [
'rules-core',
'agents-core',
'commands-core',
],
});
const LEGACY_LANGUAGE_ALIAS_TO_CANONICAL = Object.freeze({
go: 'go',
golang: 'go',
java: 'java',
javascript: 'typescript',
kotlin: 'java',
perl: 'perl',
php: 'php',
python: 'python',
swift: 'swift',
typescript: 'typescript',
});
const LEGACY_LANGUAGE_EXTRA_MODULE_IDS = Object.freeze({
go: ['framework-language'],
java: ['framework-language'],
perl: [],
php: [],
python: ['framework-language'],
swift: [],
typescript: ['framework-language'],
});
function readJson(filePath, label) {
try {
@@ -24,6 +68,19 @@ function dedupeStrings(values) {
return [...new Set((Array.isArray(values) ? values : []).map(value => String(value).trim()).filter(Boolean))];
}
function assertKnownModuleIds(moduleIds, manifests) {
const unknownModuleIds = dedupeStrings(moduleIds)
.filter(moduleId => !manifests.modulesById.has(moduleId));
if (unknownModuleIds.length === 1) {
throw new Error(`Unknown install module: ${unknownModuleIds[0]}`);
}
if (unknownModuleIds.length > 1) {
throw new Error(`Unknown install modules: ${unknownModuleIds.join(', ')}`);
}
}
function intersectTargets(modules) {
if (!Array.isArray(modules) || modules.length === 0) {
return [];
@@ -102,6 +159,17 @@ function listInstallModules(options = {}) {
}));
}
function listLegacyCompatibilityLanguages() {
return Object.keys(LEGACY_LANGUAGE_ALIAS_TO_CANONICAL).sort();
}
function validateInstallModuleIds(moduleIds, options = {}) {
const manifests = loadInstallManifests(options);
const normalizedModuleIds = dedupeStrings(moduleIds);
assertKnownModuleIds(normalizedModuleIds, manifests);
return normalizedModuleIds;
}
function listInstallComponents(options = {}) {
const manifests = loadInstallManifests(options);
const family = options.family || null;
@@ -154,6 +222,59 @@ function expandComponentIdsToModuleIds(componentIds, manifests) {
return dedupeStrings(expandedModuleIds);
}
function resolveLegacyCompatibilitySelection(options = {}) {
const manifests = loadInstallManifests(options);
const target = options.target || null;
if (target && !SUPPORTED_INSTALL_TARGETS.includes(target)) {
throw new Error(
`Unknown install target: ${target}. Expected one of ${SUPPORTED_INSTALL_TARGETS.join(', ')}`
);
}
const legacyLanguages = dedupeStrings(options.legacyLanguages)
.map(language => language.toLowerCase());
const normalizedLegacyLanguages = dedupeStrings(legacyLanguages);
if (normalizedLegacyLanguages.length === 0) {
throw new Error('No legacy languages were provided');
}
const unknownLegacyLanguages = normalizedLegacyLanguages
.filter(language => !Object.hasOwn(LEGACY_LANGUAGE_ALIAS_TO_CANONICAL, language));
if (unknownLegacyLanguages.length === 1) {
throw new Error(
`Unknown legacy language: ${unknownLegacyLanguages[0]}. Expected one of ${listLegacyCompatibilityLanguages().join(', ')}`
);
}
if (unknownLegacyLanguages.length > 1) {
throw new Error(
`Unknown legacy languages: ${unknownLegacyLanguages.join(', ')}. Expected one of ${listLegacyCompatibilityLanguages().join(', ')}`
);
}
const canonicalLegacyLanguages = normalizedLegacyLanguages
.map(language => LEGACY_LANGUAGE_ALIAS_TO_CANONICAL[language]);
const baseModuleIds = LEGACY_COMPAT_BASE_MODULE_IDS_BY_TARGET[target || 'claude']
|| LEGACY_COMPAT_BASE_MODULE_IDS_BY_TARGET.claude;
const moduleIds = dedupeStrings([
...baseModuleIds,
...(target === 'antigravity'
? []
: canonicalLegacyLanguages.flatMap(language => LEGACY_LANGUAGE_EXTRA_MODULE_IDS[language] || [])),
]);
assertKnownModuleIds(moduleIds, manifests);
return {
legacyLanguages: normalizedLegacyLanguages,
canonicalLegacyLanguages,
moduleIds,
};
}
function resolveInstallPlan(options = {}) {
const manifests = loadInstallManifests(options);
const profileId = options.profileId || null;
@@ -212,7 +333,7 @@ function resolveInstallPlan(options = {}) {
const visitingIds = new Set();
const resolvedIds = new Set();
function resolveModule(moduleId, dependencyOf) {
function resolveModule(moduleId, dependencyOf, rootRequesterId) {
const module = manifests.modulesById.get(moduleId);
if (!module) {
throw new Error(`Unknown install module: ${moduleId}`);
@@ -230,16 +351,15 @@ function resolveInstallPlan(options = {}) {
if (target && !module.targets.includes(target)) {
if (dependencyOf) {
throw new Error(
`Module ${dependencyOf} depends on ${moduleId}, which does not support target ${target}`
);
skippedTargetIds.add(rootRequesterId || dependencyOf);
return false;
}
skippedTargetIds.add(moduleId);
return;
return false;
}
if (resolvedIds.has(moduleId)) {
return;
return true;
}
if (visitingIds.has(moduleId)) {
@@ -248,15 +368,27 @@ function resolveInstallPlan(options = {}) {
visitingIds.add(moduleId);
for (const dependencyId of module.dependencies) {
resolveModule(dependencyId, moduleId);
const dependencyResolved = resolveModule(
dependencyId,
moduleId,
rootRequesterId || moduleId
);
if (!dependencyResolved) {
visitingIds.delete(moduleId);
if (!dependencyOf) {
skippedTargetIds.add(moduleId);
}
return false;
}
}
visitingIds.delete(moduleId);
resolvedIds.add(moduleId);
selectedIds.add(moduleId);
return true;
}
for (const moduleId of effectiveRequestedIds) {
resolveModule(moduleId, null);
resolveModule(moduleId, null, moduleId);
}
const selectedModules = manifests.modules.filter(module => selectedIds.has(module.id));
@@ -299,7 +431,10 @@ module.exports = {
getManifestPaths,
loadInstallManifests,
listInstallComponents,
listLegacyCompatibilityLanguages,
listInstallModules,
listInstallProfiles,
resolveInstallPlan,
resolveLegacyCompatibilitySelection,
validateInstallModuleIds,
};

View File

@@ -1,11 +1,28 @@
const fs = require('fs');
const path = require('path');
const Ajv = require('ajv');
let Ajv = null;
try {
// Prefer schema-backed validation when dependencies are installed.
// The fallback validator below keeps source checkouts usable in bare environments.
const ajvModule = require('ajv');
Ajv = ajvModule.default || ajvModule;
} catch (_error) {
Ajv = null;
}
const SCHEMA_PATH = path.join(__dirname, '..', '..', 'schemas', 'install-state.schema.json');
let cachedValidator = null;
function cloneJsonValue(value) {
if (value === undefined) {
return undefined;
}
return JSON.parse(JSON.stringify(value));
}
function readJson(filePath, label) {
try {
return JSON.parse(fs.readFileSync(filePath, 'utf8'));
@@ -19,12 +36,188 @@ function getValidator() {
return cachedValidator;
}
const schema = readJson(SCHEMA_PATH, 'install-state schema');
const ajv = new Ajv({ allErrors: true });
cachedValidator = ajv.compile(schema);
if (Ajv) {
const schema = readJson(SCHEMA_PATH, 'install-state schema');
const ajv = new Ajv({ allErrors: true });
cachedValidator = ajv.compile(schema);
return cachedValidator;
}
cachedValidator = createFallbackValidator();
return cachedValidator;
}
function createFallbackValidator() {
const validate = state => {
const errors = [];
validate.errors = errors;
function pushError(instancePath, message) {
errors.push({
instancePath,
message,
});
}
function isNonEmptyString(value) {
return typeof value === 'string' && value.length > 0;
}
function validateNoAdditionalProperties(value, instancePath, allowedKeys) {
for (const key of Object.keys(value)) {
if (!allowedKeys.includes(key)) {
pushError(`${instancePath}/${key}`, 'must NOT have additional properties');
}
}
}
function validateStringArray(value, instancePath) {
if (!Array.isArray(value)) {
pushError(instancePath, 'must be array');
return;
}
for (let index = 0; index < value.length; index += 1) {
if (!isNonEmptyString(value[index])) {
pushError(`${instancePath}/${index}`, 'must be non-empty string');
}
}
}
function validateOptionalString(value, instancePath) {
if (value !== undefined && value !== null && !isNonEmptyString(value)) {
pushError(instancePath, 'must be string or null');
}
}
if (!state || typeof state !== 'object' || Array.isArray(state)) {
pushError('/', 'must be object');
return false;
}
validateNoAdditionalProperties(
state,
'',
['schemaVersion', 'installedAt', 'lastValidatedAt', 'target', 'request', 'resolution', 'source', 'operations']
);
if (state.schemaVersion !== 'ecc.install.v1') {
pushError('/schemaVersion', 'must equal ecc.install.v1');
}
if (!isNonEmptyString(state.installedAt)) {
pushError('/installedAt', 'must be non-empty string');
}
if (state.lastValidatedAt !== undefined && !isNonEmptyString(state.lastValidatedAt)) {
pushError('/lastValidatedAt', 'must be non-empty string');
}
const target = state.target;
if (!target || typeof target !== 'object' || Array.isArray(target)) {
pushError('/target', 'must be object');
} else {
validateNoAdditionalProperties(target, '/target', ['id', 'target', 'kind', 'root', 'installStatePath']);
if (!isNonEmptyString(target.id)) {
pushError('/target/id', 'must be non-empty string');
}
validateOptionalString(target.target, '/target/target');
if (target.kind !== undefined && !['home', 'project'].includes(target.kind)) {
pushError('/target/kind', 'must be equal to one of the allowed values');
}
if (!isNonEmptyString(target.root)) {
pushError('/target/root', 'must be non-empty string');
}
if (!isNonEmptyString(target.installStatePath)) {
pushError('/target/installStatePath', 'must be non-empty string');
}
}
const request = state.request;
if (!request || typeof request !== 'object' || Array.isArray(request)) {
pushError('/request', 'must be object');
} else {
validateNoAdditionalProperties(
request,
'/request',
['profile', 'modules', 'includeComponents', 'excludeComponents', 'legacyLanguages', 'legacyMode']
);
if (!(Object.prototype.hasOwnProperty.call(request, 'profile') && (request.profile === null || typeof request.profile === 'string'))) {
pushError('/request/profile', 'must be string or null');
}
validateStringArray(request.modules, '/request/modules');
validateStringArray(request.includeComponents, '/request/includeComponents');
validateStringArray(request.excludeComponents, '/request/excludeComponents');
validateStringArray(request.legacyLanguages, '/request/legacyLanguages');
if (typeof request.legacyMode !== 'boolean') {
pushError('/request/legacyMode', 'must be boolean');
}
}
const resolution = state.resolution;
if (!resolution || typeof resolution !== 'object' || Array.isArray(resolution)) {
pushError('/resolution', 'must be object');
} else {
validateNoAdditionalProperties(resolution, '/resolution', ['selectedModules', 'skippedModules']);
validateStringArray(resolution.selectedModules, '/resolution/selectedModules');
validateStringArray(resolution.skippedModules, '/resolution/skippedModules');
}
const source = state.source;
if (!source || typeof source !== 'object' || Array.isArray(source)) {
pushError('/source', 'must be object');
} else {
validateNoAdditionalProperties(source, '/source', ['repoVersion', 'repoCommit', 'manifestVersion']);
validateOptionalString(source.repoVersion, '/source/repoVersion');
validateOptionalString(source.repoCommit, '/source/repoCommit');
if (!Number.isInteger(source.manifestVersion) || source.manifestVersion < 1) {
pushError('/source/manifestVersion', 'must be integer >= 1');
}
}
if (!Array.isArray(state.operations)) {
pushError('/operations', 'must be array');
} else {
for (let index = 0; index < state.operations.length; index += 1) {
const operation = state.operations[index];
const instancePath = `/operations/${index}`;
if (!operation || typeof operation !== 'object' || Array.isArray(operation)) {
pushError(instancePath, 'must be object');
continue;
}
if (!isNonEmptyString(operation.kind)) {
pushError(`${instancePath}/kind`, 'must be non-empty string');
}
if (!isNonEmptyString(operation.moduleId)) {
pushError(`${instancePath}/moduleId`, 'must be non-empty string');
}
if (!isNonEmptyString(operation.sourceRelativePath)) {
pushError(`${instancePath}/sourceRelativePath`, 'must be non-empty string');
}
if (!isNonEmptyString(operation.destinationPath)) {
pushError(`${instancePath}/destinationPath`, 'must be non-empty string');
}
if (!isNonEmptyString(operation.strategy)) {
pushError(`${instancePath}/strategy`, 'must be non-empty string');
}
if (!isNonEmptyString(operation.ownership)) {
pushError(`${instancePath}/ownership`, 'must be non-empty string');
}
if (typeof operation.scaffoldOnly !== 'boolean') {
pushError(`${instancePath}/scaffoldOnly`, 'must be boolean');
}
}
}
return errors.length === 0;
};
validate.errors = [];
return validate;
}
function formatValidationErrors(errors = []) {
return errors
.map(error => `${error.instancePath || '/'} ${error.message}`)
@@ -87,7 +280,7 @@ function createInstallState(options) {
manifestVersion: options.source.manifestVersion,
},
operations: Array.isArray(options.operations)
? options.operations.map(operation => ({ ...operation }))
? options.operations.map(operation => cloneJsonValue(operation))
: [],
};

View File

@@ -1,4 +1,10 @@
const { createInstallTargetAdapter } = require('./helpers');
const path = require('path');
const {
createFlatRuleOperations,
createInstallTargetAdapter,
createManagedScaffoldOperation,
} = require('./helpers');
module.exports = createInstallTargetAdapter({
id: 'antigravity-project',
@@ -6,4 +12,58 @@ module.exports = createInstallTargetAdapter({
kind: 'project',
rootSegments: ['.agent'],
installStatePathSegments: ['ecc-install-state.json'],
planOperations(input, adapter) {
const modules = Array.isArray(input.modules)
? input.modules
: (input.module ? [input.module] : []);
const {
repoRoot,
projectRoot,
homeDir,
} = input;
const planningInput = {
repoRoot,
projectRoot,
homeDir,
};
const targetRoot = adapter.resolveRoot(planningInput);
return modules.flatMap(module => {
const paths = Array.isArray(module.paths) ? module.paths : [];
return paths.flatMap(sourceRelativePath => {
if (sourceRelativePath === 'rules') {
return createFlatRuleOperations({
moduleId: module.id,
repoRoot,
sourceRelativePath,
destinationDir: path.join(targetRoot, 'rules'),
});
}
if (sourceRelativePath === 'commands') {
return [
createManagedScaffoldOperation(
module.id,
sourceRelativePath,
path.join(targetRoot, 'workflows'),
'preserve-relative-path'
),
];
}
if (sourceRelativePath === 'agents') {
return [
createManagedScaffoldOperation(
module.id,
sourceRelativePath,
path.join(targetRoot, 'skills'),
'preserve-relative-path'
),
];
}
return [adapter.createScaffoldOperation(module.id, sourceRelativePath, planningInput)];
});
});
},
});

View File

@@ -1,4 +1,9 @@
const { createInstallTargetAdapter } = require('./helpers');
const path = require('path');
const {
createFlatRuleOperations,
createInstallTargetAdapter,
} = require('./helpers');
module.exports = createInstallTargetAdapter({
id: 'cursor-project',
@@ -7,4 +12,36 @@ module.exports = createInstallTargetAdapter({
rootSegments: ['.cursor'],
installStatePathSegments: ['ecc-install-state.json'],
nativeRootRelativePath: '.cursor',
planOperations(input, adapter) {
const modules = Array.isArray(input.modules)
? input.modules
: (input.module ? [input.module] : []);
const {
repoRoot,
projectRoot,
homeDir,
} = input;
const planningInput = {
repoRoot,
projectRoot,
homeDir,
};
const targetRoot = adapter.resolveRoot(planningInput);
return modules.flatMap(module => {
const paths = Array.isArray(module.paths) ? module.paths : [];
return paths.flatMap(sourceRelativePath => {
if (sourceRelativePath === 'rules') {
return createFlatRuleOperations({
moduleId: module.id,
repoRoot,
sourceRelativePath,
destinationDir: path.join(targetRoot, 'rules'),
});
}
return [adapter.createScaffoldOperation(module.id, sourceRelativePath, planningInput)];
});
});
},
});

View File

@@ -1,3 +1,4 @@
const fs = require('fs');
const os = require('os');
const path = require('path');
@@ -24,6 +25,182 @@ function resolveBaseRoot(scope, input = {}) {
throw new Error(`Unsupported install target scope: ${scope}`);
}
function buildValidationIssue(severity, code, message, extra = {}) {
return {
severity,
code,
message,
...extra,
};
}
function listRelativeFiles(dirPath, prefix = '') {
if (!fs.existsSync(dirPath)) {
return [];
}
const entries = fs.readdirSync(dirPath, { withFileTypes: true }).sort((left, right) => (
left.name.localeCompare(right.name)
));
const files = [];
for (const entry of entries) {
const entryPrefix = prefix ? path.join(prefix, entry.name) : entry.name;
const absolutePath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
files.push(...listRelativeFiles(absolutePath, entryPrefix));
} else if (entry.isFile()) {
files.push(normalizeRelativePath(entryPrefix));
}
}
return files;
}
function createManagedOperation({
kind = 'copy-path',
moduleId,
sourceRelativePath,
destinationPath,
strategy = 'preserve-relative-path',
ownership = 'managed',
scaffoldOnly = true,
...rest
}) {
return {
kind,
moduleId,
sourceRelativePath: normalizeRelativePath(sourceRelativePath),
destinationPath,
strategy,
ownership,
scaffoldOnly,
...rest,
};
}
function defaultValidateAdapterInput(config, input = {}) {
if (config.kind === 'project' && !input.projectRoot && !input.repoRoot) {
return [
buildValidationIssue(
'error',
'missing-project-root',
'projectRoot or repoRoot is required for project install targets'
),
];
}
if (config.kind === 'home' && !input.homeDir && !os.homedir()) {
return [
buildValidationIssue(
'error',
'missing-home-dir',
'homeDir is required for home install targets'
),
];
}
return [];
}
function createRemappedOperation(adapter, moduleId, sourceRelativePath, destinationPath, options = {}) {
return createManagedOperation({
kind: options.kind || 'copy-path',
moduleId,
sourceRelativePath,
destinationPath,
strategy: options.strategy || 'preserve-relative-path',
ownership: options.ownership || 'managed',
scaffoldOnly: Object.hasOwn(options, 'scaffoldOnly') ? options.scaffoldOnly : true,
...options.extra,
});
}
function createNamespacedFlatRuleOperations(adapter, moduleId, sourceRelativePath, input = {}) {
const normalizedSourcePath = normalizeRelativePath(sourceRelativePath);
const sourceRoot = path.join(input.repoRoot || '', normalizedSourcePath);
if (!input.repoRoot || !fs.existsSync(sourceRoot) || !fs.statSync(sourceRoot).isDirectory()) {
return [];
}
const targetRulesDir = path.join(adapter.resolveRoot(input), 'rules');
const operations = [];
const entries = fs.readdirSync(sourceRoot, { withFileTypes: true }).sort((left, right) => (
left.name.localeCompare(right.name)
));
for (const entry of entries) {
const namespace = entry.name;
const entryPath = path.join(sourceRoot, entry.name);
if (entry.isDirectory()) {
const relativeFiles = listRelativeFiles(entryPath);
for (const relativeFile of relativeFiles) {
const flattenedFileName = `${namespace}-${normalizeRelativePath(relativeFile).replace(/\//g, '-')}`;
const sourceRelativeFile = path.join(normalizedSourcePath, namespace, relativeFile);
operations.push(createManagedOperation({
moduleId,
sourceRelativePath: sourceRelativeFile,
destinationPath: path.join(targetRulesDir, flattenedFileName),
strategy: 'flatten-copy',
}));
}
} else if (entry.isFile()) {
operations.push(createManagedOperation({
moduleId,
sourceRelativePath: path.join(normalizedSourcePath, entry.name),
destinationPath: path.join(targetRulesDir, entry.name),
strategy: 'flatten-copy',
}));
}
}
return operations;
}
function createFlatRuleOperations({ moduleId, repoRoot, sourceRelativePath, destinationDir }) {
const normalizedSourcePath = normalizeRelativePath(sourceRelativePath);
const sourceRoot = path.join(repoRoot || '', normalizedSourcePath);
if (!repoRoot || !fs.existsSync(sourceRoot) || !fs.statSync(sourceRoot).isDirectory()) {
return [];
}
const operations = [];
const entries = fs.readdirSync(sourceRoot, { withFileTypes: true }).sort((left, right) => (
left.name.localeCompare(right.name)
));
for (const entry of entries) {
const namespace = entry.name;
const entryPath = path.join(sourceRoot, entry.name);
if (entry.isDirectory()) {
const relativeFiles = listRelativeFiles(entryPath);
for (const relativeFile of relativeFiles) {
const flattenedFileName = `${namespace}-${normalizeRelativePath(relativeFile).replace(/\//g, '-')}`;
operations.push(createManagedOperation({
moduleId,
sourceRelativePath: path.join(normalizedSourcePath, namespace, relativeFile),
destinationPath: path.join(destinationDir, flattenedFileName),
strategy: 'flatten-copy',
}));
}
} else if (entry.isFile()) {
operations.push(createManagedOperation({
moduleId,
sourceRelativePath: path.join(normalizedSourcePath, entry.name),
destinationPath: path.join(destinationDir, entry.name),
strategy: 'flatten-copy',
}));
}
}
return operations;
}
function createInstallTargetAdapter(config) {
const adapter = {
id: config.id,
@@ -68,15 +245,43 @@ function createInstallTargetAdapter(config) {
},
createScaffoldOperation(moduleId, sourceRelativePath, input = {}) {
const normalizedSourcePath = normalizeRelativePath(sourceRelativePath);
return {
kind: 'copy-path',
return createManagedOperation({
moduleId,
sourceRelativePath: normalizedSourcePath,
destinationPath: adapter.resolveDestinationPath(normalizedSourcePath, input),
strategy: adapter.determineStrategy(normalizedSourcePath),
ownership: 'managed',
scaffoldOnly: true,
};
});
},
planOperations(input = {}) {
if (typeof config.planOperations === 'function') {
return config.planOperations(input, adapter);
}
if (Array.isArray(input.modules)) {
return input.modules.flatMap(module => {
const paths = Array.isArray(module.paths) ? module.paths : [];
return paths.map(sourceRelativePath => adapter.createScaffoldOperation(
module.id,
sourceRelativePath,
input
));
});
}
const module = input.module || {};
const paths = Array.isArray(module.paths) ? module.paths : [];
return paths.map(sourceRelativePath => adapter.createScaffoldOperation(
module.id,
sourceRelativePath,
input
));
},
validate(input = {}) {
if (typeof config.validate === 'function') {
return config.validate(input, adapter);
}
return defaultValidateAdapterInput(config, input);
},
};
@@ -84,6 +289,19 @@ function createInstallTargetAdapter(config) {
}
module.exports = {
buildValidationIssue,
createFlatRuleOperations,
createInstallTargetAdapter,
createManagedOperation,
createManagedScaffoldOperation: (moduleId, sourceRelativePath, destinationPath, strategy) => (
createManagedOperation({
moduleId,
sourceRelativePath,
destinationPath,
strategy,
})
),
createNamespacedFlatRuleOperations,
createRemappedOperation,
normalizeRelativePath,
};

View File

@@ -34,15 +34,16 @@ function planInstallTargetScaffold(options = {}) {
projectRoot: options.projectRoot || options.repoRoot,
homeDir: options.homeDir,
};
const validationIssues = adapter.validate(planningInput);
const blockingIssues = validationIssues.filter(issue => issue.severity === 'error');
if (blockingIssues.length > 0) {
throw new Error(blockingIssues.map(issue => issue.message).join('; '));
}
const targetRoot = adapter.resolveRoot(planningInput);
const installStatePath = adapter.getInstallStatePath(planningInput);
const operations = modules.flatMap(module => {
const paths = Array.isArray(module.paths) ? module.paths : [];
return paths.map(sourceRelativePath => adapter.createScaffoldOperation(
module.id,
sourceRelativePath,
planningInput
));
const operations = adapter.planOperations({
...planningInput,
modules,
});
return {
@@ -53,6 +54,7 @@ function planInstallTargetScaffold(options = {}) {
},
targetRoot,
installStatePath,
validationIssues,
operations,
};
}

View File

@@ -1,5 +1,7 @@
'use strict';
const { validateInstallModuleIds } = require('../install-manifests');
const LEGACY_INSTALL_TARGETS = ['claude', 'cursor', 'antigravity'];
function dedupeStrings(values) {
@@ -35,7 +37,7 @@ function parseInstallArgs(argv) {
index += 1;
} else if (arg === '--modules') {
const raw = args[index + 1] || '';
parsed.moduleIds = raw.split(',').map(value => value.trim()).filter(Boolean);
parsed.moduleIds = dedupeStrings(raw.split(','));
index += 1;
} else if (arg === '--with') {
const componentId = args[index + 1] || '';
@@ -70,7 +72,9 @@ function normalizeInstallRequest(options = {}) {
? options.config
: null;
const profileId = options.profileId || config?.profileId || null;
const moduleIds = dedupeStrings([...(config?.moduleIds || []), ...(options.moduleIds || [])]);
const moduleIds = validateInstallModuleIds(
dedupeStrings([...(config?.moduleIds || []), ...(options.moduleIds || [])])
);
const includeComponentIds = dedupeStrings([
...(config?.includeComponentIds || []),
...(options.includeComponentIds || []),
@@ -79,29 +83,32 @@ function normalizeInstallRequest(options = {}) {
...(config?.excludeComponentIds || []),
...(options.excludeComponentIds || []),
]);
const languages = Array.isArray(options.languages) ? [...options.languages] : [];
const legacyLanguages = dedupeStrings(dedupeStrings([
...(Array.isArray(options.legacyLanguages) ? options.legacyLanguages : []),
...(Array.isArray(options.languages) ? options.languages : []),
]).map(language => language.toLowerCase()));
const target = options.target || config?.target || 'claude';
const hasManifestBaseSelection = Boolean(profileId) || moduleIds.length > 0 || includeComponentIds.length > 0;
const usingManifestMode = hasManifestBaseSelection || excludeComponentIds.length > 0;
if (usingManifestMode && languages.length > 0) {
if (usingManifestMode && legacyLanguages.length > 0) {
throw new Error(
'Legacy language arguments cannot be combined with --profile, --modules, --with, --without, or manifest config selections'
);
}
if (!options.help && !hasManifestBaseSelection && languages.length === 0) {
if (!options.help && !hasManifestBaseSelection && legacyLanguages.length === 0) {
throw new Error('No install profile, module IDs, included components, or legacy languages were provided');
}
return {
mode: usingManifestMode ? 'manifest' : 'legacy',
mode: usingManifestMode ? 'manifest' : 'legacy-compat',
target,
profileId,
moduleIds,
includeComponentIds,
excludeComponentIds,
languages,
legacyLanguages,
configPath: config?.path || options.configPath || null,
};
}

View File

@@ -1,6 +1,7 @@
'use strict';
const {
createLegacyCompatInstallPlan,
createLegacyInstallPlan,
createManifestInstallPlan,
} = require('../install-executor');
@@ -23,6 +24,17 @@ function createInstallPlanFromRequest(request, options = {}) {
});
}
if (request.mode === 'legacy-compat') {
return createLegacyCompatInstallPlan({
target: request.target,
legacyLanguages: request.legacyLanguages,
projectRoot: options.projectRoot,
homeDir: options.homeDir,
claudeRulesDir: options.claudeRulesDir,
sourceRoot: options.sourceRoot,
});
}
if (request.mode === 'legacy') {
return createLegacyInstallPlan({
target: request.target,

View File

@@ -0,0 +1,260 @@
'use strict';
const fs = require('fs');
const path = require('path');
const provenance = require('./provenance');
const tracker = require('./tracker');
const versioning = require('./versioning');
const DAY_IN_MS = 24 * 60 * 60 * 1000;
const PENDING_AMENDMENT_STATUSES = new Set(['pending', 'proposed', 'queued', 'open']);
function roundRate(value) {
if (value === null) {
return null;
}
return Math.round(value * 10000) / 10000;
}
function formatRate(value) {
if (value === null) {
return 'n/a';
}
return `${Math.round(value * 100)}%`;
}
function summarizeHealthReport(report) {
const totalSkills = report.skills.length;
const decliningSkills = report.skills.filter(skill => skill.declining).length;
const healthySkills = totalSkills - decliningSkills;
return {
total_skills: totalSkills,
healthy_skills: healthySkills,
declining_skills: decliningSkills,
};
}
function listSkillsInRoot(rootPath) {
if (!rootPath || !fs.existsSync(rootPath)) {
return [];
}
return fs.readdirSync(rootPath, { withFileTypes: true })
.filter(entry => entry.isDirectory())
.map(entry => ({
skill_id: entry.name,
skill_dir: path.join(rootPath, entry.name),
}))
.filter(entry => fs.existsSync(path.join(entry.skill_dir, 'SKILL.md')));
}
function discoverSkills(options = {}) {
const roots = provenance.getSkillRoots(options);
const discoveredSkills = [
...listSkillsInRoot(options.skillsRoot || roots.curated).map(skill => ({
...skill,
skill_type: provenance.SKILL_TYPES.CURATED,
})),
...listSkillsInRoot(options.learnedRoot || roots.learned).map(skill => ({
...skill,
skill_type: provenance.SKILL_TYPES.LEARNED,
})),
...listSkillsInRoot(options.importedRoot || roots.imported).map(skill => ({
...skill,
skill_type: provenance.SKILL_TYPES.IMPORTED,
})),
];
return discoveredSkills.reduce((skillsById, skill) => {
if (!skillsById.has(skill.skill_id)) {
skillsById.set(skill.skill_id, skill);
}
return skillsById;
}, new Map());
}
function calculateSuccessRate(records) {
if (records.length === 0) {
return null;
}
const successfulRecords = records.filter(record => record.outcome === 'success').length;
return roundRate(successfulRecords / records.length);
}
function filterRecordsWithinDays(records, nowMs, days) {
const cutoff = nowMs - (days * DAY_IN_MS);
return records.filter(record => {
const recordedAtMs = Date.parse(record.recorded_at);
return !Number.isNaN(recordedAtMs) && recordedAtMs >= cutoff && recordedAtMs <= nowMs;
});
}
function getFailureTrend(successRate7d, successRate30d, warnThreshold) {
if (successRate7d === null || successRate30d === null) {
return 'stable';
}
const delta = roundRate(successRate7d - successRate30d);
if (delta <= (-1 * warnThreshold)) {
return 'worsening';
}
if (delta >= warnThreshold) {
return 'improving';
}
return 'stable';
}
function countPendingAmendments(skillDir) {
if (!skillDir) {
return 0;
}
return versioning.getEvolutionLog(skillDir, 'amendments')
.filter(entry => {
if (typeof entry.status === 'string') {
return PENDING_AMENDMENT_STATUSES.has(entry.status);
}
return entry.event === 'proposal';
})
.length;
}
function getLastRun(records) {
if (records.length === 0) {
return null;
}
return records
.map(record => ({
timestamp: record.recorded_at,
timeMs: Date.parse(record.recorded_at),
}))
.filter(entry => !Number.isNaN(entry.timeMs))
.sort((left, right) => left.timeMs - right.timeMs)
.at(-1)?.timestamp || null;
}
function collectSkillHealth(options = {}) {
const now = options.now || new Date().toISOString();
const nowMs = Date.parse(now);
if (Number.isNaN(nowMs)) {
throw new Error(`Invalid now timestamp: ${now}`);
}
const warnThreshold = typeof options.warnThreshold === 'number'
? options.warnThreshold
: Number(options.warnThreshold || 0.1);
if (!Number.isFinite(warnThreshold) || warnThreshold < 0) {
throw new Error(`Invalid warn threshold: ${options.warnThreshold}`);
}
const records = tracker.readSkillExecutionRecords(options);
const skillsById = discoverSkills(options);
const recordsBySkill = records.reduce((groupedRecords, record) => {
if (!groupedRecords.has(record.skill_id)) {
groupedRecords.set(record.skill_id, []);
}
groupedRecords.get(record.skill_id).push(record);
return groupedRecords;
}, new Map());
for (const skillId of recordsBySkill.keys()) {
if (!skillsById.has(skillId)) {
skillsById.set(skillId, {
skill_id: skillId,
skill_dir: null,
skill_type: provenance.SKILL_TYPES.UNKNOWN,
});
}
}
const skills = Array.from(skillsById.values())
.sort((left, right) => left.skill_id.localeCompare(right.skill_id))
.map(skill => {
const skillRecords = recordsBySkill.get(skill.skill_id) || [];
const records7d = filterRecordsWithinDays(skillRecords, nowMs, 7);
const records30d = filterRecordsWithinDays(skillRecords, nowMs, 30);
const successRate7d = calculateSuccessRate(records7d);
const successRate30d = calculateSuccessRate(records30d);
const currentVersionNumber = skill.skill_dir ? versioning.getCurrentVersion(skill.skill_dir) : 0;
const failureTrend = getFailureTrend(successRate7d, successRate30d, warnThreshold);
return {
skill_id: skill.skill_id,
skill_type: skill.skill_type,
current_version: currentVersionNumber > 0 ? `v${currentVersionNumber}` : null,
pending_amendments: countPendingAmendments(skill.skill_dir),
success_rate_7d: successRate7d,
success_rate_30d: successRate30d,
failure_trend: failureTrend,
declining: failureTrend === 'worsening',
last_run: getLastRun(skillRecords),
run_count_7d: records7d.length,
run_count_30d: records30d.length,
};
});
return {
generated_at: now,
warn_threshold: warnThreshold,
skills,
};
}
function formatHealthReport(report, options = {}) {
if (options.json) {
return `${JSON.stringify(report, null, 2)}\n`;
}
const summary = summarizeHealthReport(report);
if (!report.skills.length) {
return [
'ECC skill health',
`Generated: ${report.generated_at}`,
'',
'No skill execution records found.',
'',
].join('\n');
}
const lines = [
'ECC skill health',
`Generated: ${report.generated_at}`,
`Skills: ${summary.total_skills} total, ${summary.healthy_skills} healthy, ${summary.declining_skills} declining`,
'',
'skill version 7d 30d trend pending last run',
'--------------------------------------------------------------------------',
];
for (const skill of report.skills) {
const statusLabel = skill.declining ? '!' : ' ';
lines.push([
`${statusLabel}${skill.skill_id}`.padEnd(16),
String(skill.current_version || '-').padEnd(9),
formatRate(skill.success_rate_7d).padEnd(6),
formatRate(skill.success_rate_30d).padEnd(6),
skill.failure_trend.padEnd(11),
String(skill.pending_amendments).padEnd(9),
skill.last_run || '-',
].join(' '));
}
return `${lines.join('\n')}\n`;
}
module.exports = {
collectSkillHealth,
discoverSkills,
formatHealthReport,
summarizeHealthReport,
};

View File

@@ -0,0 +1,17 @@
'use strict';
const provenance = require('./provenance');
const versioning = require('./versioning');
const tracker = require('./tracker');
const health = require('./health');
module.exports = {
...provenance,
...versioning,
...tracker,
...health,
provenance,
versioning,
tracker,
health,
};

View File

@@ -0,0 +1,187 @@
'use strict';
const fs = require('fs');
const os = require('os');
const path = require('path');
const { ensureDir } = require('../utils');
const PROVENANCE_FILE_NAME = '.provenance.json';
const SKILL_TYPES = Object.freeze({
CURATED: 'curated',
LEARNED: 'learned',
IMPORTED: 'imported',
UNKNOWN: 'unknown',
});
function resolveRepoRoot(repoRoot) {
if (repoRoot) {
return path.resolve(repoRoot);
}
return path.resolve(__dirname, '..', '..', '..');
}
function resolveHomeDir(homeDir) {
return homeDir ? path.resolve(homeDir) : os.homedir();
}
function normalizeSkillDir(skillPath) {
if (!skillPath || typeof skillPath !== 'string') {
throw new Error('skillPath is required');
}
const resolvedPath = path.resolve(skillPath);
if (path.basename(resolvedPath) === 'SKILL.md') {
return path.dirname(resolvedPath);
}
return resolvedPath;
}
function isWithinRoot(targetPath, rootPath) {
const relativePath = path.relative(rootPath, targetPath);
return relativePath === '' || (
!relativePath.startsWith('..')
&& !path.isAbsolute(relativePath)
);
}
function getSkillRoots(options = {}) {
const repoRoot = resolveRepoRoot(options.repoRoot);
const homeDir = resolveHomeDir(options.homeDir);
return {
curated: path.join(repoRoot, 'skills'),
learned: path.join(homeDir, '.claude', 'skills', 'learned'),
imported: path.join(homeDir, '.claude', 'skills', 'imported'),
};
}
function classifySkillPath(skillPath, options = {}) {
const skillDir = normalizeSkillDir(skillPath);
const roots = getSkillRoots(options);
if (isWithinRoot(skillDir, roots.curated)) {
return SKILL_TYPES.CURATED;
}
if (isWithinRoot(skillDir, roots.learned)) {
return SKILL_TYPES.LEARNED;
}
if (isWithinRoot(skillDir, roots.imported)) {
return SKILL_TYPES.IMPORTED;
}
return SKILL_TYPES.UNKNOWN;
}
function requiresProvenance(skillPath, options = {}) {
const skillType = classifySkillPath(skillPath, options);
return skillType === SKILL_TYPES.LEARNED || skillType === SKILL_TYPES.IMPORTED;
}
function getProvenancePath(skillPath) {
return path.join(normalizeSkillDir(skillPath), PROVENANCE_FILE_NAME);
}
function isIsoTimestamp(value) {
if (typeof value !== 'string' || value.trim().length === 0) {
return false;
}
const timestamp = Date.parse(value);
return !Number.isNaN(timestamp);
}
function validateProvenance(record) {
const errors = [];
if (!record || typeof record !== 'object' || Array.isArray(record)) {
errors.push('provenance record must be an object');
return {
valid: false,
errors,
};
}
if (typeof record.source !== 'string' || record.source.trim().length === 0) {
errors.push('source is required');
}
if (!isIsoTimestamp(record.created_at)) {
errors.push('created_at must be an ISO timestamp');
}
if (typeof record.confidence !== 'number' || Number.isNaN(record.confidence)) {
errors.push('confidence must be a number');
} else if (record.confidence < 0 || record.confidence > 1) {
errors.push('confidence must be between 0 and 1');
}
if (typeof record.author !== 'string' || record.author.trim().length === 0) {
errors.push('author is required');
}
return {
valid: errors.length === 0,
errors,
};
}
function assertValidProvenance(record) {
const validation = validateProvenance(record);
if (!validation.valid) {
throw new Error(`Invalid provenance metadata: ${validation.errors.join('; ')}`);
}
}
function readProvenance(skillPath, options = {}) {
const skillDir = normalizeSkillDir(skillPath);
const provenancePath = getProvenancePath(skillDir);
const provenanceRequired = options.required === true || requiresProvenance(skillDir, options);
if (!fs.existsSync(provenancePath)) {
if (provenanceRequired) {
throw new Error(`Missing provenance metadata for ${skillDir}`);
}
return null;
}
const record = JSON.parse(fs.readFileSync(provenancePath, 'utf8'));
assertValidProvenance(record);
return record;
}
function writeProvenance(skillPath, record, options = {}) {
const skillDir = normalizeSkillDir(skillPath);
if (!requiresProvenance(skillDir, options)) {
throw new Error(`Provenance metadata is only required for learned or imported skills: ${skillDir}`);
}
assertValidProvenance(record);
const provenancePath = getProvenancePath(skillDir);
ensureDir(skillDir);
fs.writeFileSync(provenancePath, `${JSON.stringify(record, null, 2)}\n`, 'utf8');
return {
path: provenancePath,
record: { ...record },
};
}
module.exports = {
PROVENANCE_FILE_NAME,
SKILL_TYPES,
classifySkillPath,
getProvenancePath,
getSkillRoots,
readProvenance,
requiresProvenance,
validateProvenance,
writeProvenance,
};

View File

@@ -0,0 +1,146 @@
'use strict';
const fs = require('fs');
const os = require('os');
const path = require('path');
const { appendFile } = require('../utils');
const VALID_OUTCOMES = new Set(['success', 'failure', 'partial']);
const VALID_FEEDBACK = new Set(['accepted', 'corrected', 'rejected']);
function resolveHomeDir(homeDir) {
return homeDir ? path.resolve(homeDir) : os.homedir();
}
function getRunsFilePath(options = {}) {
if (options.runsFilePath) {
return path.resolve(options.runsFilePath);
}
return path.join(resolveHomeDir(options.homeDir), '.claude', 'state', 'skill-runs.jsonl');
}
function toNullableNumber(value, fieldName) {
if (value === null || typeof value === 'undefined') {
return null;
}
const numericValue = Number(value);
if (!Number.isFinite(numericValue)) {
throw new Error(`${fieldName} must be a number`);
}
return numericValue;
}
function normalizeExecutionRecord(input, options = {}) {
if (!input || typeof input !== 'object' || Array.isArray(input)) {
throw new Error('skill execution payload must be an object');
}
const skillId = input.skill_id || input.skillId;
const skillVersion = input.skill_version || input.skillVersion;
const taskDescription = input.task_description || input.task_attempted || input.taskAttempted;
const outcome = input.outcome;
const recordedAt = input.recorded_at || options.now || new Date().toISOString();
const userFeedback = input.user_feedback || input.userFeedback || null;
if (typeof skillId !== 'string' || skillId.trim().length === 0) {
throw new Error('skill_id is required');
}
if (typeof skillVersion !== 'string' || skillVersion.trim().length === 0) {
throw new Error('skill_version is required');
}
if (typeof taskDescription !== 'string' || taskDescription.trim().length === 0) {
throw new Error('task_description is required');
}
if (!VALID_OUTCOMES.has(outcome)) {
throw new Error('outcome must be one of success, failure, or partial');
}
if (userFeedback !== null && !VALID_FEEDBACK.has(userFeedback)) {
throw new Error('user_feedback must be accepted, corrected, rejected, or null');
}
if (Number.isNaN(Date.parse(recordedAt))) {
throw new Error('recorded_at must be an ISO timestamp');
}
return {
skill_id: skillId,
skill_version: skillVersion,
task_description: taskDescription,
outcome,
failure_reason: input.failure_reason || input.failureReason || null,
tokens_used: toNullableNumber(input.tokens_used ?? input.tokensUsed, 'tokens_used'),
duration_ms: toNullableNumber(input.duration_ms ?? input.durationMs, 'duration_ms'),
user_feedback: userFeedback,
recorded_at: recordedAt,
};
}
function readJsonl(filePath) {
if (!fs.existsSync(filePath)) {
return [];
}
return fs.readFileSync(filePath, 'utf8')
.split('\n')
.map(line => line.trim())
.filter(Boolean)
.reduce((rows, line) => {
try {
rows.push(JSON.parse(line));
} catch {
// Ignore malformed rows so analytics remain best-effort.
}
return rows;
}, []);
}
function recordSkillExecution(input, options = {}) {
const record = normalizeExecutionRecord(input, options);
if (options.stateStore && typeof options.stateStore.recordSkillExecution === 'function') {
try {
const result = options.stateStore.recordSkillExecution(record);
return {
storage: 'state-store',
record,
result,
};
} catch {
// Fall back to JSONL until the formal state-store exists on this branch.
}
}
const runsFilePath = getRunsFilePath(options);
appendFile(runsFilePath, `${JSON.stringify(record)}\n`);
return {
storage: 'jsonl',
path: runsFilePath,
record,
};
}
function readSkillExecutionRecords(options = {}) {
if (options.stateStore && typeof options.stateStore.listSkillExecutionRecords === 'function') {
return options.stateStore.listSkillExecutionRecords();
}
return readJsonl(getRunsFilePath(options));
}
module.exports = {
VALID_FEEDBACK,
VALID_OUTCOMES,
getRunsFilePath,
normalizeExecutionRecord,
readSkillExecutionRecords,
recordSkillExecution,
};

View File

@@ -0,0 +1,237 @@
'use strict';
const fs = require('fs');
const path = require('path');
const { appendFile, ensureDir } = require('../utils');
const VERSION_DIRECTORY_NAME = '.versions';
const EVOLUTION_DIRECTORY_NAME = '.evolution';
const EVOLUTION_LOG_TYPES = Object.freeze([
'observations',
'inspections',
'amendments',
]);
function normalizeSkillDir(skillPath) {
if (!skillPath || typeof skillPath !== 'string') {
throw new Error('skillPath is required');
}
const resolvedPath = path.resolve(skillPath);
if (path.basename(resolvedPath) === 'SKILL.md') {
return path.dirname(resolvedPath);
}
return resolvedPath;
}
function getSkillFilePath(skillPath) {
return path.join(normalizeSkillDir(skillPath), 'SKILL.md');
}
function ensureSkillExists(skillPath) {
const skillFilePath = getSkillFilePath(skillPath);
if (!fs.existsSync(skillFilePath)) {
throw new Error(`Skill file not found: ${skillFilePath}`);
}
return skillFilePath;
}
function getVersionsDir(skillPath) {
return path.join(normalizeSkillDir(skillPath), VERSION_DIRECTORY_NAME);
}
function getEvolutionDir(skillPath) {
return path.join(normalizeSkillDir(skillPath), EVOLUTION_DIRECTORY_NAME);
}
function getEvolutionLogPath(skillPath, logType) {
if (!EVOLUTION_LOG_TYPES.includes(logType)) {
throw new Error(`Unknown evolution log type: ${logType}`);
}
return path.join(getEvolutionDir(skillPath), `${logType}.jsonl`);
}
function ensureSkillVersioning(skillPath) {
ensureSkillExists(skillPath);
const versionsDir = getVersionsDir(skillPath);
const evolutionDir = getEvolutionDir(skillPath);
ensureDir(versionsDir);
ensureDir(evolutionDir);
for (const logType of EVOLUTION_LOG_TYPES) {
const logPath = getEvolutionLogPath(skillPath, logType);
if (!fs.existsSync(logPath)) {
fs.writeFileSync(logPath, '', 'utf8');
}
}
return {
versionsDir,
evolutionDir,
};
}
function parseVersionNumber(fileName) {
const match = /^v(\d+)\.md$/.exec(fileName);
if (!match) {
return null;
}
return Number(match[1]);
}
function listVersions(skillPath) {
const versionsDir = getVersionsDir(skillPath);
if (!fs.existsSync(versionsDir)) {
return [];
}
return fs.readdirSync(versionsDir)
.map(fileName => {
const version = parseVersionNumber(fileName);
if (version === null) {
return null;
}
const filePath = path.join(versionsDir, fileName);
const stats = fs.statSync(filePath);
return {
version,
path: filePath,
created_at: stats.mtime.toISOString(),
};
})
.filter(Boolean)
.sort((left, right) => left.version - right.version);
}
function getCurrentVersion(skillPath) {
const skillFilePath = getSkillFilePath(skillPath);
if (!fs.existsSync(skillFilePath)) {
return 0;
}
const versions = listVersions(skillPath);
if (versions.length === 0) {
return 1;
}
return versions[versions.length - 1].version;
}
function appendEvolutionRecord(skillPath, logType, record) {
ensureSkillVersioning(skillPath);
appendFile(getEvolutionLogPath(skillPath, logType), `${JSON.stringify(record)}\n`);
return { ...record };
}
function readJsonl(filePath) {
if (!fs.existsSync(filePath)) {
return [];
}
return fs.readFileSync(filePath, 'utf8')
.split('\n')
.map(line => line.trim())
.filter(Boolean)
.reduce((rows, line) => {
try {
rows.push(JSON.parse(line));
} catch {
// Ignore malformed rows so the log remains append-only and resilient.
}
return rows;
}, []);
}
function getEvolutionLog(skillPath, logType) {
return readJsonl(getEvolutionLogPath(skillPath, logType));
}
function createVersion(skillPath, options = {}) {
const skillFilePath = ensureSkillExists(skillPath);
ensureSkillVersioning(skillPath);
const versions = listVersions(skillPath);
const nextVersion = versions.length === 0 ? 1 : versions[versions.length - 1].version + 1;
const snapshotPath = path.join(getVersionsDir(skillPath), `v${nextVersion}.md`);
const skillContent = fs.readFileSync(skillFilePath, 'utf8');
const createdAt = options.timestamp || new Date().toISOString();
fs.writeFileSync(snapshotPath, skillContent, 'utf8');
appendEvolutionRecord(skillPath, 'amendments', {
event: 'snapshot',
version: nextVersion,
reason: options.reason || null,
author: options.author || null,
status: 'applied',
created_at: createdAt,
});
return {
version: nextVersion,
path: snapshotPath,
created_at: createdAt,
};
}
function rollbackTo(skillPath, targetVersion, options = {}) {
const normalizedTargetVersion = Number(targetVersion);
if (!Number.isInteger(normalizedTargetVersion) || normalizedTargetVersion <= 0) {
throw new Error(`Invalid target version: ${targetVersion}`);
}
ensureSkillExists(skillPath);
ensureSkillVersioning(skillPath);
const targetPath = path.join(getVersionsDir(skillPath), `v${normalizedTargetVersion}.md`);
if (!fs.existsSync(targetPath)) {
throw new Error(`Version not found: v${normalizedTargetVersion}`);
}
const currentVersion = getCurrentVersion(skillPath);
const targetContent = fs.readFileSync(targetPath, 'utf8');
fs.writeFileSync(getSkillFilePath(skillPath), targetContent, 'utf8');
const createdVersion = createVersion(skillPath, {
timestamp: options.timestamp,
reason: options.reason || `rollback to v${normalizedTargetVersion}`,
author: options.author || null,
});
appendEvolutionRecord(skillPath, 'amendments', {
event: 'rollback',
version: createdVersion.version,
source_version: currentVersion,
target_version: normalizedTargetVersion,
reason: options.reason || null,
author: options.author || null,
status: 'applied',
created_at: options.timestamp || new Date().toISOString(),
});
return createdVersion;
}
module.exports = {
EVOLUTION_DIRECTORY_NAME,
EVOLUTION_LOG_TYPES,
VERSION_DIRECTORY_NAME,
appendEvolutionRecord,
createVersion,
ensureSkillVersioning,
getCurrentVersion,
getEvolutionDir,
getEvolutionLog,
getEvolutionLogPath,
getVersionsDir,
listVersions,
rollbackTo,
};

113
scripts/skills-health.js Normal file
View File

@@ -0,0 +1,113 @@
#!/usr/bin/env node
'use strict';
const { collectSkillHealth, formatHealthReport } = require('./lib/skill-evolution/health');
function showHelp() {
console.log(`
Usage: node scripts/skills-health.js [options]
Options:
--json Emit machine-readable JSON
--skills-root <path> Override curated skills root
--learned-root <path> Override learned skills root
--imported-root <path> Override imported skills root
--home <path> Override home directory for learned/imported skill roots
--runs-file <path> Override skill run JSONL path
--now <timestamp> Override current time for deterministic reports
--warn-threshold <n> Decline sensitivity threshold (default: 0.1)
--help Show this help text
`);
}
function requireValue(argv, index, argName) {
const value = argv[index + 1];
if (!value || value.startsWith('--')) {
throw new Error(`Missing value for ${argName}`);
}
return value;
}
function parseArgs(argv) {
const options = {};
for (let index = 0; index < argv.length; index += 1) {
const arg = argv[index];
if (arg === '--json') {
options.json = true;
continue;
}
if (arg === '--help' || arg === '-h') {
options.help = true;
continue;
}
if (arg === '--skills-root') {
options.skillsRoot = requireValue(argv, index, '--skills-root');
index += 1;
continue;
}
if (arg === '--learned-root') {
options.learnedRoot = requireValue(argv, index, '--learned-root');
index += 1;
continue;
}
if (arg === '--imported-root') {
options.importedRoot = requireValue(argv, index, '--imported-root');
index += 1;
continue;
}
if (arg === '--home') {
options.homeDir = requireValue(argv, index, '--home');
index += 1;
continue;
}
if (arg === '--runs-file') {
options.runsFilePath = requireValue(argv, index, '--runs-file');
index += 1;
continue;
}
if (arg === '--now') {
options.now = requireValue(argv, index, '--now');
index += 1;
continue;
}
if (arg === '--warn-threshold') {
options.warnThreshold = Number(requireValue(argv, index, '--warn-threshold'));
index += 1;
continue;
}
throw new Error(`Unknown argument: ${arg}`);
}
return options;
}
function main() {
try {
const options = parseArgs(process.argv.slice(2));
if (options.help) {
showHelp();
process.exit(0);
}
const report = collectSkillHealth(options);
process.stdout.write(formatHealthReport(report, { json: options.json }));
} catch (error) {
process.stderr.write(`Error: ${error.message}\n`);
process.exit(1);
}
}
main();

View File

@@ -76,6 +76,10 @@ fi
# ─────────────────────────────────────────────
# Lightweight config and automated session guards
# ─────────────────────────────────────────────
#
# IMPORTANT: keep these guards above detect-project.sh.
# Sourcing detect-project.sh creates project-scoped directories and updates
# projects.json, so automated sessions must return before that point.
CONFIG_DIR="${HOME}/.claude/homunculus"

View File

@@ -147,6 +147,71 @@ function withPrependedPath(binDir, env = {}) {
};
}
function assertNoProjectDetectionSideEffects(homeDir, testName) {
const homunculusDir = path.join(homeDir, '.claude', 'homunculus');
const registryPath = path.join(homunculusDir, 'projects.json');
const projectsDir = path.join(homunculusDir, 'projects');
assert.ok(!fs.existsSync(registryPath), `${testName} should not create projects.json`);
const projectEntries = fs.existsSync(projectsDir)
? fs.readdirSync(projectsDir).filter(entry => fs.statSync(path.join(projectsDir, entry)).isDirectory())
: [];
assert.strictEqual(projectEntries.length, 0, `${testName} should not create project directories`);
}
async function assertObserveSkipBeforeProjectDetection(testCase) {
const observePath = path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'hooks', 'observe.sh');
const homeDir = createTestDir();
const projectDir = createTestDir();
try {
const cwd = testCase.cwdSuffix ? path.join(projectDir, testCase.cwdSuffix) : projectDir;
fs.mkdirSync(cwd, { recursive: true });
const payload = JSON.stringify({
tool_name: 'Bash',
tool_input: { command: 'echo hello' },
tool_response: 'ok',
session_id: `session-${testCase.name.replace(/[^a-z0-9]+/gi, '-')}`,
cwd,
...(testCase.payload || {})
});
const result = await runShellScript(observePath, ['post'], payload, {
HOME: homeDir,
USERPROFILE: homeDir,
...testCase.env
}, projectDir);
assert.strictEqual(result.code, 0, `${testCase.name} should exit successfully, stderr: ${result.stderr}`);
assertNoProjectDetectionSideEffects(homeDir, testCase.name);
} finally {
cleanupTestDir(homeDir);
cleanupTestDir(projectDir);
}
}
function runPatchedRunAll(tempRoot) {
const wrapperPath = path.join(tempRoot, 'run-all-wrapper.js');
const tempTestsDir = path.join(tempRoot, 'tests');
let source = fs.readFileSync(path.join(__dirname, '..', 'run-all.js'), 'utf8');
source = source.replace('const testsDir = __dirname;', `const testsDir = ${JSON.stringify(tempTestsDir)};`);
fs.writeFileSync(wrapperPath, source);
const result = spawnSync('node', [wrapperPath], {
encoding: 'utf8',
stdio: ['pipe', 'pipe', 'pipe'],
timeout: 15000,
});
return {
code: result.status ?? 1,
stdout: result.stdout || '',
stderr: result.stderr || '',
};
}
// Test suite
async function runTests() {
console.log('\n=== Testing Hook Scripts ===\n');
@@ -389,22 +454,28 @@ async function runTests() {
if (
await asyncTest('includes session ID in filename', async () => {
const isoHome = path.join(os.tmpdir(), `ecc-session-id-${Date.now()}`);
const testSessionId = 'test-session-abc12345';
const expectedShortId = 'abc12345'; // Last 8 chars
// Run with custom session ID
await runScript(path.join(scriptsDir, 'session-end.js'), '', {
CLAUDE_SESSION_ID: testSessionId
});
try {
await runScript(path.join(scriptsDir, 'session-end.js'), '', {
HOME: isoHome,
USERPROFILE: isoHome,
CLAUDE_SESSION_ID: testSessionId
});
// Check if session file was created with session ID
// Use local time to match the script's getDateString() function
const sessionsDir = path.join(os.homedir(), '.claude', 'sessions');
const now = new Date();
const today = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}-${String(now.getDate()).padStart(2, '0')}`;
const sessionFile = path.join(sessionsDir, `${today}-${expectedShortId}-session.tmp`);
// Check if session file was created with session ID
// Use local time to match the script's getDateString() function
const sessionsDir = path.join(isoHome, '.claude', 'sessions');
const now = new Date();
const today = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, '0')}-${String(now.getDate()).padStart(2, '0')}`;
const sessionFile = path.join(sessionsDir, `${today}-${expectedShortId}-session.tmp`);
assert.ok(fs.existsSync(sessionFile), `Session file should exist: ${sessionFile}`);
assert.ok(fs.existsSync(sessionFile), `Session file should exist: ${sessionFile}`);
} finally {
fs.rmSync(isoHome, { recursive: true, force: true });
}
})
)
passed++;
@@ -1660,6 +1731,21 @@ async function runTests() {
passed++;
else failed++;
if (
test('SessionEnd marker hook is async and cleanup-safe', () => {
const hooksPath = path.join(__dirname, '..', '..', 'hooks', 'hooks.json');
const hooks = JSON.parse(fs.readFileSync(hooksPath, 'utf8'));
const sessionEndHooks = hooks.hooks.SessionEnd.flatMap(entry => entry.hooks);
const markerHook = sessionEndHooks.find(hook => hook.command.includes('session-end-marker.js'));
assert.ok(markerHook, 'SessionEnd should invoke session-end-marker.js');
assert.strictEqual(markerHook.async, true, 'SessionEnd marker hook should run async during cleanup');
assert.ok(Number.isInteger(markerHook.timeout) && markerHook.timeout > 0, 'SessionEnd marker hook should define a timeout');
})
)
passed++;
else failed++;
if (
test('all hook commands use node or approved shell wrappers', () => {
const hooksPath = path.join(__dirname, '..', '..', 'hooks', 'hooks.json');
@@ -2292,75 +2378,44 @@ async function runTests() {
}
})) passed++; else failed++;
if (await asyncTest('observe.sh skips automated sessions before project detection side effects', async () => {
const observePath = path.join(__dirname, '..', '..', 'skills', 'continuous-learning-v2', 'hooks', 'observe.sh');
const cases = [
{
name: 'non-cli entrypoint',
env: { CLAUDE_CODE_ENTRYPOINT: 'mcp' }
if (await asyncTest('observe.sh skips non-cli entrypoints before project detection side effects', async () => {
await assertObserveSkipBeforeProjectDetection({
name: 'non-cli entrypoint',
env: { CLAUDE_CODE_ENTRYPOINT: 'mcp' }
});
})) passed++; else failed++;
if (await asyncTest('observe.sh skips minimal hook profile before project detection side effects', async () => {
await assertObserveSkipBeforeProjectDetection({
name: 'minimal hook profile',
env: { CLAUDE_CODE_ENTRYPOINT: 'cli', ECC_HOOK_PROFILE: 'minimal' }
});
})) passed++; else failed++;
if (await asyncTest('observe.sh skips cooperative skip env before project detection side effects', async () => {
await assertObserveSkipBeforeProjectDetection({
name: 'cooperative skip env',
env: { CLAUDE_CODE_ENTRYPOINT: 'cli', ECC_SKIP_OBSERVE: '1' }
});
})) passed++; else failed++;
if (await asyncTest('observe.sh skips subagent payloads before project detection side effects', async () => {
await assertObserveSkipBeforeProjectDetection({
name: 'subagent payload',
env: { CLAUDE_CODE_ENTRYPOINT: 'cli' },
payload: { agent_id: 'agent-123' }
});
})) passed++; else failed++;
if (await asyncTest('observe.sh skips configured observer-session paths before project detection side effects', async () => {
await assertObserveSkipBeforeProjectDetection({
name: 'cwd skip path',
env: {
CLAUDE_CODE_ENTRYPOINT: 'cli',
ECC_OBSERVE_SKIP_PATHS: ' observer-sessions , .claude-mem '
},
{
name: 'minimal hook profile',
env: { CLAUDE_CODE_ENTRYPOINT: 'cli', ECC_HOOK_PROFILE: 'minimal' }
},
{
name: 'cooperative skip env',
env: { CLAUDE_CODE_ENTRYPOINT: 'cli', ECC_SKIP_OBSERVE: '1' }
},
{
name: 'subagent payload',
env: { CLAUDE_CODE_ENTRYPOINT: 'cli' },
payload: { agent_id: 'agent-123' }
},
{
name: 'cwd skip path',
env: {
CLAUDE_CODE_ENTRYPOINT: 'cli',
ECC_OBSERVE_SKIP_PATHS: ' observer-sessions , .claude-mem '
},
cwdSuffix: path.join('observer-sessions', 'worker')
}
];
for (const testCase of cases) {
const homeDir = createTestDir();
const projectDir = createTestDir();
try {
const cwd = testCase.cwdSuffix ? path.join(projectDir, testCase.cwdSuffix) : projectDir;
fs.mkdirSync(cwd, { recursive: true });
const payload = JSON.stringify({
tool_name: 'Bash',
tool_input: { command: 'echo hello' },
tool_response: 'ok',
session_id: `session-${testCase.name.replace(/[^a-z0-9]+/gi, '-')}`,
cwd,
...(testCase.payload || {})
});
const result = await runShellScript(observePath, ['post'], payload, {
HOME: homeDir,
...testCase.env
}, projectDir);
assert.strictEqual(result.code, 0, `${testCase.name} should exit successfully, stderr: ${result.stderr}`);
const homunculusDir = path.join(homeDir, '.claude', 'homunculus');
const registryPath = path.join(homunculusDir, 'projects.json');
const projectsDir = path.join(homunculusDir, 'projects');
assert.ok(!fs.existsSync(registryPath), `${testCase.name} should not create projects.json`);
const projectEntries = fs.existsSync(projectsDir)
? fs.readdirSync(projectsDir).filter(entry => fs.statSync(path.join(projectsDir, entry)).isDirectory())
: [];
assert.strictEqual(projectEntries.length, 0, `${testCase.name} should not create project directories`);
} finally {
cleanupTestDir(homeDir);
cleanupTestDir(projectDir);
}
}
cwdSuffix: path.join('observer-sessions', 'worker')
});
})) passed++; else failed++;
if (await asyncTest('matches .tsx extension for type checking', async () => {
@@ -3320,6 +3375,32 @@ async function runTests() {
passed++;
else failed++;
if (
await asyncTest('test runner discovers nested tests via tests/**/*.test.js glob', async () => {
const testRoot = createTestDir();
const testsDir = path.join(testRoot, 'tests');
const nestedDir = path.join(testsDir, 'nested');
fs.mkdirSync(nestedDir, { recursive: true });
fs.writeFileSync(path.join(testsDir, 'top.test.js'), "console.log('Passed: 1\\nFailed: 0');\n");
fs.writeFileSync(path.join(nestedDir, 'deep.test.js'), "console.log('Passed: 2\\nFailed: 0');\n");
fs.writeFileSync(path.join(nestedDir, 'ignore.js'), "console.log('Passed: 999\\nFailed: 999');\n");
try {
const result = runPatchedRunAll(testRoot);
assert.strictEqual(result.code, 0, `run-all wrapper should succeed, stderr: ${result.stderr}`);
assert.ok(result.stdout.includes('Running top.test.js'), 'Should run the top-level test');
assert.ok(result.stdout.includes('Running nested/deep.test.js'), 'Should run nested .test.js files');
assert.ok(!result.stdout.includes('ignore.js'), 'Should ignore non-.test.js files');
assert.ok(result.stdout.includes('Total Tests: 3'), `Should aggregate nested test totals, got: ${result.stdout}`);
} finally {
cleanupTestDir(testRoot);
}
})
)
passed++;
else failed++;
// ── Round 32: post-edit-typecheck special characters & check-console-log ──
console.log('\nRound 32: post-edit-typecheck (special character paths):');

View File

@@ -10,6 +10,8 @@ const path = require('path');
const {
buildDoctorReport,
discoverInstalledStates,
repairInstalledStates,
uninstallInstalledStates,
} = require('../../scripts/lib/install-lifecycle');
const {
createInstallState,
@@ -350,6 +352,385 @@ function runTests() {
}
})) passed++; else failed++;
if (test('repair restores render-template outputs from recorded rendered content', () => {
const homeDir = createTempDir('install-lifecycle-home-');
const projectRoot = createTempDir('install-lifecycle-project-');
try {
const targetRoot = path.join(homeDir, '.claude');
const statePath = path.join(targetRoot, 'ecc', 'install-state.json');
const destinationPath = path.join(targetRoot, 'plugin.json');
fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
fs.writeFileSync(destinationPath, '{"drifted":true}\n');
writeState(statePath, {
adapter: { id: 'claude-home', target: 'claude', kind: 'home' },
targetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: [],
legacyLanguages: ['typescript'],
legacyMode: true,
},
resolution: {
selectedModules: ['legacy-claude-rules'],
skippedModules: [],
},
operations: [
{
kind: 'render-template',
moduleId: 'platform-configs',
sourceRelativePath: '.claude-plugin/plugin.json.template',
destinationPath,
strategy: 'render-template',
ownership: 'managed',
scaffoldOnly: false,
renderedContent: '{"ok":true}\n',
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const result = repairInstalledStates({
repoRoot: REPO_ROOT,
homeDir,
projectRoot,
targets: ['claude'],
});
assert.strictEqual(result.results[0].status, 'repaired');
assert.strictEqual(fs.readFileSync(destinationPath, 'utf8'), '{"ok":true}\n');
} finally {
cleanup(homeDir);
cleanup(projectRoot);
}
})) passed++; else failed++;
if (test('repair reapplies merge-json operations without clobbering unrelated keys', () => {
const homeDir = createTempDir('install-lifecycle-home-');
const projectRoot = createTempDir('install-lifecycle-project-');
try {
const targetRoot = path.join(projectRoot, '.cursor');
const statePath = path.join(targetRoot, 'ecc-install-state.json');
const destinationPath = path.join(targetRoot, 'hooks.json');
fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
fs.writeFileSync(destinationPath, JSON.stringify({
existing: true,
nested: {
enabled: false,
},
}, null, 2));
writeState(statePath, {
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
targetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: [],
legacyLanguages: ['typescript'],
legacyMode: true,
},
resolution: {
selectedModules: ['legacy-cursor-install'],
skippedModules: [],
},
operations: [
{
kind: 'merge-json',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/hooks.json',
destinationPath,
strategy: 'merge-json',
ownership: 'managed',
scaffoldOnly: false,
mergePayload: {
nested: {
enabled: true,
},
managed: 'yes',
},
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const result = repairInstalledStates({
repoRoot: REPO_ROOT,
homeDir,
projectRoot,
targets: ['cursor'],
});
assert.strictEqual(result.results[0].status, 'repaired');
assert.deepStrictEqual(JSON.parse(fs.readFileSync(destinationPath, 'utf8')), {
existing: true,
nested: {
enabled: true,
},
managed: 'yes',
});
} finally {
cleanup(homeDir);
cleanup(projectRoot);
}
})) passed++; else failed++;
if (test('repair re-applies managed remove operations when files reappear', () => {
const homeDir = createTempDir('install-lifecycle-home-');
const projectRoot = createTempDir('install-lifecycle-project-');
try {
const targetRoot = path.join(projectRoot, '.cursor');
const statePath = path.join(targetRoot, 'ecc-install-state.json');
const destinationPath = path.join(targetRoot, 'legacy-note.txt');
fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
fs.writeFileSync(destinationPath, 'stale');
writeState(statePath, {
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
targetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: [],
legacyLanguages: ['typescript'],
legacyMode: true,
},
resolution: {
selectedModules: ['legacy-cursor-install'],
skippedModules: [],
},
operations: [
{
kind: 'remove',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/legacy-note.txt',
destinationPath,
strategy: 'remove',
ownership: 'managed',
scaffoldOnly: false,
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const result = repairInstalledStates({
repoRoot: REPO_ROOT,
homeDir,
projectRoot,
targets: ['cursor'],
});
assert.strictEqual(result.results[0].status, 'repaired');
assert.ok(!fs.existsSync(destinationPath));
} finally {
cleanup(homeDir);
cleanup(projectRoot);
}
})) passed++; else failed++;
if (test('uninstall restores JSON merged files from recorded previous content', () => {
const homeDir = createTempDir('install-lifecycle-home-');
const projectRoot = createTempDir('install-lifecycle-project-');
try {
const targetRoot = path.join(projectRoot, '.cursor');
const statePath = path.join(targetRoot, 'ecc-install-state.json');
const destinationPath = path.join(targetRoot, 'hooks.json');
fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
fs.writeFileSync(destinationPath, JSON.stringify({
existing: true,
managed: true,
}, null, 2));
writeState(statePath, {
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
targetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: [],
legacyLanguages: ['typescript'],
legacyMode: true,
},
resolution: {
selectedModules: ['legacy-cursor-install'],
skippedModules: [],
},
operations: [
{
kind: 'merge-json',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/hooks.json',
destinationPath,
strategy: 'merge-json',
ownership: 'managed',
scaffoldOnly: false,
mergePayload: {
managed: true,
},
previousContent: JSON.stringify({
existing: true,
}, null, 2),
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const result = uninstallInstalledStates({
homeDir,
projectRoot,
targets: ['cursor'],
});
assert.strictEqual(result.results[0].status, 'uninstalled');
assert.deepStrictEqual(JSON.parse(fs.readFileSync(destinationPath, 'utf8')), {
existing: true,
});
assert.ok(!fs.existsSync(statePath));
} finally {
cleanup(homeDir);
cleanup(projectRoot);
}
})) passed++; else failed++;
if (test('uninstall restores rendered template files from recorded previous content', () => {
const tempDir = createTempDir('install-lifecycle-');
try {
const targetRoot = path.join(tempDir, '.claude');
const statePath = path.join(targetRoot, 'ecc', 'install-state.json');
const destinationPath = path.join(targetRoot, 'plugin.json');
fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
fs.writeFileSync(destinationPath, '{"generated":true}\n');
writeInstallState(statePath, createInstallState({
adapter: { id: 'claude-home', target: 'claude', kind: 'home' },
targetRoot,
installStatePath: statePath,
request: {
profile: 'core',
modules: ['platform-configs'],
includeComponents: [],
excludeComponents: [],
legacyLanguages: [],
legacyMode: false,
},
resolution: {
selectedModules: ['platform-configs'],
skippedModules: [],
},
source: {
repoVersion: '1.8.0',
repoCommit: 'abc123',
manifestVersion: 1,
},
operations: [
{
kind: 'render-template',
moduleId: 'platform-configs',
sourceRelativePath: '.claude/plugin.json.template',
destinationPath,
strategy: 'render-template',
ownership: 'managed',
scaffoldOnly: false,
renderedContent: '{"generated":true}\n',
previousContent: '{"existing":true}\n',
},
],
}));
const result = uninstallInstalledStates({
homeDir: tempDir,
projectRoot: tempDir,
targets: ['claude'],
});
assert.strictEqual(result.summary.uninstalledCount, 1);
assert.strictEqual(fs.readFileSync(destinationPath, 'utf8'), '{"existing":true}\n');
assert.ok(!fs.existsSync(statePath));
} finally {
cleanup(tempDir);
}
})) passed++; else failed++;
if (test('uninstall restores files removed during install when previous content is recorded', () => {
const homeDir = createTempDir('install-lifecycle-home-');
const projectRoot = createTempDir('install-lifecycle-project-');
try {
const targetRoot = path.join(projectRoot, '.cursor');
const statePath = path.join(targetRoot, 'ecc-install-state.json');
const destinationPath = path.join(targetRoot, 'legacy-note.txt');
fs.mkdirSync(targetRoot, { recursive: true });
writeState(statePath, {
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
targetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: [],
legacyLanguages: ['typescript'],
legacyMode: true,
},
resolution: {
selectedModules: ['legacy-cursor-install'],
skippedModules: [],
},
operations: [
{
kind: 'remove',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/legacy-note.txt',
destinationPath,
strategy: 'remove',
ownership: 'managed',
scaffoldOnly: false,
previousContent: 'restore me\n',
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const result = uninstallInstalledStates({
homeDir,
projectRoot,
targets: ['cursor'],
});
assert.strictEqual(result.results[0].status, 'uninstalled');
assert.strictEqual(fs.readFileSync(destinationPath, 'utf8'), 'restore me\n');
assert.ok(!fs.existsSync(statePath));
} finally {
cleanup(homeDir);
cleanup(projectRoot);
}
})) passed++; else failed++;
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
process.exit(failed > 0 ? 1 : 0);
}

View File

@@ -10,9 +10,12 @@ const path = require('path');
const {
loadInstallManifests,
listInstallComponents,
listLegacyCompatibilityLanguages,
listInstallModules,
listInstallProfiles,
resolveInstallPlan,
resolveLegacyCompatibilitySelection,
validateInstallModuleIds,
} = require('../../scripts/lib/install-manifests');
function test(name, fn) {
@@ -75,6 +78,15 @@ function runTests() {
'Should include capability:security');
})) passed++; else failed++;
if (test('lists supported legacy compatibility languages', () => {
const languages = listLegacyCompatibilityLanguages();
assert.ok(languages.includes('typescript'));
assert.ok(languages.includes('python'));
assert.ok(languages.includes('go'));
assert.ok(languages.includes('golang'));
assert.ok(languages.includes('kotlin'));
})) passed++; else failed++;
if (test('resolves a real project profile with target-specific skips', () => {
const projectRoot = '/workspace/app';
const plan = resolveInstallPlan({ profileId: 'developer', target: 'cursor', projectRoot });
@@ -97,6 +109,18 @@ function runTests() {
);
})) passed++; else failed++;
if (test('resolves antigravity profiles by skipping incompatible dependency trees', () => {
const projectRoot = '/workspace/app';
const plan = resolveInstallPlan({ profileId: 'core', target: 'antigravity', projectRoot });
assert.deepStrictEqual(plan.selectedModuleIds, ['rules-core', 'agents-core', 'commands-core']);
assert.ok(plan.skippedModuleIds.includes('hooks-runtime'));
assert.ok(plan.skippedModuleIds.includes('platform-configs'));
assert.ok(plan.skippedModuleIds.includes('workflow-quality'));
assert.strictEqual(plan.targetAdapterId, 'antigravity-project');
assert.strictEqual(plan.targetRoot, path.join(projectRoot, '.agent'));
})) passed++; else failed++;
if (test('resolves explicit modules with dependency expansion', () => {
const plan = resolveInstallPlan({ moduleIds: ['security'] });
assert.ok(plan.selectedModuleIds.includes('security'), 'Should include requested module');
@@ -106,6 +130,50 @@ function runTests() {
'Should include nested dependency');
})) passed++; else failed++;
if (test('validates explicit module IDs against the real manifest catalog', () => {
const moduleIds = validateInstallModuleIds(['security', 'security', 'platform-configs']);
assert.deepStrictEqual(moduleIds, ['security', 'platform-configs']);
assert.throws(
() => validateInstallModuleIds(['ghost-module']),
/Unknown install module: ghost-module/
);
})) passed++; else failed++;
if (test('resolves legacy compatibility selections into manifest module IDs', () => {
const selection = resolveLegacyCompatibilitySelection({
target: 'cursor',
legacyLanguages: ['typescript', 'go', 'golang'],
});
assert.deepStrictEqual(selection.legacyLanguages, ['typescript', 'go', 'golang']);
assert.ok(selection.moduleIds.includes('rules-core'));
assert.ok(selection.moduleIds.includes('agents-core'));
assert.ok(selection.moduleIds.includes('commands-core'));
assert.ok(selection.moduleIds.includes('hooks-runtime'));
assert.ok(selection.moduleIds.includes('platform-configs'));
assert.ok(selection.moduleIds.includes('workflow-quality'));
assert.ok(selection.moduleIds.includes('framework-language'));
})) passed++; else failed++;
if (test('keeps antigravity legacy compatibility selections target-safe', () => {
const selection = resolveLegacyCompatibilitySelection({
target: 'antigravity',
legacyLanguages: ['typescript'],
});
assert.deepStrictEqual(selection.moduleIds, ['rules-core', 'agents-core', 'commands-core']);
})) passed++; else failed++;
if (test('rejects unknown legacy compatibility languages', () => {
assert.throws(
() => resolveLegacyCompatibilitySelection({
target: 'cursor',
legacyLanguages: ['brainfuck'],
}),
/Unknown legacy language: brainfuck/
);
})) passed++; else failed++;
if (test('resolves included and excluded user-facing components', () => {
const plan = resolveInstallPlan({
profileId: 'core',
@@ -146,7 +214,7 @@ function runTests() {
);
})) passed++; else failed++;
if (test('throws when a dependency does not support the requested target', () => {
if (test('skips a requested module when its dependency chain does not support the target', () => {
const repoRoot = createTestRepo();
writeJson(path.join(repoRoot, 'manifests', 'install-modules.json'), {
version: 1,
@@ -182,10 +250,9 @@ function runTests() {
}
});
assert.throws(
() => resolveInstallPlan({ repoRoot, profileId: 'core', target: 'claude' }),
/does not support target claude/
);
const plan = resolveInstallPlan({ repoRoot, profileId: 'core', target: 'claude' });
assert.deepStrictEqual(plan.selectedModuleIds, []);
assert.deepStrictEqual(plan.skippedModuleIds, ['parent']);
cleanupTestRepo(repoRoot);
})) passed++; else failed++;

View File

@@ -33,6 +33,7 @@ function runTests() {
'scripts/install-apply.js',
'--target', 'cursor',
'--profile', 'developer',
'--modules', 'platform-configs, workflow-quality ,platform-configs',
'--with', 'lang:typescript',
'--without', 'capability:media',
'--config', 'ecc-install.json',
@@ -43,6 +44,7 @@ function runTests() {
assert.strictEqual(parsed.target, 'cursor');
assert.strictEqual(parsed.profileId, 'developer');
assert.strictEqual(parsed.configPath, 'ecc-install.json');
assert.deepStrictEqual(parsed.moduleIds, ['platform-configs', 'workflow-quality']);
assert.deepStrictEqual(parsed.includeComponentIds, ['lang:typescript']);
assert.deepStrictEqual(parsed.excludeComponentIds, ['capability:media']);
assert.strictEqual(parsed.dryRun, true);
@@ -58,9 +60,9 @@ function runTests() {
languages: ['typescript', 'python']
});
assert.strictEqual(request.mode, 'legacy');
assert.strictEqual(request.mode, 'legacy-compat');
assert.strictEqual(request.target, 'claude');
assert.deepStrictEqual(request.languages, ['typescript', 'python']);
assert.deepStrictEqual(request.legacyLanguages, ['typescript', 'python']);
assert.deepStrictEqual(request.moduleIds, []);
assert.strictEqual(request.profileId, null);
})) passed++; else failed++;
@@ -80,7 +82,7 @@ function runTests() {
assert.strictEqual(request.profileId, 'developer');
assert.deepStrictEqual(request.includeComponentIds, ['lang:typescript']);
assert.deepStrictEqual(request.excludeComponentIds, ['capability:media']);
assert.deepStrictEqual(request.languages, []);
assert.deepStrictEqual(request.legacyLanguages, []);
})) passed++; else failed++;
if (test('merges config-backed component selections with CLI overrides', () => {
@@ -111,6 +113,20 @@ function runTests() {
assert.strictEqual(request.configPath, '/workspace/app/ecc-install.json');
})) passed++; else failed++;
if (test('validates explicit module IDs against the manifest catalog', () => {
assert.throws(
() => normalizeInstallRequest({
target: 'cursor',
profileId: null,
moduleIds: ['ghost-module'],
includeComponentIds: [],
excludeComponentIds: [],
languages: [],
}),
/Unknown install module: ghost-module/
);
})) passed++; else failed++;
if (test('rejects mixing legacy languages with manifest flags', () => {
assert.throws(
() => normalizeInstallRequest({

View File

@@ -117,6 +117,56 @@ function runTests() {
}
})) passed++; else failed++;
if (test('deep-clones nested operation metadata for lifecycle-managed operations', () => {
const operation = {
kind: 'merge-json',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/hooks.json',
destinationPath: '/repo/.cursor/hooks.json',
strategy: 'merge-json',
ownership: 'managed',
scaffoldOnly: false,
mergePayload: {
nested: {
enabled: true,
},
},
previousValue: {
nested: {
enabled: false,
},
},
};
const state = createInstallState({
adapter: { id: 'cursor-project' },
targetRoot: '/repo/.cursor',
installStatePath: '/repo/.cursor/ecc-install-state.json',
request: {
profile: null,
modules: ['platform-configs'],
legacyLanguages: [],
legacyMode: false,
},
resolution: {
selectedModules: ['platform-configs'],
skippedModules: [],
},
operations: [operation],
source: {
repoVersion: '1.9.0',
repoCommit: 'abc123',
manifestVersion: 1,
},
});
operation.mergePayload.nested.enabled = false;
operation.previousValue.nested.enabled = true;
assert.strictEqual(state.operations[0].mergePayload.nested.enabled, true);
assert.strictEqual(state.operations[0].previousValue.nested.enabled, false);
})) passed++; else failed++;
if (test('rejects invalid install-state payloads on read', () => {
const testDir = createTestDir();
const statePath = path.join(testDir, 'ecc-install-state.json');
@@ -132,6 +182,48 @@ function runTests() {
}
})) passed++; else failed++;
if (test('rejects unexpected properties and missing required request fields', () => {
const testDir = createTestDir();
const statePath = path.join(testDir, 'ecc-install-state.json');
try {
fs.writeFileSync(statePath, JSON.stringify({
schemaVersion: 'ecc.install.v1',
installedAt: '2026-03-13T00:00:00Z',
unexpected: true,
target: {
id: 'cursor-project',
root: '/repo/.cursor',
installStatePath: '/repo/.cursor/ecc-install-state.json',
},
request: {
modules: [],
includeComponents: [],
excludeComponents: [],
legacyLanguages: [],
legacyMode: false,
},
resolution: {
selectedModules: [],
skippedModules: [],
},
source: {
repoVersion: '1.9.0',
repoCommit: 'abc123',
manifestVersion: 1,
},
operations: [],
}, null, 2));
assert.throws(
() => readInstallState(statePath),
/Invalid install-state/
);
} finally {
cleanupTestDir(testDir);
}
})) passed++; else failed++;
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
process.exit(failed > 0 ? 1 : 0);
}

View File

@@ -60,7 +60,7 @@ function runTests() {
})) passed++; else failed++;
if (test('plans scaffold operations and flattens native target roots', () => {
const repoRoot = '/repo/ecc';
const repoRoot = path.join(__dirname, '..', '..');
const projectRoot = '/workspace/app';
const modules = [
{
@@ -85,15 +85,124 @@ function runTests() {
assert.strictEqual(plan.installStatePath, path.join(projectRoot, '.cursor', 'ecc-install-state.json'));
const flattened = plan.operations.find(operation => operation.sourceRelativePath === '.cursor');
const preserved = plan.operations.find(operation => operation.sourceRelativePath === 'rules');
const preserved = plan.operations.find(operation => (
operation.sourceRelativePath === path.join('rules', 'common', 'coding-style.md')
));
assert.ok(flattened, 'Should include .cursor scaffold operation');
assert.strictEqual(flattened.strategy, 'sync-root-children');
assert.strictEqual(flattened.destinationPath, path.join(projectRoot, '.cursor'));
assert.ok(preserved, 'Should include rules scaffold operation');
assert.strictEqual(preserved.strategy, 'preserve-relative-path');
assert.strictEqual(preserved.destinationPath, path.join(projectRoot, '.cursor', 'rules'));
assert.ok(preserved, 'Should include flattened rules scaffold operations');
assert.strictEqual(preserved.strategy, 'flatten-copy');
assert.strictEqual(
preserved.destinationPath,
path.join(projectRoot, '.cursor', 'rules', 'common-coding-style.md')
);
})) passed++; else failed++;
if (test('plans cursor rules with flat namespaced filenames to avoid rule collisions', () => {
const repoRoot = path.join(__dirname, '..', '..');
const projectRoot = '/workspace/app';
const plan = planInstallTargetScaffold({
target: 'cursor',
repoRoot,
projectRoot,
modules: [
{
id: 'rules-core',
paths: ['rules'],
},
],
});
assert.ok(
plan.operations.some(operation => (
operation.sourceRelativePath === path.join('rules', 'common', 'coding-style.md')
&& operation.destinationPath === path.join(projectRoot, '.cursor', 'rules', 'common-coding-style.md')
)),
'Should flatten common rules into namespaced files'
);
assert.ok(
plan.operations.some(operation => (
operation.sourceRelativePath === path.join('rules', 'typescript', 'testing.md')
&& operation.destinationPath === path.join(projectRoot, '.cursor', 'rules', 'typescript-testing.md')
)),
'Should flatten language rules into namespaced files'
);
assert.ok(
!plan.operations.some(operation => (
operation.destinationPath === path.join(projectRoot, '.cursor', 'rules', 'common', 'coding-style.md')
)),
'Should not preserve nested rule directories for cursor installs'
);
})) passed++; else failed++;
if (test('plans antigravity remaps for workflows, skills, and flat rules', () => {
const repoRoot = path.join(__dirname, '..', '..');
const projectRoot = '/workspace/app';
const plan = planInstallTargetScaffold({
target: 'antigravity',
repoRoot,
projectRoot,
modules: [
{
id: 'commands-core',
paths: ['commands'],
},
{
id: 'agents-core',
paths: ['agents'],
},
{
id: 'rules-core',
paths: ['rules'],
},
],
});
assert.ok(
plan.operations.some(operation => (
operation.sourceRelativePath === 'commands'
&& operation.destinationPath === path.join(projectRoot, '.agent', 'workflows')
)),
'Should remap commands into workflows'
);
assert.ok(
plan.operations.some(operation => (
operation.sourceRelativePath === 'agents'
&& operation.destinationPath === path.join(projectRoot, '.agent', 'skills')
)),
'Should remap agents into skills'
);
assert.ok(
plan.operations.some(operation => (
operation.sourceRelativePath === path.join('rules', 'common', 'coding-style.md')
&& operation.destinationPath === path.join(projectRoot, '.agent', 'rules', 'common-coding-style.md')
)),
'Should flatten common rules for antigravity'
);
})) passed++; else failed++;
if (test('exposes validate and planOperations on adapters', () => {
const claudeAdapter = getInstallTargetAdapter('claude');
const cursorAdapter = getInstallTargetAdapter('cursor');
assert.strictEqual(typeof claudeAdapter.planOperations, 'function');
assert.strictEqual(typeof claudeAdapter.validate, 'function');
assert.deepStrictEqual(
claudeAdapter.validate({ homeDir: '/Users/example', repoRoot: '/repo/ecc' }),
[]
);
assert.strictEqual(typeof cursorAdapter.planOperations, 'function');
assert.strictEqual(typeof cursorAdapter.validate, 'function');
assert.deepStrictEqual(
cursorAdapter.validate({ projectRoot: '/workspace/app', repoRoot: '/repo/ecc' }),
[]
);
})) passed++; else failed++;
if (test('throws on unknown target adapter', () => {

View File

@@ -0,0 +1,536 @@
/**
* Tests for skill evolution helpers.
*
* Run with: node tests/lib/skill-evolution.test.js
*/
const assert = require('assert');
const fs = require('fs');
const os = require('os');
const path = require('path');
const { spawnSync } = require('child_process');
const provenance = require('../../scripts/lib/skill-evolution/provenance');
const versioning = require('../../scripts/lib/skill-evolution/versioning');
const tracker = require('../../scripts/lib/skill-evolution/tracker');
const health = require('../../scripts/lib/skill-evolution/health');
const skillEvolution = require('../../scripts/lib/skill-evolution');
const HEALTH_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'skills-health.js');
function test(name, fn) {
try {
fn();
console.log(` \u2713 ${name}`);
return true;
} catch (error) {
console.log(` \u2717 ${name}`);
console.log(` Error: ${error.message}`);
return false;
}
}
function createTempDir(prefix) {
return fs.mkdtempSync(path.join(os.tmpdir(), prefix));
}
function cleanupTempDir(dirPath) {
fs.rmSync(dirPath, { recursive: true, force: true });
}
function createSkill(skillRoot, name, content) {
const skillDir = path.join(skillRoot, name);
fs.mkdirSync(skillDir, { recursive: true });
fs.writeFileSync(path.join(skillDir, 'SKILL.md'), content);
return skillDir;
}
function appendJsonl(filePath, rows) {
const lines = rows.map(row => JSON.stringify(row)).join('\n');
fs.mkdirSync(path.dirname(filePath), { recursive: true });
fs.writeFileSync(filePath, `${lines}\n`);
}
function readJson(filePath) {
return JSON.parse(fs.readFileSync(filePath, 'utf8'));
}
function runCli(args, options = {}) {
return spawnSync(process.execPath, [HEALTH_SCRIPT, ...args], {
encoding: 'utf8',
env: {
...process.env,
...(options.env || {}),
},
});
}
function runTests() {
console.log('\n=== Testing skill evolution ===\n');
let passed = 0;
let failed = 0;
const repoRoot = createTempDir('skill-evolution-repo-');
const homeDir = createTempDir('skill-evolution-home-');
const skillsRoot = path.join(repoRoot, 'skills');
const learnedRoot = path.join(homeDir, '.claude', 'skills', 'learned');
const importedRoot = path.join(homeDir, '.claude', 'skills', 'imported');
const runsFile = path.join(homeDir, '.claude', 'state', 'skill-runs.jsonl');
const now = '2026-03-15T12:00:00.000Z';
fs.mkdirSync(skillsRoot, { recursive: true });
fs.mkdirSync(learnedRoot, { recursive: true });
fs.mkdirSync(importedRoot, { recursive: true });
try {
console.log('Provenance:');
if (test('classifies curated, learned, and imported skill directories', () => {
const curatedSkillDir = createSkill(skillsRoot, 'curated-alpha', '# Curated\n');
const learnedSkillDir = createSkill(learnedRoot, 'learned-beta', '# Learned\n');
const importedSkillDir = createSkill(importedRoot, 'imported-gamma', '# Imported\n');
const roots = provenance.getSkillRoots({ repoRoot, homeDir });
assert.strictEqual(roots.curated, skillsRoot);
assert.strictEqual(roots.learned, learnedRoot);
assert.strictEqual(roots.imported, importedRoot);
assert.strictEqual(
provenance.classifySkillPath(curatedSkillDir, { repoRoot, homeDir }),
provenance.SKILL_TYPES.CURATED
);
assert.strictEqual(
provenance.classifySkillPath(learnedSkillDir, { repoRoot, homeDir }),
provenance.SKILL_TYPES.LEARNED
);
assert.strictEqual(
provenance.classifySkillPath(importedSkillDir, { repoRoot, homeDir }),
provenance.SKILL_TYPES.IMPORTED
);
assert.strictEqual(
provenance.requiresProvenance(curatedSkillDir, { repoRoot, homeDir }),
false
);
assert.strictEqual(
provenance.requiresProvenance(learnedSkillDir, { repoRoot, homeDir }),
true
);
})) passed++; else failed++;
if (test('writes and validates provenance metadata for non-curated skills', () => {
const importedSkillDir = createSkill(importedRoot, 'imported-delta', '# Imported\n');
const provenanceRecord = {
source: 'https://example.com/skills/imported-delta',
created_at: '2026-03-15T10:00:00.000Z',
confidence: 0.86,
author: 'external-importer',
};
const writeResult = provenance.writeProvenance(importedSkillDir, provenanceRecord, {
repoRoot,
homeDir,
});
assert.strictEqual(writeResult.path, path.join(importedSkillDir, '.provenance.json'));
assert.deepStrictEqual(readJson(writeResult.path), provenanceRecord);
assert.deepStrictEqual(
provenance.readProvenance(importedSkillDir, { repoRoot, homeDir }),
provenanceRecord
);
assert.throws(
() => provenance.writeProvenance(importedSkillDir, {
source: 'bad',
created_at: '2026-03-15T10:00:00.000Z',
author: 'external-importer',
}, { repoRoot, homeDir }),
/confidence/
);
assert.throws(
() => provenance.readProvenance(path.join(learnedRoot, 'missing-provenance'), {
repoRoot,
homeDir,
required: true,
}),
/Missing provenance metadata/
);
})) passed++; else failed++;
if (test('exports the consolidated module surface from index.js', () => {
assert.strictEqual(skillEvolution.provenance, provenance);
assert.strictEqual(skillEvolution.versioning, versioning);
assert.strictEqual(skillEvolution.tracker, tracker);
assert.strictEqual(skillEvolution.health, health);
assert.strictEqual(typeof skillEvolution.collectSkillHealth, 'function');
assert.strictEqual(typeof skillEvolution.recordSkillExecution, 'function');
})) passed++; else failed++;
console.log('\nVersioning:');
if (test('creates version snapshots and evolution logs for a skill', () => {
const skillDir = createSkill(skillsRoot, 'alpha', '# Alpha v1\n');
const versionOne = versioning.createVersion(skillDir, {
timestamp: '2026-03-15T11:00:00.000Z',
reason: 'bootstrap',
author: 'observer',
});
assert.strictEqual(versionOne.version, 1);
assert.ok(fs.existsSync(path.join(skillDir, '.versions', 'v1.md')));
assert.ok(fs.existsSync(path.join(skillDir, '.evolution', 'observations.jsonl')));
assert.ok(fs.existsSync(path.join(skillDir, '.evolution', 'inspections.jsonl')));
assert.ok(fs.existsSync(path.join(skillDir, '.evolution', 'amendments.jsonl')));
assert.strictEqual(versioning.getCurrentVersion(skillDir), 1);
fs.writeFileSync(path.join(skillDir, 'SKILL.md'), '# Alpha v2\n');
const versionTwo = versioning.createVersion(skillDir, {
timestamp: '2026-03-16T11:00:00.000Z',
reason: 'accepted-amendment',
author: 'observer',
});
assert.strictEqual(versionTwo.version, 2);
assert.deepStrictEqual(
versioning.listVersions(skillDir).map(entry => entry.version),
[1, 2]
);
const amendments = versioning.getEvolutionLog(skillDir, 'amendments');
assert.strictEqual(amendments.length, 2);
assert.strictEqual(amendments[0].event, 'snapshot');
assert.strictEqual(amendments[1].version, 2);
})) passed++; else failed++;
if (test('rolls back to a previous snapshot without losing history', () => {
const skillDir = path.join(skillsRoot, 'alpha');
const rollback = versioning.rollbackTo(skillDir, 1, {
timestamp: '2026-03-17T11:00:00.000Z',
author: 'maintainer',
reason: 'restore known-good version',
});
assert.strictEqual(rollback.version, 3);
assert.strictEqual(
fs.readFileSync(path.join(skillDir, 'SKILL.md'), 'utf8'),
'# Alpha v1\n'
);
assert.deepStrictEqual(
versioning.listVersions(skillDir).map(entry => entry.version),
[1, 2, 3]
);
assert.strictEqual(versioning.getCurrentVersion(skillDir), 3);
const amendments = versioning.getEvolutionLog(skillDir, 'amendments');
const rollbackEntry = amendments[amendments.length - 1];
assert.strictEqual(rollbackEntry.event, 'rollback');
assert.strictEqual(rollbackEntry.target_version, 1);
assert.strictEqual(rollbackEntry.version, 3);
})) passed++; else failed++;
console.log('\nTracking:');
if (test('records skill execution rows to JSONL fallback storage', () => {
const result = tracker.recordSkillExecution({
skill_id: 'alpha',
skill_version: 'v3',
task_description: 'Fix flaky tests',
outcome: 'partial',
failure_reason: 'One integration test still flakes',
tokens_used: 812,
duration_ms: 4400,
user_feedback: 'corrected',
recorded_at: '2026-03-15T11:30:00.000Z',
}, {
runsFilePath: runsFile,
});
assert.strictEqual(result.storage, 'jsonl');
assert.strictEqual(result.path, runsFile);
const records = tracker.readSkillExecutionRecords({ runsFilePath: runsFile });
assert.strictEqual(records.length, 1);
assert.strictEqual(records[0].skill_id, 'alpha');
assert.strictEqual(records[0].task_description, 'Fix flaky tests');
assert.strictEqual(records[0].outcome, 'partial');
})) passed++; else failed++;
if (test('falls back to JSONL when a state-store adapter is unavailable', () => {
const result = tracker.recordSkillExecution({
skill_id: 'beta',
skill_version: 'v1',
task_description: 'Import external skill',
outcome: 'success',
failure_reason: null,
tokens_used: 215,
duration_ms: 900,
user_feedback: 'accepted',
recorded_at: '2026-03-15T11:35:00.000Z',
}, {
runsFilePath: runsFile,
stateStore: {
recordSkillExecution() {
throw new Error('state store offline');
},
},
});
assert.strictEqual(result.storage, 'jsonl');
assert.strictEqual(tracker.readSkillExecutionRecords({ runsFilePath: runsFile }).length, 2);
})) passed++; else failed++;
if (test('ignores malformed JSONL rows when reading execution records', () => {
const malformedRunsFile = path.join(homeDir, '.claude', 'state', 'malformed-skill-runs.jsonl');
fs.writeFileSync(
malformedRunsFile,
`${JSON.stringify({
skill_id: 'alpha',
skill_version: 'v3',
task_description: 'Good row',
outcome: 'success',
failure_reason: null,
tokens_used: 1,
duration_ms: 1,
user_feedback: 'accepted',
recorded_at: '2026-03-15T11:45:00.000Z',
})}\n{bad-json}\n`,
'utf8'
);
const records = tracker.readSkillExecutionRecords({ runsFilePath: malformedRunsFile });
assert.strictEqual(records.length, 1);
assert.strictEqual(records[0].skill_id, 'alpha');
})) passed++; else failed++;
if (test('preserves zero-valued telemetry fields during normalization', () => {
const record = tracker.normalizeExecutionRecord({
skill_id: 'zero-telemetry',
skill_version: 'v1',
task_description: 'No-op hook',
outcome: 'success',
tokens_used: 0,
duration_ms: 0,
user_feedback: 'accepted',
recorded_at: '2026-03-15T11:40:00.000Z',
});
assert.strictEqual(record.tokens_used, 0);
assert.strictEqual(record.duration_ms, 0);
})) passed++; else failed++;
console.log('\nHealth:');
if (test('computes per-skill health metrics and flags declining skills', () => {
const betaSkillDir = createSkill(learnedRoot, 'beta', '# Beta v1\n');
provenance.writeProvenance(betaSkillDir, {
source: 'observer://session/123',
created_at: '2026-03-14T10:00:00.000Z',
confidence: 0.72,
author: 'observer',
}, {
repoRoot,
homeDir,
});
versioning.createVersion(betaSkillDir, {
timestamp: '2026-03-14T11:00:00.000Z',
author: 'observer',
reason: 'bootstrap',
});
appendJsonl(path.join(skillsRoot, 'alpha', '.evolution', 'amendments.jsonl'), [
{
event: 'proposal',
status: 'pending',
created_at: '2026-03-15T07:00:00.000Z',
},
]);
appendJsonl(runsFile, [
{
skill_id: 'alpha',
skill_version: 'v3',
task_description: 'Recent success',
outcome: 'success',
failure_reason: null,
tokens_used: 100,
duration_ms: 1000,
user_feedback: 'accepted',
recorded_at: '2026-03-14T10:00:00.000Z',
},
{
skill_id: 'alpha',
skill_version: 'v3',
task_description: 'Recent failure',
outcome: 'failure',
failure_reason: 'Regression',
tokens_used: 100,
duration_ms: 1000,
user_feedback: 'rejected',
recorded_at: '2026-03-13T10:00:00.000Z',
},
{
skill_id: 'alpha',
skill_version: 'v2',
task_description: 'Prior success',
outcome: 'success',
failure_reason: null,
tokens_used: 100,
duration_ms: 1000,
user_feedback: 'accepted',
recorded_at: '2026-03-06T10:00:00.000Z',
},
{
skill_id: 'alpha',
skill_version: 'v1',
task_description: 'Older success',
outcome: 'success',
failure_reason: null,
tokens_used: 100,
duration_ms: 1000,
user_feedback: 'accepted',
recorded_at: '2026-02-24T10:00:00.000Z',
},
{
skill_id: 'beta',
skill_version: 'v1',
task_description: 'Recent success',
outcome: 'success',
failure_reason: null,
tokens_used: 90,
duration_ms: 800,
user_feedback: 'accepted',
recorded_at: '2026-03-15T09:00:00.000Z',
},
{
skill_id: 'beta',
skill_version: 'v1',
task_description: 'Older failure',
outcome: 'failure',
failure_reason: 'Bad import',
tokens_used: 90,
duration_ms: 800,
user_feedback: 'corrected',
recorded_at: '2026-02-20T09:00:00.000Z',
},
]);
const report = health.collectSkillHealth({
repoRoot,
homeDir,
runsFilePath: runsFile,
now,
warnThreshold: 0.1,
});
const alpha = report.skills.find(skill => skill.skill_id === 'alpha');
const beta = report.skills.find(skill => skill.skill_id === 'beta');
assert.ok(alpha);
assert.ok(beta);
assert.strictEqual(alpha.current_version, 'v3');
assert.strictEqual(alpha.pending_amendments, 1);
assert.strictEqual(alpha.success_rate_7d, 0.5);
assert.strictEqual(alpha.success_rate_30d, 0.75);
assert.strictEqual(alpha.failure_trend, 'worsening');
assert.strictEqual(alpha.declining, true);
assert.strictEqual(beta.failure_trend, 'improving');
const summary = health.summarizeHealthReport(report);
assert.deepStrictEqual(summary, {
total_skills: 6,
healthy_skills: 5,
declining_skills: 1,
});
const human = health.formatHealthReport(report, { json: false });
assert.match(human, /alpha/);
assert.match(human, /worsening/);
assert.match(
human,
new RegExp(`Skills: ${summary.total_skills} total, ${summary.healthy_skills} healthy, ${summary.declining_skills} declining`)
);
})) passed++; else failed++;
if (test('treats an unsnapshotted SKILL.md as v1 and orders last_run by actual time', () => {
const gammaSkillDir = createSkill(skillsRoot, 'gamma', '# Gamma v1\n');
const offsetRunsFile = path.join(homeDir, '.claude', 'state', 'offset-skill-runs.jsonl');
appendJsonl(offsetRunsFile, [
{
skill_id: 'gamma',
skill_version: 'v1',
task_description: 'Offset timestamp run',
outcome: 'success',
failure_reason: null,
tokens_used: 10,
duration_ms: 100,
user_feedback: 'accepted',
recorded_at: '2026-03-15T00:00:00+02:00',
},
{
skill_id: 'gamma',
skill_version: 'v1',
task_description: 'UTC timestamp run',
outcome: 'success',
failure_reason: null,
tokens_used: 11,
duration_ms: 110,
user_feedback: 'accepted',
recorded_at: '2026-03-14T23:30:00Z',
},
]);
const report = health.collectSkillHealth({
repoRoot,
homeDir,
runsFilePath: offsetRunsFile,
now,
warnThreshold: 0.1,
});
const gamma = report.skills.find(skill => skill.skill_id === path.basename(gammaSkillDir));
assert.ok(gamma);
assert.strictEqual(gamma.current_version, 'v1');
assert.strictEqual(gamma.last_run, '2026-03-14T23:30:00Z');
})) passed++; else failed++;
if (test('CLI emits JSON health output for standalone integration', () => {
const result = runCli([
'--json',
'--skills-root', skillsRoot,
'--learned-root', learnedRoot,
'--imported-root', importedRoot,
'--home', homeDir,
'--runs-file', runsFile,
'--now', now,
'--warn-threshold', '0.1',
]);
assert.strictEqual(result.status, 0, result.stderr);
const payload = JSON.parse(result.stdout.trim());
assert.ok(Array.isArray(payload.skills));
assert.strictEqual(payload.skills[0].skill_id, 'alpha');
assert.strictEqual(payload.skills[0].declining, true);
})) passed++; else failed++;
if (test('CLI shows help and rejects missing option values', () => {
const helpResult = runCli(['--help']);
assert.strictEqual(helpResult.status, 0);
assert.match(helpResult.stdout, /--learned-root <path>/);
assert.match(helpResult.stdout, /--imported-root <path>/);
const errorResult = runCli(['--skills-root']);
assert.strictEqual(errorResult.status, 1);
assert.match(errorResult.stderr, /Missing value for --skills-root/);
})) passed++; else failed++;
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
process.exit(failed > 0 ? 1 : 0);
} finally {
cleanupTempDir(repoRoot);
cleanupTempDir(homeDir);
}
}
runTests();

View File

@@ -10,25 +10,40 @@ const path = require('path');
const fs = require('fs');
const testsDir = __dirname;
const repoRoot = path.resolve(testsDir, '..');
const TEST_GLOB = 'tests/**/*.test.js';
/**
* Discover all *.test.js files under testsDir (relative paths for stable output order).
*/
function discoverTestFiles(dir, baseDir = dir, acc = []) {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const e of entries) {
const full = path.join(dir, e.name);
const rel = path.relative(baseDir, full);
if (e.isDirectory()) {
discoverTestFiles(full, baseDir, acc);
} else if (e.isFile() && e.name.endsWith('.test.js')) {
acc.push(rel);
}
function matchesTestGlob(relativePath) {
const normalized = relativePath.split(path.sep).join('/');
if (typeof path.matchesGlob === 'function') {
return path.matchesGlob(normalized, TEST_GLOB);
}
return acc.sort();
return /^tests\/(?:.+\/)?[^/]+\.test\.js$/.test(normalized);
}
const testFiles = discoverTestFiles(testsDir);
function walkFiles(dir, acc = []) {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
walkFiles(fullPath, acc);
} else if (entry.isFile()) {
acc.push(fullPath);
}
}
return acc;
}
function discoverTestFiles() {
return walkFiles(testsDir)
.map(fullPath => path.relative(repoRoot, fullPath))
.filter(matchesTestGlob)
.map(repoRelativePath => path.relative(testsDir, path.join(repoRoot, repoRelativePath)))
.sort();
}
const testFiles = discoverTestFiles();
const BOX_W = 58; // inner width between ║ delimiters
const boxLine = s => `${s.padEnd(BOX_W)}`;
@@ -38,6 +53,11 @@ console.log(boxLine(' Everything Claude Code - Test Suite'));
console.log('╚' + '═'.repeat(BOX_W) + '╝');
console.log();
if (testFiles.length === 0) {
console.log(`✗ No test files matched ${TEST_GLOB}`);
process.exit(1);
}
let totalPassed = 0;
let totalFailed = 0;
let totalTests = 0;

View File

@@ -60,16 +60,18 @@ function main() {
assert.strictEqual(result.status, 0, result.stderr);
const payload = parseJson(result.stdout);
assert.strictEqual(payload.dryRun, true);
assert.strictEqual(payload.plan.mode, 'legacy');
assert.deepStrictEqual(payload.plan.languages, ['typescript']);
assert.strictEqual(payload.plan.mode, 'legacy-compat');
assert.deepStrictEqual(payload.plan.legacyLanguages, ['typescript']);
assert.ok(payload.plan.selectedModuleIds.includes('framework-language'));
}],
['routes implicit top-level args to install', () => {
const result = runCli(['--dry-run', '--json', 'typescript']);
assert.strictEqual(result.status, 0, result.stderr);
const payload = parseJson(result.stdout);
assert.strictEqual(payload.dryRun, true);
assert.strictEqual(payload.plan.mode, 'legacy');
assert.deepStrictEqual(payload.plan.languages, ['typescript']);
assert.strictEqual(payload.plan.mode, 'legacy-compat');
assert.deepStrictEqual(payload.plan.legacyLanguages, ['typescript']);
assert.ok(payload.plan.selectedModuleIds.includes('framework-language'));
}],
['delegates plan command', () => {
const result = runCli(['plan', '--list-profiles', '--json']);

View File

@@ -89,18 +89,26 @@ function runTests() {
const result = run(['typescript'], { cwd: projectDir, homeDir });
assert.strictEqual(result.code, 0, result.stderr);
const rulesDir = path.join(homeDir, '.claude', 'rules');
assert.ok(fs.existsSync(path.join(rulesDir, 'common', 'coding-style.md')));
assert.ok(fs.existsSync(path.join(rulesDir, 'typescript', 'testing.md')));
const claudeRoot = path.join(homeDir, '.claude');
assert.ok(fs.existsSync(path.join(claudeRoot, 'rules', 'common', 'coding-style.md')));
assert.ok(fs.existsSync(path.join(claudeRoot, 'rules', 'typescript', 'testing.md')));
assert.ok(fs.existsSync(path.join(claudeRoot, 'commands', 'plan.md')));
assert.ok(fs.existsSync(path.join(claudeRoot, 'scripts', 'hooks', 'session-end.js')));
assert.ok(fs.existsSync(path.join(claudeRoot, 'skills', 'tdd-workflow', 'SKILL.md')));
assert.ok(fs.existsSync(path.join(claudeRoot, 'skills', 'coding-standards', 'SKILL.md')));
assert.ok(fs.existsSync(path.join(claudeRoot, 'plugin.json')));
const statePath = path.join(homeDir, '.claude', 'ecc', 'install-state.json');
const state = readJson(statePath);
assert.strictEqual(state.target.id, 'claude-home');
assert.deepStrictEqual(state.request.legacyLanguages, ['typescript']);
assert.strictEqual(state.request.legacyMode, true);
assert.deepStrictEqual(state.request.modules, []);
assert.ok(state.resolution.selectedModules.includes('rules-core'));
assert.ok(state.resolution.selectedModules.includes('framework-language'));
assert.ok(
state.operations.some(operation => (
operation.destinationPath === path.join(rulesDir, 'common', 'coding-style.md')
operation.destinationPath === path.join(claudeRoot, 'rules', 'common', 'coding-style.md')
)),
'Should record common rule file operation'
);
@@ -118,22 +126,28 @@ function runTests() {
const result = run(['--target', 'cursor', 'typescript'], { cwd: projectDir, homeDir });
assert.strictEqual(result.code, 0, result.stderr);
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'rules', 'common-coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'rules', 'typescript-testing.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'rules', 'common', 'coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'rules', 'typescript', 'testing.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'agents', 'architect.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'commands', 'plan.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'hooks.json')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'hooks', 'session-start.js')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'skills', 'article-writing', 'SKILL.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'skills', 'tdd-workflow', 'SKILL.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'skills', 'coding-standards', 'SKILL.md')));
const statePath = path.join(projectDir, '.cursor', 'ecc-install-state.json');
const state = readJson(statePath);
const normalizedProjectDir = fs.realpathSync(projectDir);
assert.strictEqual(state.target.id, 'cursor-project');
assert.strictEqual(state.target.root, path.join(normalizedProjectDir, '.cursor'));
assert.deepStrictEqual(state.request.legacyLanguages, ['typescript']);
assert.strictEqual(state.request.legacyMode, true);
assert.ok(state.resolution.selectedModules.includes('framework-language'));
assert.ok(
state.operations.some(operation => (
operation.destinationPath === path.join(normalizedProjectDir, '.cursor', 'hooks', 'session-start.js')
operation.destinationPath === path.join(normalizedProjectDir, '.cursor', 'commands', 'plan.md')
)),
'Should record hook file copy operation'
'Should record manifest command file copy operation'
);
} finally {
cleanup(homeDir);
@@ -149,20 +163,22 @@ function runTests() {
const result = run(['--target', 'antigravity', 'typescript'], { cwd: projectDir, homeDir });
assert.strictEqual(result.code, 0, result.stderr);
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'common-coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'typescript-testing.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'workflows', 'code-review.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'skills', 'architect.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'skills', 'article-writing', 'SKILL.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'common', 'coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'typescript', 'testing.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'commands', 'plan.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'agents', 'architect.md')));
const statePath = path.join(projectDir, '.agent', 'ecc-install-state.json');
const state = readJson(statePath);
assert.strictEqual(state.target.id, 'antigravity-project');
assert.deepStrictEqual(state.request.legacyLanguages, ['typescript']);
assert.strictEqual(state.request.legacyMode, true);
assert.deepStrictEqual(state.resolution.selectedModules, ['rules-core', 'agents-core', 'commands-core']);
assert.ok(
state.operations.some(operation => (
operation.destinationPath.endsWith(path.join('.agent', 'workflows', 'code-review.md'))
operation.destinationPath.endsWith(path.join('.agent', 'commands', 'plan.md'))
)),
'Should record workflow file copy operation'
'Should record manifest command file copy operation'
);
} finally {
cleanup(homeDir);
@@ -181,6 +197,8 @@ function runTests() {
});
assert.strictEqual(result.code, 0, result.stderr);
assert.ok(result.stdout.includes('Dry-run install plan'));
assert.ok(result.stdout.includes('Mode: legacy-compat'));
assert.ok(result.stdout.includes('Legacy languages: typescript'));
assert.ok(!fs.existsSync(path.join(projectDir, '.cursor', 'hooks.json')));
assert.ok(!fs.existsSync(path.join(projectDir, '.cursor', 'ecc-install-state.json')));
} finally {
@@ -240,6 +258,31 @@ function runTests() {
}
})) passed++; else failed++;
if (test('installs antigravity manifest profiles while skipping incompatible modules', () => {
const homeDir = createTempDir('install-apply-home-');
const projectDir = createTempDir('install-apply-project-');
try {
const result = run(['--target', 'antigravity', '--profile', 'core'], { cwd: projectDir, homeDir });
assert.strictEqual(result.code, 0, result.stderr);
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'common', 'coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'agents', 'architect.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'commands', 'plan.md')));
assert.ok(!fs.existsSync(path.join(projectDir, '.agent', 'skills', 'tdd-workflow', 'SKILL.md')));
const state = readJson(path.join(projectDir, '.agent', 'ecc-install-state.json'));
assert.strictEqual(state.request.profile, 'core');
assert.strictEqual(state.request.legacyMode, false);
assert.deepStrictEqual(state.resolution.selectedModules, ['rules-core', 'agents-core', 'commands-core']);
assert.ok(state.resolution.skippedModules.includes('workflow-quality'));
assert.ok(state.resolution.skippedModules.includes('platform-configs'));
} finally {
cleanup(homeDir);
cleanup(projectDir);
}
})) passed++; else failed++;
if (test('installs explicit modules for cursor using manifest operations', () => {
const homeDir = createTempDir('install-apply-home-');
const projectDir = createTempDir('install-apply-project-');
@@ -270,6 +313,12 @@ function runTests() {
}
})) passed++; else failed++;
if (test('rejects unknown explicit manifest modules before resolution', () => {
const result = run(['--modules', 'ghost-module']);
assert.strictEqual(result.code, 1);
assert.ok(result.stderr.includes('Unknown install module: ghost-module'));
})) passed++; else failed++;
if (test('installs from ecc-install.json and persists component selections', () => {
const homeDir = createTempDir('install-apply-home-');
const projectDir = createTempDir('install-apply-project-');

View File

@@ -12,6 +12,16 @@ const INSTALL_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'install-appl
const DOCTOR_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'doctor.js');
const REPAIR_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'repair.js');
const REPO_ROOT = path.join(__dirname, '..', '..');
const CURRENT_PACKAGE_VERSION = JSON.parse(
fs.readFileSync(path.join(REPO_ROOT, 'package.json'), 'utf8')
).version;
const CURRENT_MANIFEST_VERSION = JSON.parse(
fs.readFileSync(path.join(REPO_ROOT, 'manifests', 'install-modules.json'), 'utf8')
).version;
const {
createInstallState,
writeInstallState,
} = require('../../scripts/lib/install-state');
function createTempDir(prefix) {
return fs.mkdtempSync(path.join(os.tmpdir(), prefix));
@@ -21,6 +31,12 @@ function cleanup(dirPath) {
fs.rmSync(dirPath, { recursive: true, force: true });
}
function writeState(filePath, options) {
const state = createInstallState(options);
writeInstallState(filePath, state);
return state;
}
function runNode(scriptPath, args = [], options = {}) {
const env = {
...process.env,
@@ -64,26 +80,25 @@ function runTests() {
let passed = 0;
let failed = 0;
if (test('repairs drifted managed files and refreshes install-state', () => {
if (test('repairs drifted files from a real install-apply state', () => {
const homeDir = createTempDir('repair-home-');
const projectRoot = createTempDir('repair-project-');
try {
const installResult = runNode(INSTALL_SCRIPT, ['--target', 'cursor', '--modules', 'platform-configs'], {
const installResult = runNode(INSTALL_SCRIPT, ['--target', 'cursor', 'typescript'], {
cwd: projectRoot,
homeDir,
});
assert.strictEqual(installResult.code, 0, installResult.stderr);
const cursorRoot = path.join(projectRoot, '.cursor');
const managedPath = path.join(cursorRoot, 'hooks.json');
const statePath = path.join(cursorRoot, 'ecc-install-state.json');
const managedRealPath = fs.realpathSync(cursorRoot);
const expectedManagedPath = path.join(managedRealPath, 'hooks.json');
const expectedContent = fs.readFileSync(path.join(REPO_ROOT, '.cursor', 'hooks.json'), 'utf8');
const installedAtBefore = JSON.parse(fs.readFileSync(statePath, 'utf8')).installedAt;
fs.writeFileSync(managedPath, '{"drifted":true}\n');
const normalizedProjectRoot = fs.realpathSync(projectRoot);
const managedPath = path.join(normalizedProjectRoot, '.cursor', 'hooks', 'session-start.js');
const statePath = path.join(normalizedProjectRoot, '.cursor', 'ecc-install-state.json');
const expectedContent = fs.readFileSync(
path.join(REPO_ROOT, '.cursor', 'hooks', 'session-start.js'),
'utf8'
);
fs.writeFileSync(managedPath, '// drifted\n');
const doctorBefore = runNode(DOCTOR_SCRIPT, ['--target', 'cursor', '--json'], {
cwd: projectRoot,
@@ -100,8 +115,118 @@ function runTests() {
const parsed = JSON.parse(repairResult.stdout);
assert.strictEqual(parsed.results[0].status, 'repaired');
assert.ok(parsed.results[0].repairedPaths.includes(expectedManagedPath));
assert.ok(parsed.results[0].repairedPaths.includes(managedPath));
assert.strictEqual(fs.readFileSync(managedPath, 'utf8'), expectedContent);
assert.ok(fs.existsSync(statePath));
} finally {
cleanup(homeDir);
cleanup(projectRoot);
}
})) passed++; else failed++;
if (test('repairs drifted non-copy managed operations and refreshes install-state', () => {
const homeDir = createTempDir('repair-home-');
const projectRoot = createTempDir('repair-project-');
try {
const targetRoot = path.join(projectRoot, '.cursor');
fs.mkdirSync(targetRoot, { recursive: true });
const normalizedTargetRoot = fs.realpathSync(targetRoot);
const statePath = path.join(normalizedTargetRoot, 'ecc-install-state.json');
const jsonPath = path.join(normalizedTargetRoot, 'hooks.json');
const renderedPath = path.join(normalizedTargetRoot, 'generated.md');
const removedPath = path.join(normalizedTargetRoot, 'legacy-note.txt');
fs.writeFileSync(jsonPath, JSON.stringify({ existing: true, managed: false }, null, 2));
fs.writeFileSync(renderedPath, '# drifted\n');
fs.writeFileSync(removedPath, 'stale\n');
writeState(statePath, {
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
targetRoot: normalizedTargetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: ['platform-configs'],
includeComponents: [],
excludeComponents: [],
legacyLanguages: [],
legacyMode: false,
},
resolution: {
selectedModules: ['platform-configs'],
skippedModules: [],
},
operations: [
{
kind: 'merge-json',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/hooks.json',
destinationPath: jsonPath,
strategy: 'merge-json',
ownership: 'managed',
scaffoldOnly: false,
mergePayload: {
managed: true,
nested: {
enabled: true,
},
},
},
{
kind: 'render-template',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/generated.md.template',
destinationPath: renderedPath,
strategy: 'render-template',
ownership: 'managed',
scaffoldOnly: false,
renderedContent: '# generated\n',
},
{
kind: 'remove',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/legacy-note.txt',
destinationPath: removedPath,
strategy: 'remove',
ownership: 'managed',
scaffoldOnly: false,
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const doctorBefore = runNode(DOCTOR_SCRIPT, ['--target', 'cursor', '--json'], {
cwd: projectRoot,
homeDir,
});
assert.strictEqual(doctorBefore.code, 1);
assert.ok(JSON.parse(doctorBefore.stdout).results[0].issues.some(issue => issue.code === 'drifted-managed-files'));
const installedAtBefore = JSON.parse(fs.readFileSync(statePath, 'utf8')).installedAt;
const repairResult = runNode(REPAIR_SCRIPT, ['--target', 'cursor', '--json'], {
cwd: projectRoot,
homeDir,
});
assert.strictEqual(repairResult.code, 0, repairResult.stderr);
const parsed = JSON.parse(repairResult.stdout);
assert.strictEqual(parsed.results[0].status, 'repaired');
assert.ok(parsed.results[0].repairedPaths.includes(jsonPath));
assert.ok(parsed.results[0].repairedPaths.includes(renderedPath));
assert.ok(parsed.results[0].repairedPaths.includes(removedPath));
assert.deepStrictEqual(JSON.parse(fs.readFileSync(jsonPath, 'utf8')), {
existing: true,
managed: true,
nested: {
enabled: true,
},
});
assert.strictEqual(fs.readFileSync(renderedPath, 'utf8'), '# generated\n');
assert.ok(!fs.existsSync(removedPath));
const repairedState = JSON.parse(fs.readFileSync(statePath, 'utf8'));
assert.strictEqual(repairedState.installedAt, installedAtBefore);
@@ -119,23 +244,52 @@ function runTests() {
}
})) passed++; else failed++;
if (test('supports dry-run without mutating drifted files', () => {
if (test('supports dry-run without mutating drifted non-copy operations', () => {
const homeDir = createTempDir('repair-home-');
const projectRoot = createTempDir('repair-project-');
try {
const installResult = runNode(INSTALL_SCRIPT, ['--target', 'cursor', '--modules', 'platform-configs'], {
cwd: projectRoot,
homeDir,
});
assert.strictEqual(installResult.code, 0, installResult.stderr);
const targetRoot = path.join(projectRoot, '.cursor');
fs.mkdirSync(targetRoot, { recursive: true });
const normalizedTargetRoot = fs.realpathSync(targetRoot);
const statePath = path.join(normalizedTargetRoot, 'ecc-install-state.json');
const renderedPath = path.join(normalizedTargetRoot, 'generated.md');
fs.writeFileSync(renderedPath, '# drifted\n');
const cursorRoot = path.join(projectRoot, '.cursor');
const managedPath = path.join(cursorRoot, 'hooks.json');
const managedRealPath = fs.realpathSync(cursorRoot);
const expectedManagedPath = path.join(managedRealPath, 'hooks.json');
const driftedContent = '{"drifted":true}\n';
fs.writeFileSync(managedPath, driftedContent);
writeState(statePath, {
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
targetRoot: normalizedTargetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: ['platform-configs'],
includeComponents: [],
excludeComponents: [],
legacyLanguages: [],
legacyMode: false,
},
resolution: {
selectedModules: ['platform-configs'],
skippedModules: [],
},
operations: [
{
kind: 'render-template',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/generated.md.template',
destinationPath: renderedPath,
strategy: 'render-template',
ownership: 'managed',
scaffoldOnly: false,
renderedContent: '# generated\n',
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const repairResult = runNode(REPAIR_SCRIPT, ['--target', 'cursor', '--dry-run', '--json'], {
cwd: projectRoot,
@@ -144,8 +298,8 @@ function runTests() {
assert.strictEqual(repairResult.code, 0, repairResult.stderr);
const parsed = JSON.parse(repairResult.stdout);
assert.strictEqual(parsed.dryRun, true);
assert.ok(parsed.results[0].plannedRepairs.includes(expectedManagedPath));
assert.strictEqual(fs.readFileSync(managedPath, 'utf8'), driftedContent);
assert.ok(parsed.results[0].plannedRepairs.includes(renderedPath));
assert.strictEqual(fs.readFileSync(renderedPath, 'utf8'), '# drifted\n');
} finally {
cleanup(homeDir);
cleanup(projectRoot);

View File

@@ -9,7 +9,18 @@ const path = require('path');
const { execFileSync } = require('child_process');
const INSTALL_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'install-apply.js');
const UNINSTALL_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'uninstall.js');
const SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'uninstall.js');
const REPO_ROOT = path.join(__dirname, '..', '..');
const CURRENT_PACKAGE_VERSION = JSON.parse(
fs.readFileSync(path.join(REPO_ROOT, 'package.json'), 'utf8')
).version;
const CURRENT_MANIFEST_VERSION = JSON.parse(
fs.readFileSync(path.join(REPO_ROOT, 'manifests', 'install-modules.json'), 'utf8')
).version;
const {
createInstallState,
writeInstallState,
} = require('../../scripts/lib/install-state');
function createTempDir(prefix) {
return fs.mkdtempSync(path.join(os.tmpdir(), prefix));
@@ -19,14 +30,20 @@ function cleanup(dirPath) {
fs.rmSync(dirPath, { recursive: true, force: true });
}
function runNode(scriptPath, args = [], options = {}) {
function writeState(filePath, options) {
const state = createInstallState(options);
writeInstallState(filePath, state);
return state;
}
function run(args = [], options = {}) {
const env = {
...process.env,
HOME: options.homeDir || process.env.HOME,
};
try {
const stdout = execFileSync('node', [scriptPath, ...args], {
const stdout = execFileSync('node', [SCRIPT, ...args], {
cwd: options.cwd,
env,
encoding: 'utf8',
@@ -62,24 +79,30 @@ function runTests() {
let passed = 0;
let failed = 0;
if (test('removes managed files and keeps unrelated files', () => {
if (test('uninstalls files from a real install-apply state and preserves unrelated files', () => {
const homeDir = createTempDir('uninstall-home-');
const projectRoot = createTempDir('uninstall-project-');
try {
const installResult = runNode(INSTALL_SCRIPT, ['--target', 'cursor', '--modules', 'platform-configs'], {
const installStdout = execFileSync('node', [INSTALL_SCRIPT, '--target', 'cursor', 'typescript'], {
cwd: projectRoot,
homeDir,
env: {
...process.env,
HOME: homeDir,
},
encoding: 'utf8',
stdio: ['pipe', 'pipe', 'pipe'],
timeout: 10000,
});
assert.strictEqual(installResult.code, 0, installResult.stderr);
assert.ok(installStdout.includes('Done. Install-state written'));
const cursorRoot = path.join(projectRoot, '.cursor');
const managedPath = path.join(cursorRoot, 'hooks.json');
const statePath = path.join(cursorRoot, 'ecc-install-state.json');
const unrelatedPath = path.join(cursorRoot, 'custom-user-note.txt');
const normalizedProjectRoot = fs.realpathSync(projectRoot);
const managedPath = path.join(normalizedProjectRoot, '.cursor', 'hooks.json');
const statePath = path.join(normalizedProjectRoot, '.cursor', 'ecc-install-state.json');
const unrelatedPath = path.join(normalizedProjectRoot, '.cursor', 'custom-user-note.txt');
fs.writeFileSync(unrelatedPath, 'leave me alone');
const uninstallResult = runNode(UNINSTALL_SCRIPT, ['--target', 'cursor'], {
const uninstallResult = run(['--target', 'cursor'], {
cwd: projectRoot,
homeDir,
});
@@ -94,22 +117,152 @@ function runTests() {
}
})) passed++; else failed++;
if (test('supports dry-run without removing files', () => {
if (test('reverses non-copy operations and keeps unrelated files', () => {
const homeDir = createTempDir('uninstall-home-');
const projectRoot = createTempDir('uninstall-project-');
try {
const installResult = runNode(INSTALL_SCRIPT, ['--target', 'cursor', '--modules', 'platform-configs'], {
const targetRoot = path.join(projectRoot, '.cursor');
fs.mkdirSync(targetRoot, { recursive: true });
const normalizedTargetRoot = fs.realpathSync(targetRoot);
const statePath = path.join(normalizedTargetRoot, 'ecc-install-state.json');
const copiedPath = path.join(normalizedTargetRoot, 'managed-rule.md');
const mergedPath = path.join(normalizedTargetRoot, 'hooks.json');
const removedPath = path.join(normalizedTargetRoot, 'legacy-note.txt');
const unrelatedPath = path.join(normalizedTargetRoot, 'custom-user-note.txt');
fs.writeFileSync(copiedPath, 'managed\n');
fs.writeFileSync(mergedPath, JSON.stringify({
existing: true,
managed: true,
}, null, 2));
fs.writeFileSync(unrelatedPath, 'leave me alone');
writeState(statePath, {
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
targetRoot: normalizedTargetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: ['platform-configs'],
includeComponents: [],
excludeComponents: [],
legacyLanguages: [],
legacyMode: false,
},
resolution: {
selectedModules: ['platform-configs'],
skippedModules: [],
},
operations: [
{
kind: 'copy-file',
moduleId: 'platform-configs',
sourceRelativePath: 'rules/common/coding-style.md',
destinationPath: copiedPath,
strategy: 'preserve-relative-path',
ownership: 'managed',
scaffoldOnly: false,
},
{
kind: 'merge-json',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/hooks.json',
destinationPath: mergedPath,
strategy: 'merge-json',
ownership: 'managed',
scaffoldOnly: false,
mergePayload: {
managed: true,
},
previousContent: JSON.stringify({
existing: true,
}, null, 2),
},
{
kind: 'remove',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/legacy-note.txt',
destinationPath: removedPath,
strategy: 'remove',
ownership: 'managed',
scaffoldOnly: false,
previousContent: 'restore me\n',
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const uninstallResult = run(['--target', 'cursor'], {
cwd: projectRoot,
homeDir,
});
assert.strictEqual(installResult.code, 0, installResult.stderr);
assert.strictEqual(uninstallResult.code, 0, uninstallResult.stderr);
assert.ok(uninstallResult.stdout.includes('Uninstall summary'));
assert.ok(!fs.existsSync(copiedPath));
assert.deepStrictEqual(JSON.parse(fs.readFileSync(mergedPath, 'utf8')), {
existing: true,
});
assert.strictEqual(fs.readFileSync(removedPath, 'utf8'), 'restore me\n');
assert.ok(!fs.existsSync(statePath));
assert.ok(fs.existsSync(unrelatedPath));
} finally {
cleanup(homeDir);
cleanup(projectRoot);
}
})) passed++; else failed++;
const cursorRoot = path.join(projectRoot, '.cursor');
const managedPath = path.join(cursorRoot, 'hooks.json');
const statePath = path.join(cursorRoot, 'ecc-install-state.json');
if (test('supports dry-run without mutating managed files', () => {
const homeDir = createTempDir('uninstall-home-');
const projectRoot = createTempDir('uninstall-project-');
const uninstallResult = runNode(UNINSTALL_SCRIPT, ['--target', 'cursor', '--dry-run', '--json'], {
try {
const targetRoot = path.join(projectRoot, '.cursor');
fs.mkdirSync(targetRoot, { recursive: true });
const normalizedTargetRoot = fs.realpathSync(targetRoot);
const statePath = path.join(normalizedTargetRoot, 'ecc-install-state.json');
const renderedPath = path.join(normalizedTargetRoot, 'generated.md');
fs.writeFileSync(renderedPath, '# generated\n');
writeState(statePath, {
adapter: { id: 'cursor-project', target: 'cursor', kind: 'project' },
targetRoot: normalizedTargetRoot,
installStatePath: statePath,
request: {
profile: null,
modules: ['platform-configs'],
includeComponents: [],
excludeComponents: [],
legacyLanguages: [],
legacyMode: false,
},
resolution: {
selectedModules: ['platform-configs'],
skippedModules: [],
},
operations: [
{
kind: 'render-template',
moduleId: 'platform-configs',
sourceRelativePath: '.cursor/generated.md.template',
destinationPath: renderedPath,
strategy: 'render-template',
ownership: 'managed',
scaffoldOnly: false,
renderedContent: '# generated\n',
},
],
source: {
repoVersion: CURRENT_PACKAGE_VERSION,
repoCommit: 'abc123',
manifestVersion: CURRENT_MANIFEST_VERSION,
},
});
const uninstallResult = run(['--target', 'cursor', '--dry-run', '--json'], {
cwd: projectRoot,
homeDir,
});
@@ -117,8 +270,8 @@ function runTests() {
const parsed = JSON.parse(uninstallResult.stdout);
assert.strictEqual(parsed.dryRun, true);
assert.ok(parsed.results[0].plannedRemovals.length > 0);
assert.ok(fs.existsSync(managedPath));
assert.ok(parsed.results[0].plannedRemovals.includes(renderedPath));
assert.ok(fs.existsSync(renderedPath));
assert.ok(fs.existsSync(statePath));
} finally {
cleanup(homeDir);