3 Commits

Author SHA1 Message Date
Affaan Mustafa
b48930974b fix: resolve all CI test failures (19 fixes across 6 files) (#519)
- canonical-session: fall back to JSON file recording when the loaded
  state-store module has no writer methods (factory vs instance)
- install-executor: skip node_modules and .git dirs in listFilesRecursive
  to prevent ETIMEDOUT copying thousands of .opencode dependency files
- ecc.js: increase maxBuffer to 10MB for spawned subcommands to prevent
  ENOBUFS on large install plan JSON output
- install-apply.test: update Cursor and Antigravity path assertions to
  match flattened rule layout and remapped dirs (workflows, skills)
- ecc.test: increase maxBuffer in test runner to handle large output
- orchestrate-codex-worker.sh: guard against unreadable task file before
  cat, write failure status and handoff artifacts on early exit
2026-03-16 01:59:53 -07:00
Affaan Mustafa
426fc54456 feat: record canonical session snapshots via adapters (#511) 2026-03-16 01:35:45 -07:00
Affaan Mustafa
bae1129209 feat: add SQLite state store and query CLI (#510)
* feat: add SQLite state store and ECC status CLI

* fix: replace better-sqlite3 with sql.js to eliminate native module CI failures

better-sqlite3 requires native C++ compilation (node-gyp, prebuild-install)
which fails in CI across npm/pnpm on all platforms:
- npm ci: lock file out of sync with native transitive deps
- pnpm: native bindings not found at runtime
- Windows: native compilation fails entirely

sql.js is a pure JavaScript/WASM SQLite implementation with zero native
dependencies. The adapter in index.js wraps the sql.js API to match the
better-sqlite3 interface used by migrations.js and queries.js.

Key implementation detail: sql.js db.export() implicitly ends active
transactions, so the adapter defers disk writes (saveToDisk) until
after transaction commit via an inTransaction guard flag.

createStateStore is now async (sql.js requires async WASM init).
Updated status.js, sessions-cli.js, and tests accordingly.
2026-03-16 01:32:21 -07:00
22 changed files with 3406 additions and 92 deletions

View File

@@ -0,0 +1,285 @@
# Session Adapter Contract
This document defines the canonical ECC session snapshot contract for
`ecc.session.v1`.
The contract is implemented in
`scripts/lib/session-adapters/canonical-session.js`. This document is the
normative specification for adapters and consumers.
## Purpose
ECC has multiple session sources:
- tmux-orchestrated worktree sessions
- Claude local session history
- future harnesses and control-plane backends
Adapters normalize those sources into one control-plane-safe snapshot shape so
inspection, persistence, and future UI layers do not depend on harness-specific
files or runtime details.
## Canonical Snapshot
Every adapter MUST return a JSON-serializable object with this top-level shape:
```json
{
"schemaVersion": "ecc.session.v1",
"adapterId": "dmux-tmux",
"session": {
"id": "workflow-visual-proof",
"kind": "orchestrated",
"state": "active",
"repoRoot": "/tmp/repo",
"sourceTarget": {
"type": "session",
"value": "workflow-visual-proof"
}
},
"workers": [
{
"id": "seed-check",
"label": "seed-check",
"state": "running",
"branch": "feature/seed-check",
"worktree": "/tmp/worktree",
"runtime": {
"kind": "tmux-pane",
"command": "codex",
"pid": 1234,
"active": false,
"dead": false
},
"intent": {
"objective": "Inspect seeded files.",
"seedPaths": ["scripts/orchestrate-worktrees.js"]
},
"outputs": {
"summary": [],
"validation": [],
"remainingRisks": []
},
"artifacts": {
"statusFile": "/tmp/status.md",
"taskFile": "/tmp/task.md",
"handoffFile": "/tmp/handoff.md"
}
}
],
"aggregates": {
"workerCount": 1,
"states": {
"running": 1
}
}
}
```
## Required Fields
### Top level
| Field | Type | Notes |
| --- | --- | --- |
| `schemaVersion` | string | MUST be exactly `ecc.session.v1` for this contract |
| `adapterId` | string | Stable adapter identifier such as `dmux-tmux` or `claude-history` |
| `session` | object | Canonical session metadata |
| `workers` | array | Canonical worker records; may be empty |
| `aggregates` | object | Derived worker counts |
### `session`
| Field | Type | Notes |
| --- | --- | --- |
| `id` | string | Stable identifier within the adapter domain |
| `kind` | string | High-level session family such as `orchestrated` or `history` |
| `state` | string | Canonical session state |
| `sourceTarget` | object | Provenance for the target that opened the session |
### `session.sourceTarget`
| Field | Type | Notes |
| --- | --- | --- |
| `type` | string | Lookup class such as `plan`, `session`, `claude-history`, `claude-alias`, or `session-file` |
| `value` | string | Raw target value or resolved path |
### `workers[]`
| Field | Type | Notes |
| --- | --- | --- |
| `id` | string | Stable worker identifier in adapter scope |
| `label` | string | Operator-facing label |
| `state` | string | Canonical worker state |
| `runtime` | object | Execution/runtime metadata |
| `intent` | object | Why this worker/session exists |
| `outputs` | object | Structured outcomes and checks |
| `artifacts` | object | Adapter-owned file/path references |
### `workers[].runtime`
| Field | Type | Notes |
| --- | --- | --- |
| `kind` | string | Runtime family such as `tmux-pane` or `claude-session` |
| `active` | boolean | Whether the runtime is active now |
| `dead` | boolean | Whether the runtime is known dead/finished |
### `workers[].intent`
| Field | Type | Notes |
| --- | --- | --- |
| `objective` | string | Primary objective or title |
| `seedPaths` | string[] | Seed or context paths associated with the worker/session |
### `workers[].outputs`
| Field | Type | Notes |
| --- | --- | --- |
| `summary` | string[] | Completed outputs or summary items |
| `validation` | string[] | Validation evidence or checks |
| `remainingRisks` | string[] | Open risks, follow-ups, or notes |
### `aggregates`
| Field | Type | Notes |
| --- | --- | --- |
| `workerCount` | integer | MUST equal `workers.length` |
| `states` | object | Count map derived from `workers[].state` |
## Optional Fields
Optional fields MAY be omitted, but if emitted they MUST preserve the documented
type:
| Field | Type | Notes |
| --- | --- | --- |
| `session.repoRoot` | `string \| null` | Repo/worktree root when known |
| `workers[].branch` | `string \| null` | Branch name when known |
| `workers[].worktree` | `string \| null` | Worktree path when known |
| `workers[].runtime.command` | `string \| null` | Active command when known |
| `workers[].runtime.pid` | `number \| null` | Process id when known |
| `workers[].artifacts.*` | adapter-defined | File paths or structured references owned by the adapter |
Adapter-specific optional fields belong inside `runtime`, `artifacts`, or other
documented nested objects. Adapters MUST NOT invent new top-level fields without
updating this contract.
## State Semantics
The contract intentionally keeps `session.state` and `workers[].state` flexible
enough for multiple harnesses, but current adapters use these values:
- `dmux-tmux`
- session states: `active`, `completed`, `failed`, `idle`, `missing`
- worker states: derived from worker status files, for example `running` or
`completed`
- `claude-history`
- session state: `recorded`
- worker state: `recorded`
Consumers MUST treat unknown state strings as valid adapter-specific values and
degrade gracefully.
## Versioning Strategy
`schemaVersion` is the only compatibility gate. Consumers MUST branch on it.
### Allowed in `ecc.session.v1`
- adding new optional nested fields
- adding new adapter ids
- adding new state string values
- adding new artifact keys inside `workers[].artifacts`
### Requires a new schema version
- removing a required field
- renaming a field
- changing a field type
- changing the meaning of an existing field in a non-compatible way
- moving data from one field to another while keeping the same version string
If any of those happen, the producer MUST emit a new version string such as
`ecc.session.v2`.
## Adapter Compliance Requirements
Every ECC session adapter MUST:
1. Emit `schemaVersion: "ecc.session.v1"` exactly.
2. Return a snapshot that satisfies all required fields and types.
3. Use `null` for unknown optional scalar values and empty arrays for unknown
list values.
4. Keep adapter-specific details nested under `runtime`, `artifacts`, or other
documented nested objects.
5. Ensure `aggregates.workerCount === workers.length`.
6. Ensure `aggregates.states` matches the emitted worker states.
7. Produce plain JSON-serializable values only.
8. Validate the canonical shape before persistence or downstream use.
9. Persist the normalized canonical snapshot through the session recording shim.
In this repo, that shim first attempts `scripts/lib/state-store` and falls
back to a JSON recording file only when the state store module is not
available yet.
## Consumer Expectations
Consumers SHOULD:
- rely only on documented fields for `ecc.session.v1`
- ignore unknown optional fields
- treat `adapterId`, `session.kind`, and `runtime.kind` as routing hints rather
than exhaustive enums
- expect adapter-specific artifact keys inside `workers[].artifacts`
Consumers MUST NOT:
- infer harness-specific behavior from undocumented fields
- assume all adapters have tmux panes, git worktrees, or markdown coordination
files
- reject snapshots only because a state string is unfamiliar
## Current Adapter Mappings
### `dmux-tmux`
- Source: `scripts/lib/orchestration-session.js`
- Session id: orchestration session name
- Session kind: `orchestrated`
- Session source target: plan path or session name
- Worker runtime kind: `tmux-pane`
- Artifacts: `statusFile`, `taskFile`, `handoffFile`
### `claude-history`
- Source: `scripts/lib/session-manager.js`
- Session id: Claude short id when present, otherwise session filename-derived id
- Session kind: `history`
- Session source target: explicit history target, alias, or `.tmp` session file
- Worker runtime kind: `claude-session`
- Intent seed paths: parsed from `### Context to Load`
- Artifacts: `sessionFile`, `context`
## Validation Reference
The repo implementation validates:
- required object structure
- required string fields
- boolean runtime flags
- string-array outputs and seed paths
- aggregate count consistency
Adapters should treat validation failures as contract bugs, not user input
errors.
## Recording Fallback Behavior
The JSON fallback recorder is a temporary compatibility shim for the period
before the dedicated state store lands. Its behavior is:
- latest snapshot is always replaced in-place
- history records only distinct snapshot bodies
- unchanged repeated reads do not append duplicate history entries
This keeps `session-inspect` and other polling-style reads from growing
unbounded history for the same unchanged session snapshot.

9
package-lock.json generated
View File

@@ -9,6 +9,9 @@
"version": "1.8.0",
"hasInstallScript": true,
"license": "MIT",
"dependencies": {
"sql.js": "^1.14.1"
},
"bin": {
"ecc": "scripts/ecc.js",
"ecc-install": "install.sh"
@@ -2596,6 +2599,12 @@
"url": "https://github.com/sponsors/cyyynthia"
}
},
"node_modules/sql.js": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/sql.js/-/sql.js-1.14.1.tgz",
"integrity": "sha512-gcj8zBWU5cFsi9WUP+4bFNXAyF1iRpA3LLyS/DP5xlrNzGmPIizUeBggKa8DbDwdqaKwUcTEnChtd2grWo/x/A==",
"license": "MIT"
},
"node_modules/string-width": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",

View File

@@ -70,6 +70,8 @@
"scripts/lib/",
"scripts/claw.js",
"scripts/doctor.js",
"scripts/status.js",
"scripts/sessions-cli.js",
"scripts/install-apply.js",
"scripts/install-plan.js",
"scripts/list-installed.js",
@@ -102,6 +104,9 @@
"test": "node scripts/ci/validate-agents.js && node scripts/ci/validate-commands.js && node scripts/ci/validate-rules.js && node scripts/ci/validate-skills.js && node scripts/ci/validate-hooks.js && node scripts/ci/validate-install-manifests.js && node scripts/ci/validate-no-personal-paths.js && node tests/run-all.js",
"coverage": "c8 --all --include=\"scripts/**/*.js\" --check-coverage --lines 80 --functions 80 --branches 80 --statements 80 --reporter=text --reporter=lcov node tests/run-all.js"
},
"dependencies": {
"sql.js": "^1.14.1"
},
"devDependencies": {
"@eslint/js": "^9.39.2",
"ajv": "^8.18.0",

View File

@@ -0,0 +1,316 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "ecc.state-store.v1",
"title": "ECC State Store Schema",
"type": "object",
"additionalProperties": false,
"properties": {
"sessions": {
"type": "array",
"items": {
"$ref": "#/$defs/session"
}
},
"skillRuns": {
"type": "array",
"items": {
"$ref": "#/$defs/skillRun"
}
},
"skillVersions": {
"type": "array",
"items": {
"$ref": "#/$defs/skillVersion"
}
},
"decisions": {
"type": "array",
"items": {
"$ref": "#/$defs/decision"
}
},
"installState": {
"type": "array",
"items": {
"$ref": "#/$defs/installState"
}
},
"governanceEvents": {
"type": "array",
"items": {
"$ref": "#/$defs/governanceEvent"
}
}
},
"$defs": {
"nonEmptyString": {
"type": "string",
"minLength": 1
},
"nullableString": {
"type": [
"string",
"null"
]
},
"nullableInteger": {
"type": [
"integer",
"null"
],
"minimum": 0
},
"jsonValue": {
"type": [
"object",
"array",
"string",
"number",
"boolean",
"null"
]
},
"jsonArray": {
"type": "array"
},
"session": {
"type": "object",
"additionalProperties": false,
"required": [
"id",
"adapterId",
"harness",
"state",
"repoRoot",
"startedAt",
"endedAt",
"snapshot"
],
"properties": {
"id": {
"$ref": "#/$defs/nonEmptyString"
},
"adapterId": {
"$ref": "#/$defs/nonEmptyString"
},
"harness": {
"$ref": "#/$defs/nonEmptyString"
},
"state": {
"$ref": "#/$defs/nonEmptyString"
},
"repoRoot": {
"$ref": "#/$defs/nullableString"
},
"startedAt": {
"$ref": "#/$defs/nullableString"
},
"endedAt": {
"$ref": "#/$defs/nullableString"
},
"snapshot": {
"type": [
"object",
"array"
]
}
}
},
"skillRun": {
"type": "object",
"additionalProperties": false,
"required": [
"id",
"skillId",
"skillVersion",
"sessionId",
"taskDescription",
"outcome",
"failureReason",
"tokensUsed",
"durationMs",
"userFeedback",
"createdAt"
],
"properties": {
"id": {
"$ref": "#/$defs/nonEmptyString"
},
"skillId": {
"$ref": "#/$defs/nonEmptyString"
},
"skillVersion": {
"$ref": "#/$defs/nonEmptyString"
},
"sessionId": {
"$ref": "#/$defs/nonEmptyString"
},
"taskDescription": {
"$ref": "#/$defs/nonEmptyString"
},
"outcome": {
"$ref": "#/$defs/nonEmptyString"
},
"failureReason": {
"$ref": "#/$defs/nullableString"
},
"tokensUsed": {
"$ref": "#/$defs/nullableInteger"
},
"durationMs": {
"$ref": "#/$defs/nullableInteger"
},
"userFeedback": {
"$ref": "#/$defs/nullableString"
},
"createdAt": {
"$ref": "#/$defs/nonEmptyString"
}
}
},
"skillVersion": {
"type": "object",
"additionalProperties": false,
"required": [
"skillId",
"version",
"contentHash",
"amendmentReason",
"promotedAt",
"rolledBackAt"
],
"properties": {
"skillId": {
"$ref": "#/$defs/nonEmptyString"
},
"version": {
"$ref": "#/$defs/nonEmptyString"
},
"contentHash": {
"$ref": "#/$defs/nonEmptyString"
},
"amendmentReason": {
"$ref": "#/$defs/nullableString"
},
"promotedAt": {
"$ref": "#/$defs/nullableString"
},
"rolledBackAt": {
"$ref": "#/$defs/nullableString"
}
}
},
"decision": {
"type": "object",
"additionalProperties": false,
"required": [
"id",
"sessionId",
"title",
"rationale",
"alternatives",
"supersedes",
"status",
"createdAt"
],
"properties": {
"id": {
"$ref": "#/$defs/nonEmptyString"
},
"sessionId": {
"$ref": "#/$defs/nonEmptyString"
},
"title": {
"$ref": "#/$defs/nonEmptyString"
},
"rationale": {
"$ref": "#/$defs/nonEmptyString"
},
"alternatives": {
"$ref": "#/$defs/jsonArray"
},
"supersedes": {
"$ref": "#/$defs/nullableString"
},
"status": {
"$ref": "#/$defs/nonEmptyString"
},
"createdAt": {
"$ref": "#/$defs/nonEmptyString"
}
}
},
"installState": {
"type": "object",
"additionalProperties": false,
"required": [
"targetId",
"targetRoot",
"profile",
"modules",
"operations",
"installedAt",
"sourceVersion"
],
"properties": {
"targetId": {
"$ref": "#/$defs/nonEmptyString"
},
"targetRoot": {
"$ref": "#/$defs/nonEmptyString"
},
"profile": {
"$ref": "#/$defs/nullableString"
},
"modules": {
"$ref": "#/$defs/jsonArray"
},
"operations": {
"$ref": "#/$defs/jsonArray"
},
"installedAt": {
"$ref": "#/$defs/nonEmptyString"
},
"sourceVersion": {
"$ref": "#/$defs/nullableString"
}
}
},
"governanceEvent": {
"type": "object",
"additionalProperties": false,
"required": [
"id",
"sessionId",
"eventType",
"payload",
"resolvedAt",
"resolution",
"createdAt"
],
"properties": {
"id": {
"$ref": "#/$defs/nonEmptyString"
},
"sessionId": {
"$ref": "#/$defs/nullableString"
},
"eventType": {
"$ref": "#/$defs/nonEmptyString"
},
"payload": {
"$ref": "#/$defs/jsonValue"
},
"resolvedAt": {
"$ref": "#/$defs/nullableString"
},
"resolution": {
"$ref": "#/$defs/nullableString"
},
"createdAt": {
"$ref": "#/$defs/nonEmptyString"
}
}
}
}
}

View File

@@ -29,6 +29,14 @@ const COMMANDS = {
script: 'repair.js',
description: 'Restore drifted or missing ECC-managed files',
},
status: {
script: 'status.js',
description: 'Query the ECC SQLite state store status summary',
},
sessions: {
script: 'sessions-cli.js',
description: 'List or inspect ECC sessions from the SQLite state store',
},
'session-inspect': {
script: 'session-inspect.js',
description: 'Emit canonical ECC session snapshots from dmux or Claude history targets',
@@ -45,6 +53,8 @@ const PRIMARY_COMMANDS = [
'list-installed',
'doctor',
'repair',
'status',
'sessions',
'session-inspect',
'uninstall',
];
@@ -72,6 +82,9 @@ Examples:
ecc list-installed --json
ecc doctor --target cursor
ecc repair --dry-run
ecc status --json
ecc sessions
ecc sessions session-active --json
ecc session-inspect claude:latest
ecc uninstall --target antigravity --dry-run
`);
@@ -137,6 +150,7 @@ function runCommand(commandName, args) {
cwd: process.cwd(),
env: process.env,
encoding: 'utf8',
maxBuffer: 10 * 1024 * 1024,
}
);

View File

@@ -83,6 +83,11 @@ function validateLegacyTarget(target) {
}
}
const IGNORED_DIRECTORY_NAMES = new Set([
'node_modules',
'.git',
]);
function listFilesRecursive(dirPath) {
if (!fs.existsSync(dirPath)) {
return [];
@@ -94,6 +99,9 @@ function listFilesRecursive(dirPath) {
for (const entry of entries) {
const absolutePath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
if (IGNORED_DIRECTORY_NAMES.has(entry.name)) {
continue;
}
const childFiles = listFilesRecursive(absolutePath);
for (const childFile of childFiles) {
files.push(path.join(entry.name, childFile));

View File

@@ -1,8 +1,64 @@
'use strict';
const fs = require('fs');
const os = require('os');
const path = require('path');
const SESSION_SCHEMA_VERSION = 'ecc.session.v1';
const SESSION_RECORDING_SCHEMA_VERSION = 'ecc.session.recording.v1';
const DEFAULT_RECORDING_DIR = path.join(os.tmpdir(), 'ecc-session-recordings');
function isObject(value) {
return Boolean(value) && typeof value === 'object' && !Array.isArray(value);
}
function sanitizePathSegment(value) {
return String(value || 'unknown')
.trim()
.replace(/[^A-Za-z0-9._-]+/g, '_')
.replace(/^_+|_+$/g, '') || 'unknown';
}
function parseContextSeedPaths(context) {
if (typeof context !== 'string' || context.trim().length === 0) {
return [];
}
return context
.split('\n')
.map(line => line.trim())
.filter(Boolean);
}
function ensureString(value, fieldPath) {
if (typeof value !== 'string' || value.length === 0) {
throw new Error(`Canonical session snapshot requires ${fieldPath} to be a non-empty string`);
}
}
function ensureOptionalString(value, fieldPath) {
if (value !== null && value !== undefined && typeof value !== 'string') {
throw new Error(`Canonical session snapshot requires ${fieldPath} to be a string or null`);
}
}
function ensureBoolean(value, fieldPath) {
if (typeof value !== 'boolean') {
throw new Error(`Canonical session snapshot requires ${fieldPath} to be a boolean`);
}
}
function ensureArrayOfStrings(value, fieldPath) {
if (!Array.isArray(value) || value.some(item => typeof item !== 'string')) {
throw new Error(`Canonical session snapshot requires ${fieldPath} to be an array of strings`);
}
}
function ensureInteger(value, fieldPath) {
if (!Number.isInteger(value) || value < 0) {
throw new Error(`Canonical session snapshot requires ${fieldPath} to be a non-negative integer`);
}
}
function buildAggregates(workers) {
const states = workers.reduce((accumulator, worker) => {
@@ -17,16 +73,302 @@ function buildAggregates(workers) {
};
}
function summarizeRawWorkerStates(snapshot) {
if (isObject(snapshot.workerStates)) {
return snapshot.workerStates;
}
return (snapshot.workers || []).reduce((counts, worker) => {
const state = worker && worker.status && worker.status.state
? worker.status.state
: 'unknown';
counts[state] = (counts[state] || 0) + 1;
return counts;
}, {});
}
function deriveDmuxSessionState(snapshot) {
const workerStates = summarizeRawWorkerStates(snapshot);
const totalWorkers = Number.isInteger(snapshot.workerCount)
? snapshot.workerCount
: Object.values(workerStates).reduce((sum, count) => sum + count, 0);
if (snapshot.sessionActive) {
return 'active';
}
if (snapshot.workerCount > 0) {
return 'idle';
if (totalWorkers === 0) {
return 'missing';
}
return 'missing';
const failedCount = (workerStates.failed || 0) + (workerStates.error || 0);
if (failedCount > 0) {
return 'failed';
}
const completedCount = (workerStates.completed || 0)
+ (workerStates.succeeded || 0)
+ (workerStates.success || 0)
+ (workerStates.done || 0);
if (completedCount === totalWorkers) {
return 'completed';
}
return 'idle';
}
function validateCanonicalSnapshot(snapshot) {
if (!isObject(snapshot)) {
throw new Error('Canonical session snapshot must be an object');
}
ensureString(snapshot.schemaVersion, 'schemaVersion');
if (snapshot.schemaVersion !== SESSION_SCHEMA_VERSION) {
throw new Error(`Unsupported canonical session schema version: ${snapshot.schemaVersion}`);
}
ensureString(snapshot.adapterId, 'adapterId');
if (!isObject(snapshot.session)) {
throw new Error('Canonical session snapshot requires session to be an object');
}
ensureString(snapshot.session.id, 'session.id');
ensureString(snapshot.session.kind, 'session.kind');
ensureString(snapshot.session.state, 'session.state');
ensureOptionalString(snapshot.session.repoRoot, 'session.repoRoot');
if (!isObject(snapshot.session.sourceTarget)) {
throw new Error('Canonical session snapshot requires session.sourceTarget to be an object');
}
ensureString(snapshot.session.sourceTarget.type, 'session.sourceTarget.type');
ensureString(snapshot.session.sourceTarget.value, 'session.sourceTarget.value');
if (!Array.isArray(snapshot.workers)) {
throw new Error('Canonical session snapshot requires workers to be an array');
}
snapshot.workers.forEach((worker, index) => {
if (!isObject(worker)) {
throw new Error(`Canonical session snapshot requires workers[${index}] to be an object`);
}
ensureString(worker.id, `workers[${index}].id`);
ensureString(worker.label, `workers[${index}].label`);
ensureString(worker.state, `workers[${index}].state`);
ensureOptionalString(worker.branch, `workers[${index}].branch`);
ensureOptionalString(worker.worktree, `workers[${index}].worktree`);
if (!isObject(worker.runtime)) {
throw new Error(`Canonical session snapshot requires workers[${index}].runtime to be an object`);
}
ensureString(worker.runtime.kind, `workers[${index}].runtime.kind`);
ensureOptionalString(worker.runtime.command, `workers[${index}].runtime.command`);
ensureBoolean(worker.runtime.active, `workers[${index}].runtime.active`);
ensureBoolean(worker.runtime.dead, `workers[${index}].runtime.dead`);
if (!isObject(worker.intent)) {
throw new Error(`Canonical session snapshot requires workers[${index}].intent to be an object`);
}
ensureString(worker.intent.objective, `workers[${index}].intent.objective`);
ensureArrayOfStrings(worker.intent.seedPaths, `workers[${index}].intent.seedPaths`);
if (!isObject(worker.outputs)) {
throw new Error(`Canonical session snapshot requires workers[${index}].outputs to be an object`);
}
ensureArrayOfStrings(worker.outputs.summary, `workers[${index}].outputs.summary`);
ensureArrayOfStrings(worker.outputs.validation, `workers[${index}].outputs.validation`);
ensureArrayOfStrings(worker.outputs.remainingRisks, `workers[${index}].outputs.remainingRisks`);
if (!isObject(worker.artifacts)) {
throw new Error(`Canonical session snapshot requires workers[${index}].artifacts to be an object`);
}
});
if (!isObject(snapshot.aggregates)) {
throw new Error('Canonical session snapshot requires aggregates to be an object');
}
ensureInteger(snapshot.aggregates.workerCount, 'aggregates.workerCount');
if (snapshot.aggregates.workerCount !== snapshot.workers.length) {
throw new Error('Canonical session snapshot requires aggregates.workerCount to match workers.length');
}
if (!isObject(snapshot.aggregates.states)) {
throw new Error('Canonical session snapshot requires aggregates.states to be an object');
}
for (const [state, count] of Object.entries(snapshot.aggregates.states)) {
ensureString(state, 'aggregates.states key');
ensureInteger(count, `aggregates.states.${state}`);
}
return snapshot;
}
function resolveRecordingDir(options = {}) {
if (typeof options.recordingDir === 'string' && options.recordingDir.length > 0) {
return path.resolve(options.recordingDir);
}
if (typeof process.env.ECC_SESSION_RECORDING_DIR === 'string' && process.env.ECC_SESSION_RECORDING_DIR.length > 0) {
return path.resolve(process.env.ECC_SESSION_RECORDING_DIR);
}
return DEFAULT_RECORDING_DIR;
}
function getFallbackSessionRecordingPath(snapshot, options = {}) {
validateCanonicalSnapshot(snapshot);
return path.join(
resolveRecordingDir(options),
sanitizePathSegment(snapshot.adapterId),
`${sanitizePathSegment(snapshot.session.id)}.json`
);
}
function readExistingRecording(filePath) {
if (!fs.existsSync(filePath)) {
return null;
}
try {
return JSON.parse(fs.readFileSync(filePath, 'utf8'));
} catch {
return null;
}
}
function writeFallbackSessionRecording(snapshot, options = {}) {
const filePath = getFallbackSessionRecordingPath(snapshot, options);
const recordedAt = new Date().toISOString();
const existing = readExistingRecording(filePath);
const snapshotChanged = !existing
|| JSON.stringify(existing.latest) !== JSON.stringify(snapshot);
const payload = {
schemaVersion: SESSION_RECORDING_SCHEMA_VERSION,
adapterId: snapshot.adapterId,
sessionId: snapshot.session.id,
createdAt: existing && typeof existing.createdAt === 'string'
? existing.createdAt
: recordedAt,
updatedAt: recordedAt,
latest: snapshot,
history: Array.isArray(existing && existing.history)
? (snapshotChanged
? existing.history.concat([{ recordedAt, snapshot }])
: existing.history)
: [{ recordedAt, snapshot }]
};
fs.mkdirSync(path.dirname(filePath), { recursive: true });
fs.writeFileSync(filePath, JSON.stringify(payload, null, 2) + '\n', 'utf8');
return {
backend: 'json-file',
path: filePath,
recordedAt
};
}
function loadStateStore(options = {}) {
if (options.stateStore) {
return options.stateStore;
}
const loadStateStoreImpl = options.loadStateStoreImpl || (() => require('../state-store'));
try {
return loadStateStoreImpl();
} catch (error) {
const missingRequestedModule = error
&& error.code === 'MODULE_NOT_FOUND'
&& typeof error.message === 'string'
&& error.message.includes('../state-store');
if (missingRequestedModule) {
return null;
}
throw error;
}
}
function resolveStateStoreWriter(stateStore) {
if (!stateStore) {
return null;
}
const candidates = [
{ owner: stateStore, fn: stateStore.persistCanonicalSessionSnapshot },
{ owner: stateStore, fn: stateStore.recordCanonicalSessionSnapshot },
{ owner: stateStore, fn: stateStore.persistSessionSnapshot },
{ owner: stateStore, fn: stateStore.recordSessionSnapshot },
{ owner: stateStore, fn: stateStore.writeSessionSnapshot },
{
owner: stateStore.sessions,
fn: stateStore.sessions && stateStore.sessions.persistCanonicalSessionSnapshot
},
{
owner: stateStore.sessions,
fn: stateStore.sessions && stateStore.sessions.recordCanonicalSessionSnapshot
},
{
owner: stateStore.sessions,
fn: stateStore.sessions && stateStore.sessions.persistSessionSnapshot
},
{
owner: stateStore.sessions,
fn: stateStore.sessions && stateStore.sessions.recordSessionSnapshot
}
];
const writer = candidates.find(candidate => typeof candidate.fn === 'function');
return writer ? writer.fn.bind(writer.owner) : null;
}
function persistCanonicalSnapshot(snapshot, options = {}) {
validateCanonicalSnapshot(snapshot);
if (options.persist === false) {
return {
backend: 'skipped',
path: null,
recordedAt: null
};
}
const stateStore = loadStateStore(options);
const writer = resolveStateStoreWriter(stateStore);
if (stateStore && !writer) {
// The loaded object is a factory module (e.g. has createStateStore but no
// writer methods). Treat it the same as a missing state store and fall
// through to the JSON-file recording path below.
return writeFallbackSessionRecording(snapshot, options);
}
if (writer) {
writer(snapshot, {
adapterId: snapshot.adapterId,
schemaVersion: snapshot.schemaVersion,
sessionId: snapshot.session.id
});
return {
backend: 'state-store',
path: null,
recordedAt: null
};
}
return writeFallbackSessionRecording(snapshot, options);
}
function normalizeDmuxSnapshot(snapshot, sourceTarget) {
@@ -59,7 +401,7 @@ function normalizeDmuxSnapshot(snapshot, sourceTarget) {
}
}));
return {
return validateCanonicalSnapshot({
schemaVersion: SESSION_SCHEMA_VERSION,
adapterId: 'dmux-tmux',
session: {
@@ -71,7 +413,7 @@ function normalizeDmuxSnapshot(snapshot, sourceTarget) {
},
workers,
aggregates: buildAggregates(workers)
};
});
}
function deriveClaudeWorkerId(session) {
@@ -102,7 +444,7 @@ function normalizeClaudeHistorySession(session, sourceTarget) {
objective: metadata.inProgress && metadata.inProgress.length > 0
? metadata.inProgress[0]
: (metadata.title || ''),
seedPaths: []
seedPaths: parseContextSeedPaths(metadata.context)
},
outputs: {
summary: Array.isArray(metadata.completed) ? metadata.completed : [],
@@ -115,7 +457,7 @@ function normalizeClaudeHistorySession(session, sourceTarget) {
}
};
return {
return validateCanonicalSnapshot({
schemaVersion: SESSION_SCHEMA_VERSION,
adapterId: 'claude-history',
session: {
@@ -127,12 +469,15 @@ function normalizeClaudeHistorySession(session, sourceTarget) {
},
workers: [worker],
aggregates: buildAggregates([worker])
};
});
}
module.exports = {
SESSION_SCHEMA_VERSION,
buildAggregates,
getFallbackSessionRecordingPath,
normalizeClaudeHistorySession,
normalizeDmuxSnapshot
normalizeDmuxSnapshot,
persistCanonicalSnapshot,
validateCanonicalSnapshot
};

View File

@@ -5,7 +5,7 @@ const path = require('path');
const sessionManager = require('../session-manager');
const sessionAliases = require('../session-aliases');
const { normalizeClaudeHistorySession } = require('./canonical-session');
const { normalizeClaudeHistorySession, persistCanonicalSnapshot } = require('./canonical-session');
function parseClaudeTarget(target) {
if (typeof target !== 'string') {
@@ -111,7 +111,9 @@ function resolveSessionRecord(target, cwd) {
throw new Error(`Unsupported Claude session target: ${target}`);
}
function createClaudeHistoryAdapter() {
function createClaudeHistoryAdapter(options = {}) {
const persistCanonicalSnapshotImpl = options.persistCanonicalSnapshotImpl || persistCanonicalSnapshot;
return {
id: 'claude-history',
description: 'Claude local session history and session-file snapshots',
@@ -135,7 +137,16 @@ function createClaudeHistoryAdapter() {
adapterId: 'claude-history',
getSnapshot() {
const { session, sourceTarget } = resolveSessionRecord(target, cwd);
return normalizeClaudeHistorySession(session, sourceTarget);
const canonicalSnapshot = normalizeClaudeHistorySession(session, sourceTarget);
persistCanonicalSnapshotImpl(canonicalSnapshot, {
loadStateStoreImpl: options.loadStateStoreImpl,
persist: context.persistSnapshots !== false && options.persistSnapshots !== false,
recordingDir: context.recordingDir || options.recordingDir,
stateStore: options.stateStore
});
return canonicalSnapshot;
}
};
}

View File

@@ -4,7 +4,7 @@ const fs = require('fs');
const path = require('path');
const { collectSessionSnapshot } = require('../orchestration-session');
const { normalizeDmuxSnapshot } = require('./canonical-session');
const { normalizeDmuxSnapshot, persistCanonicalSnapshot } = require('./canonical-session');
function isPlanFileTarget(target, cwd) {
if (typeof target !== 'string' || target.length === 0) {
@@ -42,6 +42,7 @@ function buildSourceTarget(target, cwd) {
function createDmuxTmuxAdapter(options = {}) {
const collectSessionSnapshotImpl = options.collectSessionSnapshotImpl || collectSessionSnapshot;
const persistCanonicalSnapshotImpl = options.persistCanonicalSnapshotImpl || persistCanonicalSnapshot;
return {
id: 'dmux-tmux',
@@ -66,7 +67,16 @@ function createDmuxTmuxAdapter(options = {}) {
adapterId: 'dmux-tmux',
getSnapshot() {
const snapshot = collectSessionSnapshotImpl(target, cwd);
return normalizeDmuxSnapshot(snapshot, buildSourceTarget(target, cwd));
const canonicalSnapshot = normalizeDmuxSnapshot(snapshot, buildSourceTarget(target, cwd));
persistCanonicalSnapshotImpl(canonicalSnapshot, {
loadStateStoreImpl: options.loadStateStoreImpl,
persist: context.persistSnapshots !== false && options.persistSnapshots !== false,
recordingDir: context.recordingDir || options.recordingDir,
stateStore: options.stateStore
});
return canonicalSnapshot;
}
};
}

View File

@@ -11,10 +11,26 @@ const TARGET_TYPE_TO_ADAPTER_ID = Object.freeze({
'session-file': 'claude-history'
});
function createDefaultAdapters() {
function buildDefaultAdapterOptions(options, adapterId) {
const sharedOptions = {
loadStateStoreImpl: options.loadStateStoreImpl,
persistSnapshots: options.persistSnapshots,
recordingDir: options.recordingDir,
stateStore: options.stateStore
};
return {
...sharedOptions,
...(options.adapterOptions && options.adapterOptions[adapterId]
? options.adapterOptions[adapterId]
: {})
};
}
function createDefaultAdapters(options = {}) {
return [
createClaudeHistoryAdapter(),
createDmuxTmuxAdapter()
createClaudeHistoryAdapter(buildDefaultAdapterOptions(options, 'claude-history')),
createDmuxTmuxAdapter(buildDefaultAdapterOptions(options, 'dmux-tmux'))
];
}
@@ -60,7 +76,7 @@ function normalizeStructuredTarget(target, context = {}) {
}
function createAdapterRegistry(options = {}) {
const adapters = options.adapters || createDefaultAdapters();
const adapters = options.adapters || createDefaultAdapters(options);
return {
adapters,

View File

@@ -0,0 +1,191 @@
'use strict';
const fs = require('fs');
const os = require('os');
const path = require('path');
const initSqlJs = require('sql.js');
const { applyMigrations, getAppliedMigrations } = require('./migrations');
const { createQueryApi } = require('./queries');
const { assertValidEntity, validateEntity } = require('./schema');
const DEFAULT_STATE_STORE_RELATIVE_PATH = path.join('.claude', 'ecc', 'state.db');
function resolveStateStorePath(options = {}) {
if (options.dbPath) {
if (options.dbPath === ':memory:') {
return options.dbPath;
}
return path.resolve(options.dbPath);
}
const homeDir = options.homeDir || process.env.HOME || os.homedir();
return path.join(homeDir, DEFAULT_STATE_STORE_RELATIVE_PATH);
}
/**
* Wraps a sql.js Database with a better-sqlite3-compatible API surface so
* that the rest of the state-store code (migrations.js, queries.js) can
* operate without knowing which driver is in use.
*
* IMPORTANT: sql.js db.export() implicitly ends any active transaction, so
* we must defer all disk writes until after the transaction commits.
*/
function wrapSqlJsDatabase(rawDb, dbPath) {
let inTransaction = false;
function saveToDisk() {
if (dbPath === ':memory:' || inTransaction) {
return;
}
const data = rawDb.export();
const buffer = Buffer.from(data);
fs.writeFileSync(dbPath, buffer);
}
const db = {
exec(sql) {
rawDb.run(sql);
saveToDisk();
},
pragma(pragmaStr) {
try {
rawDb.run(`PRAGMA ${pragmaStr}`);
} catch (_error) {
// Ignore unsupported pragmas (e.g. WAL for in-memory databases).
}
},
prepare(sql) {
return {
all(...positionalArgs) {
const stmt = rawDb.prepare(sql);
if (positionalArgs.length === 1 && typeof positionalArgs[0] !== 'object') {
stmt.bind([positionalArgs[0]]);
} else if (positionalArgs.length > 1) {
stmt.bind(positionalArgs);
}
const rows = [];
while (stmt.step()) {
rows.push(stmt.getAsObject());
}
stmt.free();
return rows;
},
get(...positionalArgs) {
const stmt = rawDb.prepare(sql);
if (positionalArgs.length === 1 && typeof positionalArgs[0] !== 'object') {
stmt.bind([positionalArgs[0]]);
} else if (positionalArgs.length > 1) {
stmt.bind(positionalArgs);
}
let row = null;
if (stmt.step()) {
row = stmt.getAsObject();
}
stmt.free();
return row;
},
run(namedParams) {
const stmt = rawDb.prepare(sql);
if (namedParams && typeof namedParams === 'object' && !Array.isArray(namedParams)) {
const sqlJsParams = {};
for (const [key, value] of Object.entries(namedParams)) {
sqlJsParams[`@${key}`] = value === undefined ? null : value;
}
stmt.bind(sqlJsParams);
}
stmt.step();
stmt.free();
saveToDisk();
},
};
},
transaction(fn) {
return (...args) => {
rawDb.run('BEGIN');
inTransaction = true;
try {
const result = fn(...args);
rawDb.run('COMMIT');
inTransaction = false;
saveToDisk();
return result;
} catch (error) {
try {
rawDb.run('ROLLBACK');
} catch (_rollbackError) {
// Transaction may already be rolled back.
}
inTransaction = false;
throw error;
}
};
},
close() {
saveToDisk();
rawDb.close();
},
};
return db;
}
async function openDatabase(SQL, dbPath) {
if (dbPath !== ':memory:') {
fs.mkdirSync(path.dirname(dbPath), { recursive: true });
}
let rawDb;
if (dbPath !== ':memory:' && fs.existsSync(dbPath)) {
const fileBuffer = fs.readFileSync(dbPath);
rawDb = new SQL.Database(fileBuffer);
} else {
rawDb = new SQL.Database();
}
const db = wrapSqlJsDatabase(rawDb, dbPath);
db.pragma('foreign_keys = ON');
try {
db.pragma('journal_mode = WAL');
} catch (_error) {
// Some SQLite environments reject WAL for in-memory or readonly contexts.
}
return db;
}
async function createStateStore(options = {}) {
const dbPath = resolveStateStorePath(options);
const SQL = await initSqlJs();
const db = await openDatabase(SQL, dbPath);
const appliedMigrations = applyMigrations(db);
const queryApi = createQueryApi(db);
return {
dbPath,
close() {
db.close();
},
getAppliedMigrations() {
return getAppliedMigrations(db);
},
validateEntity,
assertValidEntity,
...queryApi,
_database: db,
_migrations: appliedMigrations,
};
}
module.exports = {
DEFAULT_STATE_STORE_RELATIVE_PATH,
createStateStore,
resolveStateStorePath,
};

View File

@@ -0,0 +1,178 @@
'use strict';
const INITIAL_SCHEMA_SQL = `
CREATE TABLE IF NOT EXISTS schema_migrations (
version INTEGER PRIMARY KEY,
name TEXT NOT NULL,
applied_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS sessions (
id TEXT PRIMARY KEY,
adapter_id TEXT NOT NULL,
harness TEXT NOT NULL,
state TEXT NOT NULL,
repo_root TEXT,
started_at TEXT,
ended_at TEXT,
snapshot TEXT NOT NULL CHECK (json_valid(snapshot))
);
CREATE INDEX IF NOT EXISTS idx_sessions_state_started_at
ON sessions (state, started_at DESC);
CREATE INDEX IF NOT EXISTS idx_sessions_started_at
ON sessions (started_at DESC);
CREATE TABLE IF NOT EXISTS skill_runs (
id TEXT PRIMARY KEY,
skill_id TEXT NOT NULL,
skill_version TEXT NOT NULL,
session_id TEXT NOT NULL,
task_description TEXT NOT NULL,
outcome TEXT NOT NULL,
failure_reason TEXT,
tokens_used INTEGER,
duration_ms INTEGER,
user_feedback TEXT,
created_at TEXT NOT NULL,
FOREIGN KEY (session_id) REFERENCES sessions (id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_skill_runs_session_id_created_at
ON skill_runs (session_id, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_skill_runs_created_at
ON skill_runs (created_at DESC);
CREATE INDEX IF NOT EXISTS idx_skill_runs_outcome_created_at
ON skill_runs (outcome, created_at DESC);
CREATE TABLE IF NOT EXISTS skill_versions (
skill_id TEXT NOT NULL,
version TEXT NOT NULL,
content_hash TEXT NOT NULL,
amendment_reason TEXT,
promoted_at TEXT,
rolled_back_at TEXT,
PRIMARY KEY (skill_id, version)
);
CREATE INDEX IF NOT EXISTS idx_skill_versions_promoted_at
ON skill_versions (promoted_at DESC);
CREATE TABLE IF NOT EXISTS decisions (
id TEXT PRIMARY KEY,
session_id TEXT NOT NULL,
title TEXT NOT NULL,
rationale TEXT NOT NULL,
alternatives TEXT NOT NULL CHECK (json_valid(alternatives)),
supersedes TEXT,
status TEXT NOT NULL,
created_at TEXT NOT NULL,
FOREIGN KEY (session_id) REFERENCES sessions (id) ON DELETE CASCADE,
FOREIGN KEY (supersedes) REFERENCES decisions (id) ON DELETE SET NULL
);
CREATE INDEX IF NOT EXISTS idx_decisions_session_id_created_at
ON decisions (session_id, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_decisions_status_created_at
ON decisions (status, created_at DESC);
CREATE TABLE IF NOT EXISTS install_state (
target_id TEXT NOT NULL,
target_root TEXT NOT NULL,
profile TEXT,
modules TEXT NOT NULL CHECK (json_valid(modules)),
operations TEXT NOT NULL CHECK (json_valid(operations)),
installed_at TEXT NOT NULL,
source_version TEXT,
PRIMARY KEY (target_id, target_root)
);
CREATE INDEX IF NOT EXISTS idx_install_state_installed_at
ON install_state (installed_at DESC);
CREATE TABLE IF NOT EXISTS governance_events (
id TEXT PRIMARY KEY,
session_id TEXT,
event_type TEXT NOT NULL,
payload TEXT NOT NULL CHECK (json_valid(payload)),
resolved_at TEXT,
resolution TEXT,
created_at TEXT NOT NULL,
FOREIGN KEY (session_id) REFERENCES sessions (id) ON DELETE SET NULL
);
CREATE INDEX IF NOT EXISTS idx_governance_events_resolved_at_created_at
ON governance_events (resolved_at, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_governance_events_session_id_created_at
ON governance_events (session_id, created_at DESC);
`;
const MIGRATIONS = [
{
version: 1,
name: '001_initial_state_store',
sql: INITIAL_SCHEMA_SQL,
},
];
function ensureMigrationTable(db) {
db.exec(`
CREATE TABLE IF NOT EXISTS schema_migrations (
version INTEGER PRIMARY KEY,
name TEXT NOT NULL,
applied_at TEXT NOT NULL
);
`);
}
function getAppliedMigrations(db) {
ensureMigrationTable(db);
return db
.prepare(`
SELECT version, name, applied_at
FROM schema_migrations
ORDER BY version ASC
`)
.all()
.map(row => ({
version: row.version,
name: row.name,
appliedAt: row.applied_at,
}));
}
function applyMigrations(db) {
ensureMigrationTable(db);
const appliedVersions = new Set(
db.prepare('SELECT version FROM schema_migrations').all().map(row => row.version)
);
const insertMigration = db.prepare(`
INSERT INTO schema_migrations (version, name, applied_at)
VALUES (@version, @name, @applied_at)
`);
const applyPending = db.transaction(() => {
for (const migration of MIGRATIONS) {
if (appliedVersions.has(migration.version)) {
continue;
}
db.exec(migration.sql);
insertMigration.run({
version: migration.version,
name: migration.name,
applied_at: new Date().toISOString(),
});
}
});
applyPending();
return getAppliedMigrations(db);
}
module.exports = {
MIGRATIONS,
applyMigrations,
getAppliedMigrations,
};

View File

@@ -0,0 +1,697 @@
'use strict';
const { assertValidEntity } = require('./schema');
const ACTIVE_SESSION_STATES = ['active', 'running', 'idle'];
const SUCCESS_OUTCOMES = new Set(['success', 'succeeded', 'passed']);
const FAILURE_OUTCOMES = new Set(['failure', 'failed', 'error']);
function normalizeLimit(value, fallback) {
if (value === undefined || value === null) {
return fallback;
}
const parsed = Number.parseInt(value, 10);
if (!Number.isFinite(parsed) || parsed <= 0) {
throw new Error(`Invalid limit: ${value}`);
}
return parsed;
}
function parseJsonColumn(value, fallback) {
if (value === null || value === undefined || value === '') {
return fallback;
}
return JSON.parse(value);
}
function stringifyJson(value, label) {
try {
return JSON.stringify(value);
} catch (error) {
throw new Error(`Failed to serialize ${label}: ${error.message}`);
}
}
function mapSessionRow(row) {
const snapshot = parseJsonColumn(row.snapshot, {});
return {
id: row.id,
adapterId: row.adapter_id,
harness: row.harness,
state: row.state,
repoRoot: row.repo_root,
startedAt: row.started_at,
endedAt: row.ended_at,
snapshot,
workerCount: Array.isArray(snapshot && snapshot.workers) ? snapshot.workers.length : 0,
};
}
function mapSkillRunRow(row) {
return {
id: row.id,
skillId: row.skill_id,
skillVersion: row.skill_version,
sessionId: row.session_id,
taskDescription: row.task_description,
outcome: row.outcome,
failureReason: row.failure_reason,
tokensUsed: row.tokens_used,
durationMs: row.duration_ms,
userFeedback: row.user_feedback,
createdAt: row.created_at,
};
}
function mapSkillVersionRow(row) {
return {
skillId: row.skill_id,
version: row.version,
contentHash: row.content_hash,
amendmentReason: row.amendment_reason,
promotedAt: row.promoted_at,
rolledBackAt: row.rolled_back_at,
};
}
function mapDecisionRow(row) {
return {
id: row.id,
sessionId: row.session_id,
title: row.title,
rationale: row.rationale,
alternatives: parseJsonColumn(row.alternatives, []),
supersedes: row.supersedes,
status: row.status,
createdAt: row.created_at,
};
}
function mapInstallStateRow(row) {
const modules = parseJsonColumn(row.modules, []);
const operations = parseJsonColumn(row.operations, []);
const status = row.source_version && row.installed_at ? 'healthy' : 'warning';
return {
targetId: row.target_id,
targetRoot: row.target_root,
profile: row.profile,
modules,
operations,
installedAt: row.installed_at,
sourceVersion: row.source_version,
moduleCount: Array.isArray(modules) ? modules.length : 0,
operationCount: Array.isArray(operations) ? operations.length : 0,
status,
};
}
function mapGovernanceEventRow(row) {
return {
id: row.id,
sessionId: row.session_id,
eventType: row.event_type,
payload: parseJsonColumn(row.payload, null),
resolvedAt: row.resolved_at,
resolution: row.resolution,
createdAt: row.created_at,
};
}
function classifyOutcome(outcome) {
const normalized = String(outcome || '').toLowerCase();
if (SUCCESS_OUTCOMES.has(normalized)) {
return 'success';
}
if (FAILURE_OUTCOMES.has(normalized)) {
return 'failure';
}
return 'unknown';
}
function toPercent(numerator, denominator) {
if (denominator === 0) {
return null;
}
return Number(((numerator / denominator) * 100).toFixed(1));
}
function summarizeSkillRuns(skillRuns) {
const summary = {
totalCount: skillRuns.length,
knownCount: 0,
successCount: 0,
failureCount: 0,
unknownCount: 0,
successRate: null,
failureRate: null,
};
for (const skillRun of skillRuns) {
const classification = classifyOutcome(skillRun.outcome);
if (classification === 'success') {
summary.successCount += 1;
summary.knownCount += 1;
} else if (classification === 'failure') {
summary.failureCount += 1;
summary.knownCount += 1;
} else {
summary.unknownCount += 1;
}
}
summary.successRate = toPercent(summary.successCount, summary.knownCount);
summary.failureRate = toPercent(summary.failureCount, summary.knownCount);
return summary;
}
function summarizeInstallHealth(installations) {
if (installations.length === 0) {
return {
status: 'missing',
totalCount: 0,
healthyCount: 0,
warningCount: 0,
installations: [],
};
}
const summary = installations.reduce((result, installation) => {
if (installation.status === 'healthy') {
result.healthyCount += 1;
} else {
result.warningCount += 1;
}
return result;
}, {
totalCount: installations.length,
healthyCount: 0,
warningCount: 0,
});
return {
status: summary.warningCount > 0 ? 'warning' : 'healthy',
...summary,
installations,
};
}
function normalizeSessionInput(session) {
return {
id: session.id,
adapterId: session.adapterId,
harness: session.harness,
state: session.state,
repoRoot: session.repoRoot ?? null,
startedAt: session.startedAt ?? null,
endedAt: session.endedAt ?? null,
snapshot: session.snapshot ?? {},
};
}
function normalizeSkillRunInput(skillRun) {
return {
id: skillRun.id,
skillId: skillRun.skillId,
skillVersion: skillRun.skillVersion,
sessionId: skillRun.sessionId,
taskDescription: skillRun.taskDescription,
outcome: skillRun.outcome,
failureReason: skillRun.failureReason ?? null,
tokensUsed: skillRun.tokensUsed ?? null,
durationMs: skillRun.durationMs ?? null,
userFeedback: skillRun.userFeedback ?? null,
createdAt: skillRun.createdAt || new Date().toISOString(),
};
}
function normalizeSkillVersionInput(skillVersion) {
return {
skillId: skillVersion.skillId,
version: skillVersion.version,
contentHash: skillVersion.contentHash,
amendmentReason: skillVersion.amendmentReason ?? null,
promotedAt: skillVersion.promotedAt ?? null,
rolledBackAt: skillVersion.rolledBackAt ?? null,
};
}
function normalizeDecisionInput(decision) {
return {
id: decision.id,
sessionId: decision.sessionId,
title: decision.title,
rationale: decision.rationale,
alternatives: decision.alternatives === undefined || decision.alternatives === null
? []
: decision.alternatives,
supersedes: decision.supersedes ?? null,
status: decision.status,
createdAt: decision.createdAt || new Date().toISOString(),
};
}
function normalizeInstallStateInput(installState) {
return {
targetId: installState.targetId,
targetRoot: installState.targetRoot,
profile: installState.profile ?? null,
modules: installState.modules === undefined || installState.modules === null
? []
: installState.modules,
operations: installState.operations === undefined || installState.operations === null
? []
: installState.operations,
installedAt: installState.installedAt || new Date().toISOString(),
sourceVersion: installState.sourceVersion ?? null,
};
}
function normalizeGovernanceEventInput(governanceEvent) {
return {
id: governanceEvent.id,
sessionId: governanceEvent.sessionId ?? null,
eventType: governanceEvent.eventType,
payload: governanceEvent.payload ?? null,
resolvedAt: governanceEvent.resolvedAt ?? null,
resolution: governanceEvent.resolution ?? null,
createdAt: governanceEvent.createdAt || new Date().toISOString(),
};
}
function createQueryApi(db) {
const listRecentSessionsStatement = db.prepare(`
SELECT *
FROM sessions
ORDER BY COALESCE(started_at, ended_at, '') DESC, id DESC
LIMIT ?
`);
const countSessionsStatement = db.prepare(`
SELECT COUNT(*) AS total_count
FROM sessions
`);
const getSessionStatement = db.prepare(`
SELECT *
FROM sessions
WHERE id = ?
`);
const getSessionSkillRunsStatement = db.prepare(`
SELECT *
FROM skill_runs
WHERE session_id = ?
ORDER BY created_at DESC, id DESC
`);
const getSessionDecisionsStatement = db.prepare(`
SELECT *
FROM decisions
WHERE session_id = ?
ORDER BY created_at DESC, id DESC
`);
const listActiveSessionsStatement = db.prepare(`
SELECT *
FROM sessions
WHERE ended_at IS NULL
AND state IN ('active', 'running', 'idle')
ORDER BY COALESCE(started_at, ended_at, '') DESC, id DESC
LIMIT ?
`);
const countActiveSessionsStatement = db.prepare(`
SELECT COUNT(*) AS total_count
FROM sessions
WHERE ended_at IS NULL
AND state IN ('active', 'running', 'idle')
`);
const listRecentSkillRunsStatement = db.prepare(`
SELECT *
FROM skill_runs
ORDER BY created_at DESC, id DESC
LIMIT ?
`);
const listInstallStateStatement = db.prepare(`
SELECT *
FROM install_state
ORDER BY installed_at DESC, target_id ASC
`);
const countPendingGovernanceStatement = db.prepare(`
SELECT COUNT(*) AS total_count
FROM governance_events
WHERE resolved_at IS NULL
`);
const listPendingGovernanceStatement = db.prepare(`
SELECT *
FROM governance_events
WHERE resolved_at IS NULL
ORDER BY created_at DESC, id DESC
LIMIT ?
`);
const getSkillVersionStatement = db.prepare(`
SELECT *
FROM skill_versions
WHERE skill_id = ? AND version = ?
`);
const upsertSessionStatement = db.prepare(`
INSERT INTO sessions (
id,
adapter_id,
harness,
state,
repo_root,
started_at,
ended_at,
snapshot
) VALUES (
@id,
@adapter_id,
@harness,
@state,
@repo_root,
@started_at,
@ended_at,
@snapshot
)
ON CONFLICT(id) DO UPDATE SET
adapter_id = excluded.adapter_id,
harness = excluded.harness,
state = excluded.state,
repo_root = excluded.repo_root,
started_at = excluded.started_at,
ended_at = excluded.ended_at,
snapshot = excluded.snapshot
`);
const insertSkillRunStatement = db.prepare(`
INSERT INTO skill_runs (
id,
skill_id,
skill_version,
session_id,
task_description,
outcome,
failure_reason,
tokens_used,
duration_ms,
user_feedback,
created_at
) VALUES (
@id,
@skill_id,
@skill_version,
@session_id,
@task_description,
@outcome,
@failure_reason,
@tokens_used,
@duration_ms,
@user_feedback,
@created_at
)
ON CONFLICT(id) DO UPDATE SET
skill_id = excluded.skill_id,
skill_version = excluded.skill_version,
session_id = excluded.session_id,
task_description = excluded.task_description,
outcome = excluded.outcome,
failure_reason = excluded.failure_reason,
tokens_used = excluded.tokens_used,
duration_ms = excluded.duration_ms,
user_feedback = excluded.user_feedback,
created_at = excluded.created_at
`);
const upsertSkillVersionStatement = db.prepare(`
INSERT INTO skill_versions (
skill_id,
version,
content_hash,
amendment_reason,
promoted_at,
rolled_back_at
) VALUES (
@skill_id,
@version,
@content_hash,
@amendment_reason,
@promoted_at,
@rolled_back_at
)
ON CONFLICT(skill_id, version) DO UPDATE SET
content_hash = excluded.content_hash,
amendment_reason = excluded.amendment_reason,
promoted_at = excluded.promoted_at,
rolled_back_at = excluded.rolled_back_at
`);
const insertDecisionStatement = db.prepare(`
INSERT INTO decisions (
id,
session_id,
title,
rationale,
alternatives,
supersedes,
status,
created_at
) VALUES (
@id,
@session_id,
@title,
@rationale,
@alternatives,
@supersedes,
@status,
@created_at
)
ON CONFLICT(id) DO UPDATE SET
session_id = excluded.session_id,
title = excluded.title,
rationale = excluded.rationale,
alternatives = excluded.alternatives,
supersedes = excluded.supersedes,
status = excluded.status,
created_at = excluded.created_at
`);
const upsertInstallStateStatement = db.prepare(`
INSERT INTO install_state (
target_id,
target_root,
profile,
modules,
operations,
installed_at,
source_version
) VALUES (
@target_id,
@target_root,
@profile,
@modules,
@operations,
@installed_at,
@source_version
)
ON CONFLICT(target_id, target_root) DO UPDATE SET
profile = excluded.profile,
modules = excluded.modules,
operations = excluded.operations,
installed_at = excluded.installed_at,
source_version = excluded.source_version
`);
const insertGovernanceEventStatement = db.prepare(`
INSERT INTO governance_events (
id,
session_id,
event_type,
payload,
resolved_at,
resolution,
created_at
) VALUES (
@id,
@session_id,
@event_type,
@payload,
@resolved_at,
@resolution,
@created_at
)
ON CONFLICT(id) DO UPDATE SET
session_id = excluded.session_id,
event_type = excluded.event_type,
payload = excluded.payload,
resolved_at = excluded.resolved_at,
resolution = excluded.resolution,
created_at = excluded.created_at
`);
function getSessionById(id) {
const row = getSessionStatement.get(id);
return row ? mapSessionRow(row) : null;
}
function listRecentSessions(options = {}) {
const limit = normalizeLimit(options.limit, 10);
return {
totalCount: countSessionsStatement.get().total_count,
sessions: listRecentSessionsStatement.all(limit).map(mapSessionRow),
};
}
function getSessionDetail(id) {
const session = getSessionById(id);
if (!session) {
return null;
}
const workers = Array.isArray(session.snapshot && session.snapshot.workers)
? session.snapshot.workers.map(worker => ({ ...worker }))
: [];
return {
session,
workers,
skillRuns: getSessionSkillRunsStatement.all(id).map(mapSkillRunRow),
decisions: getSessionDecisionsStatement.all(id).map(mapDecisionRow),
};
}
function getStatus(options = {}) {
const activeLimit = normalizeLimit(options.activeLimit, 5);
const recentSkillRunLimit = normalizeLimit(options.recentSkillRunLimit, 20);
const pendingLimit = normalizeLimit(options.pendingLimit, 5);
const activeSessions = listActiveSessionsStatement.all(activeLimit).map(mapSessionRow);
const recentSkillRuns = listRecentSkillRunsStatement.all(recentSkillRunLimit).map(mapSkillRunRow);
const installations = listInstallStateStatement.all().map(mapInstallStateRow);
const pendingGovernanceEvents = listPendingGovernanceStatement.all(pendingLimit).map(mapGovernanceEventRow);
return {
generatedAt: new Date().toISOString(),
activeSessions: {
activeCount: countActiveSessionsStatement.get().total_count,
sessions: activeSessions,
},
skillRuns: {
windowSize: recentSkillRunLimit,
summary: summarizeSkillRuns(recentSkillRuns),
recent: recentSkillRuns,
},
installHealth: summarizeInstallHealth(installations),
governance: {
pendingCount: countPendingGovernanceStatement.get().total_count,
events: pendingGovernanceEvents,
},
};
}
return {
getSessionById,
getSessionDetail,
getStatus,
insertDecision(decision) {
const normalized = normalizeDecisionInput(decision);
assertValidEntity('decision', normalized);
insertDecisionStatement.run({
id: normalized.id,
session_id: normalized.sessionId,
title: normalized.title,
rationale: normalized.rationale,
alternatives: stringifyJson(normalized.alternatives, 'decision.alternatives'),
supersedes: normalized.supersedes,
status: normalized.status,
created_at: normalized.createdAt,
});
return normalized;
},
insertGovernanceEvent(governanceEvent) {
const normalized = normalizeGovernanceEventInput(governanceEvent);
assertValidEntity('governanceEvent', normalized);
insertGovernanceEventStatement.run({
id: normalized.id,
session_id: normalized.sessionId,
event_type: normalized.eventType,
payload: stringifyJson(normalized.payload, 'governanceEvent.payload'),
resolved_at: normalized.resolvedAt,
resolution: normalized.resolution,
created_at: normalized.createdAt,
});
return normalized;
},
insertSkillRun(skillRun) {
const normalized = normalizeSkillRunInput(skillRun);
assertValidEntity('skillRun', normalized);
insertSkillRunStatement.run({
id: normalized.id,
skill_id: normalized.skillId,
skill_version: normalized.skillVersion,
session_id: normalized.sessionId,
task_description: normalized.taskDescription,
outcome: normalized.outcome,
failure_reason: normalized.failureReason,
tokens_used: normalized.tokensUsed,
duration_ms: normalized.durationMs,
user_feedback: normalized.userFeedback,
created_at: normalized.createdAt,
});
return normalized;
},
listRecentSessions,
upsertInstallState(installState) {
const normalized = normalizeInstallStateInput(installState);
assertValidEntity('installState', normalized);
upsertInstallStateStatement.run({
target_id: normalized.targetId,
target_root: normalized.targetRoot,
profile: normalized.profile,
modules: stringifyJson(normalized.modules, 'installState.modules'),
operations: stringifyJson(normalized.operations, 'installState.operations'),
installed_at: normalized.installedAt,
source_version: normalized.sourceVersion,
});
return normalized;
},
upsertSession(session) {
const normalized = normalizeSessionInput(session);
assertValidEntity('session', normalized);
upsertSessionStatement.run({
id: normalized.id,
adapter_id: normalized.adapterId,
harness: normalized.harness,
state: normalized.state,
repo_root: normalized.repoRoot,
started_at: normalized.startedAt,
ended_at: normalized.endedAt,
snapshot: stringifyJson(normalized.snapshot, 'session.snapshot'),
});
return getSessionById(normalized.id);
},
upsertSkillVersion(skillVersion) {
const normalized = normalizeSkillVersionInput(skillVersion);
assertValidEntity('skillVersion', normalized);
upsertSkillVersionStatement.run({
skill_id: normalized.skillId,
version: normalized.version,
content_hash: normalized.contentHash,
amendment_reason: normalized.amendmentReason,
promoted_at: normalized.promotedAt,
rolled_back_at: normalized.rolledBackAt,
});
const row = getSkillVersionStatement.get(normalized.skillId, normalized.version);
return row ? mapSkillVersionRow(row) : null;
},
};
}
module.exports = {
ACTIVE_SESSION_STATES,
FAILURE_OUTCOMES,
SUCCESS_OUTCOMES,
createQueryApi,
};

View File

@@ -0,0 +1,92 @@
'use strict';
const fs = require('fs');
const path = require('path');
const Ajv = require('ajv');
const SCHEMA_PATH = path.join(__dirname, '..', '..', '..', 'schemas', 'state-store.schema.json');
const ENTITY_DEFINITIONS = {
session: 'session',
skillRun: 'skillRun',
skillVersion: 'skillVersion',
decision: 'decision',
installState: 'installState',
governanceEvent: 'governanceEvent',
};
let cachedSchema = null;
let cachedAjv = null;
const cachedValidators = new Map();
function readSchema() {
if (cachedSchema) {
return cachedSchema;
}
cachedSchema = JSON.parse(fs.readFileSync(SCHEMA_PATH, 'utf8'));
return cachedSchema;
}
function getAjv() {
if (cachedAjv) {
return cachedAjv;
}
cachedAjv = new Ajv({
allErrors: true,
strict: false,
});
return cachedAjv;
}
function getEntityValidator(entityName) {
if (cachedValidators.has(entityName)) {
return cachedValidators.get(entityName);
}
const schema = readSchema();
const definitionName = ENTITY_DEFINITIONS[entityName];
if (!definitionName || !schema.$defs || !schema.$defs[definitionName]) {
throw new Error(`Unknown state-store schema entity: ${entityName}`);
}
const validatorSchema = {
$schema: schema.$schema,
...schema.$defs[definitionName],
$defs: schema.$defs,
};
const validator = getAjv().compile(validatorSchema);
cachedValidators.set(entityName, validator);
return validator;
}
function formatValidationErrors(errors = []) {
return errors
.map(error => `${error.instancePath || '/'} ${error.message}`)
.join('; ');
}
function validateEntity(entityName, payload) {
const validator = getEntityValidator(entityName);
const valid = validator(payload);
return {
valid,
errors: validator.errors || [],
};
}
function assertValidEntity(entityName, payload, label) {
const result = validateEntity(entityName, payload);
if (!result.valid) {
throw new Error(`Invalid ${entityName}${label ? ` (${label})` : ''}: ${formatValidationErrors(result.errors)}`);
}
}
module.exports = {
assertValidEntity,
formatValidationErrors,
readSchema,
validateEntity,
};

View File

@@ -31,6 +31,21 @@ EOF
}
mkdir -p "$(dirname "$handoff_file")" "$(dirname "$status_file")"
if [[ ! -r "$task_file" ]]; then
write_status "failed" "- Error: task file is missing or unreadable (\`$task_file\`)"
{
echo "# Handoff"
echo
echo "- Failed: $(timestamp)"
echo "- Branch: \`$(git rev-parse --abbrev-ref HEAD)\`"
echo "- Worktree: \`$(pwd)\`"
echo
echo "Task file is missing or unreadable: \`$task_file\`"
} > "$handoff_file"
exit 1
fi
write_status "running" "- Task file: \`$task_file\`"
prompt_file="$(mktemp)"

177
scripts/sessions-cli.js Normal file
View File

@@ -0,0 +1,177 @@
#!/usr/bin/env node
'use strict';
const { createStateStore } = require('./lib/state-store');
function showHelp(exitCode = 0) {
console.log(`
Usage: node scripts/sessions-cli.js [<session-id>] [--db <path>] [--json] [--limit <n>]
List recent ECC sessions from the SQLite state store or inspect a single session
with worker, skill-run, and decision detail.
`);
process.exit(exitCode);
}
function parseArgs(argv) {
const args = argv.slice(2);
const parsed = {
dbPath: null,
help: false,
json: false,
limit: 10,
sessionId: null,
};
for (let index = 0; index < args.length; index += 1) {
const arg = args[index];
if (arg === '--db') {
parsed.dbPath = args[index + 1] || null;
index += 1;
} else if (arg === '--json') {
parsed.json = true;
} else if (arg === '--limit') {
parsed.limit = args[index + 1] || null;
index += 1;
} else if (arg === '--help' || arg === '-h') {
parsed.help = true;
} else if (!arg.startsWith('--') && !parsed.sessionId) {
parsed.sessionId = arg;
} else {
throw new Error(`Unknown argument: ${arg}`);
}
}
return parsed;
}
function printSessionList(payload) {
console.log('Recent sessions:\n');
if (payload.sessions.length === 0) {
console.log('No sessions found.');
return;
}
for (const session of payload.sessions) {
console.log(`- ${session.id} [${session.harness}/${session.adapterId}] ${session.state}`);
console.log(` Repo: ${session.repoRoot || '(unknown)'}`);
console.log(` Started: ${session.startedAt || '(unknown)'}`);
console.log(` Ended: ${session.endedAt || '(active)'}`);
console.log(` Workers: ${session.workerCount}`);
}
console.log(`\nTotal sessions: ${payload.totalCount}`);
}
function printWorkers(workers) {
console.log(`Workers: ${workers.length}`);
if (workers.length === 0) {
console.log(' - none');
return;
}
for (const worker of workers) {
console.log(` - ${worker.id || worker.label || '(unknown)'} ${worker.state || 'unknown'}`);
console.log(` Branch: ${worker.branch || '(unknown)'}`);
console.log(` Worktree: ${worker.worktree || '(unknown)'}`);
}
}
function printSkillRuns(skillRuns) {
console.log(`Skill runs: ${skillRuns.length}`);
if (skillRuns.length === 0) {
console.log(' - none');
return;
}
for (const skillRun of skillRuns) {
console.log(` - ${skillRun.id} ${skillRun.outcome} ${skillRun.skillId}@${skillRun.skillVersion}`);
console.log(` Task: ${skillRun.taskDescription}`);
console.log(` Duration: ${skillRun.durationMs ?? '(unknown)'} ms`);
}
}
function printDecisions(decisions) {
console.log(`Decisions: ${decisions.length}`);
if (decisions.length === 0) {
console.log(' - none');
return;
}
for (const decision of decisions) {
console.log(` - ${decision.id} ${decision.status}`);
console.log(` Title: ${decision.title}`);
console.log(` Alternatives: ${decision.alternatives.join(', ') || '(none)'}`);
}
}
function printSessionDetail(payload) {
console.log(`Session: ${payload.session.id}`);
console.log(`Harness: ${payload.session.harness}`);
console.log(`Adapter: ${payload.session.adapterId}`);
console.log(`State: ${payload.session.state}`);
console.log(`Repo: ${payload.session.repoRoot || '(unknown)'}`);
console.log(`Started: ${payload.session.startedAt || '(unknown)'}`);
console.log(`Ended: ${payload.session.endedAt || '(active)'}`);
console.log();
printWorkers(payload.workers);
console.log();
printSkillRuns(payload.skillRuns);
console.log();
printDecisions(payload.decisions);
}
async function main() {
let store = null;
try {
const options = parseArgs(process.argv);
if (options.help) {
showHelp(0);
}
store = await createStateStore({
dbPath: options.dbPath,
homeDir: process.env.HOME,
});
if (!options.sessionId) {
const payload = store.listRecentSessions({ limit: options.limit });
if (options.json) {
console.log(JSON.stringify(payload, null, 2));
} else {
printSessionList(payload);
}
return;
}
const payload = store.getSessionDetail(options.sessionId);
if (!payload) {
throw new Error(`Session not found: ${options.sessionId}`);
}
if (options.json) {
console.log(JSON.stringify(payload, null, 2));
} else {
printSessionDetail(payload);
}
} catch (error) {
console.error(`Error: ${error.message}`);
process.exit(1);
} finally {
if (store) {
store.close();
}
}
}
if (require.main === module) {
main();
}
module.exports = {
main,
parseArgs,
};

176
scripts/status.js Normal file
View File

@@ -0,0 +1,176 @@
#!/usr/bin/env node
'use strict';
const { createStateStore } = require('./lib/state-store');
function showHelp(exitCode = 0) {
console.log(`
Usage: node scripts/status.js [--db <path>] [--json] [--limit <n>]
Query the ECC SQLite state store for active sessions, recent skill runs,
install health, and pending governance events.
`);
process.exit(exitCode);
}
function parseArgs(argv) {
const args = argv.slice(2);
const parsed = {
dbPath: null,
json: false,
help: false,
limit: 5,
};
for (let index = 0; index < args.length; index += 1) {
const arg = args[index];
if (arg === '--db') {
parsed.dbPath = args[index + 1] || null;
index += 1;
} else if (arg === '--json') {
parsed.json = true;
} else if (arg === '--limit') {
parsed.limit = args[index + 1] || null;
index += 1;
} else if (arg === '--help' || arg === '-h') {
parsed.help = true;
} else {
throw new Error(`Unknown argument: ${arg}`);
}
}
return parsed;
}
function printActiveSessions(section) {
console.log(`Active sessions: ${section.activeCount}`);
if (section.sessions.length === 0) {
console.log(' - none');
return;
}
for (const session of section.sessions) {
console.log(` - ${session.id} [${session.harness}/${session.adapterId}] ${session.state}`);
console.log(` Repo: ${session.repoRoot || '(unknown)'}`);
console.log(` Started: ${session.startedAt || '(unknown)'}`);
console.log(` Workers: ${session.workerCount}`);
}
}
function printSkillRuns(section) {
const summary = section.summary;
const successRate = summary.successRate === null ? 'n/a' : `${summary.successRate}%`;
const failureRate = summary.failureRate === null ? 'n/a' : `${summary.failureRate}%`;
console.log(`Skill runs (last ${section.windowSize}):`);
console.log(` Success: ${summary.successCount}`);
console.log(` Failure: ${summary.failureCount}`);
console.log(` Unknown: ${summary.unknownCount}`);
console.log(` Success rate: ${successRate}`);
console.log(` Failure rate: ${failureRate}`);
if (section.recent.length === 0) {
console.log(' Recent runs: none');
return;
}
console.log(' Recent runs:');
for (const skillRun of section.recent.slice(0, 5)) {
console.log(` - ${skillRun.id} ${skillRun.outcome} ${skillRun.skillId}@${skillRun.skillVersion}`);
}
}
function printInstallHealth(section) {
console.log(`Install health: ${section.status}`);
console.log(` Targets recorded: ${section.totalCount}`);
console.log(` Healthy: ${section.healthyCount}`);
console.log(` Warning: ${section.warningCount}`);
if (section.installations.length === 0) {
console.log(' Installations: none');
return;
}
console.log(' Installations:');
for (const installation of section.installations.slice(0, 5)) {
console.log(` - ${installation.targetId} ${installation.status}`);
console.log(` Root: ${installation.targetRoot}`);
console.log(` Profile: ${installation.profile || '(custom)'}`);
console.log(` Modules: ${installation.moduleCount}`);
console.log(` Source version: ${installation.sourceVersion || '(unknown)'}`);
}
}
function printGovernance(section) {
console.log(`Pending governance events: ${section.pendingCount}`);
if (section.events.length === 0) {
console.log(' - none');
return;
}
for (const event of section.events) {
console.log(` - ${event.id} ${event.eventType}`);
console.log(` Session: ${event.sessionId || '(none)'}`);
console.log(` Created: ${event.createdAt}`);
}
}
function printHuman(payload) {
console.log('ECC status\n');
console.log(`Database: ${payload.dbPath}\n`);
printActiveSessions(payload.activeSessions);
console.log();
printSkillRuns(payload.skillRuns);
console.log();
printInstallHealth(payload.installHealth);
console.log();
printGovernance(payload.governance);
}
async function main() {
let store = null;
try {
const options = parseArgs(process.argv);
if (options.help) {
showHelp(0);
}
store = await createStateStore({
dbPath: options.dbPath,
homeDir: process.env.HOME,
});
const payload = {
dbPath: store.dbPath,
...store.getStatus({
activeLimit: options.limit,
recentSkillRunLimit: 20,
pendingLimit: options.limit,
}),
};
if (options.json) {
console.log(JSON.stringify(payload, null, 2));
} else {
printHuman(payload);
}
} catch (error) {
console.error(`Error: ${error.message}`);
process.exit(1);
} finally {
if (store) {
store.close();
}
}
}
if (require.main === module) {
main();
}
module.exports = {
main,
parseArgs,
};

View File

@@ -5,9 +5,16 @@ const fs = require('fs');
const os = require('os');
const path = require('path');
const {
getFallbackSessionRecordingPath,
persistCanonicalSnapshot
} = require('../../scripts/lib/session-adapters/canonical-session');
const { createClaudeHistoryAdapter } = require('../../scripts/lib/session-adapters/claude-history');
const { createDmuxTmuxAdapter } = require('../../scripts/lib/session-adapters/dmux-tmux');
const { createAdapterRegistry } = require('../../scripts/lib/session-adapters/registry');
const {
createAdapterRegistry,
inspectSessionTarget
} = require('../../scripts/lib/session-adapters/registry');
console.log('=== Testing session-adapters ===\n');
@@ -41,74 +48,233 @@ function withHome(homeDir, fn) {
}
test('dmux adapter normalizes orchestration snapshots into canonical form', () => {
const adapter = createDmuxTmuxAdapter({
collectSessionSnapshotImpl: () => ({
sessionName: 'workflow-visual-proof',
coordinationDir: '/tmp/.claude/orchestration/workflow-visual-proof',
repoRoot: '/tmp/repo',
targetType: 'plan',
sessionActive: true,
paneCount: 1,
workerCount: 1,
workerStates: { running: 1 },
panes: [{
paneId: '%95',
windowIndex: 1,
paneIndex: 0,
title: 'seed-check',
currentCommand: 'codex',
currentPath: '/tmp/worktree',
active: false,
dead: false,
pid: 1234
}],
workers: [{
workerSlug: 'seed-check',
workerDir: '/tmp/.claude/orchestration/workflow-visual-proof/seed-check',
status: {
state: 'running',
updated: '2026-03-13T00:00:00Z',
branch: 'feature/seed-check',
worktree: '/tmp/worktree',
taskFile: '/tmp/task.md',
handoffFile: '/tmp/handoff.md'
},
task: {
objective: 'Inspect seeded files.',
seedPaths: ['scripts/orchestrate-worktrees.js']
},
handoff: {
summary: ['Pending'],
validation: [],
remainingRisks: ['No screenshot yet']
},
files: {
status: '/tmp/status.md',
task: '/tmp/task.md',
handoff: '/tmp/handoff.md'
},
pane: {
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-recordings-'));
try {
const adapter = createDmuxTmuxAdapter({
collectSessionSnapshotImpl: () => ({
sessionName: 'workflow-visual-proof',
coordinationDir: '/tmp/.claude/orchestration/workflow-visual-proof',
repoRoot: '/tmp/repo',
targetType: 'plan',
sessionActive: true,
paneCount: 1,
workerCount: 1,
workerStates: { running: 1 },
panes: [{
paneId: '%95',
title: 'seed-check'
}
}]
})
});
windowIndex: 1,
paneIndex: 0,
title: 'seed-check',
currentCommand: 'codex',
currentPath: '/tmp/worktree',
active: false,
dead: false,
pid: 1234
}],
workers: [{
workerSlug: 'seed-check',
workerDir: '/tmp/.claude/orchestration/workflow-visual-proof/seed-check',
status: {
state: 'running',
updated: '2026-03-13T00:00:00Z',
branch: 'feature/seed-check',
worktree: '/tmp/worktree',
taskFile: '/tmp/task.md',
handoffFile: '/tmp/handoff.md'
},
task: {
objective: 'Inspect seeded files.',
seedPaths: ['scripts/orchestrate-worktrees.js']
},
handoff: {
summary: ['Pending'],
validation: [],
remainingRisks: ['No screenshot yet']
},
files: {
status: '/tmp/status.md',
task: '/tmp/task.md',
handoff: '/tmp/handoff.md'
},
pane: {
paneId: '%95',
title: 'seed-check'
}
}]
}),
recordingDir
});
const snapshot = adapter.open('workflow-visual-proof').getSnapshot();
const snapshot = adapter.open('workflow-visual-proof').getSnapshot();
const recordingPath = getFallbackSessionRecordingPath(snapshot, { recordingDir });
const persisted = JSON.parse(fs.readFileSync(recordingPath, 'utf8'));
assert.strictEqual(snapshot.schemaVersion, 'ecc.session.v1');
assert.strictEqual(snapshot.adapterId, 'dmux-tmux');
assert.strictEqual(snapshot.session.id, 'workflow-visual-proof');
assert.strictEqual(snapshot.session.kind, 'orchestrated');
assert.strictEqual(snapshot.session.sourceTarget.type, 'session');
assert.strictEqual(snapshot.aggregates.workerCount, 1);
assert.strictEqual(snapshot.workers[0].runtime.kind, 'tmux-pane');
assert.strictEqual(snapshot.workers[0].outputs.remainingRisks[0], 'No screenshot yet');
assert.strictEqual(snapshot.schemaVersion, 'ecc.session.v1');
assert.strictEqual(snapshot.adapterId, 'dmux-tmux');
assert.strictEqual(snapshot.session.id, 'workflow-visual-proof');
assert.strictEqual(snapshot.session.kind, 'orchestrated');
assert.strictEqual(snapshot.session.state, 'active');
assert.strictEqual(snapshot.session.sourceTarget.type, 'session');
assert.strictEqual(snapshot.aggregates.workerCount, 1);
assert.strictEqual(snapshot.workers[0].runtime.kind, 'tmux-pane');
assert.strictEqual(snapshot.workers[0].outputs.remainingRisks[0], 'No screenshot yet');
assert.strictEqual(persisted.latest.session.state, 'active');
assert.strictEqual(persisted.latest.adapterId, 'dmux-tmux');
assert.strictEqual(persisted.history.length, 1);
} finally {
fs.rmSync(recordingDir, { recursive: true, force: true });
}
});
test('dmux adapter marks finished sessions as completed and records history', () => {
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-recordings-'));
try {
const adapter = createDmuxTmuxAdapter({
collectSessionSnapshotImpl: () => ({
sessionName: 'workflow-visual-proof',
coordinationDir: '/tmp/.claude/orchestration/workflow-visual-proof',
repoRoot: '/tmp/repo',
targetType: 'session',
sessionActive: false,
paneCount: 0,
workerCount: 2,
workerStates: { completed: 2 },
panes: [],
workers: [{
workerSlug: 'seed-check',
workerDir: '/tmp/.claude/orchestration/workflow-visual-proof/seed-check',
status: {
state: 'completed',
updated: '2026-03-13T00:00:00Z',
branch: 'feature/seed-check',
worktree: '/tmp/worktree-a',
taskFile: '/tmp/task-a.md',
handoffFile: '/tmp/handoff-a.md'
},
task: {
objective: 'Inspect seeded files.',
seedPaths: ['scripts/orchestrate-worktrees.js']
},
handoff: {
summary: ['Finished'],
validation: ['Reviewed outputs'],
remainingRisks: []
},
files: {
status: '/tmp/status-a.md',
task: '/tmp/task-a.md',
handoff: '/tmp/handoff-a.md'
},
pane: null
}, {
workerSlug: 'proof',
workerDir: '/tmp/.claude/orchestration/workflow-visual-proof/proof',
status: {
state: 'completed',
updated: '2026-03-13T00:10:00Z',
branch: 'feature/proof',
worktree: '/tmp/worktree-b',
taskFile: '/tmp/task-b.md',
handoffFile: '/tmp/handoff-b.md'
},
task: {
objective: 'Capture proof.',
seedPaths: ['README.md']
},
handoff: {
summary: ['Delivered proof'],
validation: ['Checked screenshots'],
remainingRisks: []
},
files: {
status: '/tmp/status-b.md',
task: '/tmp/task-b.md',
handoff: '/tmp/handoff-b.md'
},
pane: null
}]
}),
recordingDir
});
const snapshot = adapter.open('workflow-visual-proof').getSnapshot();
const recordingPath = getFallbackSessionRecordingPath(snapshot, { recordingDir });
const persisted = JSON.parse(fs.readFileSync(recordingPath, 'utf8'));
assert.strictEqual(snapshot.session.state, 'completed');
assert.strictEqual(snapshot.aggregates.states.completed, 2);
assert.strictEqual(persisted.latest.session.state, 'completed');
assert.strictEqual(persisted.history.length, 1);
} finally {
fs.rmSync(recordingDir, { recursive: true, force: true });
}
});
test('fallback recording does not append duplicate history entries for unchanged snapshots', () => {
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-recordings-'));
try {
const adapter = createDmuxTmuxAdapter({
collectSessionSnapshotImpl: () => ({
sessionName: 'workflow-visual-proof',
coordinationDir: '/tmp/.claude/orchestration/workflow-visual-proof',
repoRoot: '/tmp/repo',
targetType: 'session',
sessionActive: true,
paneCount: 1,
workerCount: 1,
workerStates: { running: 1 },
panes: [],
workers: [{
workerSlug: 'seed-check',
workerDir: '/tmp/.claude/orchestration/workflow-visual-proof/seed-check',
status: {
state: 'running',
updated: '2026-03-13T00:00:00Z',
branch: 'feature/seed-check',
worktree: '/tmp/worktree',
taskFile: '/tmp/task.md',
handoffFile: '/tmp/handoff.md'
},
task: {
objective: 'Inspect seeded files.',
seedPaths: ['scripts/orchestrate-worktrees.js']
},
handoff: {
summary: ['Pending'],
validation: [],
remainingRisks: []
},
files: {
status: '/tmp/status.md',
task: '/tmp/task.md',
handoff: '/tmp/handoff.md'
},
pane: null
}]
}),
recordingDir
});
const handle = adapter.open('workflow-visual-proof');
const firstSnapshot = handle.getSnapshot();
const secondSnapshot = handle.getSnapshot();
const recordingPath = getFallbackSessionRecordingPath(firstSnapshot, { recordingDir });
const persisted = JSON.parse(fs.readFileSync(recordingPath, 'utf8'));
assert.deepStrictEqual(secondSnapshot, firstSnapshot);
assert.strictEqual(persisted.history.length, 1);
assert.deepStrictEqual(persisted.latest, secondSnapshot);
} finally {
fs.rmSync(recordingDir, { recursive: true, force: true });
}
});
test('claude-history adapter loads the latest recorded session', () => {
const homeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-adapter-home-'));
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-recordings-'));
const sessionsDir = path.join(homeDir, '.claude', 'sessions');
fs.mkdirSync(sessionsDir, { recursive: true });
@@ -140,8 +306,10 @@ test('claude-history adapter loads the latest recorded session', () => {
try {
withHome(homeDir, () => {
const adapter = createClaudeHistoryAdapter();
const adapter = createClaudeHistoryAdapter({ recordingDir });
const snapshot = adapter.open('claude:latest').getSnapshot();
const recordingPath = getFallbackSessionRecordingPath(snapshot, { recordingDir });
const persisted = JSON.parse(fs.readFileSync(recordingPath, 'utf8'));
assert.strictEqual(snapshot.schemaVersion, 'ecc.session.v1');
assert.strictEqual(snapshot.adapterId, 'claude-history');
@@ -151,11 +319,15 @@ test('claude-history adapter loads the latest recorded session', () => {
assert.strictEqual(snapshot.workers[0].branch, 'feat/session-adapter');
assert.strictEqual(snapshot.workers[0].worktree, '/tmp/ecc-worktree');
assert.strictEqual(snapshot.workers[0].runtime.kind, 'claude-session');
assert.deepStrictEqual(snapshot.workers[0].intent.seedPaths, ['scripts/lib/orchestration-session.js']);
assert.strictEqual(snapshot.workers[0].artifacts.sessionFile, sessionPath);
assert.ok(snapshot.workers[0].outputs.summary.includes('Build snapshot prototype'));
assert.strictEqual(persisted.latest.adapterId, 'claude-history');
assert.strictEqual(persisted.history.length, 1);
});
} finally {
fs.rmSync(homeDir, { recursive: true, force: true });
fs.rmSync(recordingDir, { recursive: true, force: true });
}
});
@@ -264,6 +436,41 @@ test('adapter registry resolves structured target types into the correct adapter
}
});
test('default registry forwards a nested state-store writer to adapters', () => {
const homeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-registry-home-'));
const sessionsDir = path.join(homeDir, '.claude', 'sessions');
fs.mkdirSync(sessionsDir, { recursive: true });
fs.writeFileSync(
path.join(sessionsDir, '2026-03-13-z9y8x7w6-session.tmp'),
'# History Session\n\n**Branch:** feat/history\n'
);
const stateStore = {
sessions: {
persisted: [],
persistCanonicalSessionSnapshot(snapshot, metadata) {
this.persisted.push({ snapshot, metadata });
}
}
};
try {
withHome(homeDir, () => {
const snapshot = inspectSessionTarget('claude:latest', {
cwd: process.cwd(),
stateStore
});
assert.strictEqual(snapshot.adapterId, 'claude-history');
assert.strictEqual(stateStore.sessions.persisted.length, 1);
assert.strictEqual(stateStore.sessions.persisted[0].snapshot.adapterId, 'claude-history');
assert.strictEqual(stateStore.sessions.persisted[0].metadata.sessionId, snapshot.session.id);
});
} finally {
fs.rmSync(homeDir, { recursive: true, force: true });
}
});
test('adapter registry lists adapter metadata and target types', () => {
const registry = createAdapterRegistry();
const adapters = registry.listAdapters();
@@ -281,5 +488,66 @@ test('adapter registry lists adapter metadata and target types', () => {
);
});
test('persistence only falls back when the state-store module is missing', () => {
const snapshot = {
schemaVersion: 'ecc.session.v1',
adapterId: 'claude-history',
session: {
id: 'a1b2c3d4',
kind: 'history',
state: 'recorded',
repoRoot: null,
sourceTarget: {
type: 'claude-history',
value: 'latest'
}
},
workers: [{
id: 'a1b2c3d4',
label: 'Session Review',
state: 'recorded',
branch: null,
worktree: null,
runtime: {
kind: 'claude-session',
command: 'claude',
pid: null,
active: false,
dead: true
},
intent: {
objective: 'Session Review',
seedPaths: []
},
outputs: {
summary: [],
validation: [],
remainingRisks: []
},
artifacts: {
sessionFile: '/tmp/session.tmp',
context: null
}
}],
aggregates: {
workerCount: 1,
states: {
recorded: 1
}
}
};
const loadError = new Error('state-store bootstrap failed');
loadError.code = 'ERR_STATE_STORE_BOOT';
assert.throws(() => {
persistCanonicalSnapshot(snapshot, {
loadStateStoreImpl() {
throw loadError;
}
});
}, /state-store bootstrap failed/);
});
console.log(`\n=== Results: ${passed} passed, ${failed} failed ===`);
if (failed > 0) process.exit(1);

View File

@@ -0,0 +1,489 @@
/**
* Tests for the SQLite-backed ECC state store and CLI commands.
*/
const assert = require('assert');
const fs = require('fs');
const os = require('os');
const path = require('path');
const { spawnSync } = require('child_process');
const {
createStateStore,
resolveStateStorePath,
} = require('../../scripts/lib/state-store');
const ECC_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'ecc.js');
const STATUS_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'status.js');
const SESSIONS_SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'sessions-cli.js');
async function test(name, fn) {
try {
await fn();
console.log(` \u2713 ${name}`);
return true;
} catch (error) {
console.log(` \u2717 ${name}`);
console.log(` Error: ${error.message}`);
return false;
}
}
function createTempDir(prefix) {
return fs.mkdtempSync(path.join(os.tmpdir(), prefix));
}
function cleanupTempDir(dirPath) {
fs.rmSync(dirPath, { recursive: true, force: true });
}
function runNode(scriptPath, args = [], options = {}) {
return spawnSync('node', [scriptPath, ...args], {
encoding: 'utf8',
cwd: options.cwd || process.cwd(),
env: {
...process.env,
...(options.env || {}),
},
});
}
function parseJson(stdout) {
return JSON.parse(stdout.trim());
}
async function seedStore(dbPath) {
const store = await createStateStore({ dbPath });
store.upsertSession({
id: 'session-active',
adapterId: 'dmux-tmux',
harness: 'claude',
state: 'active',
repoRoot: '/tmp/ecc-repo',
startedAt: '2026-03-15T08:00:00.000Z',
endedAt: null,
snapshot: {
schemaVersion: 'ecc.session.v1',
adapterId: 'dmux-tmux',
session: {
id: 'session-active',
kind: 'orchestrated',
state: 'active',
repoRoot: '/tmp/ecc-repo',
},
workers: [
{
id: 'worker-1',
label: 'Worker 1',
state: 'active',
branch: 'feat/state-store',
worktree: '/tmp/ecc-repo/.worktrees/worker-1',
},
{
id: 'worker-2',
label: 'Worker 2',
state: 'idle',
branch: 'feat/state-store',
worktree: '/tmp/ecc-repo/.worktrees/worker-2',
},
],
aggregates: {
workerCount: 2,
states: {
active: 1,
idle: 1,
},
},
},
});
store.upsertSession({
id: 'session-recorded',
adapterId: 'claude-history',
harness: 'claude',
state: 'recorded',
repoRoot: '/tmp/ecc-repo',
startedAt: '2026-03-14T18:00:00.000Z',
endedAt: '2026-03-14T19:00:00.000Z',
snapshot: {
schemaVersion: 'ecc.session.v1',
adapterId: 'claude-history',
session: {
id: 'session-recorded',
kind: 'history',
state: 'recorded',
repoRoot: '/tmp/ecc-repo',
},
workers: [
{
id: 'worker-hist',
label: 'History Worker',
state: 'recorded',
branch: 'main',
worktree: '/tmp/ecc-repo',
},
],
aggregates: {
workerCount: 1,
states: {
recorded: 1,
},
},
},
});
store.insertSkillRun({
id: 'skill-run-1',
skillId: 'tdd-workflow',
skillVersion: '1.0.0',
sessionId: 'session-active',
taskDescription: 'Write store tests',
outcome: 'success',
failureReason: null,
tokensUsed: 1200,
durationMs: 3500,
userFeedback: 'useful',
createdAt: '2026-03-15T08:05:00.000Z',
});
store.insertSkillRun({
id: 'skill-run-2',
skillId: 'security-review',
skillVersion: '1.0.0',
sessionId: 'session-active',
taskDescription: 'Review state-store design',
outcome: 'failed',
failureReason: 'timeout',
tokensUsed: 800,
durationMs: 1800,
userFeedback: null,
createdAt: '2026-03-15T08:06:00.000Z',
});
store.insertSkillRun({
id: 'skill-run-3',
skillId: 'code-reviewer',
skillVersion: '1.0.0',
sessionId: 'session-recorded',
taskDescription: 'Inspect CLI formatting',
outcome: 'success',
failureReason: null,
tokensUsed: 500,
durationMs: 900,
userFeedback: 'clear',
createdAt: '2026-03-15T08:07:00.000Z',
});
store.insertSkillRun({
id: 'skill-run-4',
skillId: 'planner',
skillVersion: '1.0.0',
sessionId: 'session-recorded',
taskDescription: 'Outline ECC 2.0 work',
outcome: 'unknown',
failureReason: null,
tokensUsed: 300,
durationMs: 500,
userFeedback: null,
createdAt: '2026-03-15T08:08:00.000Z',
});
store.upsertSkillVersion({
skillId: 'tdd-workflow',
version: '1.0.0',
contentHash: 'abc123',
amendmentReason: 'initial',
promotedAt: '2026-03-10T00:00:00.000Z',
rolledBackAt: null,
});
store.insertDecision({
id: 'decision-1',
sessionId: 'session-active',
title: 'Use SQLite for durable state',
rationale: 'Need queryable local state for ECC control plane',
alternatives: ['json-files', 'memory-only'],
supersedes: null,
status: 'active',
createdAt: '2026-03-15T08:09:00.000Z',
});
store.upsertInstallState({
targetId: 'claude-home',
targetRoot: '/tmp/home/.claude',
profile: 'developer',
modules: ['rules-core', 'orchestration'],
operations: [
{
kind: 'copy-file',
destinationPath: '/tmp/home/.claude/agents/planner.md',
},
],
installedAt: '2026-03-15T07:00:00.000Z',
sourceVersion: '1.8.0',
});
store.insertGovernanceEvent({
id: 'gov-1',
sessionId: 'session-active',
eventType: 'policy-review-required',
payload: {
severity: 'warning',
owner: 'security-reviewer',
},
resolvedAt: null,
resolution: null,
createdAt: '2026-03-15T08:10:00.000Z',
});
store.insertGovernanceEvent({
id: 'gov-2',
sessionId: 'session-recorded',
eventType: 'decision-accepted',
payload: {
severity: 'info',
},
resolvedAt: '2026-03-15T08:11:00.000Z',
resolution: 'accepted',
createdAt: '2026-03-15T08:09:30.000Z',
});
store.close();
}
async function runTests() {
console.log('\n=== Testing state-store ===\n');
let passed = 0;
let failed = 0;
if (await test('creates the default state.db path and applies migrations idempotently', async () => {
const homeDir = createTempDir('ecc-state-home-');
try {
const expectedPath = path.join(homeDir, '.claude', 'ecc', 'state.db');
assert.strictEqual(resolveStateStorePath({ homeDir }), expectedPath);
const firstStore = await createStateStore({ homeDir });
const firstMigrations = firstStore.getAppliedMigrations();
firstStore.close();
assert.strictEqual(firstMigrations.length, 1);
assert.strictEqual(firstMigrations[0].version, 1);
assert.ok(fs.existsSync(expectedPath));
const secondStore = await createStateStore({ homeDir });
const secondMigrations = secondStore.getAppliedMigrations();
secondStore.close();
assert.strictEqual(secondMigrations.length, 1);
assert.strictEqual(secondMigrations[0].version, 1);
} finally {
cleanupTempDir(homeDir);
}
})) passed += 1; else failed += 1;
if (await test('preserves SQLite special database names like :memory:', async () => {
const tempDir = createTempDir('ecc-state-memory-');
const previousCwd = process.cwd();
try {
process.chdir(tempDir);
assert.strictEqual(resolveStateStorePath({ dbPath: ':memory:' }), ':memory:');
const store = await createStateStore({ dbPath: ':memory:' });
assert.strictEqual(store.dbPath, ':memory:');
assert.strictEqual(store.getAppliedMigrations().length, 1);
store.close();
assert.ok(!fs.existsSync(path.join(tempDir, ':memory:')));
} finally {
process.chdir(previousCwd);
cleanupTempDir(tempDir);
}
})) passed += 1; else failed += 1;
if (await test('stores sessions and returns detailed session views with workers, skill runs, and decisions', async () => {
const testDir = createTempDir('ecc-state-db-');
const dbPath = path.join(testDir, 'state.db');
try {
await seedStore(dbPath);
const store = await createStateStore({ dbPath });
const listResult = store.listRecentSessions({ limit: 10 });
const detail = store.getSessionDetail('session-active');
store.close();
assert.strictEqual(listResult.totalCount, 2);
assert.strictEqual(listResult.sessions[0].id, 'session-active');
assert.strictEqual(detail.session.id, 'session-active');
assert.strictEqual(detail.workers.length, 2);
assert.strictEqual(detail.skillRuns.length, 2);
assert.strictEqual(detail.decisions.length, 1);
assert.deepStrictEqual(detail.decisions[0].alternatives, ['json-files', 'memory-only']);
} finally {
cleanupTempDir(testDir);
}
})) passed += 1; else failed += 1;
if (await test('builds a status snapshot with active sessions, skill rates, install health, and pending governance', async () => {
const testDir = createTempDir('ecc-state-db-');
const dbPath = path.join(testDir, 'state.db');
try {
await seedStore(dbPath);
const store = await createStateStore({ dbPath });
const status = store.getStatus();
store.close();
assert.strictEqual(status.activeSessions.activeCount, 1);
assert.strictEqual(status.activeSessions.sessions[0].id, 'session-active');
assert.strictEqual(status.skillRuns.summary.totalCount, 4);
assert.strictEqual(status.skillRuns.summary.successCount, 2);
assert.strictEqual(status.skillRuns.summary.failureCount, 1);
assert.strictEqual(status.skillRuns.summary.unknownCount, 1);
assert.strictEqual(status.installHealth.status, 'healthy');
assert.strictEqual(status.installHealth.totalCount, 1);
assert.strictEqual(status.governance.pendingCount, 1);
assert.strictEqual(status.governance.events[0].id, 'gov-1');
} finally {
cleanupTempDir(testDir);
}
})) passed += 1; else failed += 1;
if (await test('validates entity payloads before writing to the database', async () => {
const testDir = createTempDir('ecc-state-db-');
const dbPath = path.join(testDir, 'state.db');
try {
const store = await createStateStore({ dbPath });
assert.throws(() => {
store.upsertSession({
id: '',
adapterId: 'dmux-tmux',
harness: 'claude',
state: 'active',
repoRoot: '/tmp/repo',
startedAt: '2026-03-15T08:00:00.000Z',
endedAt: null,
snapshot: {},
});
}, /Invalid session/);
assert.throws(() => {
store.insertDecision({
id: 'decision-invalid',
sessionId: 'missing-session',
title: 'Reject non-array alternatives',
rationale: 'alternatives must be an array',
alternatives: { unexpected: true },
supersedes: null,
status: 'active',
createdAt: '2026-03-15T08:15:00.000Z',
});
}, /Invalid decision/);
assert.throws(() => {
store.upsertInstallState({
targetId: 'claude-home',
targetRoot: '/tmp/home/.claude',
profile: 'developer',
modules: 'rules-core',
operations: [],
installedAt: '2026-03-15T07:00:00.000Z',
sourceVersion: '1.8.0',
});
}, /Invalid installState/);
store.close();
} finally {
cleanupTempDir(testDir);
}
})) passed += 1; else failed += 1;
if (await test('status CLI supports human-readable and --json output', async () => {
const testDir = createTempDir('ecc-state-cli-');
const dbPath = path.join(testDir, 'state.db');
try {
await seedStore(dbPath);
const jsonResult = runNode(STATUS_SCRIPT, ['--db', dbPath, '--json']);
assert.strictEqual(jsonResult.status, 0, jsonResult.stderr);
const jsonPayload = parseJson(jsonResult.stdout);
assert.strictEqual(jsonPayload.activeSessions.activeCount, 1);
assert.strictEqual(jsonPayload.governance.pendingCount, 1);
const humanResult = runNode(STATUS_SCRIPT, ['--db', dbPath]);
assert.strictEqual(humanResult.status, 0, humanResult.stderr);
assert.match(humanResult.stdout, /Active sessions: 1/);
assert.match(humanResult.stdout, /Skill runs \(last 20\):/);
assert.match(humanResult.stdout, /Install health: healthy/);
assert.match(humanResult.stdout, /Pending governance events: 1/);
} finally {
cleanupTempDir(testDir);
}
})) passed += 1; else failed += 1;
if (await test('sessions CLI supports list and detail views in human-readable and --json output', async () => {
const testDir = createTempDir('ecc-state-cli-');
const dbPath = path.join(testDir, 'state.db');
try {
await seedStore(dbPath);
const listJsonResult = runNode(SESSIONS_SCRIPT, ['--db', dbPath, '--json']);
assert.strictEqual(listJsonResult.status, 0, listJsonResult.stderr);
const listPayload = parseJson(listJsonResult.stdout);
assert.strictEqual(listPayload.totalCount, 2);
assert.strictEqual(listPayload.sessions[0].id, 'session-active');
const detailJsonResult = runNode(SESSIONS_SCRIPT, ['session-active', '--db', dbPath, '--json']);
assert.strictEqual(detailJsonResult.status, 0, detailJsonResult.stderr);
const detailPayload = parseJson(detailJsonResult.stdout);
assert.strictEqual(detailPayload.session.id, 'session-active');
assert.strictEqual(detailPayload.workers.length, 2);
assert.strictEqual(detailPayload.skillRuns.length, 2);
assert.strictEqual(detailPayload.decisions.length, 1);
const detailHumanResult = runNode(SESSIONS_SCRIPT, ['session-active', '--db', dbPath]);
assert.strictEqual(detailHumanResult.status, 0, detailHumanResult.stderr);
assert.match(detailHumanResult.stdout, /Session: session-active/);
assert.match(detailHumanResult.stdout, /Workers: 2/);
assert.match(detailHumanResult.stdout, /Skill runs: 2/);
assert.match(detailHumanResult.stdout, /Decisions: 1/);
} finally {
cleanupTempDir(testDir);
}
})) passed += 1; else failed += 1;
if (await test('ecc CLI delegates the new status and sessions subcommands', async () => {
const testDir = createTempDir('ecc-state-cli-');
const dbPath = path.join(testDir, 'state.db');
try {
await seedStore(dbPath);
const statusResult = runNode(ECC_SCRIPT, ['status', '--db', dbPath, '--json']);
assert.strictEqual(statusResult.status, 0, statusResult.stderr);
const statusPayload = parseJson(statusResult.stdout);
assert.strictEqual(statusPayload.activeSessions.activeCount, 1);
const sessionsResult = runNode(ECC_SCRIPT, ['sessions', 'session-active', '--db', dbPath, '--json']);
assert.strictEqual(sessionsResult.status, 0, sessionsResult.stderr);
const sessionsPayload = parseJson(sessionsResult.stdout);
assert.strictEqual(sessionsPayload.session.id, 'session-active');
assert.strictEqual(sessionsPayload.skillRuns.length, 2);
} finally {
cleanupTempDir(testDir);
}
})) passed += 1; else failed += 1;
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
process.exit(failed > 0 ? 1 : 0);
}
runTests();

View File

@@ -14,6 +14,7 @@ function runCli(args, options = {}) {
return spawnSync('node', [SCRIPT, ...args], {
encoding: 'utf8',
cwd: options.cwd || process.cwd(),
maxBuffer: 10 * 1024 * 1024,
env: {
...process.env,
...(options.env || {}),

View File

@@ -126,8 +126,8 @@ function runTests() {
const result = run(['--target', 'cursor', 'typescript'], { cwd: projectDir, homeDir });
assert.strictEqual(result.code, 0, result.stderr);
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'rules', 'common', 'coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'rules', 'typescript', 'testing.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'rules', 'common-coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'rules', 'typescript-testing.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'agents', 'architect.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'commands', 'plan.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.cursor', 'hooks.json')));
@@ -163,10 +163,10 @@ function runTests() {
const result = run(['--target', 'antigravity', 'typescript'], { cwd: projectDir, homeDir });
assert.strictEqual(result.code, 0, result.stderr);
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'common', 'coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'typescript', 'testing.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'commands', 'plan.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'agents', 'architect.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'common-coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'typescript-testing.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'workflows', 'plan.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'skills', 'architect.md')));
const statePath = path.join(projectDir, '.agent', 'ecc-install-state.json');
const state = readJson(statePath);
@@ -176,7 +176,7 @@ function runTests() {
assert.deepStrictEqual(state.resolution.selectedModules, ['rules-core', 'agents-core', 'commands-core']);
assert.ok(
state.operations.some(operation => (
operation.destinationPath.endsWith(path.join('.agent', 'commands', 'plan.md'))
operation.destinationPath.endsWith(path.join('.agent', 'workflows', 'plan.md'))
)),
'Should record manifest command file copy operation'
);
@@ -266,9 +266,9 @@ function runTests() {
const result = run(['--target', 'antigravity', '--profile', 'core'], { cwd: projectDir, homeDir });
assert.strictEqual(result.code, 0, result.stderr);
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'common', 'coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'agents', 'architect.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'commands', 'plan.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'rules', 'common-coding-style.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'skills', 'architect.md')));
assert.ok(fs.existsSync(path.join(projectDir, '.agent', 'workflows', 'plan.md')));
assert.ok(!fs.existsSync(path.join(projectDir, '.agent', 'skills', 'tdd-workflow', 'SKILL.md')));
const state = readJson(path.join(projectDir, '.agent', 'ecc-install-state.json'));

View File

@@ -8,6 +8,8 @@ const os = require('os');
const path = require('path');
const { execFileSync } = require('child_process');
const { getFallbackSessionRecordingPath } = require('../../scripts/lib/session-adapters/canonical-session');
const SCRIPT = path.join(__dirname, '..', '..', 'scripts', 'session-inspect.js');
function run(args = [], options = {}) {
@@ -67,6 +69,7 @@ function runTests() {
if (test('prints canonical JSON for claude history targets', () => {
const homeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-inspect-home-'));
const recordingDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-session-inspect-recordings-'));
const sessionsDir = path.join(homeDir, '.claude', 'sessions');
fs.mkdirSync(sessionsDir, { recursive: true });
@@ -77,16 +80,24 @@ function runTests() {
);
const result = run(['claude:latest'], {
env: { HOME: homeDir }
env: {
HOME: homeDir,
ECC_SESSION_RECORDING_DIR: recordingDir
}
});
assert.strictEqual(result.code, 0, result.stderr);
const payload = JSON.parse(result.stdout);
const recordingPath = getFallbackSessionRecordingPath(payload, { recordingDir });
const persisted = JSON.parse(fs.readFileSync(recordingPath, 'utf8'));
assert.strictEqual(payload.adapterId, 'claude-history');
assert.strictEqual(payload.session.kind, 'history');
assert.strictEqual(payload.workers[0].branch, 'feat/session-inspect');
assert.strictEqual(persisted.latest.adapterId, 'claude-history');
assert.strictEqual(persisted.history.length, 1);
} finally {
fs.rmSync(homeDir, { recursive: true, force: true });
fs.rmSync(recordingDir, { recursive: true, force: true });
}
})) passed++; else failed++;