feat: sync GitHub queue into work items

This commit is contained in:
Affaan Mustafa 2026-05-11 12:44:38 -04:00 committed by Affaan Mustafa
parent 9887ba6123
commit fd820d6306
4 changed files with 344 additions and 2 deletions

View File

@ -94,7 +94,7 @@ This repo is the raw code only. The guides explain everything.
- **Media and launch tooling**`manim-video`, `remotion-video-creation`, and upgraded social publishing surfaces make technical explainers and launch content part of the same system.
- **Framework and product surface growth**`nestjs-patterns`, richer Codex/OpenCode install surfaces, and expanded cross-harness packaging keep the repo usable beyond Claude Code alone.
- **ECC 2.0 alpha is in-tree** — the Rust control-plane prototype in `ecc2/` now builds locally and exposes `dashboard`, `start`, `sessions`, `status`, `stop`, `resume`, and `daemon` commands. It is usable as an alpha, not yet a general release.
- **Operator status snapshots**`ecc status --markdown --write status.md` turns the local state store into a portable handoff covering readiness, active sessions, skill-run health, install health, pending governance events, and linked work items from Linear/GitHub/handoffs. Use `ecc work-items upsert ...` to add or update those linked work items from the CLI, and `ecc status --exit-code` to fail automation when readiness needs attention.
- **Operator status snapshots**`ecc status --markdown --write status.md` turns the local state store into a portable handoff covering readiness, active sessions, skill-run health, install health, pending governance events, and linked work items from Linear/GitHub/handoffs. Use `ecc work-items upsert ...` for manual entries, `ecc work-items sync-github --repo owner/repo` for PR/issue queue state, and `ecc status --exit-code` to fail automation when readiness needs attention.
- **Ecosystem hardening** — AgentShield, ECC Tools cost controls, billing portal work, and website refreshes continue to ship around the core plugin instead of drifting into separate silos.
### v1.9.0 — Selective Install & Language Expansion (Mar 2026)

View File

@ -118,6 +118,7 @@ Examples:
ecc sessions
ecc sessions session-active --json
ecc work-items upsert linear-ecc-20 --source linear --source-id ECC-20 --title "Review control-plane contract" --status blocked
ecc work-items sync-github --repo affaan-m/everything-claude-code
ecc session-inspect claude:latest
ecc loop-status --json
ecc uninstall --target antigravity --dry-run

View File

@ -2,10 +2,12 @@
'use strict';
const os = require('os');
const { spawnSync } = require('child_process');
const { createStateStore } = require('./lib/state-store');
const VALUE_FLAGS = new Set([
'--db',
'--github-repo',
'--id',
'--limit',
'--metadata-json',
@ -29,6 +31,7 @@ Usage:
node scripts/work-items.js show <id> [--db <path>] [--json]
node scripts/work-items.js upsert [<id>] --title <title> [options] [--json]
node scripts/work-items.js close <id> [--status done] [--db <path>] [--json]
node scripts/work-items.js sync-github --repo <owner/repo> [--db <path>] [--json]
Track Linear, GitHub, handoff, and manual roadmap items in the ECC SQLite state
store so "ecc status" can include linked work and blocked operator follow-up.
@ -42,7 +45,8 @@ Options:
--url <url> Optional source URL
--owner <owner> Optional owner label
--repo-root <path> Optional repo root to associate with this item
--repo <path> Alias for --repo-root
--repo <path> GitHub repo for sync-github, otherwise alias for --repo-root
--github-repo <owner/repo> Explicit GitHub repo for sync-github
--session-id <id> Optional ECC session id
--session <id> Alias for --session-id
--metadata-json <json> Optional JSON metadata payload
@ -54,11 +58,13 @@ Options:
function assignOption(options, flag, value) {
if (flag === '--db') options.dbPath = value;
else if (flag === '--github-repo') options.githubRepo = value;
else if (flag === '--id') options.id = value;
else if (flag === '--limit') options.limit = value;
else if (flag === '--metadata-json') options.metadataJson = value;
else if (flag === '--owner') options.owner = value;
else if (flag === '--priority') options.priority = value;
else if (flag === '--repo' && options.command === 'sync-github') options.githubRepo = value;
else if (flag === '--repo' || flag === '--repo-root') options.repoRoot = value;
else if (flag === '--session' || flag === '--session-id') options.sessionId = value;
else if (flag === '--source') options.source = value;
@ -131,6 +137,192 @@ function normalizeLimit(value) {
return parsed;
}
function runGhJson(args) {
const shimPath = process.env.ECC_GH_SHIM;
const command = shimPath ? process.execPath : 'gh';
const commandArgs = shimPath ? [shimPath, ...args] : args;
const displayCommand = shimPath ? `node ${shimPath} ${args.join(' ')}` : `gh ${args.join(' ')}`;
const result = spawnSync(command, commandArgs, {
encoding: 'utf8',
maxBuffer: 10 * 1024 * 1024,
});
if (result.error) {
throw new Error(`Failed to run gh: ${result.error.message}`);
}
if (result.status !== 0) {
throw new Error(`${displayCommand} failed: ${(result.stderr || result.stdout || '').trim()}`);
}
try {
return JSON.parse(result.stdout || '[]');
} catch (error) {
throw new Error(`${displayCommand} returned invalid JSON: ${error.message}`);
}
}
function slugifyWorkItemSegment(value) {
return String(value || '')
.toLowerCase()
.replace(/[^a-z0-9]+/g, '-')
.replace(/^-+|-+$/g, '') || 'unknown';
}
function githubWorkItemId(repo, type, number) {
return `github-${slugifyWorkItemSegment(repo)}-${type}-${number}`;
}
function githubPrStatus(pr) {
if (pr.isDraft || pr.mergeStateStatus === 'DIRTY') {
return 'blocked';
}
return 'needs-review';
}
function githubAuthorLogin(item) {
return item && item.author && item.author.login ? item.author.login : null;
}
function buildGithubPrWorkItem(repo, pr, options = {}) {
return {
id: githubWorkItemId(repo, 'pr', pr.number),
source: 'github-pr',
sourceId: String(pr.number),
title: `PR #${pr.number}: ${pr.title}`,
status: githubPrStatus(pr),
priority: pr.isDraft || pr.mergeStateStatus === 'DIRTY' ? 'high' : 'normal',
url: pr.url || null,
owner: githubAuthorLogin(pr),
repoRoot: options.repoRoot || process.cwd(),
sessionId: options.sessionId || null,
metadata: {
repo,
type: 'pull_request',
mergeStateStatus: pr.mergeStateStatus || null,
isDraft: Boolean(pr.isDraft),
headRefName: pr.headRefName || null,
sourceUpdatedAt: pr.updatedAt || null,
syncedBy: 'ecc-work-items-sync-github',
},
};
}
function buildGithubIssueWorkItem(repo, issue, options = {}) {
return {
id: githubWorkItemId(repo, 'issue', issue.number),
source: 'github-issue',
sourceId: String(issue.number),
title: `Issue #${issue.number}: ${issue.title}`,
status: 'needs-review',
priority: 'normal',
url: issue.url || null,
owner: githubAuthorLogin(issue),
repoRoot: options.repoRoot || process.cwd(),
sessionId: options.sessionId || null,
metadata: {
repo,
type: 'issue',
labels: Array.isArray(issue.labels) ? issue.labels.map(label => label.name || label).filter(Boolean) : [],
sourceUpdatedAt: issue.updatedAt || null,
syncedBy: 'ecc-work-items-sync-github',
},
};
}
function closeStaleGithubItems(store, repo, activeIds, options = {}) {
const payload = store.listWorkItems({ limit: options.limit || 10000 });
const closed = [];
for (const item of payload.items) {
if (!item.metadata || item.metadata.syncedBy !== 'ecc-work-items-sync-github') {
continue;
}
if (item.metadata.repo !== repo || activeIds.has(item.id)) {
continue;
}
if (item.status === 'closed' || item.status === 'done') {
continue;
}
closed.push(store.upsertWorkItem({
...item,
status: 'closed',
updatedAt: new Date().toISOString(),
metadata: {
...item.metadata,
sourceClosedAt: new Date().toISOString(),
},
}));
}
return closed;
}
function syncGithubWorkItems(store, options) {
const repo = options.githubRepo;
if (!repo) {
throw new Error('Missing GitHub repo. Pass --repo <owner/repo>.');
}
const limit = normalizeLimit(options.limit);
const prs = runGhJson([
'pr',
'list',
'--repo',
repo,
'--state',
'open',
'--limit',
String(limit),
'--json',
'number,title,author,url,updatedAt,mergeStateStatus,isDraft,headRefName',
]);
const issues = runGhJson([
'issue',
'list',
'--repo',
repo,
'--state',
'open',
'--limit',
String(limit),
'--json',
'number,title,author,url,updatedAt,labels',
]);
const syncedAt = new Date().toISOString();
const activeIds = new Set();
const items = [];
for (const pr of prs) {
const payload = buildGithubPrWorkItem(repo, pr, options);
activeIds.add(payload.id);
items.push(store.upsertWorkItem({
...payload,
createdAt: undefined,
updatedAt: syncedAt,
}));
}
for (const issue of issues) {
const payload = buildGithubIssueWorkItem(repo, issue, options);
activeIds.add(payload.id);
items.push(store.upsertWorkItem({
...payload,
createdAt: undefined,
updatedAt: syncedAt,
}));
}
const closedItems = closeStaleGithubItems(store, repo, activeIds, { limit: Math.max(limit * 4, 1000) });
return {
repo,
syncedAt,
prCount: prs.length,
issueCount: issues.length,
closedCount: closedItems.length,
items,
closedItems,
};
}
function buildUpsertPayload(options, existing = null) {
const id = resolveWorkItemId(options);
if (!id) {
@ -194,6 +386,20 @@ function printWorkItemList(payload) {
}
}
function printGithubSyncResult(payload) {
console.log(`GitHub sync: ${payload.repo}`);
console.log(` Open PRs: ${payload.prCount}`);
console.log(` Open issues: ${payload.issueCount}`);
console.log(` Closed stale items: ${payload.closedCount}`);
if (payload.items.length === 0 && payload.closedItems.length === 0) {
console.log(' Work items changed: none');
return;
}
for (const item of [...payload.items, ...payload.closedItems]) {
console.log(` - ${item.id} ${item.status}: ${item.title}`);
}
}
async function main() {
let store = null;
@ -269,6 +475,16 @@ async function main() {
return;
}
if (options.command === 'sync-github') {
const payload = syncGithubWorkItems(store, options);
if (options.json) {
console.log(JSON.stringify(payload, null, 2));
} else {
printGithubSyncResult(payload);
}
return;
}
throw new Error(`Unknown command: ${options.command}`);
} catch (error) {
console.error(`Error: ${error.message}`);
@ -286,6 +502,9 @@ if (require.main === module) {
module.exports = {
buildUpsertPayload,
buildGithubIssueWorkItem,
buildGithubPrWorkItem,
main,
parseArgs,
syncGithubWorkItems,
};

View File

@ -49,6 +49,59 @@ function runNode(scriptPath, args = [], options = {}) {
});
}
function createGhShim(binDir) {
fs.mkdirSync(binDir, { recursive: true });
const shimJs = path.join(binDir, 'gh.js');
fs.writeFileSync(shimJs, `
const mode = process.env.ECC_FAKE_GH_MODE || 'open';
const args = process.argv.slice(2);
function write(payload) {
process.stdout.write(JSON.stringify(payload));
}
if (args[0] === 'pr' && args[1] === 'list') {
if (mode === 'empty') write([]);
else write([
{
number: 3,
title: 'Conflicting queue cleanup',
author: { login: 'contributor-a' },
url: 'https://github.com/affaan-m/everything-claude-code/pull/3',
updatedAt: '2026-05-11T10:00:00Z',
mergeStateStatus: 'DIRTY',
isDraft: false,
headRefName: 'fix/conflict'
},
{
number: 4,
title: 'Clean docs update',
author: { login: 'contributor-b' },
url: 'https://github.com/affaan-m/everything-claude-code/pull/4',
updatedAt: '2026-05-11T11:00:00Z',
mergeStateStatus: 'CLEAN',
isDraft: false,
headRefName: 'docs/clean'
}
]);
} else if (args[0] === 'issue' && args[1] === 'list') {
if (mode === 'empty') write([]);
else write([
{
number: 9,
title: 'Track release blocker',
author: { login: 'reporter' },
url: 'https://github.com/affaan-m/everything-claude-code/issues/9',
updatedAt: '2026-05-11T12:00:00Z',
labels: [{ name: 'release' }]
}
]);
} else {
process.stderr.write('unexpected gh args: ' + args.join(' '));
process.exit(2);
}
`, 'utf8');
return shimJs;
}
function parseJson(stdout) {
return JSON.parse(stdout.trim());
}
@ -796,6 +849,75 @@ async function runTests() {
}
})) passed += 1; else failed += 1;
if (await test('work-items CLI syncs GitHub PRs and issues into readiness', async () => {
const testDir = createTempDir('ecc-work-items-github-');
const dbPath = path.join(testDir, 'state.db');
const binDir = path.join(testDir, 'bin');
const repo = 'affaan-m/everything-claude-code';
try {
const env = {
ECC_GH_SHIM: createGhShim(binDir),
};
const syncResult = runNode(WORK_ITEMS_SCRIPT, [
'sync-github',
'--repo',
repo,
'--db',
dbPath,
'--limit',
'10',
'--json',
], { cwd: testDir, env });
assert.strictEqual(syncResult.status, 0, syncResult.stderr);
const syncPayload = parseJson(syncResult.stdout);
assert.strictEqual(syncPayload.repo, repo);
assert.strictEqual(syncPayload.prCount, 2);
assert.strictEqual(syncPayload.issueCount, 1);
assert.strictEqual(syncPayload.closedCount, 0);
assert.strictEqual(syncPayload.items.length, 3);
assert.strictEqual(syncPayload.items[0].id, 'github-affaan-m-everything-claude-code-pr-3');
assert.strictEqual(syncPayload.items[0].status, 'blocked');
assert.strictEqual(syncPayload.items[1].status, 'needs-review');
assert.strictEqual(syncPayload.items[2].metadata.labels[0], 'release');
const statusResult = runNode(STATUS_SCRIPT, ['--db', dbPath, '--json', '--exit-code']);
assert.strictEqual(statusResult.status, 2, statusResult.stderr);
const statusPayload = parseJson(statusResult.stdout);
assert.strictEqual(statusPayload.readiness.blockedWorkItems, 3);
const closeResult = runNode(WORK_ITEMS_SCRIPT, [
'sync-github',
'--repo',
repo,
'--db',
dbPath,
'--json',
], {
cwd: testDir,
env: {
...env,
ECC_FAKE_GH_MODE: 'empty',
},
});
assert.strictEqual(closeResult.status, 0, closeResult.stderr);
const closePayload = parseJson(closeResult.stdout);
assert.strictEqual(closePayload.prCount, 0);
assert.strictEqual(closePayload.issueCount, 0);
assert.strictEqual(closePayload.closedCount, 3);
assert.ok(closePayload.closedItems.every(item => item.status === 'closed'));
const cleanStatusResult = runNode(STATUS_SCRIPT, ['--db', dbPath, '--json', '--exit-code']);
assert.strictEqual(cleanStatusResult.status, 0, cleanStatusResult.stderr);
const cleanStatusPayload = parseJson(cleanStatusResult.stdout);
assert.strictEqual(cleanStatusPayload.readiness.blockedWorkItems, 0);
assert.strictEqual(cleanStatusPayload.workItems.closedCount, 3);
} finally {
cleanupTempDir(testDir);
}
})) passed += 1; else failed += 1;
if (await test('sessions CLI supports list and detail views in human-readable and --json output', async () => {
const testDir = createTempDir('ecc-state-cli-');
const dbPath = path.join(testDir, 'state.db');