Add agent config revisions, issue-approval links, and robust migration reconciliation

Add agent_config_revisions table for tracking agent configuration changes with
rollback support. Add issue_approvals junction table linking issues to approvals.
New migrations (0005, 0006) for permissions column and new tables. Rework migration
client with statement-level idempotency checks (table/column/index/constraint
existence) so migrations can be safely retried against partially-migrated databases.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Forgotten
2026-02-19 13:02:14 -06:00
parent e0a878f4eb
commit 778b39d3b5
10 changed files with 6199 additions and 11 deletions

View File

@@ -1,6 +1,7 @@
import { createHash } from "node:crypto";
import { drizzle as drizzlePg } from "drizzle-orm/postgres-js";
import { migrate as migratePg } from "drizzle-orm/postgres-js/migrator";
import { readdir, readFile } from "node:fs/promises";
import { readFile, readdir } from "node:fs/promises";
import postgres from "postgres";
import * as schema from "./schema/index.js";
@@ -17,6 +18,17 @@ function quoteIdentifier(value: string): string {
return `"${value.replaceAll("\"", "\"\"")}"`;
}
function quoteLiteral(value: string): string {
return `'${value.replaceAll("'", "''")}'`;
}
function splitMigrationStatements(content: string): string[] {
return content
.split("--> statement-breakpoint")
.map((statement) => statement.trim())
.filter((statement) => statement.length > 0);
}
export type MigrationState =
| { status: "upToDate"; tableCount: number; availableMigrations: string[]; appliedMigrations: string[] }
| {
@@ -42,37 +54,222 @@ async function listMigrationFiles(): Promise<string[]> {
}
type MigrationJournalFile = {
entries?: Array<{ tag?: string }>;
entries?: Array<{ tag?: string; when?: number }>;
};
async function listJournalMigrationFiles(): Promise<string[]> {
type JournalMigrationEntry = {
fileName: string;
folderMillis: number;
};
async function listJournalMigrationEntries(): Promise<JournalMigrationEntry[]> {
try {
const raw = await readFile(MIGRATIONS_JOURNAL_JSON, "utf8");
const parsed = JSON.parse(raw) as MigrationJournalFile;
if (!Array.isArray(parsed.entries)) return [];
return parsed.entries
.map((entry) => (typeof entry?.tag === "string" ? `${entry.tag}.sql` : null))
.filter((name): name is string => typeof name === "string");
.map((entry) => {
if (typeof entry?.tag !== "string") return null;
if (typeof entry?.when !== "number" || !Number.isFinite(entry.when)) return null;
return { fileName: `${entry.tag}.sql`, folderMillis: entry.when };
})
.filter((entry): entry is JournalMigrationEntry => entry !== null);
} catch {
return [];
}
}
async function listJournalMigrationFiles(): Promise<string[]> {
const entries = await listJournalMigrationEntries();
return entries.map((entry) => entry.fileName);
}
async function readMigrationFileContent(migrationFile: string): Promise<string> {
return readFile(new URL(`./migrations/${migrationFile}`, import.meta.url), "utf8");
}
async function mapHashesToMigrationFiles(migrationFiles: string[]): Promise<Map<string, string>> {
const mapped = new Map<string, string>();
await Promise.all(
migrationFiles.map(async (migrationFile) => {
const content = await readMigrationFileContent(migrationFile);
const hash = createHash("sha256").update(content).digest("hex");
mapped.set(hash, migrationFile);
}),
);
return mapped;
}
async function getMigrationTableColumnNames(
sql: ReturnType<typeof postgres>,
migrationTableSchema: string,
): Promise<Set<string>> {
const columns = await sql.unsafe<{ column_name: string }[]>(
`
SELECT column_name
FROM information_schema.columns
WHERE table_schema = ${quoteLiteral(migrationTableSchema)}
AND table_name = ${quoteLiteral(DRIZZLE_MIGRATIONS_TABLE)}
`,
);
return new Set(columns.map((column) => column.column_name));
}
async function tableExists(
sql: ReturnType<typeof postgres>,
tableName: string,
): Promise<boolean> {
const rows = await sql<{ exists: boolean }[]>`
SELECT EXISTS (
SELECT 1
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = ${tableName}
) AS exists
`;
return rows[0]?.exists ?? false;
}
async function columnExists(
sql: ReturnType<typeof postgres>,
tableName: string,
columnName: string,
): Promise<boolean> {
const rows = await sql<{ exists: boolean }[]>`
SELECT EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = ${tableName}
AND column_name = ${columnName}
) AS exists
`;
return rows[0]?.exists ?? false;
}
async function indexExists(
sql: ReturnType<typeof postgres>,
indexName: string,
): Promise<boolean> {
const rows = await sql<{ exists: boolean }[]>`
SELECT EXISTS (
SELECT 1
FROM pg_class c
JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE n.nspname = 'public'
AND c.relkind = 'i'
AND c.relname = ${indexName}
) AS exists
`;
return rows[0]?.exists ?? false;
}
async function constraintExists(
sql: ReturnType<typeof postgres>,
constraintName: string,
): Promise<boolean> {
const rows = await sql<{ exists: boolean }[]>`
SELECT EXISTS (
SELECT 1
FROM pg_constraint c
JOIN pg_namespace n ON n.oid = c.connamespace
WHERE n.nspname = 'public'
AND c.conname = ${constraintName}
) AS exists
`;
return rows[0]?.exists ?? false;
}
async function migrationStatementAlreadyApplied(
sql: ReturnType<typeof postgres>,
statement: string,
): Promise<boolean> {
const normalized = statement.replace(/\s+/g, " ").trim();
const createTableMatch = normalized.match(/^CREATE TABLE(?: IF NOT EXISTS)? "([^"]+)"/i);
if (createTableMatch) {
return tableExists(sql, createTableMatch[1]);
}
const addColumnMatch = normalized.match(
/^ALTER TABLE "([^"]+)" ADD COLUMN(?: IF NOT EXISTS)? "([^"]+)"/i,
);
if (addColumnMatch) {
return columnExists(sql, addColumnMatch[1], addColumnMatch[2]);
}
const createIndexMatch = normalized.match(/^CREATE (?:UNIQUE )?INDEX(?: IF NOT EXISTS)? "([^"]+)"/i);
if (createIndexMatch) {
return indexExists(sql, createIndexMatch[1]);
}
const addConstraintMatch = normalized.match(/^ALTER TABLE "([^"]+)" ADD CONSTRAINT "([^"]+)"/i);
if (addConstraintMatch) {
return constraintExists(sql, addConstraintMatch[2]);
}
// If we cannot reason about a statement safely, require manual migration.
return false;
}
async function migrationContentAlreadyApplied(
sql: ReturnType<typeof postgres>,
migrationContent: string,
): Promise<boolean> {
const statements = splitMigrationStatements(migrationContent);
if (statements.length === 0) return false;
for (const statement of statements) {
const applied = await migrationStatementAlreadyApplied(sql, statement);
if (!applied) return false;
}
return true;
}
async function loadAppliedMigrations(
sql: ReturnType<typeof postgres>,
migrationTableSchema: string,
availableMigrations: string[],
): Promise<string[]> {
const qualifiedTable = `${quoteIdentifier(migrationTableSchema)}.${quoteIdentifier(DRIZZLE_MIGRATIONS_TABLE)}`;
try {
const quotedSchema = quoteIdentifier(migrationTableSchema);
const qualifiedTable = `${quotedSchema}.${quoteIdentifier(DRIZZLE_MIGRATIONS_TABLE)}`;
const columnNames = await getMigrationTableColumnNames(sql, migrationTableSchema);
if (columnNames.has("name")) {
const rows = await sql.unsafe<{ name: string }[]>(`SELECT name FROM ${qualifiedTable} ORDER BY id`);
return rows.map((row) => row.name).filter((name): name is string => Boolean(name));
} catch (error) {
if (!(error instanceof Error) || !error.message.includes('column "name" does not exist')) {
throw error;
}
if (columnNames.has("hash") && columnNames.has("created_at")) {
const journalEntries = await listJournalMigrationEntries();
if (journalEntries.length > 0) {
const lastDbRows = await sql.unsafe<{ created_at: string | number | null }[]>(
`SELECT created_at FROM ${qualifiedTable} ORDER BY created_at DESC LIMIT 1`,
);
const lastCreatedAt = Number(lastDbRows[0]?.created_at ?? -1);
if (Number.isFinite(lastCreatedAt) && lastCreatedAt >= 0) {
return journalEntries
.filter((entry) => availableMigrations.includes(entry.fileName))
.filter((entry) => entry.folderMillis <= lastCreatedAt)
.map((entry) => entry.fileName);
}
return [];
}
}
if (columnNames.has("hash")) {
const rows = await sql.unsafe<{ hash: string }[]>(`SELECT hash FROM ${qualifiedTable} ORDER BY id`);
const hashesToMigrationFiles = await mapHashesToMigrationFiles(availableMigrations);
const appliedFromHashes = rows
.map((row) => hashesToMigrationFiles.get(row.hash))
.filter((name): name is string => Boolean(name));
if (appliedFromHashes.length > 0) return appliedFromHashes;
}
const rows = await sql.unsafe<{ id: number }[]>(`SELECT id FROM ${qualifiedTable} ORDER BY id`);
const journalMigrationFiles = await listJournalMigrationFiles();
const appliedFromIds = rows
@@ -83,6 +280,106 @@ async function loadAppliedMigrations(
return availableMigrations.slice(0, Math.max(0, rows.length));
}
export type MigrationHistoryReconcileResult = {
repairedMigrations: string[];
remainingMigrations: string[];
};
export async function reconcilePendingMigrationHistory(
url: string,
): Promise<MigrationHistoryReconcileResult> {
const state = await inspectMigrations(url);
if (state.status !== "needsMigrations" || state.reason !== "pending-migrations") {
return { repairedMigrations: [], remainingMigrations: [] };
}
const sql = postgres(url, { max: 1 });
const repairedMigrations: string[] = [];
try {
const journalEntries = await listJournalMigrationEntries();
const folderMillisByFile = new Map(journalEntries.map((entry) => [entry.fileName, entry.folderMillis]));
const migrationTableSchema = await discoverMigrationTableSchema(sql);
if (!migrationTableSchema) {
return { repairedMigrations, remainingMigrations: state.pendingMigrations };
}
const columnNames = await getMigrationTableColumnNames(sql, migrationTableSchema);
const qualifiedTable = `${quoteIdentifier(migrationTableSchema)}.${quoteIdentifier(DRIZZLE_MIGRATIONS_TABLE)}`;
for (const migrationFile of state.pendingMigrations) {
const migrationContent = await readMigrationFileContent(migrationFile);
const alreadyApplied = await migrationContentAlreadyApplied(sql, migrationContent);
if (!alreadyApplied) break;
const hash = createHash("sha256").update(migrationContent).digest("hex");
const folderMillis = folderMillisByFile.get(migrationFile) ?? Date.now();
const existingByHash = columnNames.has("hash")
? await sql.unsafe<{ created_at: string | number | null }[]>(
`SELECT created_at FROM ${qualifiedTable} WHERE hash = ${quoteLiteral(hash)} ORDER BY created_at DESC LIMIT 1`,
)
: [];
const existingByName = columnNames.has("name")
? await sql.unsafe<{ created_at: string | number | null }[]>(
`SELECT created_at FROM ${qualifiedTable} WHERE name = ${quoteLiteral(migrationFile)} ORDER BY created_at DESC LIMIT 1`,
)
: [];
if (existingByHash.length > 0 || existingByName.length > 0) {
if (columnNames.has("created_at")) {
const existingHashCreatedAt = Number(existingByHash[0]?.created_at ?? -1);
if (existingByHash.length > 0 && Number.isFinite(existingHashCreatedAt) && existingHashCreatedAt < folderMillis) {
await sql.unsafe(
`UPDATE ${qualifiedTable} SET created_at = ${quoteLiteral(String(folderMillis))} WHERE hash = ${quoteLiteral(hash)} AND created_at < ${quoteLiteral(String(folderMillis))}`,
);
}
const existingNameCreatedAt = Number(existingByName[0]?.created_at ?? -1);
if (existingByName.length > 0 && Number.isFinite(existingNameCreatedAt) && existingNameCreatedAt < folderMillis) {
await sql.unsafe(
`UPDATE ${qualifiedTable} SET created_at = ${quoteLiteral(String(folderMillis))} WHERE name = ${quoteLiteral(migrationFile)} AND created_at < ${quoteLiteral(String(folderMillis))}`,
);
}
}
repairedMigrations.push(migrationFile);
continue;
}
const insertColumns: string[] = [];
const insertValues: string[] = [];
if (columnNames.has("hash")) {
insertColumns.push(quoteIdentifier("hash"));
insertValues.push(quoteLiteral(hash));
}
if (columnNames.has("name")) {
insertColumns.push(quoteIdentifier("name"));
insertValues.push(quoteLiteral(migrationFile));
}
if (columnNames.has("created_at")) {
insertColumns.push(quoteIdentifier("created_at"));
insertValues.push(quoteLiteral(String(folderMillis)));
}
if (insertColumns.length === 0) break;
await sql.unsafe(
`INSERT INTO ${qualifiedTable} (${insertColumns.join(", ")}) VALUES (${insertValues.join(", ")})`,
);
repairedMigrations.push(migrationFile);
}
} finally {
await sql.end();
}
const refreshed = await inspectMigrations(url);
return {
repairedMigrations,
remainingMigrations:
refreshed.status === "needsMigrations" ? refreshed.pendingMigrations : [],
};
}
async function discoverMigrationTableSchema(sql: ReturnType<typeof postgres>): Promise<string | null> {
const rows = await sql<{ schemaName: string }[]>`
SELECT n.nspname AS "schemaName"