Add secrets infrastructure: DB tables, shared types, env binding model, and migration improvements
Introduce company_secrets and company_secret_versions tables for encrypted secret storage. Add EnvBinding discriminated union (plain vs secret_ref) to replace raw string env values in adapter configs. Add hiddenAt column to issues for soft-hiding. Improve migration system with journal-ordered application and manual fallback when Drizzle migrator can't reconcile history. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -54,12 +54,13 @@ async function listMigrationFiles(): Promise<string[]> {
|
||||
}
|
||||
|
||||
type MigrationJournalFile = {
|
||||
entries?: Array<{ tag?: string; when?: number }>;
|
||||
entries?: Array<{ idx?: number; tag?: string; when?: number }>;
|
||||
};
|
||||
|
||||
type JournalMigrationEntry = {
|
||||
fileName: string;
|
||||
folderMillis: number;
|
||||
order: number;
|
||||
};
|
||||
|
||||
async function listJournalMigrationEntries(): Promise<JournalMigrationEntry[]> {
|
||||
@@ -68,10 +69,11 @@ async function listJournalMigrationEntries(): Promise<JournalMigrationEntry[]> {
|
||||
const parsed = JSON.parse(raw) as MigrationJournalFile;
|
||||
if (!Array.isArray(parsed.entries)) return [];
|
||||
return parsed.entries
|
||||
.map((entry) => {
|
||||
.map((entry, entryIndex) => {
|
||||
if (typeof entry?.tag !== "string") return null;
|
||||
if (typeof entry?.when !== "number" || !Number.isFinite(entry.when)) return null;
|
||||
return { fileName: `${entry.tag}.sql`, folderMillis: entry.when };
|
||||
const order = Number.isInteger(entry.idx) ? Number(entry.idx) : entryIndex;
|
||||
return { fileName: `${entry.tag}.sql`, folderMillis: entry.when, order };
|
||||
})
|
||||
.filter((entry): entry is JournalMigrationEntry => entry !== null);
|
||||
} catch {
|
||||
@@ -88,6 +90,175 @@ async function readMigrationFileContent(migrationFile: string): Promise<string>
|
||||
return readFile(new URL(`./migrations/${migrationFile}`, import.meta.url), "utf8");
|
||||
}
|
||||
|
||||
async function orderMigrationsByJournal(migrationFiles: string[]): Promise<string[]> {
|
||||
const journalEntries = await listJournalMigrationEntries();
|
||||
const orderByFileName = new Map(journalEntries.map((entry) => [entry.fileName, entry.order]));
|
||||
return [...migrationFiles].sort((left, right) => {
|
||||
const leftOrder = orderByFileName.get(left);
|
||||
const rightOrder = orderByFileName.get(right);
|
||||
if (leftOrder === undefined && rightOrder === undefined) return left.localeCompare(right);
|
||||
if (leftOrder === undefined) return 1;
|
||||
if (rightOrder === undefined) return -1;
|
||||
if (leftOrder === rightOrder) return left.localeCompare(right);
|
||||
return leftOrder - rightOrder;
|
||||
});
|
||||
}
|
||||
|
||||
type SqlExecutor = Pick<ReturnType<typeof postgres>, "unsafe">;
|
||||
|
||||
async function runInTransaction(sql: SqlExecutor, action: () => Promise<void>): Promise<void> {
|
||||
await sql.unsafe("BEGIN");
|
||||
try {
|
||||
await action();
|
||||
await sql.unsafe("COMMIT");
|
||||
} catch (error) {
|
||||
try {
|
||||
await sql.unsafe("ROLLBACK");
|
||||
} catch {
|
||||
// Ignore rollback failures and surface the original error.
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function latestMigrationCreatedAt(
|
||||
sql: SqlExecutor,
|
||||
qualifiedTable: string,
|
||||
): Promise<number | null> {
|
||||
const rows = await sql.unsafe<{ created_at: string | number | null }[]>(
|
||||
`SELECT created_at FROM ${qualifiedTable} ORDER BY created_at DESC NULLS LAST LIMIT 1`,
|
||||
);
|
||||
const value = Number(rows[0]?.created_at ?? Number.NaN);
|
||||
return Number.isFinite(value) ? value : null;
|
||||
}
|
||||
|
||||
function normalizeFolderMillis(value: number | null | undefined): number {
|
||||
if (typeof value === "number" && Number.isFinite(value) && value >= 0) {
|
||||
return Math.trunc(value);
|
||||
}
|
||||
return Date.now();
|
||||
}
|
||||
|
||||
async function ensureMigrationJournalTable(
|
||||
sql: ReturnType<typeof postgres>,
|
||||
): Promise<{ migrationTableSchema: string; columnNames: Set<string> }> {
|
||||
let migrationTableSchema = await discoverMigrationTableSchema(sql);
|
||||
if (!migrationTableSchema) {
|
||||
const drizzleSchema = quoteIdentifier("drizzle");
|
||||
const migrationTable = quoteIdentifier(DRIZZLE_MIGRATIONS_TABLE);
|
||||
await sql.unsafe(`CREATE SCHEMA IF NOT EXISTS ${drizzleSchema}`);
|
||||
await sql.unsafe(
|
||||
`CREATE TABLE IF NOT EXISTS ${drizzleSchema}.${migrationTable} (id SERIAL PRIMARY KEY, hash text NOT NULL, created_at bigint)`,
|
||||
);
|
||||
migrationTableSchema = (await discoverMigrationTableSchema(sql)) ?? "drizzle";
|
||||
}
|
||||
|
||||
const columnNames = await getMigrationTableColumnNames(sql, migrationTableSchema);
|
||||
return { migrationTableSchema, columnNames };
|
||||
}
|
||||
|
||||
async function migrationHistoryEntryExists(
|
||||
sql: SqlExecutor,
|
||||
qualifiedTable: string,
|
||||
columnNames: Set<string>,
|
||||
migrationFile: string,
|
||||
hash: string,
|
||||
): Promise<boolean> {
|
||||
const predicates: string[] = [];
|
||||
if (columnNames.has("hash")) predicates.push(`hash = ${quoteLiteral(hash)}`);
|
||||
if (columnNames.has("name")) predicates.push(`name = ${quoteLiteral(migrationFile)}`);
|
||||
if (predicates.length === 0) return false;
|
||||
|
||||
const rows = await sql.unsafe<{ one: number }[]>(
|
||||
`SELECT 1 AS one FROM ${qualifiedTable} WHERE ${predicates.join(" OR ")} LIMIT 1`,
|
||||
);
|
||||
return rows.length > 0;
|
||||
}
|
||||
|
||||
async function recordMigrationHistoryEntry(
|
||||
sql: SqlExecutor,
|
||||
qualifiedTable: string,
|
||||
columnNames: Set<string>,
|
||||
migrationFile: string,
|
||||
hash: string,
|
||||
folderMillis: number,
|
||||
): Promise<void> {
|
||||
const insertColumns: string[] = [];
|
||||
const insertValues: string[] = [];
|
||||
|
||||
if (columnNames.has("hash")) {
|
||||
insertColumns.push(quoteIdentifier("hash"));
|
||||
insertValues.push(quoteLiteral(hash));
|
||||
}
|
||||
if (columnNames.has("name")) {
|
||||
insertColumns.push(quoteIdentifier("name"));
|
||||
insertValues.push(quoteLiteral(migrationFile));
|
||||
}
|
||||
if (columnNames.has("created_at")) {
|
||||
const latestCreatedAt = await latestMigrationCreatedAt(sql, qualifiedTable);
|
||||
const createdAt = latestCreatedAt === null
|
||||
? normalizeFolderMillis(folderMillis)
|
||||
: Math.max(latestCreatedAt + 1, normalizeFolderMillis(folderMillis));
|
||||
insertColumns.push(quoteIdentifier("created_at"));
|
||||
insertValues.push(quoteLiteral(String(createdAt)));
|
||||
}
|
||||
|
||||
if (insertColumns.length === 0) return;
|
||||
|
||||
await sql.unsafe(
|
||||
`INSERT INTO ${qualifiedTable} (${insertColumns.join(", ")}) VALUES (${insertValues.join(", ")})`,
|
||||
);
|
||||
}
|
||||
|
||||
async function applyPendingMigrationsManually(
|
||||
url: string,
|
||||
pendingMigrations: string[],
|
||||
): Promise<void> {
|
||||
if (pendingMigrations.length === 0) return;
|
||||
|
||||
const orderedPendingMigrations = await orderMigrationsByJournal(pendingMigrations);
|
||||
const journalEntries = await listJournalMigrationEntries();
|
||||
const folderMillisByFileName = new Map(
|
||||
journalEntries.map((entry) => [entry.fileName, normalizeFolderMillis(entry.folderMillis)]),
|
||||
);
|
||||
|
||||
const sql = postgres(url, { max: 1 });
|
||||
try {
|
||||
const { migrationTableSchema, columnNames } = await ensureMigrationJournalTable(sql);
|
||||
const qualifiedTable = `${quoteIdentifier(migrationTableSchema)}.${quoteIdentifier(DRIZZLE_MIGRATIONS_TABLE)}`;
|
||||
|
||||
for (const migrationFile of orderedPendingMigrations) {
|
||||
const migrationContent = await readMigrationFileContent(migrationFile);
|
||||
const hash = createHash("sha256").update(migrationContent).digest("hex");
|
||||
const existingEntry = await migrationHistoryEntryExists(
|
||||
sql,
|
||||
qualifiedTable,
|
||||
columnNames,
|
||||
migrationFile,
|
||||
hash,
|
||||
);
|
||||
if (existingEntry) continue;
|
||||
|
||||
await runInTransaction(sql, async () => {
|
||||
for (const statement of splitMigrationStatements(migrationContent)) {
|
||||
await sql.unsafe(statement);
|
||||
}
|
||||
|
||||
await recordMigrationHistoryEntry(
|
||||
sql,
|
||||
qualifiedTable,
|
||||
columnNames,
|
||||
migrationFile,
|
||||
hash,
|
||||
folderMillisByFileName.get(migrationFile) ?? Date.now(),
|
||||
);
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
}
|
||||
|
||||
async function mapHashesToMigrationFiles(migrationFiles: string[]): Promise<Map<string, string>> {
|
||||
const mapped = new Map<string, string>();
|
||||
|
||||
@@ -467,6 +638,9 @@ export async function inspectMigrations(url: string): Promise<MigrationState> {
|
||||
}
|
||||
|
||||
export async function applyPendingMigrations(url: string): Promise<void> {
|
||||
const initialState = await inspectMigrations(url);
|
||||
if (initialState.status === "upToDate") return;
|
||||
|
||||
const sql = postgres(url, { max: 1 });
|
||||
|
||||
try {
|
||||
@@ -475,6 +649,28 @@ export async function applyPendingMigrations(url: string): Promise<void> {
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
let state = await inspectMigrations(url);
|
||||
if (state.status === "upToDate") return;
|
||||
|
||||
const repair = await reconcilePendingMigrationHistory(url);
|
||||
if (repair.repairedMigrations.length > 0) {
|
||||
state = await inspectMigrations(url);
|
||||
if (state.status === "upToDate") return;
|
||||
}
|
||||
|
||||
if (state.status !== "needsMigrations" || state.reason !== "pending-migrations") {
|
||||
throw new Error("Migrations are still pending after attempted apply; run inspectMigrations for details.");
|
||||
}
|
||||
|
||||
await applyPendingMigrationsManually(url, state.pendingMigrations);
|
||||
|
||||
const finalState = await inspectMigrations(url);
|
||||
if (finalState.status !== "upToDate") {
|
||||
throw new Error(
|
||||
`Failed to apply pending migrations: ${finalState.pendingMigrations.join(", ")}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export type MigrationBootstrapResult =
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
import { migrate as migratePg } from "drizzle-orm/postgres-js/migrator";
|
||||
import postgres from "postgres";
|
||||
import { drizzle as drizzlePg } from "drizzle-orm/postgres-js";
|
||||
import { applyPendingMigrations, inspectMigrations } from "./client.js";
|
||||
|
||||
const migrationsFolder = new URL("./migrations", import.meta.url).pathname;
|
||||
const url = process.env.DATABASE_URL;
|
||||
|
||||
if (!url) {
|
||||
throw new Error("DATABASE_URL is required for db:migrate");
|
||||
}
|
||||
|
||||
const sql = postgres(url, { max: 1 });
|
||||
const db = drizzlePg(sql);
|
||||
await migratePg(db, { migrationsFolder });
|
||||
await sql.end();
|
||||
const before = await inspectMigrations(url);
|
||||
if (before.status === "upToDate") {
|
||||
console.log("No pending migrations");
|
||||
} else {
|
||||
console.log(`Applying ${before.pendingMigrations.length} pending migration(s)...`);
|
||||
await applyPendingMigrations(url);
|
||||
|
||||
console.log("Migrations complete");
|
||||
const after = await inspectMigrations(url);
|
||||
if (after.status !== "upToDate") {
|
||||
throw new Error(`Migrations incomplete: ${after.pendingMigrations.join(", ")}`);
|
||||
}
|
||||
console.log("Migrations complete");
|
||||
}
|
||||
|
||||
1
packages/db/src/migrations/0008_amused_zzzax.sql
Normal file
1
packages/db/src/migrations/0008_amused_zzzax.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "issues" ADD COLUMN "hidden_at" timestamp with time zone;
|
||||
36
packages/db/src/migrations/0009_fast_jackal.sql
Normal file
36
packages/db/src/migrations/0009_fast_jackal.sql
Normal file
@@ -0,0 +1,36 @@
|
||||
CREATE TABLE "company_secret_versions" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"secret_id" uuid NOT NULL,
|
||||
"version" integer NOT NULL,
|
||||
"material" jsonb NOT NULL,
|
||||
"value_sha256" text NOT NULL,
|
||||
"created_by_agent_id" uuid,
|
||||
"created_by_user_id" text,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"revoked_at" timestamp with time zone
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "company_secrets" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"provider" text DEFAULT 'local_encrypted' NOT NULL,
|
||||
"external_ref" text,
|
||||
"latest_version" integer DEFAULT 1 NOT NULL,
|
||||
"description" text,
|
||||
"created_by_agent_id" uuid,
|
||||
"created_by_user_id" text,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "company_secret_versions" ADD CONSTRAINT "company_secret_versions_secret_id_company_secrets_id_fk" FOREIGN KEY ("secret_id") REFERENCES "public"."company_secrets"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "company_secret_versions" ADD CONSTRAINT "company_secret_versions_created_by_agent_id_agents_id_fk" FOREIGN KEY ("created_by_agent_id") REFERENCES "public"."agents"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "company_secrets" ADD CONSTRAINT "company_secrets_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "company_secrets" ADD CONSTRAINT "company_secrets_created_by_agent_id_agents_id_fk" FOREIGN KEY ("created_by_agent_id") REFERENCES "public"."agents"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "company_secret_versions_secret_idx" ON "company_secret_versions" USING btree ("secret_id","created_at");--> statement-breakpoint
|
||||
CREATE INDEX "company_secret_versions_value_sha256_idx" ON "company_secret_versions" USING btree ("value_sha256");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "company_secret_versions_secret_version_uq" ON "company_secret_versions" USING btree ("secret_id","version");--> statement-breakpoint
|
||||
CREATE INDEX "company_secrets_company_idx" ON "company_secrets" USING btree ("company_id");--> statement-breakpoint
|
||||
CREATE INDEX "company_secrets_company_provider_idx" ON "company_secrets" USING btree ("company_id","provider");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "company_secrets_company_name_uq" ON "company_secrets" USING btree ("company_id","name");
|
||||
3264
packages/db/src/migrations/meta/0008_snapshot.json
Normal file
3264
packages/db/src/migrations/meta/0008_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
3587
packages/db/src/migrations/meta/0009_snapshot.json
Normal file
3587
packages/db/src/migrations/meta/0009_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -57,6 +57,20 @@
|
||||
"when": 1771545603000,
|
||||
"tag": "0007_new_quentin_quire",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 8,
|
||||
"version": "7",
|
||||
"when": 1771534160426,
|
||||
"tag": "0008_amused_zzzax",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 9,
|
||||
"version": "7",
|
||||
"when": 1771534211029,
|
||||
"tag": "0009_fast_jackal",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
23
packages/db/src/schema/company_secret_versions.ts
Normal file
23
packages/db/src/schema/company_secret_versions.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { pgTable, uuid, text, timestamp, integer, jsonb, index, uniqueIndex } from "drizzle-orm/pg-core";
|
||||
import { agents } from "./agents.js";
|
||||
import { companySecrets } from "./company_secrets.js";
|
||||
|
||||
export const companySecretVersions = pgTable(
|
||||
"company_secret_versions",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
secretId: uuid("secret_id").notNull().references(() => companySecrets.id, { onDelete: "cascade" }),
|
||||
version: integer("version").notNull(),
|
||||
material: jsonb("material").$type<Record<string, unknown>>().notNull(),
|
||||
valueSha256: text("value_sha256").notNull(),
|
||||
createdByAgentId: uuid("created_by_agent_id").references(() => agents.id, { onDelete: "set null" }),
|
||||
createdByUserId: text("created_by_user_id"),
|
||||
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
revokedAt: timestamp("revoked_at", { withTimezone: true }),
|
||||
},
|
||||
(table) => ({
|
||||
secretIdx: index("company_secret_versions_secret_idx").on(table.secretId, table.createdAt),
|
||||
valueHashIdx: index("company_secret_versions_value_sha256_idx").on(table.valueSha256),
|
||||
secretVersionUq: uniqueIndex("company_secret_versions_secret_version_uq").on(table.secretId, table.version),
|
||||
}),
|
||||
);
|
||||
25
packages/db/src/schema/company_secrets.ts
Normal file
25
packages/db/src/schema/company_secrets.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { pgTable, uuid, text, timestamp, integer, index, uniqueIndex } from "drizzle-orm/pg-core";
|
||||
import { companies } from "./companies.js";
|
||||
import { agents } from "./agents.js";
|
||||
|
||||
export const companySecrets = pgTable(
|
||||
"company_secrets",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
companyId: uuid("company_id").notNull().references(() => companies.id),
|
||||
name: text("name").notNull(),
|
||||
provider: text("provider").notNull().default("local_encrypted"),
|
||||
externalRef: text("external_ref"),
|
||||
latestVersion: integer("latest_version").notNull().default(1),
|
||||
description: text("description"),
|
||||
createdByAgentId: uuid("created_by_agent_id").references(() => agents.id, { onDelete: "set null" }),
|
||||
createdByUserId: text("created_by_user_id"),
|
||||
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
companyIdx: index("company_secrets_company_idx").on(table.companyId),
|
||||
companyProviderIdx: index("company_secrets_company_provider_idx").on(table.companyId, table.provider),
|
||||
companyNameUq: uniqueIndex("company_secrets_company_name_uq").on(table.companyId, table.name),
|
||||
}),
|
||||
);
|
||||
@@ -16,3 +16,5 @@ export { costEvents } from "./cost_events.js";
|
||||
export { approvals } from "./approvals.js";
|
||||
export { approvalComments } from "./approval_comments.js";
|
||||
export { activityLog } from "./activity_log.js";
|
||||
export { companySecrets } from "./company_secrets.js";
|
||||
export { companySecretVersions } from "./company_secret_versions.js";
|
||||
|
||||
@@ -35,6 +35,7 @@ export const issues = pgTable(
|
||||
startedAt: timestamp("started_at", { withTimezone: true }),
|
||||
completedAt: timestamp("completed_at", { withTimezone: true }),
|
||||
cancelledAt: timestamp("cancelled_at", { withTimezone: true }),
|
||||
hiddenAt: timestamp("hidden_at", { withTimezone: true }),
|
||||
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user