Add issue identifiers, activity run tracking, and migration inspection
Add issuePrefix/issueCounter to companies and issueNumber/identifier to issues for human-readable issue IDs (e.g. PAP-42). Add runId to activity_log for linking activity to heartbeat runs. Rework DB client to support migration state inspection and interactive pending migration prompts at startup. Add reopen option to issue comments validator. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,13 +1,178 @@
|
||||
import { drizzle as drizzlePg } from "drizzle-orm/postgres-js";
|
||||
import { migrate as migratePg } from "drizzle-orm/postgres-js/migrator";
|
||||
import { readdir, readFile } from "node:fs/promises";
|
||||
import postgres from "postgres";
|
||||
import * as schema from "./schema/index.js";
|
||||
|
||||
const MIGRATIONS_FOLDER = new URL("./migrations", import.meta.url).pathname;
|
||||
const DRIZZLE_MIGRATIONS_TABLE = "__drizzle_migrations";
|
||||
const MIGRATIONS_JOURNAL_JSON = new URL("./migrations/meta/_journal.json", import.meta.url).pathname;
|
||||
|
||||
function isSafeIdentifier(value: string): boolean {
|
||||
return /^[A-Za-z_][A-Za-z0-9_]*$/.test(value);
|
||||
}
|
||||
|
||||
function quoteIdentifier(value: string): string {
|
||||
if (!isSafeIdentifier(value)) throw new Error(`Unsafe SQL identifier: ${value}`);
|
||||
return `"${value.replaceAll("\"", "\"\"")}"`;
|
||||
}
|
||||
|
||||
export type MigrationState =
|
||||
| { status: "upToDate"; tableCount: number; availableMigrations: string[]; appliedMigrations: string[] }
|
||||
| {
|
||||
status: "needsMigrations";
|
||||
tableCount: number;
|
||||
availableMigrations: string[];
|
||||
appliedMigrations: string[];
|
||||
pendingMigrations: string[];
|
||||
reason: "no-migration-journal-empty-db" | "no-migration-journal-non-empty-db" | "pending-migrations";
|
||||
};
|
||||
|
||||
export function createDb(url: string) {
|
||||
const sql = postgres(url);
|
||||
return drizzlePg(sql, { schema });
|
||||
}
|
||||
|
||||
async function listMigrationFiles(): Promise<string[]> {
|
||||
const entries = await readdir(MIGRATIONS_FOLDER, { withFileTypes: true });
|
||||
return entries
|
||||
.filter((entry) => entry.isFile() && entry.name.endsWith(".sql"))
|
||||
.map((entry) => entry.name)
|
||||
.sort((a, b) => a.localeCompare(b));
|
||||
}
|
||||
|
||||
type MigrationJournalFile = {
|
||||
entries?: Array<{ tag?: string }>;
|
||||
};
|
||||
|
||||
async function listJournalMigrationFiles(): Promise<string[]> {
|
||||
try {
|
||||
const raw = await readFile(MIGRATIONS_JOURNAL_JSON, "utf8");
|
||||
const parsed = JSON.parse(raw) as MigrationJournalFile;
|
||||
if (!Array.isArray(parsed.entries)) return [];
|
||||
return parsed.entries
|
||||
.map((entry) => (typeof entry?.tag === "string" ? `${entry.tag}.sql` : null))
|
||||
.filter((name): name is string => typeof name === "string");
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async function loadAppliedMigrations(
|
||||
sql: ReturnType<typeof postgres>,
|
||||
migrationTableSchema: string,
|
||||
availableMigrations: string[],
|
||||
): Promise<string[]> {
|
||||
const qualifiedTable = `${quoteIdentifier(migrationTableSchema)}.${quoteIdentifier(DRIZZLE_MIGRATIONS_TABLE)}`;
|
||||
try {
|
||||
const rows = await sql.unsafe<{ name: string }[]>(`SELECT name FROM ${qualifiedTable} ORDER BY id`);
|
||||
return rows.map((row) => row.name).filter((name): name is string => Boolean(name));
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error) || !error.message.includes('column "name" does not exist')) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const rows = await sql.unsafe<{ id: number }[]>(`SELECT id FROM ${qualifiedTable} ORDER BY id`);
|
||||
const journalMigrationFiles = await listJournalMigrationFiles();
|
||||
const appliedFromIds = rows
|
||||
.map((row) => journalMigrationFiles[row.id - 1])
|
||||
.filter((name): name is string => Boolean(name));
|
||||
if (appliedFromIds.length > 0) return appliedFromIds;
|
||||
|
||||
return availableMigrations.slice(0, Math.max(0, rows.length));
|
||||
}
|
||||
|
||||
async function discoverMigrationTableSchema(sql: ReturnType<typeof postgres>): Promise<string | null> {
|
||||
const rows = await sql<{ schemaName: string }[]>`
|
||||
SELECT n.nspname AS "schemaName"
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||
WHERE c.relname = ${DRIZZLE_MIGRATIONS_TABLE} AND c.relkind = 'r'
|
||||
`;
|
||||
|
||||
if (rows.length === 0) return null;
|
||||
|
||||
const drizzleSchema = rows.find(({ schemaName }) => schemaName === "drizzle");
|
||||
if (drizzleSchema) return drizzleSchema.schemaName;
|
||||
|
||||
const publicSchema = rows.find(({ schemaName }) => schemaName === "public");
|
||||
if (publicSchema) return publicSchema.schemaName;
|
||||
|
||||
return rows[0]?.schemaName ?? null;
|
||||
}
|
||||
|
||||
export async function inspectMigrations(url: string): Promise<MigrationState> {
|
||||
const sql = postgres(url, { max: 1 });
|
||||
|
||||
try {
|
||||
const availableMigrations = await listMigrationFiles();
|
||||
const tableCountResult = await sql<{ count: number }[]>`
|
||||
select count(*)::int as count
|
||||
from information_schema.tables
|
||||
where table_schema = 'public'
|
||||
and table_type = 'BASE TABLE'
|
||||
`;
|
||||
const tableCount = tableCountResult[0]?.count ?? 0;
|
||||
|
||||
const migrationTableSchema = await discoverMigrationTableSchema(sql);
|
||||
if (!migrationTableSchema) {
|
||||
if (tableCount > 0) {
|
||||
return {
|
||||
status: "needsMigrations",
|
||||
tableCount,
|
||||
availableMigrations,
|
||||
appliedMigrations: [],
|
||||
pendingMigrations: availableMigrations,
|
||||
reason: "no-migration-journal-non-empty-db",
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
status: "needsMigrations",
|
||||
tableCount,
|
||||
availableMigrations,
|
||||
appliedMigrations: [],
|
||||
pendingMigrations: availableMigrations,
|
||||
reason: "no-migration-journal-empty-db",
|
||||
};
|
||||
}
|
||||
|
||||
const appliedMigrations = await loadAppliedMigrations(sql, migrationTableSchema, availableMigrations);
|
||||
const pendingMigrations = availableMigrations.filter((name) => !appliedMigrations.includes(name));
|
||||
if (pendingMigrations.length === 0) {
|
||||
return {
|
||||
status: "upToDate",
|
||||
tableCount,
|
||||
availableMigrations,
|
||||
appliedMigrations,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
status: "needsMigrations",
|
||||
tableCount,
|
||||
availableMigrations,
|
||||
appliedMigrations,
|
||||
pendingMigrations,
|
||||
reason: "pending-migrations",
|
||||
};
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
}
|
||||
|
||||
export async function applyPendingMigrations(url: string): Promise<void> {
|
||||
const sql = postgres(url, { max: 1 });
|
||||
|
||||
try {
|
||||
const db = drizzlePg(sql);
|
||||
await migratePg(db, { migrationsFolder: MIGRATIONS_FOLDER });
|
||||
} finally {
|
||||
await sql.end();
|
||||
}
|
||||
}
|
||||
|
||||
export type MigrationBootstrapResult =
|
||||
| { migrated: true; reason: "migrated-empty-db"; tableCount: 0 }
|
||||
| { migrated: false; reason: "already-migrated"; tableCount: number }
|
||||
@@ -17,9 +182,7 @@ export async function migratePostgresIfEmpty(url: string): Promise<MigrationBoot
|
||||
const sql = postgres(url, { max: 1 });
|
||||
|
||||
try {
|
||||
const journal = await sql<{ regclass: string | null }[]>`
|
||||
select to_regclass('public.__drizzle_migrations') as regclass
|
||||
`;
|
||||
const migrationTableSchema = await discoverMigrationTableSchema(sql);
|
||||
|
||||
const tableCountResult = await sql<{ count: number }[]>`
|
||||
select count(*)::int as count
|
||||
@@ -30,7 +193,7 @@ export async function migratePostgresIfEmpty(url: string): Promise<MigrationBoot
|
||||
|
||||
const tableCount = tableCountResult[0]?.count ?? 0;
|
||||
|
||||
if (journal[0]?.regclass) {
|
||||
if (migrationTableSchema) {
|
||||
return { migrated: false, reason: "already-migrated", tableCount };
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
export {
|
||||
createDb,
|
||||
ensurePostgresDatabase,
|
||||
inspectMigrations,
|
||||
applyPendingMigrations,
|
||||
type MigrationState,
|
||||
migratePostgresIfEmpty,
|
||||
type MigrationBootstrapResult,
|
||||
type Db,
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE "activity_log" ADD COLUMN "run_id" uuid;--> statement-breakpoint
|
||||
ALTER TABLE "activity_log" ADD CONSTRAINT "activity_log_run_id_heartbeat_runs_id_fk" FOREIGN KEY ("run_id") REFERENCES "public"."heartbeat_runs"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "activity_log_run_id_idx" ON "activity_log" USING btree ("run_id");--> statement-breakpoint
|
||||
CREATE INDEX "activity_log_entity_type_id_idx" ON "activity_log" USING btree ("entity_type","entity_id");--> statement-breakpoint
|
||||
ALTER TABLE "agents" DROP COLUMN "context_mode";
|
||||
28
packages/db/src/migrations/0004_issue_identifiers.sql
Normal file
28
packages/db/src/migrations/0004_issue_identifiers.sql
Normal file
@@ -0,0 +1,28 @@
|
||||
-- Add issue identifier columns to companies
|
||||
ALTER TABLE "companies" ADD COLUMN "issue_prefix" text NOT NULL DEFAULT 'PAP';--> statement-breakpoint
|
||||
ALTER TABLE "companies" ADD COLUMN "issue_counter" integer NOT NULL DEFAULT 0;--> statement-breakpoint
|
||||
|
||||
-- Add issue identifier columns to issues
|
||||
ALTER TABLE "issues" ADD COLUMN "issue_number" integer;--> statement-breakpoint
|
||||
ALTER TABLE "issues" ADD COLUMN "identifier" text;--> statement-breakpoint
|
||||
|
||||
-- Backfill existing issues: assign sequential issue_number per company ordered by created_at
|
||||
WITH numbered AS (
|
||||
SELECT id, company_id, ROW_NUMBER() OVER (PARTITION BY company_id ORDER BY created_at ASC) AS rn
|
||||
FROM issues
|
||||
)
|
||||
UPDATE issues
|
||||
SET issue_number = numbered.rn,
|
||||
identifier = (SELECT issue_prefix FROM companies WHERE companies.id = issues.company_id) || '-' || numbered.rn
|
||||
FROM numbered
|
||||
WHERE issues.id = numbered.id;--> statement-breakpoint
|
||||
|
||||
-- Sync each company's issue_counter to the max assigned number
|
||||
UPDATE companies
|
||||
SET issue_counter = COALESCE(
|
||||
(SELECT MAX(issue_number) FROM issues WHERE issues.company_id = companies.id),
|
||||
0
|
||||
);--> statement-breakpoint
|
||||
|
||||
-- Create unique index on (company_id, identifier)
|
||||
CREATE UNIQUE INDEX "issues_company_identifier_idx" ON "issues" USING btree ("company_id","identifier");
|
||||
2502
packages/db/src/migrations/meta/0003_snapshot.json
Normal file
2502
packages/db/src/migrations/meta/0003_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -22,6 +22,20 @@
|
||||
"when": 1771349403162,
|
||||
"tag": "0002_big_zaladane",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"version": "7",
|
||||
"when": 1771456737635,
|
||||
"tag": "0003_shallow_quentin_quire",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 4,
|
||||
"version": "7",
|
||||
"when": 1771545600000,
|
||||
"tag": "0004_issue_identifiers",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import { pgTable, uuid, text, timestamp, jsonb, index } from "drizzle-orm/pg-core";
|
||||
import { companies } from "./companies.js";
|
||||
import { agents } from "./agents.js";
|
||||
import { heartbeatRuns } from "./heartbeat_runs.js";
|
||||
|
||||
export const activityLog = pgTable(
|
||||
"activity_log",
|
||||
@@ -13,10 +14,13 @@ export const activityLog = pgTable(
|
||||
entityType: text("entity_type").notNull(),
|
||||
entityId: text("entity_id").notNull(),
|
||||
agentId: uuid("agent_id").references(() => agents.id),
|
||||
runId: uuid("run_id").references(() => heartbeatRuns.id),
|
||||
details: jsonb("details").$type<Record<string, unknown>>(),
|
||||
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
companyCreatedIdx: index("activity_log_company_created_idx").on(table.companyId, table.createdAt),
|
||||
runIdIdx: index("activity_log_run_id_idx").on(table.runId),
|
||||
entityIdx: index("activity_log_entity_type_id_idx").on(table.entityType, table.entityId),
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
import { pgTable, uuid, text, integer, timestamp } from "drizzle-orm/pg-core";
|
||||
import { pgTable, uuid, text, integer, timestamp, boolean } from "drizzle-orm/pg-core";
|
||||
|
||||
export const companies = pgTable("companies", {
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
name: text("name").notNull(),
|
||||
description: text("description"),
|
||||
status: text("status").notNull().default("active"),
|
||||
issuePrefix: text("issue_prefix").notNull().default("PAP"),
|
||||
issueCounter: integer("issue_counter").notNull().default(0),
|
||||
budgetMonthlyCents: integer("budget_monthly_cents").notNull().default(0),
|
||||
spentMonthlyCents: integer("spent_monthly_cents").notNull().default(0),
|
||||
requireBoardApprovalForNewAgents: boolean("require_board_approval_for_new_agents")
|
||||
.notNull()
|
||||
.default(true),
|
||||
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
});
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
timestamp,
|
||||
integer,
|
||||
index,
|
||||
uniqueIndex,
|
||||
} from "drizzle-orm/pg-core";
|
||||
import { agents } from "./agents.js";
|
||||
import { projects } from "./projects.js";
|
||||
@@ -27,6 +28,8 @@ export const issues = pgTable(
|
||||
assigneeAgentId: uuid("assignee_agent_id").references(() => agents.id),
|
||||
createdByAgentId: uuid("created_by_agent_id").references(() => agents.id),
|
||||
createdByUserId: text("created_by_user_id"),
|
||||
issueNumber: integer("issue_number"),
|
||||
identifier: text("identifier"),
|
||||
requestDepth: integer("request_depth").notNull().default(0),
|
||||
billingCode: text("billing_code"),
|
||||
startedAt: timestamp("started_at", { withTimezone: true }),
|
||||
@@ -44,5 +47,6 @@ export const issues = pgTable(
|
||||
),
|
||||
parentIdx: index("issues_company_parent_idx").on(table.companyId, table.parentId),
|
||||
projectIdx: index("issues_company_project_idx").on(table.companyId, table.projectId),
|
||||
identifierIdx: uniqueIndex("issues_company_identifier_idx").on(table.companyId, table.identifier),
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -7,6 +7,7 @@ export interface ActivityEvent {
|
||||
entityType: string;
|
||||
entityId: string;
|
||||
agentId: string | null;
|
||||
runId: string | null;
|
||||
details: Record<string, unknown> | null;
|
||||
createdAt: Date;
|
||||
}
|
||||
|
||||
@@ -5,8 +5,11 @@ export interface Company {
|
||||
name: string;
|
||||
description: string | null;
|
||||
status: CompanyStatus;
|
||||
issuePrefix: string;
|
||||
issueCounter: number;
|
||||
budgetMonthlyCents: number;
|
||||
spentMonthlyCents: number;
|
||||
requireBoardApprovalForNewAgents: boolean;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
@@ -25,6 +25,8 @@ export interface Issue {
|
||||
assigneeAgentId: string | null;
|
||||
createdByAgentId: string | null;
|
||||
createdByUserId: string | null;
|
||||
issueNumber: number | null;
|
||||
identifier: string | null;
|
||||
requestDepth: number;
|
||||
billingCode: string | null;
|
||||
startedAt: Date | null;
|
||||
|
||||
@@ -31,6 +31,7 @@ export type CheckoutIssue = z.infer<typeof checkoutIssueSchema>;
|
||||
|
||||
export const addIssueCommentSchema = z.object({
|
||||
body: z.string().min(1),
|
||||
reopen: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export type AddIssueComment = z.infer<typeof addIssueCommentSchema>;
|
||||
|
||||
Reference in New Issue
Block a user