Server: migration prompts, structured logging, heartbeat reaping, and issue-run tracking

Replace auto-migrate-if-empty with interactive migration flow that inspects
pending migrations and prompts before applying. Add pino-pretty for structured
console + file logging. Add reapOrphanedRuns to clean up stuck heartbeat runs
on startup and periodically. Track runId through auth middleware, activity logs,
and all mutation routes. Add issue-run cross-reference queries, live-run and
active-run endpoints for issues, issue identifier lookup, reopen-via-comment
flow, and done/cancelled -> todo status transitions.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Forgotten
2026-02-19 09:09:40 -06:00
parent 21b7bc8da0
commit a90063415e
14 changed files with 605 additions and 67 deletions

View File

@@ -15,12 +15,13 @@
"@paperclip/adapter-utils": "workspace:*",
"@paperclip/db": "workspace:*",
"@paperclip/shared": "workspace:*",
"dotenv": "^17.0.1",
"detect-port": "^2.1.0",
"dotenv": "^17.0.1",
"drizzle-orm": "^0.38.4",
"express": "^5.1.0",
"pino": "^9.6.0",
"pino-http": "^10.4.0",
"pino-pretty": "^13.1.3",
"ws": "^8.19.0",
"zod": "^3.24.2"
},

View File

@@ -1,10 +1,13 @@
import { existsSync, readFileSync, rmSync } from "node:fs";
import { createServer } from "node:http";
import { resolve } from "node:path";
import { createInterface } from "node:readline/promises";
import { stdin, stdout } from "node:process";
import {
createDb,
ensurePostgresDatabase,
migratePostgresIfEmpty,
inspectMigrations,
applyPendingMigrations,
} from "@paperclip/db";
import detectPort from "detect-port";
import { createApp } from "./app.js";
@@ -30,27 +33,80 @@ type EmbeddedPostgresCtor = new (opts: {
const config = loadConfig();
type MigrationSummary =
| "skipped"
| "already applied"
| "applied (empty database)"
| "applied (pending migrations)"
| "pending migrations skipped";
function formatPendingMigrationSummary(migrations: string[]): string {
if (migrations.length === 0) return "none";
return migrations.length > 3
? `${migrations.slice(0, 3).join(", ")} (+${migrations.length - 3} more)`
: migrations.join(", ");
}
async function promptApplyMigrations(migrations: string[]): Promise<boolean> {
if (!stdin.isTTY || !stdout.isTTY) return true;
if (process.env.PAPERCLIP_MIGRATION_AUTO_APPLY === "true") return true;
const prompt = createInterface({ input: stdin, output: stdout });
try {
const answer = (await prompt.question(
`Apply pending migrations (${formatPendingMigrationSummary(migrations)}) now? (y/N): `,
)).trim().toLowerCase();
return answer === "y" || answer === "yes";
} finally {
prompt.close();
}
}
async function ensureMigrations(connectionString: string, label: string): Promise<MigrationSummary> {
const state = await inspectMigrations(connectionString);
if (state.status === "upToDate") return "already applied";
if (state.status === "needsMigrations" && state.reason === "no-migration-journal-non-empty-db") {
logger.warn(
{ tableCount: state.tableCount },
`${label} has existing tables but no migration journal. Run migrations manually to sync schema.`,
);
const apply = await promptApplyMigrations(state.pendingMigrations);
if (!apply) {
logger.warn(
{ pendingMigrations: state.pendingMigrations },
`${label} has pending migrations; continuing without applying. Run pnpm db:migrate to apply before startup.`,
);
return "pending migrations skipped";
}
logger.info({ pendingMigrations: state.pendingMigrations }, `Applying ${state.pendingMigrations.length} pending migrations for ${label}`);
await applyPendingMigrations(connectionString);
return "applied (pending migrations)";
}
const apply = await promptApplyMigrations(state.pendingMigrations);
if (!apply) {
logger.warn(
{ pendingMigrations: state.pendingMigrations },
`${label} has pending migrations; continuing without applying. Run pnpm db:migrate to apply before startup.`,
);
return "pending migrations skipped";
}
logger.info({ pendingMigrations: state.pendingMigrations }, `Applying ${state.pendingMigrations.length} pending migrations for ${label}`);
await applyPendingMigrations(connectionString);
return "applied (pending migrations)";
}
let db;
let embeddedPostgres: EmbeddedPostgresInstance | null = null;
let embeddedPostgresStartedByThisProcess = false;
let migrationSummary = "skipped";
let migrationSummary: MigrationSummary = "skipped";
let startupDbInfo:
| { mode: "external-postgres"; connectionString: string }
| { mode: "embedded-postgres"; dataDir: string; port: number };
if (config.databaseUrl) {
const migration = await migratePostgresIfEmpty(config.databaseUrl);
if (migration.migrated) {
logger.info("Empty PostgreSQL database detected; applied migrations");
migrationSummary = "applied (empty database)";
} else if (migration.reason === "not-empty-no-migration-journal") {
logger.warn(
{ tableCount: migration.tableCount },
"PostgreSQL has existing tables but no migration journal; skipped auto-migrate",
);
migrationSummary = "skipped (existing schema, no migration journal)";
} else {
migrationSummary = "already applied";
}
migrationSummary = await ensureMigrations(config.databaseUrl, "PostgreSQL");
db = createDb(config.databaseUrl);
logger.info("Using external PostgreSQL via DATABASE_URL/config");
@@ -131,19 +187,7 @@ if (config.databaseUrl) {
}
const embeddedConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
const migration = await migratePostgresIfEmpty(embeddedConnectionString);
if (migration.migrated) {
logger.info("Empty embedded PostgreSQL database detected; applied migrations");
migrationSummary = "applied (empty database)";
} else if (migration.reason === "not-empty-no-migration-journal") {
logger.warn(
{ tableCount: migration.tableCount },
"Embedded PostgreSQL has existing tables but no migration journal; skipped auto-migrate",
);
migrationSummary = "skipped (existing schema, no migration journal)";
} else {
migrationSummary = "already applied";
}
migrationSummary = await ensureMigrations(embeddedConnectionString, "Embedded PostgreSQL");
db = createDb(embeddedConnectionString);
logger.info("Embedded PostgreSQL ready");
@@ -163,6 +207,12 @@ setupLiveEventsWebSocketServer(server, db as any);
if (config.heartbeatSchedulerEnabled) {
const heartbeat = heartbeatService(db as any);
// Reap orphaned runs at startup (no threshold -- runningProcesses is empty)
void heartbeat.reapOrphanedRuns().catch((err) => {
logger.error({ err }, "startup reap of orphaned heartbeat runs failed");
});
setInterval(() => {
void heartbeat
.tickTimers(new Date())
@@ -174,6 +224,13 @@ if (config.heartbeatSchedulerEnabled) {
.catch((err) => {
logger.error({ err }, "heartbeat timer tick failed");
});
// Periodically reap orphaned runs (5-min staleness threshold)
void heartbeat
.reapOrphanedRuns({ staleThresholdMs: 5 * 60 * 1000 })
.catch((err) => {
logger.error({ err }, "periodic reap of orphaned heartbeat runs failed");
});
}, config.heartbeatSchedulerIntervalMs);
}

View File

@@ -13,8 +13,11 @@ export function actorMiddleware(db: Db): RequestHandler {
return async (req, _res, next) => {
req.actor = { type: "board", userId: "board" };
const runIdHeader = req.header("x-paperclip-run-id");
const authHeader = req.header("authorization");
if (!authHeader?.toLowerCase().startsWith("bearer ")) {
if (runIdHeader) req.actor.runId = runIdHeader;
next();
return;
}
@@ -60,6 +63,7 @@ export function actorMiddleware(db: Db): RequestHandler {
agentId: claims.sub,
companyId: claims.company_id,
keyId: undefined,
runId: runIdHeader || undefined,
};
next();
return;
@@ -75,6 +79,7 @@ export function actorMiddleware(db: Db): RequestHandler {
agentId: key.agentId,
companyId: key.companyId,
keyId: key.id,
runId: runIdHeader || undefined,
};
next();

View File

@@ -1,5 +1,41 @@
import path from "node:path";
import fs from "node:fs";
import pino from "pino";
import { pinoHttp } from "pino-http";
export const logger = pino();
export const httpLogger = pinoHttp({ logger });
const logDir = path.resolve(process.cwd(), ".paperclip", "logs");
fs.mkdirSync(logDir, { recursive: true });
const logFile = path.join(logDir, "server.log");
const sharedOpts = {
translateTime: "HH:MM:ss",
ignore: "pid,hostname",
};
export const logger = pino({
level: "debug",
}, pino.transport({
targets: [
{
target: "pino-pretty",
options: { ...sharedOpts, ignore: "pid,hostname,req,res", hideObject: true, colorize: true, destination: 1 },
level: "info",
},
{
target: "pino-pretty",
options: { ...sharedOpts, colorize: false, destination: logFile, mkdir: true },
level: "debug",
},
],
}));
export const httpLogger = pinoHttp({
logger,
customSuccessMessage(req, res) {
return `${req.method} ${req.url} ${res.statusCode}`;
},
customErrorMessage(req, res) {
return `${req.method} ${req.url} ${res.statusCode}`;
},
});

View File

@@ -4,6 +4,7 @@ import type { Db } from "@paperclip/db";
import { validate } from "../middleware/validate.js";
import { activityService } from "../services/activity.js";
import { assertBoard, assertCompanyAccess } from "./authz.js";
import { issueService } from "../services/index.js";
const createActivitySchema = z.object({
actorType: z.enum(["agent", "user", "system"]).optional().default("system"),
@@ -18,6 +19,7 @@ const createActivitySchema = z.object({
export function activityRoutes(db: Db) {
const router = Router();
const svc = activityService(db);
const issueSvc = issueService(db);
router.get("/companies/:companyId/activity", async (req, res) => {
const companyId = req.params.companyId as string;
@@ -43,5 +45,35 @@ export function activityRoutes(db: Db) {
res.status(201).json(event);
});
router.get("/issues/:id/activity", async (req, res) => {
const id = req.params.id as string;
const issue = await issueSvc.getById(id);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
assertCompanyAccess(req, issue.companyId);
const result = await svc.forIssue(id);
res.json(result);
});
router.get("/issues/:id/runs", async (req, res) => {
const id = req.params.id as string;
const issue = await issueSvc.getById(id);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
assertCompanyAccess(req, issue.companyId);
const result = await svc.runsForIssue(id);
res.json(result);
});
router.get("/heartbeat-runs/:runId/issues", async (req, res) => {
const runId = req.params.runId as string;
const result = await svc.issuesForRun(runId);
res.json(result);
});
return router;
}

View File

@@ -1,5 +1,7 @@
import { Router } from "express";
import type { Db } from "@paperclip/db";
import { agents as agentsTable, heartbeatRuns } from "@paperclip/db";
import { and, desc, eq, inArray, sql } from "drizzle-orm";
import {
createAgentKeySchema,
createAgentSchema,
@@ -7,7 +9,7 @@ import {
updateAgentSchema,
} from "@paperclip/shared";
import { validate } from "../middleware/validate.js";
import { agentService, heartbeatService, logActivity } from "../services/index.js";
import { agentService, heartbeatService, issueService, logActivity } from "../services/index.js";
import { assertBoard, assertCompanyAccess, getActorInfo } from "./authz.js";
import { listAdapterModels } from "../adapters/index.js";
@@ -160,6 +162,7 @@ export function agentRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "agent.created",
entityType: "agent",
entityId: agent.id,
@@ -195,6 +198,7 @@ export function agentRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "agent.updated",
entityType: "agent",
entityId: agent.id,
@@ -349,6 +353,7 @@ export function agentRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "heartbeat.invoked",
entityType: "heartbeat_run",
entityId: run.id,
@@ -397,6 +402,7 @@ export function agentRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "heartbeat.invoked",
entityType: "heartbeat_run",
entityId: run.id,
@@ -472,5 +478,77 @@ export function agentRoutes(db: Db) {
res.json(result);
});
router.get("/issues/:id/live-runs", async (req, res) => {
const id = req.params.id as string;
const issueSvc = issueService(db);
const issue = await issueSvc.getById(id);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
assertCompanyAccess(req, issue.companyId);
const liveRuns = await db
.select({
id: heartbeatRuns.id,
status: heartbeatRuns.status,
invocationSource: heartbeatRuns.invocationSource,
triggerDetail: heartbeatRuns.triggerDetail,
startedAt: heartbeatRuns.startedAt,
finishedAt: heartbeatRuns.finishedAt,
createdAt: heartbeatRuns.createdAt,
agentId: heartbeatRuns.agentId,
agentName: agentsTable.name,
adapterType: agentsTable.adapterType,
})
.from(heartbeatRuns)
.innerJoin(agentsTable, eq(heartbeatRuns.agentId, agentsTable.id))
.where(
and(
eq(heartbeatRuns.companyId, issue.companyId),
inArray(heartbeatRuns.status, ["queued", "running"]),
sql`${heartbeatRuns.contextSnapshot} ->> 'issueId' = ${id}`,
),
)
.orderBy(desc(heartbeatRuns.createdAt));
res.json(liveRuns);
});
router.get("/issues/:id/active-run", async (req, res) => {
const id = req.params.id as string;
const issueSvc = issueService(db);
const issue = await issueSvc.getById(id);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
assertCompanyAccess(req, issue.companyId);
if (!issue.assigneeAgentId || issue.status !== "in_progress") {
res.json(null);
return;
}
const agent = await svc.getById(issue.assigneeAgentId);
if (!agent) {
res.json(null);
return;
}
const run = await heartbeat.getActiveRunForAgent(issue.assigneeAgentId);
if (!run) {
res.json(null);
return;
}
res.json({
...run,
agentId: agent.id,
agentName: agent.name,
adapterType: agent.adapterType,
});
});
return router;
}

View File

@@ -19,6 +19,7 @@ export function getActorInfo(req: Request) {
actorType: "agent" as const,
actorId: req.actor.agentId ?? "unknown-agent",
agentId: req.actor.agentId ?? null,
runId: req.actor.runId ?? null,
};
}
@@ -26,5 +27,6 @@ export function getActorInfo(req: Request) {
actorType: "user" as const,
actorId: req.actor.userId ?? "board",
agentId: null,
runId: req.actor.runId ?? null,
};
}

View File

@@ -29,13 +29,14 @@ export function issueRoutes(db: Db) {
router.get("/issues/:id", async (req, res) => {
const id = req.params.id as string;
const issue = await svc.getById(id);
const isIdentifier = /^[A-Z]+-\d+$/i.test(id);
const issue = isIdentifier ? await svc.getByIdentifier(id) : await svc.getById(id);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
assertCompanyAccess(req, issue.companyId);
const ancestors = await svc.getAncestors(id);
const ancestors = await svc.getAncestors(issue.id);
res.json({ ...issue, ancestors });
});
@@ -55,6 +56,7 @@ export function issueRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.created",
entityType: "issue",
entityId: issue.id,
@@ -100,6 +102,7 @@ export function issueRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.updated",
entityType: "issue",
entityId: issue.id,
@@ -118,6 +121,7 @@ export function issueRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.comment_added",
entityType: "issue",
entityId: issue.id,
@@ -142,16 +146,20 @@ export function issueRoutes(db: Db) {
const assigneeChanged =
req.body.assigneeAgentId !== undefined && req.body.assigneeAgentId !== existing.assigneeAgentId;
if (assigneeChanged && issue.assigneeAgentId) {
const reopened =
(existing.status === "done" || existing.status === "cancelled") &&
issue.status !== "done" && issue.status !== "cancelled";
if ((assigneeChanged || reopened) && issue.assigneeAgentId) {
void heartbeat
.wakeup(issue.assigneeAgentId, {
source: "assignment",
source: reopened ? "automation" : "assignment",
triggerDetail: "system",
reason: "issue_assigned",
reason: reopened ? "issue_reopened" : "issue_assigned",
payload: { issueId: issue.id, mutation: "update" },
requestedByActorType: actor.actorType,
requestedByActorId: actor.actorId,
contextSnapshot: { issueId: issue.id, source: "issue.update" },
contextSnapshot: { issueId: issue.id, source: reopened ? "issue.reopen" : "issue.update" },
})
.catch((err) => logger.warn({ err, issueId: issue.id }, "failed to wake assignee on issue update"));
}
@@ -180,6 +188,7 @@ export function issueRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.deleted",
entityType: "issue",
entityId: issue.id,
@@ -210,6 +219,7 @@ export function issueRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.checked_out",
entityType: "issue",
entityId: issue.id,
@@ -252,6 +262,7 @@ export function issueRoutes(db: Db) {
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.released",
entityType: "issue",
entityId: released.id,
@@ -282,19 +293,54 @@ export function issueRoutes(db: Db) {
assertCompanyAccess(req, issue.companyId);
const actor = getActorInfo(req);
const reopenRequested = req.body.reopen === true;
const isClosed = issue.status === "done" || issue.status === "cancelled";
let reopened = false;
let reopenFromStatus: string | null = null;
let currentIssue = issue;
if (reopenRequested && isClosed) {
const reopenedIssue = await svc.update(id, { status: "todo" });
if (!reopenedIssue) {
res.status(404).json({ error: "Issue not found" });
return;
}
reopened = true;
reopenFromStatus = issue.status;
currentIssue = reopenedIssue;
await logActivity(db, {
companyId: currentIssue.companyId,
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.updated",
entityType: "issue",
entityId: currentIssue.id,
details: {
status: "todo",
reopened: true,
reopenedFrom: reopenFromStatus,
source: "comment",
},
});
}
const comment = await svc.addComment(id, req.body.body, {
agentId: actor.agentId ?? undefined,
userId: actor.actorType === "user" ? actor.actorId : undefined,
});
await logActivity(db, {
companyId: issue.companyId,
companyId: currentIssue.companyId,
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.comment_added",
entityType: "issue",
entityId: issue.id,
entityId: currentIssue.id,
details: { commentId: comment.id },
});
@@ -313,6 +359,32 @@ export function issueRoutes(db: Db) {
}
}).catch((err) => logger.warn({ err, issueId: id }, "failed to resolve @-mentions"));
if (reopened && currentIssue.assigneeAgentId) {
void heartbeat
.wakeup(currentIssue.assigneeAgentId, {
source: "automation",
triggerDetail: "system",
reason: "issue_reopened_via_comment",
payload: {
issueId: currentIssue.id,
commentId: comment.id,
reopenedFrom: reopenFromStatus,
mutation: "comment",
},
requestedByActorType: actor.actorType,
requestedByActorId: actor.actorId,
contextSnapshot: {
issueId: currentIssue.id,
taskId: currentIssue.id,
commentId: comment.id,
source: "issue.comment.reopen",
wakeReason: "issue_reopened_via_comment",
reopenedFrom: reopenFromStatus,
},
})
.catch((err) => logger.warn({ err, issueId: currentIssue.id }, "failed to wake assignee on issue reopen comment"));
}
res.status(201).json(comment);
});

View File

@@ -10,6 +10,7 @@ export interface LogActivityInput {
entityType: string;
entityId: string;
agentId?: string | null;
runId?: string | null;
details?: Record<string, unknown> | null;
}
@@ -22,6 +23,7 @@ export async function logActivity(db: Db, input: LogActivityInput) {
entityType: input.entityType,
entityId: input.entityId,
agentId: input.agentId ?? null,
runId: input.runId ?? null,
details: input.details ?? null,
});
@@ -35,6 +37,7 @@ export async function logActivity(db: Db, input: LogActivityInput) {
entityType: input.entityType,
entityId: input.entityId,
agentId: input.agentId ?? null,
runId: input.runId ?? null,
details: input.details ?? null,
},
});

View File

@@ -1,6 +1,6 @@
import { and, desc, eq } from "drizzle-orm";
import { and, desc, eq, isNotNull, sql } from "drizzle-orm";
import type { Db } from "@paperclip/db";
import { activityLog } from "@paperclip/db";
import { activityLog, heartbeatRuns, issues } from "@paperclip/db";
export interface ActivityFilters {
companyId: string;
@@ -10,6 +10,7 @@ export interface ActivityFilters {
}
export function activityService(db: Db) {
const issueIdAsText = sql<string>`${issues.id}::text`;
return {
list: (filters: ActivityFilters) => {
const conditions = [eq(activityLog.companyId, filters.companyId)];
@@ -27,6 +28,58 @@ export function activityService(db: Db) {
return db.select().from(activityLog).where(and(...conditions)).orderBy(desc(activityLog.createdAt));
},
forIssue: (issueId: string) =>
db
.select()
.from(activityLog)
.where(
and(
eq(activityLog.entityType, "issue"),
eq(activityLog.entityId, issueId),
),
)
.orderBy(desc(activityLog.createdAt)),
runsForIssue: (issueId: string) =>
db
.selectDistinctOn([activityLog.runId], {
runId: activityLog.runId,
status: heartbeatRuns.status,
agentId: heartbeatRuns.agentId,
startedAt: heartbeatRuns.startedAt,
finishedAt: heartbeatRuns.finishedAt,
createdAt: heartbeatRuns.createdAt,
invocationSource: heartbeatRuns.invocationSource,
})
.from(activityLog)
.innerJoin(heartbeatRuns, eq(activityLog.runId, heartbeatRuns.id))
.where(
and(
eq(activityLog.entityType, "issue"),
eq(activityLog.entityId, issueId),
isNotNull(activityLog.runId),
),
)
.orderBy(activityLog.runId, desc(heartbeatRuns.createdAt)),
issuesForRun: (runId: string) =>
db
.selectDistinctOn([issueIdAsText], {
issueId: issues.id,
title: issues.title,
status: issues.status,
priority: issues.priority,
})
.from(activityLog)
.innerJoin(issues, eq(activityLog.entityId, issueIdAsText))
.where(
and(
eq(activityLog.runId, runId),
eq(activityLog.entityType, "issue"),
),
)
.orderBy(issueIdAsText),
create: (data: typeof activityLog.$inferInsert) =>
db
.insert(activityLog)

View File

@@ -34,6 +34,10 @@ interface WakeupOptions {
contextSnapshot?: Record<string, unknown>;
}
function readNonEmptyString(value: unknown): string | null {
return typeof value === "string" && value.trim().length > 0 ? value : null;
}
export function heartbeatService(db: Db) {
const runLogStore = getRunLogStore();
@@ -216,6 +220,56 @@ export function heartbeatService(db: Db) {
}
}
async function reapOrphanedRuns(opts?: { staleThresholdMs?: number }) {
const staleThresholdMs = opts?.staleThresholdMs ?? 0;
const now = new Date();
// Find all runs in "queued" or "running" state
const activeRuns = await db
.select()
.from(heartbeatRuns)
.where(inArray(heartbeatRuns.status, ["queued", "running"]));
const reaped: string[] = [];
for (const run of activeRuns) {
if (runningProcesses.has(run.id)) continue;
// Apply staleness threshold to avoid false positives
if (staleThresholdMs > 0) {
const refTime = run.updatedAt ? new Date(run.updatedAt).getTime() : 0;
if (now.getTime() - refTime < staleThresholdMs) continue;
}
await setRunStatus(run.id, "failed", {
error: "Process lost -- server may have restarted",
errorCode: "process_lost",
finishedAt: now,
});
await setWakeupStatus(run.wakeupRequestId, "failed", {
finishedAt: now,
error: "Process lost -- server may have restarted",
});
const updatedRun = await getRun(run.id);
if (updatedRun) {
await appendRunEvent(updatedRun, 1, {
eventType: "lifecycle",
stream: "system",
level: "error",
message: "Process lost -- server may have restarted",
});
}
await finalizeAgentStatus(run.agentId, "failed");
runningProcesses.delete(run.id);
reaped.push(run.id);
}
if (reaped.length > 0) {
logger.warn({ reapedCount: reaped.length, runIds: reaped }, "reaped orphaned heartbeat runs");
}
return { reaped: reaped.length, runIds: reaped };
}
async function updateRuntimeState(
agent: typeof agents.$inferSelect,
run: typeof heartbeatRuns.$inferSelect,
@@ -543,7 +597,26 @@ export function heartbeatService(db: Db) {
async function enqueueWakeup(agentId: string, opts: WakeupOptions = {}) {
const source = opts.source ?? "on_demand";
const triggerDetail = opts.triggerDetail ?? null;
const contextSnapshot = opts.contextSnapshot ?? {};
const contextSnapshot: Record<string, unknown> = { ...(opts.contextSnapshot ?? {}) };
const reason = opts.reason ?? null;
const payload = opts.payload ?? null;
const issueIdFromPayload = readNonEmptyString(payload?.["issueId"]);
if (!readNonEmptyString(contextSnapshot["wakeReason"]) && reason) {
contextSnapshot.wakeReason = reason;
}
if (!readNonEmptyString(contextSnapshot["issueId"]) && issueIdFromPayload) {
contextSnapshot.issueId = issueIdFromPayload;
}
if (!readNonEmptyString(contextSnapshot["taskId"]) && issueIdFromPayload) {
contextSnapshot.taskId = issueIdFromPayload;
}
if (!readNonEmptyString(contextSnapshot["wakeSource"])) {
contextSnapshot.wakeSource = source;
}
if (!readNonEmptyString(contextSnapshot["wakeTriggerDetail"]) && triggerDetail) {
contextSnapshot.wakeTriggerDetail = triggerDetail;
}
const agent = await getAgent(agentId);
if (!agent) throw notFound("Agent not found");
@@ -560,7 +633,7 @@ export function heartbeatService(db: Db) {
source,
triggerDetail,
reason,
payload: opts.payload ?? null,
payload,
status: "skipped",
requestedByActorType: opts.requestedByActorType ?? null,
requestedByActorId: opts.requestedByActorId ?? null,
@@ -591,8 +664,8 @@ export function heartbeatService(db: Db) {
agentId,
source,
triggerDetail,
reason: opts.reason ?? null,
payload: opts.payload ?? null,
reason,
payload,
status: "coalesced",
coalescedCount: 1,
requestedByActorType: opts.requestedByActorType ?? null,
@@ -611,8 +684,8 @@ export function heartbeatService(db: Db) {
agentId,
source,
triggerDetail,
reason: opts.reason ?? null,
payload: opts.payload ?? null,
reason,
payload,
status: "queued",
requestedByActorType: opts.requestedByActorType ?? null,
requestedByActorId: opts.requestedByActorId ?? null,
@@ -757,6 +830,8 @@ export function heartbeatService(db: Db) {
wakeup: enqueueWakeup,
reapOrphanedRuns,
tickTimers: async (now = new Date()) => {
const allAgents = await db.select().from(agents);
let checked = 0;
@@ -860,5 +935,20 @@ export function heartbeatService(db: Db) {
return runs.length;
},
getActiveRunForAgent: async (agentId: string) => {
const [run] = await db
.select()
.from(heartbeatRuns)
.where(
and(
eq(heartbeatRuns.agentId, agentId),
eq(heartbeatRuns.status, "running"),
),
)
.orderBy(desc(heartbeatRuns.startedAt))
.limit(1);
return run ?? null;
},
};
}

View File

@@ -1,6 +1,6 @@
import { and, asc, desc, eq, inArray, isNull, or, sql } from "drizzle-orm";
import type { Db } from "@paperclip/db";
import { agents, issues, issueComments } from "@paperclip/db";
import { agents, companies, issues, issueComments } from "@paperclip/db";
import { conflict, notFound, unprocessable } from "../errors.js";
const ISSUE_TRANSITIONS: Record<string, string[]> = {
@@ -9,8 +9,8 @@ const ISSUE_TRANSITIONS: Record<string, string[]> = {
in_progress: ["in_review", "blocked", "done", "cancelled"],
in_review: ["in_progress", "done", "cancelled"],
blocked: ["todo", "in_progress", "cancelled"],
done: [],
cancelled: [],
done: ["todo"],
cancelled: ["todo"],
};
function assertTransition(from: string, to: string) {
@@ -69,23 +69,38 @@ export function issueService(db: Db) {
.where(eq(issues.id, id))
.then((rows) => rows[0] ?? null),
create: (companyId: string, data: Omit<typeof issues.$inferInsert, "companyId">) => {
const values = { ...data, companyId } as typeof issues.$inferInsert;
if (values.status === "in_progress" && !values.startedAt) {
values.startedAt = new Date();
}
if (values.status === "done") {
values.completedAt = new Date();
}
if (values.status === "cancelled") {
values.cancelledAt = new Date();
}
getByIdentifier: (identifier: string) =>
db
.select()
.from(issues)
.where(eq(issues.identifier, identifier.toUpperCase()))
.then((rows) => rows[0] ?? null),
return db
.insert(issues)
.values(values)
.returning()
.then((rows) => rows[0]);
create: async (companyId: string, data: Omit<typeof issues.$inferInsert, "companyId">) => {
return db.transaction(async (tx) => {
const [company] = await tx
.update(companies)
.set({ issueCounter: sql`${companies.issueCounter} + 1` })
.where(eq(companies.id, companyId))
.returning({ issueCounter: companies.issueCounter, issuePrefix: companies.issuePrefix });
const issueNumber = company.issueCounter;
const identifier = `${company.issuePrefix}-${issueNumber}`;
const values = { ...data, companyId, issueNumber, identifier } as typeof issues.$inferInsert;
if (values.status === "in_progress" && !values.startedAt) {
values.startedAt = new Date();
}
if (values.status === "done") {
values.completedAt = new Date();
}
if (values.status === "cancelled") {
values.cancelledAt = new Date();
}
const [issue] = await tx.insert(issues).values(values).returning();
return issue;
});
},
update: async (id: string, data: Partial<typeof issues.$inferInsert>) => {
@@ -110,6 +125,12 @@ export function issueService(db: Db) {
}
applyStatusSideEffects(data.status, patch);
if (data.status && data.status !== "done") {
patch.completedAt = null;
}
if (data.status && data.status !== "cancelled") {
patch.cancelledAt = null;
}
return db
.update(issues)

View File

@@ -9,6 +9,7 @@ declare global {
agentId?: string;
companyId?: string;
keyId?: string;
runId?: string;
};
}
}