Add workspace operation tracking and fix project properties JSX

This commit is contained in:
Dotta
2026-03-17 09:36:35 -05:00
parent e39ae5a400
commit 4da13984e2
19 changed files with 11537 additions and 30 deletions

View File

@@ -0,0 +1,29 @@
CREATE TABLE "workspace_operations" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"company_id" uuid NOT NULL,
"execution_workspace_id" uuid,
"heartbeat_run_id" uuid,
"phase" text NOT NULL,
"command" text,
"cwd" text,
"status" text DEFAULT 'running' NOT NULL,
"exit_code" integer,
"log_store" text,
"log_ref" text,
"log_bytes" bigint,
"log_sha256" text,
"log_compressed" boolean DEFAULT false NOT NULL,
"stdout_excerpt" text,
"stderr_excerpt" text,
"metadata" jsonb,
"started_at" timestamp with time zone DEFAULT now() NOT NULL,
"finished_at" timestamp with time zone,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
ALTER TABLE "workspace_operations" ADD CONSTRAINT "workspace_operations_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "workspace_operations" ADD CONSTRAINT "workspace_operations_execution_workspace_id_execution_workspaces_id_fk" FOREIGN KEY ("execution_workspace_id") REFERENCES "public"."execution_workspaces"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "workspace_operations" ADD CONSTRAINT "workspace_operations_heartbeat_run_id_heartbeat_runs_id_fk" FOREIGN KEY ("heartbeat_run_id") REFERENCES "public"."heartbeat_runs"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "workspace_operations_company_run_started_idx" ON "workspace_operations" USING btree ("company_id","heartbeat_run_id","started_at");--> statement-breakpoint
CREATE INDEX "workspace_operations_company_workspace_started_idx" ON "workspace_operations" USING btree ("company_id","execution_workspace_id","started_at");

File diff suppressed because it is too large Load Diff

View File

@@ -260,6 +260,13 @@
"when": 1773756213455,
"tag": "0036_cheerful_nitro",
"breakpoints": true
},
{
"idx": 37,
"version": "7",
"when": 1773756922363,
"tag": "0037_friendly_eddie_brock",
"breakpoints": true
}
]
}
}

View File

@@ -18,6 +18,7 @@ export { agentWakeupRequests } from "./agent_wakeup_requests.js";
export { projects } from "./projects.js";
export { projectWorkspaces } from "./project_workspaces.js";
export { executionWorkspaces } from "./execution_workspaces.js";
export { workspaceOperations } from "./workspace_operations.js";
export { workspaceRuntimeServices } from "./workspace_runtime_services.js";
export { projectGoals } from "./project_goals.js";
export { goals } from "./goals.js";

View File

@@ -0,0 +1,57 @@
import {
bigint,
boolean,
index,
integer,
jsonb,
pgTable,
text,
timestamp,
uuid,
} from "drizzle-orm/pg-core";
import { companies } from "./companies.js";
import { executionWorkspaces } from "./execution_workspaces.js";
import { heartbeatRuns } from "./heartbeat_runs.js";
export const workspaceOperations = pgTable(
"workspace_operations",
{
id: uuid("id").primaryKey().defaultRandom(),
companyId: uuid("company_id").notNull().references(() => companies.id),
executionWorkspaceId: uuid("execution_workspace_id").references(() => executionWorkspaces.id, {
onDelete: "set null",
}),
heartbeatRunId: uuid("heartbeat_run_id").references(() => heartbeatRuns.id, {
onDelete: "set null",
}),
phase: text("phase").notNull(),
command: text("command"),
cwd: text("cwd"),
status: text("status").notNull().default("running"),
exitCode: integer("exit_code"),
logStore: text("log_store"),
logRef: text("log_ref"),
logBytes: bigint("log_bytes", { mode: "number" }),
logSha256: text("log_sha256"),
logCompressed: boolean("log_compressed").notNull().default(false),
stdoutExcerpt: text("stdout_excerpt"),
stderrExcerpt: text("stderr_excerpt"),
metadata: jsonb("metadata").$type<Record<string, unknown>>(),
startedAt: timestamp("started_at", { withTimezone: true }).notNull().defaultNow(),
finishedAt: timestamp("finished_at", { withTimezone: true }),
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
},
(table) => ({
companyRunStartedIdx: index("workspace_operations_company_run_started_idx").on(
table.companyId,
table.heartbeatRunId,
table.startedAt,
),
companyWorkspaceStartedIdx: index("workspace_operations_company_workspace_started_idx").on(
table.companyId,
table.executionWorkspaceId,
table.startedAt,
),
}),
);

View File

@@ -138,6 +138,9 @@ export type {
ProjectWorkspace,
ExecutionWorkspace,
WorkspaceRuntimeService,
WorkspaceOperation,
WorkspaceOperationPhase,
WorkspaceOperationStatus,
ExecutionWorkspaceStrategyType,
ExecutionWorkspaceMode,
ExecutionWorkspaceProviderType,

View File

@@ -24,6 +24,11 @@ export type {
ProjectExecutionWorkspaceDefaultMode,
IssueExecutionWorkspaceSettings,
} from "./workspace-runtime.js";
export type {
WorkspaceOperation,
WorkspaceOperationPhase,
WorkspaceOperationStatus,
} from "./workspace-operation.js";
export type {
IssueWorkProduct,
IssueWorkProductType,

View File

@@ -0,0 +1,31 @@
export type WorkspaceOperationPhase =
| "worktree_prepare"
| "workspace_provision"
| "workspace_teardown"
| "worktree_cleanup";
export type WorkspaceOperationStatus = "running" | "succeeded" | "failed" | "skipped";
export interface WorkspaceOperation {
id: string;
companyId: string;
executionWorkspaceId: string | null;
heartbeatRunId: string | null;
phase: WorkspaceOperationPhase;
command: string | null;
cwd: string | null;
status: WorkspaceOperationStatus;
exitCode: number | null;
logStore: string | null;
logRef: string | null;
logBytes: number | null;
logSha256: string | null;
logCompressed: boolean;
stdoutExcerpt: string | null;
stderrExcerpt: string | null;
metadata: Record<string, unknown> | null;
startedAt: Date;
finishedAt: Date | null;
createdAt: Date;
updatedAt: Date;
}

View File

@@ -13,6 +13,8 @@ import {
stopRuntimeServicesForExecutionWorkspace,
type RealizedExecutionWorkspace,
} from "../services/workspace-runtime.ts";
import type { WorkspaceOperation } from "@paperclipai/shared";
import type { WorkspaceOperationRecorder } from "../services/workspace-operations.ts";
const execFileAsync = promisify(execFile);
const leasedRunIds = new Set<string>();
@@ -50,6 +52,68 @@ function buildWorkspace(cwd: string): RealizedExecutionWorkspace {
};
}
function createWorkspaceOperationRecorderDouble() {
const operations: Array<{
phase: string;
command: string | null;
cwd: string | null;
metadata: Record<string, unknown> | null;
result: {
status?: string;
exitCode?: number | null;
stdout?: string | null;
stderr?: string | null;
system?: string | null;
metadata?: Record<string, unknown> | null;
};
}> = [];
let executionWorkspaceId: string | null = null;
const recorder: WorkspaceOperationRecorder = {
attachExecutionWorkspaceId: async (nextExecutionWorkspaceId) => {
executionWorkspaceId = nextExecutionWorkspaceId;
},
recordOperation: async (input) => {
const result = await input.run();
operations.push({
phase: input.phase,
command: input.command ?? null,
cwd: input.cwd ?? null,
metadata: {
...(input.metadata ?? {}),
...(executionWorkspaceId ? { executionWorkspaceId } : {}),
},
result,
});
return {
id: `op-${operations.length}`,
companyId: "company-1",
executionWorkspaceId,
heartbeatRunId: "run-1",
phase: input.phase,
command: input.command ?? null,
cwd: input.cwd ?? null,
status: (result.status ?? "succeeded") as WorkspaceOperation["status"],
exitCode: result.exitCode ?? null,
logStore: "local_file",
logRef: `op-${operations.length}.ndjson`,
logBytes: 0,
logSha256: null,
logCompressed: false,
stdoutExcerpt: result.stdout ?? null,
stderrExcerpt: result.stderr ?? null,
metadata: input.metadata ?? null,
startedAt: new Date(),
finishedAt: new Date(),
createdAt: new Date(),
updatedAt: new Date(),
};
},
};
return { recorder, operations };
}
afterEach(async () => {
await Promise.all(
Array.from(leasedRunIds).map(async (runId) => {
@@ -218,6 +282,64 @@ describe("realizeExecutionWorkspace", () => {
await expect(fs.readFile(path.join(reused.cwd, ".paperclip-provision-created"), "utf8")).resolves.toBe("false\n");
});
it("records worktree setup and provision operations when a recorder is provided", async () => {
const repoRoot = await createTempRepo();
const { recorder, operations } = createWorkspaceOperationRecorderDouble();
await fs.mkdir(path.join(repoRoot, "scripts"), { recursive: true });
await fs.writeFile(
path.join(repoRoot, "scripts", "provision.sh"),
[
"#!/usr/bin/env bash",
"set -euo pipefail",
"printf 'provisioned\\n'",
].join("\n"),
"utf8",
);
await runGit(repoRoot, ["add", "scripts/provision.sh"]);
await runGit(repoRoot, ["commit", "-m", "Add recorder provision script"]);
await realizeExecutionWorkspace({
base: {
baseCwd: repoRoot,
source: "project_primary",
projectId: "project-1",
workspaceId: "workspace-1",
repoUrl: null,
repoRef: "HEAD",
},
config: {
workspaceStrategy: {
type: "git_worktree",
branchTemplate: "{{issue.identifier}}-{{slug}}",
provisionCommand: "bash ./scripts/provision.sh",
},
},
issue: {
id: "issue-1",
identifier: "PAP-540",
title: "Record workspace operations",
},
agent: {
id: "agent-1",
name: "Codex Coder",
companyId: "company-1",
},
recorder,
});
expect(operations.map((operation) => operation.phase)).toEqual([
"worktree_prepare",
"workspace_provision",
]);
expect(operations[0]?.command).toContain("git worktree add");
expect(operations[0]?.metadata).toMatchObject({
branchName: "PAP-540-record-workspace-operations",
created: true,
});
expect(operations[1]?.command).toBe("bash ./scripts/provision.sh");
});
it("reuses an existing branch without resetting it when recreating a missing worktree", async () => {
const repoRoot = await createTempRepo();
const branchName = "PAP-450-recreate-missing-worktree";
@@ -389,6 +511,74 @@ describe("realizeExecutionWorkspace", () => {
stdout: expect.stringContaining(workspace.branchName!),
});
});
it("records teardown and cleanup operations when a recorder is provided", async () => {
const repoRoot = await createTempRepo();
const { recorder, operations } = createWorkspaceOperationRecorderDouble();
const workspace = await realizeExecutionWorkspace({
base: {
baseCwd: repoRoot,
source: "project_primary",
projectId: "project-1",
workspaceId: "workspace-1",
repoUrl: null,
repoRef: "HEAD",
},
config: {
workspaceStrategy: {
type: "git_worktree",
branchTemplate: "{{issue.identifier}}-{{slug}}",
},
},
issue: {
id: "issue-1",
identifier: "PAP-541",
title: "Cleanup recorder",
},
agent: {
id: "agent-1",
name: "Codex Coder",
companyId: "company-1",
},
});
await cleanupExecutionWorkspaceArtifacts({
workspace: {
id: "execution-workspace-1",
cwd: workspace.cwd,
providerType: "git_worktree",
providerRef: workspace.worktreePath,
branchName: workspace.branchName,
repoUrl: workspace.repoUrl,
baseRef: workspace.repoRef,
projectId: workspace.projectId,
projectWorkspaceId: workspace.workspaceId,
sourceIssueId: "issue-1",
metadata: {
createdByRuntime: true,
},
},
projectWorkspace: {
cwd: repoRoot,
cleanupCommand: "printf 'cleanup ok\\n'",
},
recorder,
});
expect(operations.map((operation) => operation.phase)).toEqual([
"workspace_teardown",
"worktree_cleanup",
"worktree_cleanup",
]);
expect(operations[0]?.command).toBe("printf 'cleanup ok\\n'");
expect(operations[1]?.metadata).toMatchObject({
cleanupAction: "worktree_remove",
});
expect(operations[2]?.metadata).toMatchObject({
cleanupAction: "branch_delete",
});
});
});
describe("ensureRuntimeServicesForRun", () => {

View File

@@ -29,6 +29,7 @@ import {
issueService,
logActivity,
secretService,
workspaceOperationService,
} from "../services/index.js";
import { conflict, forbidden, notFound, unprocessable } from "../errors.js";
import { assertBoard, assertCompanyAccess, getActorInfo } from "./authz.js";
@@ -62,6 +63,7 @@ export function agentRoutes(db: Db) {
const heartbeat = heartbeatService(db);
const issueApprovalsSvc = issueApprovalService(db);
const secretsSvc = secretService(db);
const workspaceOperations = workspaceOperationService(db);
const strictSecretsMode = process.env.PAPERCLIP_SECRETS_STRICT_MODE === "true";
function canCreateAgents(agent: { role: string; permissions: Record<string, unknown> | null | undefined }) {
@@ -1560,6 +1562,40 @@ export function agentRoutes(db: Db) {
res.json(result);
});
router.get("/heartbeat-runs/:runId/workspace-operations", async (req, res) => {
const runId = req.params.runId as string;
const run = await heartbeat.getRun(runId);
if (!run) {
res.status(404).json({ error: "Heartbeat run not found" });
return;
}
assertCompanyAccess(req, run.companyId);
const context = asRecord(run.contextSnapshot);
const executionWorkspaceId = asNonEmptyString(context?.executionWorkspaceId);
const operations = await workspaceOperations.listForRun(runId, executionWorkspaceId);
res.json(redactCurrentUserValue(operations));
});
router.get("/workspace-operations/:operationId/log", async (req, res) => {
const operationId = req.params.operationId as string;
const operation = await workspaceOperations.getById(operationId);
if (!operation) {
res.status(404).json({ error: "Workspace operation not found" });
return;
}
assertCompanyAccess(req, operation.companyId);
const offset = Number(req.query.offset ?? 0);
const limitBytes = Number(req.query.limitBytes ?? 256000);
const result = await workspaceOperations.readLog(operationId, {
offset: Number.isFinite(offset) ? offset : 0,
limitBytes: Number.isFinite(limitBytes) ? limitBytes : 256000,
});
res.json(result);
});
router.get("/issues/:issueId/live-runs", async (req, res) => {
const rawId = req.params.issueId as string;
const issueSvc = issueService(db);

View File

@@ -4,7 +4,7 @@ import type { Db } from "@paperclipai/db";
import { issues, projects, projectWorkspaces } from "@paperclipai/db";
import { updateExecutionWorkspaceSchema } from "@paperclipai/shared";
import { validate } from "../middleware/validate.js";
import { executionWorkspaceService, logActivity } from "../services/index.js";
import { executionWorkspaceService, logActivity, workspaceOperationService } from "../services/index.js";
import { parseProjectExecutionWorkspacePolicy } from "../services/execution-workspace-policy.js";
import {
cleanupExecutionWorkspaceArtifacts,
@@ -17,6 +17,7 @@ const TERMINAL_ISSUE_STATUSES = new Set(["done", "cancelled"]);
export function executionWorkspaceRoutes(db: Db) {
const router = Router();
const svc = executionWorkspaceService(db);
const workspaceOperationsSvc = workspaceOperationService(db);
router.get("/companies/:companyId/execution-workspaces", async (req, res) => {
const companyId = req.params.companyId as string;
@@ -121,6 +122,10 @@ export function executionWorkspaceRoutes(db: Db) {
workspace: existing,
projectWorkspace,
teardownCommand: projectPolicy?.workspaceStrategy?.teardownCommand ?? null,
recorder: workspaceOperationsSvc.createRecorder({
companyId: existing.companyId,
executionWorkspaceId: existing.id,
}),
});
cleanupWarnings = cleanupResult.warnings;
const cleanupPatch: Record<string, unknown> = {

View File

@@ -38,6 +38,7 @@ import {
} from "./workspace-runtime.js";
import { issueService } from "./issues.js";
import { executionWorkspaceService } from "./execution-workspaces.js";
import { workspaceOperationService } from "./workspace-operations.js";
import {
buildExecutionWorkspaceAdapterConfig,
gateProjectExecutionWorkspacePolicy,
@@ -705,6 +706,7 @@ export function heartbeatService(db: Db) {
const secretsSvc = secretService(db);
const issuesSvc = issueService(db);
const executionWorkspacesSvc = executionWorkspaceService(db);
const workspaceOperationsSvc = workspaceOperationService(db);
const activeRunExecutions = new Set<string>();
const budgetHooks = {
cancelWorkForScope: cancelBudgetScopeWork,
@@ -1732,6 +1734,13 @@ export function heartbeatService(db: Db) {
.where(and(eq(issues.id, issueId), eq(issues.companyId, agent.companyId)))
.then((rows) => rows[0] ?? null)
: null;
const existingExecutionWorkspace =
issueRef?.executionWorkspaceId ? await executionWorkspacesSvc.getById(issueRef.executionWorkspaceId) : null;
const workspaceOperationRecorder = workspaceOperationsSvc.createRecorder({
companyId: agent.companyId,
heartbeatRunId: run.id,
executionWorkspaceId: existingExecutionWorkspace?.id ?? null,
});
const executionWorkspace = await realizeExecutionWorkspace({
base: {
baseCwd: resolvedWorkspace.cwd,
@@ -1748,9 +1757,8 @@ export function heartbeatService(db: Db) {
name: agent.name,
companyId: agent.companyId,
},
recorder: workspaceOperationRecorder,
});
const existingExecutionWorkspace =
issueRef?.executionWorkspaceId ? await executionWorkspacesSvc.getById(issueRef.executionWorkspaceId) : null;
const resolvedProjectId = executionWorkspace.projectId ?? issueRef?.projectId ?? executionProjectId ?? null;
const resolvedProjectWorkspaceId = issueRef?.projectWorkspaceId ?? resolvedWorkspace.workspaceId ?? null;
const shouldReuseExisting =
@@ -1804,12 +1812,23 @@ export function heartbeatService(db: Db) {
},
})
: null;
await workspaceOperationRecorder.attachExecutionWorkspaceId(persistedExecutionWorkspace?.id ?? null);
if (issueId && persistedExecutionWorkspace && issueRef?.executionWorkspaceId !== persistedExecutionWorkspace.id) {
await issuesSvc.update(issueId, {
executionWorkspaceId: persistedExecutionWorkspace.id,
...(resolvedProjectWorkspaceId ? { projectWorkspaceId: resolvedProjectWorkspaceId } : {}),
});
}
if (persistedExecutionWorkspace) {
context.executionWorkspaceId = persistedExecutionWorkspace.id;
await db
.update(heartbeatRuns)
.set({
contextSnapshot: context,
updatedAt: new Date(),
})
.where(eq(heartbeatRuns.id, run.id));
}
const runtimeSessionResolution = resolveRuntimeSessionParamsForWorkspace({
agentId: agent.id,
previousSessionParams,

View File

@@ -19,6 +19,7 @@ export { accessService } from "./access.js";
export { instanceSettingsService } from "./instance-settings.js";
export { companyPortabilityService } from "./company-portability.js";
export { executionWorkspaceService } from "./execution-workspaces.js";
export { workspaceOperationService } from "./workspace-operations.js";
export { workProductService } from "./work-products.js";
export { logActivity, type LogActivityInput } from "./activity-log.js";
export { notifyHireApproved, type NotifyHireApprovedInput } from "./hire-hook.js";

View File

@@ -0,0 +1,156 @@
import { createReadStream, promises as fs } from "node:fs";
import path from "node:path";
import { createHash } from "node:crypto";
import { notFound } from "../errors.js";
import { resolvePaperclipInstanceRoot } from "../home-paths.js";
export type WorkspaceOperationLogStoreType = "local_file";
export interface WorkspaceOperationLogHandle {
store: WorkspaceOperationLogStoreType;
logRef: string;
}
export interface WorkspaceOperationLogReadOptions {
offset?: number;
limitBytes?: number;
}
export interface WorkspaceOperationLogReadResult {
content: string;
nextOffset?: number;
}
export interface WorkspaceOperationLogFinalizeSummary {
bytes: number;
sha256?: string;
compressed: boolean;
}
export interface WorkspaceOperationLogStore {
begin(input: { companyId: string; operationId: string }): Promise<WorkspaceOperationLogHandle>;
append(
handle: WorkspaceOperationLogHandle,
event: { stream: "stdout" | "stderr" | "system"; chunk: string; ts: string },
): Promise<void>;
finalize(handle: WorkspaceOperationLogHandle): Promise<WorkspaceOperationLogFinalizeSummary>;
read(handle: WorkspaceOperationLogHandle, opts?: WorkspaceOperationLogReadOptions): Promise<WorkspaceOperationLogReadResult>;
}
function safeSegments(...segments: string[]) {
return segments.map((segment) => segment.replace(/[^a-zA-Z0-9._-]/g, "_"));
}
function resolveWithin(basePath: string, relativePath: string) {
const resolved = path.resolve(basePath, relativePath);
const base = path.resolve(basePath) + path.sep;
if (!resolved.startsWith(base) && resolved !== path.resolve(basePath)) {
throw new Error("Invalid log path");
}
return resolved;
}
function createLocalFileWorkspaceOperationLogStore(basePath: string): WorkspaceOperationLogStore {
async function ensureDir(relativeDir: string) {
const dir = resolveWithin(basePath, relativeDir);
await fs.mkdir(dir, { recursive: true });
}
async function readFileRange(filePath: string, offset: number, limitBytes: number): Promise<WorkspaceOperationLogReadResult> {
const stat = await fs.stat(filePath).catch(() => null);
if (!stat) throw notFound("Workspace operation log not found");
const start = Math.max(0, Math.min(offset, stat.size));
const end = Math.max(start, Math.min(start + limitBytes - 1, stat.size - 1));
if (start > end) {
return { content: "", nextOffset: start };
}
const chunks: Buffer[] = [];
await new Promise<void>((resolve, reject) => {
const stream = createReadStream(filePath, { start, end });
stream.on("data", (chunk) => {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
});
stream.on("error", reject);
stream.on("end", () => resolve());
});
const content = Buffer.concat(chunks).toString("utf8");
const nextOffset = end + 1 < stat.size ? end + 1 : undefined;
return { content, nextOffset };
}
async function sha256File(filePath: string): Promise<string> {
return new Promise<string>((resolve, reject) => {
const hash = createHash("sha256");
const stream = createReadStream(filePath);
stream.on("data", (chunk) => hash.update(chunk));
stream.on("error", reject);
stream.on("end", () => resolve(hash.digest("hex")));
});
}
return {
async begin(input) {
const [companyId] = safeSegments(input.companyId);
const operationId = safeSegments(input.operationId)[0]!;
const relDir = companyId;
const relPath = path.join(relDir, `${operationId}.ndjson`);
await ensureDir(relDir);
const absPath = resolveWithin(basePath, relPath);
await fs.writeFile(absPath, "", "utf8");
return { store: "local_file", logRef: relPath };
},
async append(handle, event) {
if (handle.store !== "local_file") return;
const absPath = resolveWithin(basePath, handle.logRef);
const line = JSON.stringify({
ts: event.ts,
stream: event.stream,
chunk: event.chunk,
});
await fs.appendFile(absPath, `${line}\n`, "utf8");
},
async finalize(handle) {
if (handle.store !== "local_file") {
return { bytes: 0, compressed: false };
}
const absPath = resolveWithin(basePath, handle.logRef);
const stat = await fs.stat(absPath).catch(() => null);
if (!stat) throw notFound("Workspace operation log not found");
const hash = await sha256File(absPath);
return {
bytes: stat.size,
sha256: hash,
compressed: false,
};
},
async read(handle, opts) {
if (handle.store !== "local_file") {
throw notFound("Workspace operation log not found");
}
const absPath = resolveWithin(basePath, handle.logRef);
const offset = opts?.offset ?? 0;
const limitBytes = opts?.limitBytes ?? 256_000;
return readFileRange(absPath, offset, limitBytes);
},
};
}
let cachedStore: WorkspaceOperationLogStore | null = null;
export function getWorkspaceOperationLogStore() {
if (cachedStore) return cachedStore;
const basePath = process.env.WORKSPACE_OPERATION_LOG_BASE_PATH
?? path.resolve(resolvePaperclipInstanceRoot(), "data", "workspace-operation-logs");
cachedStore = createLocalFileWorkspaceOperationLogStore(basePath);
return cachedStore;
}

View File

@@ -0,0 +1,250 @@
import { randomUUID } from "node:crypto";
import type { Db } from "@paperclipai/db";
import { workspaceOperations } from "@paperclipai/db";
import type { WorkspaceOperation, WorkspaceOperationPhase, WorkspaceOperationStatus } from "@paperclipai/shared";
import { asc, desc, eq, inArray, isNull, or, and } from "drizzle-orm";
import { notFound } from "../errors.js";
import { redactCurrentUserText, redactCurrentUserValue } from "../log-redaction.js";
import { getWorkspaceOperationLogStore } from "./workspace-operation-log-store.js";
type WorkspaceOperationRow = typeof workspaceOperations.$inferSelect;
function toWorkspaceOperation(row: WorkspaceOperationRow): WorkspaceOperation {
return {
id: row.id,
companyId: row.companyId,
executionWorkspaceId: row.executionWorkspaceId ?? null,
heartbeatRunId: row.heartbeatRunId ?? null,
phase: row.phase as WorkspaceOperationPhase,
command: row.command ?? null,
cwd: row.cwd ?? null,
status: row.status as WorkspaceOperationStatus,
exitCode: row.exitCode ?? null,
logStore: row.logStore ?? null,
logRef: row.logRef ?? null,
logBytes: row.logBytes ?? null,
logSha256: row.logSha256 ?? null,
logCompressed: row.logCompressed,
stdoutExcerpt: row.stdoutExcerpt ?? null,
stderrExcerpt: row.stderrExcerpt ?? null,
metadata: (row.metadata as Record<string, unknown> | null) ?? null,
startedAt: row.startedAt,
finishedAt: row.finishedAt ?? null,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
};
}
function appendExcerpt(current: string, chunk: string) {
return `${current}${chunk}`.slice(-4096);
}
function combineMetadata(
base: Record<string, unknown> | null | undefined,
patch: Record<string, unknown> | null | undefined,
) {
if (!base && !patch) return null;
return {
...(base ?? {}),
...(patch ?? {}),
};
}
export interface WorkspaceOperationRecorder {
attachExecutionWorkspaceId(executionWorkspaceId: string | null): Promise<void>;
recordOperation(input: {
phase: WorkspaceOperationPhase;
command?: string | null;
cwd?: string | null;
metadata?: Record<string, unknown> | null;
run: () => Promise<{
status?: WorkspaceOperationStatus;
exitCode?: number | null;
stdout?: string | null;
stderr?: string | null;
system?: string | null;
metadata?: Record<string, unknown> | null;
}>;
}): Promise<WorkspaceOperation>;
}
export function workspaceOperationService(db: Db) {
const logStore = getWorkspaceOperationLogStore();
async function getById(id: string) {
const row = await db
.select()
.from(workspaceOperations)
.where(eq(workspaceOperations.id, id))
.then((rows) => rows[0] ?? null);
return row ? toWorkspaceOperation(row) : null;
}
return {
getById,
createRecorder(input: {
companyId: string;
heartbeatRunId?: string | null;
executionWorkspaceId?: string | null;
}): WorkspaceOperationRecorder {
let executionWorkspaceId = input.executionWorkspaceId ?? null;
const createdIds: string[] = [];
return {
async attachExecutionWorkspaceId(nextExecutionWorkspaceId) {
executionWorkspaceId = nextExecutionWorkspaceId ?? null;
if (!executionWorkspaceId || createdIds.length === 0) return;
await db
.update(workspaceOperations)
.set({
executionWorkspaceId,
updatedAt: new Date(),
})
.where(inArray(workspaceOperations.id, createdIds));
},
async recordOperation(recordInput) {
const startedAt = new Date();
const id = randomUUID();
const handle = await logStore.begin({
companyId: input.companyId,
operationId: id,
});
let stdoutExcerpt = "";
let stderrExcerpt = "";
const append = async (stream: "stdout" | "stderr" | "system", chunk: string | null | undefined) => {
if (!chunk) return;
const sanitizedChunk = redactCurrentUserText(chunk);
if (stream === "stdout") stdoutExcerpt = appendExcerpt(stdoutExcerpt, sanitizedChunk);
if (stream === "stderr") stderrExcerpt = appendExcerpt(stderrExcerpt, sanitizedChunk);
await logStore.append(handle, {
stream,
chunk: sanitizedChunk,
ts: new Date().toISOString(),
});
};
await db.insert(workspaceOperations).values({
id,
companyId: input.companyId,
executionWorkspaceId,
heartbeatRunId: input.heartbeatRunId ?? null,
phase: recordInput.phase,
command: recordInput.command ?? null,
cwd: recordInput.cwd ?? null,
status: "running",
logStore: handle.store,
logRef: handle.logRef,
metadata: redactCurrentUserValue(recordInput.metadata ?? null) as Record<string, unknown> | null,
startedAt,
});
createdIds.push(id);
try {
const result = await recordInput.run();
await append("system", result.system ?? null);
await append("stdout", result.stdout ?? null);
await append("stderr", result.stderr ?? null);
const finalized = await logStore.finalize(handle);
const finishedAt = new Date();
const row = await db
.update(workspaceOperations)
.set({
executionWorkspaceId,
status: result.status ?? "succeeded",
exitCode: result.exitCode ?? null,
stdoutExcerpt: stdoutExcerpt || null,
stderrExcerpt: stderrExcerpt || null,
logBytes: finalized.bytes,
logSha256: finalized.sha256,
logCompressed: finalized.compressed,
metadata: redactCurrentUserValue(
combineMetadata(recordInput.metadata, result.metadata),
) as Record<string, unknown> | null,
finishedAt,
updatedAt: finishedAt,
})
.where(eq(workspaceOperations.id, id))
.returning()
.then((rows) => rows[0] ?? null);
if (!row) throw notFound("Workspace operation not found");
return toWorkspaceOperation(row);
} catch (error) {
await append("stderr", error instanceof Error ? error.message : String(error));
const finalized = await logStore.finalize(handle).catch(() => null);
const finishedAt = new Date();
await db
.update(workspaceOperations)
.set({
executionWorkspaceId,
status: "failed",
stdoutExcerpt: stdoutExcerpt || null,
stderrExcerpt: stderrExcerpt || null,
logBytes: finalized?.bytes ?? null,
logSha256: finalized?.sha256 ?? null,
logCompressed: finalized?.compressed ?? false,
finishedAt,
updatedAt: finishedAt,
})
.where(eq(workspaceOperations.id, id));
throw error;
}
},
};
},
listForRun: async (runId: string, executionWorkspaceId?: string | null) => {
const conditions = [eq(workspaceOperations.heartbeatRunId, runId)];
if (executionWorkspaceId) {
const cleanupCondition = and(
eq(workspaceOperations.executionWorkspaceId, executionWorkspaceId)!,
isNull(workspaceOperations.heartbeatRunId),
)!;
if (cleanupCondition) conditions.push(cleanupCondition);
}
const rows = await db
.select()
.from(workspaceOperations)
.where(conditions.length === 1 ? conditions[0]! : or(...conditions)!)
.orderBy(asc(workspaceOperations.startedAt), asc(workspaceOperations.createdAt), asc(workspaceOperations.id));
return rows.map(toWorkspaceOperation);
},
listForExecutionWorkspace: async (executionWorkspaceId: string) => {
const rows = await db
.select()
.from(workspaceOperations)
.where(eq(workspaceOperations.executionWorkspaceId, executionWorkspaceId))
.orderBy(desc(workspaceOperations.startedAt), desc(workspaceOperations.createdAt));
return rows.map(toWorkspaceOperation);
},
readLog: async (operationId: string, opts?: { offset?: number; limitBytes?: number }) => {
const operation = await getById(operationId);
if (!operation) throw notFound("Workspace operation not found");
if (!operation.logStore || !operation.logRef) throw notFound("Workspace operation log not found");
const result = await logStore.read(
{
store: operation.logStore as "local_file",
logRef: operation.logRef,
},
opts,
);
return {
operationId,
store: operation.logStore,
logRef: operation.logRef,
...result,
content: redactCurrentUserText(result.content),
};
},
};
}
export { toWorkspaceOperation };

View File

@@ -10,6 +10,7 @@ import { workspaceRuntimeServices } from "@paperclipai/db";
import { and, desc, eq, inArray } from "drizzle-orm";
import { asNumber, asString, parseObject, renderTemplate } from "../adapters/utils.js";
import { resolveHomeAwarePath } from "../home-paths.js";
import type { WorkspaceOperationRecorder } from "./workspace-operations.js";
export interface ExecutionWorkspaceInput {
baseCwd: string;
@@ -221,12 +222,23 @@ function resolveConfiguredPath(value: string, baseDir: string): string {
return path.resolve(baseDir, value);
}
async function runGit(args: string[], cwd: string): Promise<string> {
function formatCommandForDisplay(command: string, args: string[]) {
return [command, ...args]
.map((part) => (/^[A-Za-z0-9_./:-]+$/.test(part) ? part : JSON.stringify(part)))
.join(" ");
}
async function executeProcess(input: {
command: string;
args: string[];
cwd: string;
env?: NodeJS.ProcessEnv;
}): Promise<{ stdout: string; stderr: string; code: number | null }> {
const proc = await new Promise<{ stdout: string; stderr: string; code: number | null }>((resolve, reject) => {
const child = spawn("git", args, {
cwd,
const child = spawn(input.command, input.args, {
cwd: input.cwd,
stdio: ["ignore", "pipe", "pipe"],
env: process.env,
env: input.env ?? process.env,
});
let stdout = "";
let stderr = "";
@@ -239,6 +251,15 @@ async function runGit(args: string[], cwd: string): Promise<string> {
child.on("error", reject);
child.on("close", (code) => resolve({ stdout, stderr, code }));
});
return proc;
}
async function runGit(args: string[], cwd: string): Promise<string> {
const proc = await executeProcess({
command: "git",
args,
cwd,
});
if (proc.code !== 0) {
throw new Error(proc.stderr.trim() || proc.stdout.trim() || `git ${args.join(" ")} failed`);
}
@@ -307,22 +328,11 @@ async function runWorkspaceCommand(input: {
label: string;
}) {
const shell = process.env.SHELL?.trim() || "/bin/sh";
const proc = await new Promise<{ stdout: string; stderr: string; code: number | null }>((resolve, reject) => {
const child = spawn(shell, ["-c", input.command], {
cwd: input.cwd,
env: input.env,
stdio: ["ignore", "pipe", "pipe"],
});
let stdout = "";
let stderr = "";
child.stdout?.on("data", (chunk) => {
stdout += String(chunk);
});
child.stderr?.on("data", (chunk) => {
stderr += String(chunk);
});
child.on("error", reject);
child.on("close", (code) => resolve({ stdout, stderr, code }));
const proc = await executeProcess({
command: shell,
args: ["-c", input.command],
cwd: input.cwd,
env: input.env,
});
if (proc.code === 0) return;
@@ -334,6 +344,115 @@ async function runWorkspaceCommand(input: {
);
}
async function recordGitOperation(
recorder: WorkspaceOperationRecorder | null | undefined,
input: {
phase: "worktree_prepare" | "worktree_cleanup";
args: string[];
cwd: string;
metadata?: Record<string, unknown> | null;
successMessage?: string | null;
failureLabel?: string | null;
},
): Promise<string> {
if (!recorder) {
return runGit(input.args, input.cwd);
}
let stdout = "";
let stderr = "";
let code: number | null = null;
await recorder.recordOperation({
phase: input.phase,
command: formatCommandForDisplay("git", input.args),
cwd: input.cwd,
metadata: input.metadata ?? null,
run: async () => {
const result = await executeProcess({
command: "git",
args: input.args,
cwd: input.cwd,
});
stdout = result.stdout;
stderr = result.stderr;
code = result.code;
return {
status: result.code === 0 ? "succeeded" : "failed",
exitCode: result.code,
stdout: result.stdout,
stderr: result.stderr,
system: result.code === 0 ? input.successMessage ?? null : null,
};
},
});
if (code !== 0) {
const details = [stderr.trim(), stdout.trim()].filter(Boolean).join("\n");
throw new Error(
details.length > 0
? `${input.failureLabel ?? `git ${input.args.join(" ")}`} failed: ${details}`
: `${input.failureLabel ?? `git ${input.args.join(" ")}`} failed with exit code ${code ?? -1}`,
);
}
return stdout.trim();
}
async function recordWorkspaceCommandOperation(
recorder: WorkspaceOperationRecorder | null | undefined,
input: {
phase: "workspace_provision" | "workspace_teardown";
command: string;
cwd: string;
env: NodeJS.ProcessEnv;
label: string;
metadata?: Record<string, unknown> | null;
successMessage?: string | null;
},
) {
if (!recorder) {
await runWorkspaceCommand(input);
return;
}
let stdout = "";
let stderr = "";
let code: number | null = null;
await recorder.recordOperation({
phase: input.phase,
command: input.command,
cwd: input.cwd,
metadata: input.metadata ?? null,
run: async () => {
const shell = process.env.SHELL?.trim() || "/bin/sh";
const result = await executeProcess({
command: shell,
args: ["-c", input.command],
cwd: input.cwd,
env: input.env,
});
stdout = result.stdout;
stderr = result.stderr;
code = result.code;
return {
status: result.code === 0 ? "succeeded" : "failed",
exitCode: result.code,
stdout: result.stdout,
stderr: result.stderr,
system: result.code === 0 ? input.successMessage ?? null : null,
};
},
});
if (code === 0) return;
const details = [stderr.trim(), stdout.trim()].filter(Boolean).join("\n");
throw new Error(
details.length > 0
? `${input.label} failed: ${details}`
: `${input.label} failed with exit code ${code ?? -1}`,
);
}
async function provisionExecutionWorktree(input: {
strategy: Record<string, unknown>;
base: ExecutionWorkspaceInput;
@@ -343,11 +462,13 @@ async function provisionExecutionWorktree(input: {
issue: ExecutionWorkspaceIssueRef | null;
agent: ExecutionWorkspaceAgentRef;
created: boolean;
recorder?: WorkspaceOperationRecorder | null;
}) {
const provisionCommand = asString(input.strategy.provisionCommand, "").trim();
if (!provisionCommand) return;
await runWorkspaceCommand({
await recordWorkspaceCommandOperation(input.recorder, {
phase: "workspace_provision",
command: provisionCommand,
cwd: input.worktreePath,
env: buildWorkspaceCommandEnv({
@@ -360,6 +481,13 @@ async function provisionExecutionWorktree(input: {
created: input.created,
}),
label: `Execution workspace provision command "${provisionCommand}"`,
metadata: {
repoRoot: input.repoRoot,
worktreePath: input.worktreePath,
branchName: input.branchName,
created: input.created,
},
successMessage: `Provisioned workspace at ${input.worktreePath}\n`,
});
}
@@ -417,6 +545,7 @@ export async function realizeExecutionWorkspace(input: {
config: Record<string, unknown>;
issue: ExecutionWorkspaceIssueRef | null;
agent: ExecutionWorkspaceAgentRef;
recorder?: WorkspaceOperationRecorder | null;
}): Promise<RealizedExecutionWorkspace> {
const rawStrategy = parseObject(input.config.workspaceStrategy);
const strategyType = asString(rawStrategy.type, "project_primary");
@@ -454,6 +583,25 @@ export async function realizeExecutionWorkspace(input: {
if (existingWorktree) {
const existingGitDir = await runGit(["rev-parse", "--git-dir"], worktreePath).catch(() => null);
if (existingGitDir) {
if (input.recorder) {
await input.recorder.recordOperation({
phase: "worktree_prepare",
cwd: repoRoot,
metadata: {
repoRoot,
worktreePath,
branchName,
baseRef,
created: false,
reused: true,
},
run: async () => ({
status: "succeeded",
exitCode: 0,
system: `Reused existing git worktree at ${worktreePath}\n`,
}),
});
}
await provisionExecutionWorktree({
strategy: rawStrategy,
base: input.base,
@@ -463,6 +611,7 @@ export async function realizeExecutionWorkspace(input: {
issue: input.issue,
agent: input.agent,
created: false,
recorder: input.recorder ?? null,
});
return {
...input.base,
@@ -478,12 +627,39 @@ export async function realizeExecutionWorkspace(input: {
}
try {
await runGit(["worktree", "add", "-b", branchName, worktreePath, baseRef], repoRoot);
await recordGitOperation(input.recorder, {
phase: "worktree_prepare",
args: ["worktree", "add", "-b", branchName, worktreePath, baseRef],
cwd: repoRoot,
metadata: {
repoRoot,
worktreePath,
branchName,
baseRef,
created: true,
},
successMessage: `Created git worktree at ${worktreePath}\n`,
failureLabel: `git worktree add ${worktreePath}`,
});
} catch (error) {
if (!gitErrorIncludes(error, "already exists")) {
throw error;
}
await runGit(["worktree", "add", worktreePath, branchName], repoRoot);
await recordGitOperation(input.recorder, {
phase: "worktree_prepare",
args: ["worktree", "add", worktreePath, branchName],
cwd: repoRoot,
metadata: {
repoRoot,
worktreePath,
branchName,
baseRef,
created: false,
reusedExistingBranch: true,
},
successMessage: `Attached existing branch ${branchName} at ${worktreePath}\n`,
failureLabel: `git worktree add ${worktreePath}`,
});
}
await provisionExecutionWorktree({
strategy: rawStrategy,
@@ -494,6 +670,7 @@ export async function realizeExecutionWorkspace(input: {
issue: input.issue,
agent: input.agent,
created: true,
recorder: input.recorder ?? null,
});
return {
@@ -526,6 +703,7 @@ export async function cleanupExecutionWorkspaceArtifacts(input: {
cleanupCommand: string | null;
} | null;
teardownCommand?: string | null;
recorder?: WorkspaceOperationRecorder | null;
}) {
const warnings: string[] = [];
const workspacePath = input.workspace.providerRef ?? input.workspace.cwd;
@@ -543,11 +721,19 @@ export async function cleanupExecutionWorkspaceArtifacts(input: {
for (const command of cleanupCommands) {
try {
await runWorkspaceCommand({
await recordWorkspaceCommandOperation(input.recorder, {
phase: "workspace_teardown",
command,
cwd: workspacePath ?? input.projectWorkspace?.cwd ?? process.cwd(),
env: cleanupEnv,
label: `Execution workspace cleanup command "${command}"`,
metadata: {
workspaceId: input.workspace.id,
workspacePath,
branchName: input.workspace.branchName,
providerType: input.workspace.providerType,
},
successMessage: `Completed cleanup command "${command}"\n`,
});
} catch (err) {
warnings.push(err instanceof Error ? err.message : String(err));
@@ -565,7 +751,19 @@ export async function cleanupExecutionWorkspaceArtifacts(input: {
warnings.push(`Could not resolve git repo root for "${workspacePath}".`);
} else {
try {
await runGit(["worktree", "remove", "--force", workspacePath], repoRoot);
await recordGitOperation(input.recorder, {
phase: "worktree_cleanup",
args: ["worktree", "remove", "--force", workspacePath],
cwd: repoRoot,
metadata: {
workspaceId: input.workspace.id,
workspacePath,
branchName: input.workspace.branchName,
cleanupAction: "worktree_remove",
},
successMessage: `Removed git worktree ${workspacePath}\n`,
failureLabel: `git worktree remove ${workspacePath}`,
});
} catch (err) {
warnings.push(err instanceof Error ? err.message : String(err));
}
@@ -576,7 +774,19 @@ export async function cleanupExecutionWorkspaceArtifacts(input: {
warnings.push(`Could not resolve git repo root to delete branch "${input.workspace.branchName}".`);
} else {
try {
await runGit(["branch", "-d", input.workspace.branchName], repoRoot);
await recordGitOperation(input.recorder, {
phase: "worktree_cleanup",
args: ["branch", "-d", input.workspace.branchName],
cwd: repoRoot,
metadata: {
workspaceId: input.workspace.id,
workspacePath,
branchName: input.workspace.branchName,
cleanupAction: "branch_delete",
},
successMessage: `Deleted branch ${input.workspace.branchName}\n`,
failureLabel: `git branch -d ${input.workspace.branchName}`,
});
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
warnings.push(`Skipped deleting branch "${input.workspace.branchName}": ${message}`);
@@ -590,6 +800,22 @@ export async function cleanupExecutionWorkspaceArtifacts(input: {
warnings.push(`Refusing to remove shared project workspace "${workspacePath}".`);
} else {
await fs.rm(resolvedWorkspacePath, { recursive: true, force: true });
if (input.recorder) {
await input.recorder.recordOperation({
phase: "workspace_teardown",
cwd: projectWorkspaceCwd ?? process.cwd(),
metadata: {
workspaceId: input.workspace.id,
workspacePath: resolvedWorkspacePath,
cleanupAction: "remove_local_fs",
},
run: async () => ({
status: "succeeded",
exitCode: 0,
system: `Removed local workspace directory ${resolvedWorkspacePath}\n`,
}),
});
}
}
}

View File

@@ -2,6 +2,7 @@ import type {
HeartbeatRun,
HeartbeatRunEvent,
InstanceSchedulerHeartbeatAgent,
WorkspaceOperation,
} from "@paperclipai/shared";
import { api } from "./client";
@@ -42,6 +43,12 @@ export const heartbeatsApi = {
api.get<{ runId: string; store: string; logRef: string; content: string; nextOffset?: number }>(
`/heartbeat-runs/${runId}/log?offset=${encodeURIComponent(String(offset))}&limitBytes=${encodeURIComponent(String(limitBytes))}`,
),
workspaceOperations: (runId: string) =>
api.get<WorkspaceOperation[]>(`/heartbeat-runs/${runId}/workspace-operations`),
workspaceOperationLog: (operationId: string, offset = 0, limitBytes = 256000) =>
api.get<{ operationId: string; store: string; logRef: string; content: string; nextOffset?: number }>(
`/workspace-operations/${operationId}/log?offset=${encodeURIComponent(String(offset))}&limitBytes=${encodeURIComponent(String(limitBytes))}`,
),
cancel: (runId: string) => api.post<void>(`/heartbeat-runs/${runId}/cancel`, {}),
liveRunsForIssue: (issueId: string) =>
api.get<LiveRunForIssue[]>(`/issues/${issueId}/live-runs`),

View File

@@ -100,6 +100,7 @@ export const queryKeys = {
heartbeats: (companyId: string, agentId?: string) =>
["heartbeats", companyId, agentId] as const,
runDetail: (runId: string) => ["heartbeat-run", runId] as const,
runWorkspaceOperations: (runId: string) => ["heartbeat-run", runId, "workspace-operations"] as const,
liveRuns: (companyId: string) => ["live-runs", companyId] as const,
runIssues: (runId: string) => ["run-issues", runId] as const,
org: (companyId: string) => ["org", companyId] as const,

View File

@@ -68,6 +68,7 @@ import {
type HeartbeatRunEvent,
type AgentRuntimeState,
type LiveEvent,
type WorkspaceOperation,
} from "@paperclipai/shared";
import { redactHomePathUserSegments, redactHomePathUserSegmentsInValue } from "@paperclipai/adapter-utils";
import { agentRouteRef } from "../lib/utils";
@@ -238,6 +239,219 @@ function asNonEmptyString(value: unknown): string | null {
return trimmed.length > 0 ? trimmed : null;
}
function parseStoredLogContent(content: string): RunLogChunk[] {
const parsed: RunLogChunk[] = [];
for (const line of content.split("\n")) {
const trimmed = line.trim();
if (!trimmed) continue;
try {
const raw = JSON.parse(trimmed) as { ts?: unknown; stream?: unknown; chunk?: unknown };
const stream =
raw.stream === "stderr" || raw.stream === "system" ? raw.stream : "stdout";
const chunk = typeof raw.chunk === "string" ? raw.chunk : "";
const ts = typeof raw.ts === "string" ? raw.ts : new Date().toISOString();
if (!chunk) continue;
parsed.push({ ts, stream, chunk });
} catch {
// Ignore malformed log lines.
}
}
return parsed;
}
function workspaceOperationPhaseLabel(phase: WorkspaceOperation["phase"]) {
switch (phase) {
case "worktree_prepare":
return "Worktree setup";
case "workspace_provision":
return "Provision";
case "workspace_teardown":
return "Teardown";
case "worktree_cleanup":
return "Worktree cleanup";
default:
return phase;
}
}
function workspaceOperationStatusTone(status: WorkspaceOperation["status"]) {
switch (status) {
case "succeeded":
return "border-green-500/20 bg-green-500/10 text-green-700 dark:text-green-300";
case "failed":
return "border-red-500/20 bg-red-500/10 text-red-700 dark:text-red-300";
case "running":
return "border-cyan-500/20 bg-cyan-500/10 text-cyan-700 dark:text-cyan-300";
case "skipped":
return "border-yellow-500/20 bg-yellow-500/10 text-yellow-700 dark:text-yellow-300";
default:
return "border-border bg-muted/40 text-muted-foreground";
}
}
function WorkspaceOperationStatusBadge({ status }: { status: WorkspaceOperation["status"] }) {
return (
<span
className={cn(
"inline-flex items-center rounded-full border px-2 py-0.5 text-[11px] font-medium capitalize",
workspaceOperationStatusTone(status),
)}
>
{status.replace("_", " ")}
</span>
);
}
function WorkspaceOperationLogViewer({ operation }: { operation: WorkspaceOperation }) {
const [open, setOpen] = useState(false);
const { data: logData, isLoading, error } = useQuery({
queryKey: ["workspace-operation-log", operation.id],
queryFn: () => heartbeatsApi.workspaceOperationLog(operation.id),
enabled: open && Boolean(operation.logRef),
refetchInterval: open && operation.status === "running" ? 2000 : false,
});
const chunks = useMemo(
() => (logData?.content ? parseStoredLogContent(logData.content) : []),
[logData?.content],
);
return (
<div className="space-y-2">
<button
type="button"
className="text-[11px] text-muted-foreground underline underline-offset-2 hover:text-foreground"
onClick={() => setOpen((value) => !value)}
>
{open ? "Hide full log" : "Show full log"}
</button>
{open && (
<div className="rounded-md border border-border bg-background/70 p-2">
{isLoading && <div className="text-xs text-muted-foreground">Loading log...</div>}
{error && (
<div className="text-xs text-destructive">
{error instanceof Error ? error.message : "Failed to load workspace operation log"}
</div>
)}
{!isLoading && !error && chunks.length === 0 && (
<div className="text-xs text-muted-foreground">No persisted log lines.</div>
)}
{chunks.length > 0 && (
<div className="max-h-64 overflow-y-auto rounded bg-neutral-100 p-2 font-mono text-xs dark:bg-neutral-950">
{chunks.map((chunk, index) => (
<div key={`${chunk.ts}-${index}`} className="flex gap-2">
<span className="shrink-0 text-neutral-500">
{new Date(chunk.ts).toLocaleTimeString("en-US", { hour12: false })}
</span>
<span
className={cn(
"shrink-0 w-14",
chunk.stream === "stderr"
? "text-red-600 dark:text-red-300"
: chunk.stream === "system"
? "text-blue-600 dark:text-blue-300"
: "text-muted-foreground",
)}
>
[{chunk.stream}]
</span>
<span className="whitespace-pre-wrap break-all">{redactHomePathUserSegments(chunk.chunk)}</span>
</div>
))}
</div>
)}
</div>
)}
</div>
);
}
function WorkspaceOperationsSection({ operations }: { operations: WorkspaceOperation[] }) {
if (operations.length === 0) return null;
return (
<div className="rounded-lg border border-border bg-background/60 p-3 space-y-3">
<div className="text-xs font-medium text-muted-foreground">
Workspace ({operations.length})
</div>
<div className="space-y-3">
{operations.map((operation) => {
const metadata = asRecord(operation.metadata);
return (
<div key={operation.id} className="rounded-md border border-border/70 bg-background/70 p-3 space-y-2">
<div className="flex flex-wrap items-center gap-2">
<div className="text-sm font-medium">{workspaceOperationPhaseLabel(operation.phase)}</div>
<WorkspaceOperationStatusBadge status={operation.status} />
<div className="text-[11px] text-muted-foreground">
{relativeTime(operation.startedAt)}
{operation.finishedAt && ` to ${relativeTime(operation.finishedAt)}`}
</div>
</div>
{operation.command && (
<div className="text-xs break-all">
<span className="text-muted-foreground">Command: </span>
<span className="font-mono">{operation.command}</span>
</div>
)}
{operation.cwd && (
<div className="text-xs break-all">
<span className="text-muted-foreground">Working dir: </span>
<span className="font-mono">{operation.cwd}</span>
</div>
)}
{(asNonEmptyString(metadata?.branchName)
|| asNonEmptyString(metadata?.baseRef)
|| asNonEmptyString(metadata?.worktreePath)
|| asNonEmptyString(metadata?.repoRoot)
|| asNonEmptyString(metadata?.cleanupAction)) && (
<div className="grid gap-1 text-xs sm:grid-cols-2">
{asNonEmptyString(metadata?.branchName) && (
<div><span className="text-muted-foreground">Branch: </span><span className="font-mono">{metadata?.branchName as string}</span></div>
)}
{asNonEmptyString(metadata?.baseRef) && (
<div><span className="text-muted-foreground">Base ref: </span><span className="font-mono">{metadata?.baseRef as string}</span></div>
)}
{asNonEmptyString(metadata?.worktreePath) && (
<div className="break-all"><span className="text-muted-foreground">Worktree: </span><span className="font-mono">{metadata?.worktreePath as string}</span></div>
)}
{asNonEmptyString(metadata?.repoRoot) && (
<div className="break-all"><span className="text-muted-foreground">Repo root: </span><span className="font-mono">{metadata?.repoRoot as string}</span></div>
)}
{asNonEmptyString(metadata?.cleanupAction) && (
<div><span className="text-muted-foreground">Cleanup: </span><span className="font-mono">{metadata?.cleanupAction as string}</span></div>
)}
</div>
)}
{typeof metadata?.created === "boolean" && (
<div className="text-xs text-muted-foreground">
{metadata.created ? "Created by this run" : "Reused existing workspace"}
</div>
)}
{operation.stderrExcerpt && operation.stderrExcerpt.trim() && (
<div>
<div className="mb-1 text-xs text-red-700 dark:text-red-300">stderr excerpt</div>
<pre className="rounded-md bg-red-50 p-2 text-xs whitespace-pre-wrap break-all text-red-800 dark:bg-neutral-950 dark:text-red-100">
{redactHomePathUserSegments(operation.stderrExcerpt)}
</pre>
</div>
)}
{operation.stdoutExcerpt && operation.stdoutExcerpt.trim() && (
<div>
<div className="mb-1 text-xs text-muted-foreground">stdout excerpt</div>
<pre className="rounded-md bg-neutral-100 p-2 text-xs whitespace-pre-wrap break-all dark:bg-neutral-950">
{redactHomePathUserSegments(operation.stdoutExcerpt)}
</pre>
</div>
)}
{operation.logRef && <WorkspaceOperationLogViewer operation={operation} />}
</div>
);
})}
</div>
</div>
);
}
export function AgentDetail() {
const { companyPrefix, agentId, tab: urlTab, runId: urlRunId } = useParams<{
companyPrefix?: string;
@@ -1769,6 +1983,11 @@ function LogViewer({ run, adapterType }: { run: HeartbeatRun; adapterType: strin
distanceFromBottom: Number.POSITIVE_INFINITY,
});
const isLive = run.status === "running" || run.status === "queued";
const { data: workspaceOperations = [] } = useQuery({
queryKey: queryKeys.runWorkspaceOperations(run.id),
queryFn: () => heartbeatsApi.workspaceOperations(run.id),
refetchInterval: isLive ? 2000 : false,
});
function isRunLogUnavailable(err: unknown): boolean {
return err instanceof ApiError && err.status === 404;
@@ -2139,6 +2358,7 @@ function LogViewer({ run, adapterType }: { run: HeartbeatRun; adapterType: strin
return (
<div className="space-y-3">
<WorkspaceOperationsSection operations={workspaceOperations} />
{adapterInvokePayload && (
<div className="rounded-lg border border-border bg-background/60 p-3 space-y-2">
<div className="text-xs font-medium text-muted-foreground">Invocation</div>