Merge public-gh/master into paperclip-company-import-export
This commit is contained in:
19
server/src/__tests__/app-hmr-port.test.ts
Normal file
19
server/src/__tests__/app-hmr-port.test.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { resolveViteHmrPort } from "../app.ts";
|
||||
|
||||
describe("resolveViteHmrPort", () => {
|
||||
it("uses serverPort + 10000 when the result stays in range", () => {
|
||||
expect(resolveViteHmrPort(3100)).toBe(13_100);
|
||||
expect(resolveViteHmrPort(55_535)).toBe(65_535);
|
||||
});
|
||||
|
||||
it("falls back below the server port when adding 10000 would overflow", () => {
|
||||
expect(resolveViteHmrPort(55_536)).toBe(45_536);
|
||||
expect(resolveViteHmrPort(63_000)).toBe(53_000);
|
||||
});
|
||||
|
||||
it("never returns a privileged or invalid port", () => {
|
||||
expect(resolveViteHmrPort(65_535)).toBe(55_535);
|
||||
expect(resolveViteHmrPort(9_000)).toBe(19_000);
|
||||
});
|
||||
});
|
||||
@@ -35,6 +35,11 @@ type CapturePayload = {
|
||||
paperclipEnvKeys: string[];
|
||||
};
|
||||
|
||||
type LogEntry = {
|
||||
stream: "stdout" | "stderr";
|
||||
chunk: string;
|
||||
};
|
||||
|
||||
describe("codex execute", () => {
|
||||
it("uses a worktree-isolated CODEX_HOME while preserving shared auth and config", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-codex-execute-"));
|
||||
@@ -62,6 +67,7 @@ describe("codex execute", () => {
|
||||
process.env.CODEX_HOME = sharedCodexHome;
|
||||
|
||||
try {
|
||||
const logs: LogEntry[] = [];
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
@@ -87,7 +93,9 @@ describe("codex execute", () => {
|
||||
},
|
||||
context: {},
|
||||
authToken: "run-jwt-token",
|
||||
onLog: async () => {},
|
||||
onLog: async (stream, chunk) => {
|
||||
logs.push({ stream, chunk });
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
@@ -116,6 +124,18 @@ describe("codex execute", () => {
|
||||
expect((await fs.lstat(isolatedConfig)).isFile()).toBe(true);
|
||||
expect(await fs.readFile(isolatedConfig, "utf8")).toBe('model = "codex-mini-latest"\n');
|
||||
expect((await fs.lstat(isolatedSkill)).isSymbolicLink()).toBe(true);
|
||||
expect(logs).toContainEqual(
|
||||
expect.objectContaining({
|
||||
stream: "stdout",
|
||||
chunk: expect.stringContaining("Using worktree-isolated Codex home"),
|
||||
}),
|
||||
);
|
||||
expect(logs).toContainEqual(
|
||||
expect.objectContaining({
|
||||
stream: "stdout",
|
||||
chunk: expect.stringContaining('Injected Codex skill "paperclip"'),
|
||||
}),
|
||||
);
|
||||
} finally {
|
||||
if (previousHome === undefined) delete process.env.HOME;
|
||||
else process.env.HOME = previousHome;
|
||||
|
||||
@@ -51,10 +51,10 @@ describe("codex local adapter skill injection", () => {
|
||||
await createPaperclipRepoSkill(oldRepo, "paperclip");
|
||||
await fs.symlink(path.join(oldRepo, "skills", "paperclip"), path.join(skillsHome, "paperclip"));
|
||||
|
||||
const logs: string[] = [];
|
||||
const logs: Array<{ stream: "stdout" | "stderr"; chunk: string }> = [];
|
||||
await ensureCodexSkillsInjected(
|
||||
async (_stream, chunk) => {
|
||||
logs.push(chunk);
|
||||
async (stream, chunk) => {
|
||||
logs.push({ stream, chunk });
|
||||
},
|
||||
{
|
||||
skillsHome,
|
||||
@@ -69,7 +69,12 @@ describe("codex local adapter skill injection", () => {
|
||||
expect(await fs.realpath(path.join(skillsHome, "paperclip"))).toBe(
|
||||
await fs.realpath(path.join(currentRepo, "skills", "paperclip")),
|
||||
);
|
||||
expect(logs.some((line) => line.includes("Repaired Codex skill"))).toBe(true);
|
||||
expect(logs).toContainEqual(
|
||||
expect.objectContaining({
|
||||
stream: "stdout",
|
||||
chunk: expect.stringContaining('Repaired Codex skill "paperclip"'),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it("preserves a custom Codex skill symlink outside Paperclip repo checkouts", async () => {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
buildExecutionWorkspaceAdapterConfig,
|
||||
defaultIssueExecutionWorkspaceSettingsForProject,
|
||||
gateProjectExecutionWorkspacePolicy,
|
||||
parseIssueExecutionWorkspaceSettings,
|
||||
parseProjectExecutionWorkspacePolicy,
|
||||
resolveExecutionWorkspaceMode,
|
||||
@@ -12,36 +13,36 @@ describe("execution workspace policy helpers", () => {
|
||||
expect(
|
||||
defaultIssueExecutionWorkspaceSettingsForProject({
|
||||
enabled: true,
|
||||
defaultMode: "isolated",
|
||||
defaultMode: "isolated_workspace",
|
||||
}),
|
||||
).toEqual({ mode: "isolated" });
|
||||
).toEqual({ mode: "isolated_workspace" });
|
||||
expect(
|
||||
defaultIssueExecutionWorkspaceSettingsForProject({
|
||||
enabled: true,
|
||||
defaultMode: "project_primary",
|
||||
defaultMode: "shared_workspace",
|
||||
}),
|
||||
).toEqual({ mode: "project_primary" });
|
||||
).toEqual({ mode: "shared_workspace" });
|
||||
expect(defaultIssueExecutionWorkspaceSettingsForProject(null)).toBeNull();
|
||||
});
|
||||
|
||||
it("prefers explicit issue mode over project policy and legacy overrides", () => {
|
||||
expect(
|
||||
resolveExecutionWorkspaceMode({
|
||||
projectPolicy: { enabled: true, defaultMode: "project_primary" },
|
||||
issueSettings: { mode: "isolated" },
|
||||
projectPolicy: { enabled: true, defaultMode: "shared_workspace" },
|
||||
issueSettings: { mode: "isolated_workspace" },
|
||||
legacyUseProjectWorkspace: false,
|
||||
}),
|
||||
).toBe("isolated");
|
||||
).toBe("isolated_workspace");
|
||||
});
|
||||
|
||||
it("falls back to project policy before legacy project-workspace compatibility flag", () => {
|
||||
expect(
|
||||
resolveExecutionWorkspaceMode({
|
||||
projectPolicy: { enabled: true, defaultMode: "isolated" },
|
||||
projectPolicy: { enabled: true, defaultMode: "isolated_workspace" },
|
||||
issueSettings: null,
|
||||
legacyUseProjectWorkspace: false,
|
||||
}),
|
||||
).toBe("isolated");
|
||||
).toBe("isolated_workspace");
|
||||
expect(
|
||||
resolveExecutionWorkspaceMode({
|
||||
projectPolicy: null,
|
||||
@@ -58,7 +59,7 @@ describe("execution workspace policy helpers", () => {
|
||||
},
|
||||
projectPolicy: {
|
||||
enabled: true,
|
||||
defaultMode: "isolated",
|
||||
defaultMode: "isolated_workspace",
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
baseRef: "origin/main",
|
||||
@@ -69,7 +70,7 @@ describe("execution workspace policy helpers", () => {
|
||||
},
|
||||
},
|
||||
issueSettings: null,
|
||||
mode: "isolated",
|
||||
mode: "isolated_workspace",
|
||||
legacyUseProjectWorkspace: null,
|
||||
});
|
||||
|
||||
@@ -92,9 +93,9 @@ describe("execution workspace policy helpers", () => {
|
||||
expect(
|
||||
buildExecutionWorkspaceAdapterConfig({
|
||||
agentConfig: baseConfig,
|
||||
projectPolicy: { enabled: true, defaultMode: "isolated" },
|
||||
issueSettings: { mode: "project_primary" },
|
||||
mode: "project_primary",
|
||||
projectPolicy: { enabled: true, defaultMode: "isolated_workspace" },
|
||||
issueSettings: { mode: "shared_workspace" },
|
||||
mode: "shared_workspace",
|
||||
legacyUseProjectWorkspace: null,
|
||||
}).workspaceStrategy,
|
||||
).toBeUndefined();
|
||||
@@ -124,7 +125,7 @@ describe("execution workspace policy helpers", () => {
|
||||
}),
|
||||
).toEqual({
|
||||
enabled: true,
|
||||
defaultMode: "isolated",
|
||||
defaultMode: "isolated_workspace",
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
worktreeParentDir: ".paperclip/worktrees",
|
||||
@@ -137,7 +138,22 @@ describe("execution workspace policy helpers", () => {
|
||||
mode: "project_primary",
|
||||
}),
|
||||
).toEqual({
|
||||
mode: "project_primary",
|
||||
mode: "shared_workspace",
|
||||
});
|
||||
});
|
||||
|
||||
it("disables project execution workspace policy when the instance flag is off", () => {
|
||||
expect(
|
||||
gateProjectExecutionWorkspacePolicy(
|
||||
{ enabled: true, defaultMode: "isolated_workspace" },
|
||||
false,
|
||||
),
|
||||
).toBeNull();
|
||||
expect(
|
||||
gateProjectExecutionWorkspacePolicy(
|
||||
{ enabled: true, defaultMode: "isolated_workspace" },
|
||||
true,
|
||||
),
|
||||
).toEqual({ enabled: true, defaultMode: "isolated_workspace" });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { describe, expect, it } from "vitest";
|
||||
import type { agents } from "@paperclipai/db";
|
||||
import { resolveDefaultAgentWorkspaceDir } from "../home-paths.js";
|
||||
import {
|
||||
prioritizeProjectWorkspaceCandidatesForRun,
|
||||
parseSessionCompactionPolicy,
|
||||
resolveRuntimeSessionParamsForWorkspace,
|
||||
shouldResetTaskSessionForWake,
|
||||
@@ -180,6 +181,42 @@ describe("shouldResetTaskSessionForWake", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("prioritizeProjectWorkspaceCandidatesForRun", () => {
|
||||
it("moves the explicitly selected workspace to the front", () => {
|
||||
const rows = [
|
||||
{ id: "workspace-1", cwd: "/tmp/one" },
|
||||
{ id: "workspace-2", cwd: "/tmp/two" },
|
||||
{ id: "workspace-3", cwd: "/tmp/three" },
|
||||
];
|
||||
|
||||
expect(
|
||||
prioritizeProjectWorkspaceCandidatesForRun(rows, "workspace-2").map((row) => row.id),
|
||||
).toEqual(["workspace-2", "workspace-1", "workspace-3"]);
|
||||
});
|
||||
|
||||
it("keeps the original order when no preferred workspace is selected", () => {
|
||||
const rows = [
|
||||
{ id: "workspace-1" },
|
||||
{ id: "workspace-2" },
|
||||
];
|
||||
|
||||
expect(
|
||||
prioritizeProjectWorkspaceCandidatesForRun(rows, null).map((row) => row.id),
|
||||
).toEqual(["workspace-1", "workspace-2"]);
|
||||
});
|
||||
|
||||
it("keeps the original order when the selected workspace is missing", () => {
|
||||
const rows = [
|
||||
{ id: "workspace-1" },
|
||||
{ id: "workspace-2" },
|
||||
];
|
||||
|
||||
expect(
|
||||
prioritizeProjectWorkspaceCandidatesForRun(rows, "workspace-9").map((row) => row.id),
|
||||
).toEqual(["workspace-1", "workspace-2"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseSessionCompactionPolicy", () => {
|
||||
it("disables Paperclip-managed rotation by default for codex and claude local", () => {
|
||||
expect(parseSessionCompactionPolicy(buildAgent("codex_local"))).toEqual({
|
||||
|
||||
99
server/src/__tests__/instance-settings-routes.test.ts
Normal file
99
server/src/__tests__/instance-settings-routes.test.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import express from "express";
|
||||
import request from "supertest";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { errorHandler } from "../middleware/index.js";
|
||||
import { instanceSettingsRoutes } from "../routes/instance-settings.js";
|
||||
|
||||
const mockInstanceSettingsService = vi.hoisted(() => ({
|
||||
getExperimental: vi.fn(),
|
||||
updateExperimental: vi.fn(),
|
||||
listCompanyIds: vi.fn(),
|
||||
}));
|
||||
const mockLogActivity = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock("../services/index.js", () => ({
|
||||
instanceSettingsService: () => mockInstanceSettingsService,
|
||||
logActivity: mockLogActivity,
|
||||
}));
|
||||
|
||||
function createApp(actor: any) {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, _res, next) => {
|
||||
req.actor = actor;
|
||||
next();
|
||||
});
|
||||
app.use("/api", instanceSettingsRoutes({} as any));
|
||||
app.use(errorHandler);
|
||||
return app;
|
||||
}
|
||||
|
||||
describe("instance settings routes", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockInstanceSettingsService.getExperimental.mockResolvedValue({
|
||||
enableIsolatedWorkspaces: false,
|
||||
});
|
||||
mockInstanceSettingsService.updateExperimental.mockResolvedValue({
|
||||
id: "instance-settings-1",
|
||||
experimental: {
|
||||
enableIsolatedWorkspaces: true,
|
||||
},
|
||||
});
|
||||
mockInstanceSettingsService.listCompanyIds.mockResolvedValue(["company-1", "company-2"]);
|
||||
});
|
||||
|
||||
it("allows local board users to read and update experimental settings", async () => {
|
||||
const app = createApp({
|
||||
type: "board",
|
||||
userId: "local-board",
|
||||
source: "local_implicit",
|
||||
isInstanceAdmin: true,
|
||||
});
|
||||
|
||||
const getRes = await request(app).get("/api/instance/settings/experimental");
|
||||
expect(getRes.status).toBe(200);
|
||||
expect(getRes.body).toEqual({ enableIsolatedWorkspaces: false });
|
||||
|
||||
const patchRes = await request(app)
|
||||
.patch("/api/instance/settings/experimental")
|
||||
.send({ enableIsolatedWorkspaces: true });
|
||||
|
||||
expect(patchRes.status).toBe(200);
|
||||
expect(mockInstanceSettingsService.updateExperimental).toHaveBeenCalledWith({
|
||||
enableIsolatedWorkspaces: true,
|
||||
});
|
||||
expect(mockLogActivity).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it("rejects non-admin board users", async () => {
|
||||
const app = createApp({
|
||||
type: "board",
|
||||
userId: "user-1",
|
||||
source: "session",
|
||||
isInstanceAdmin: false,
|
||||
companyIds: ["company-1"],
|
||||
});
|
||||
|
||||
const res = await request(app).get("/api/instance/settings/experimental");
|
||||
|
||||
expect(res.status).toBe(403);
|
||||
expect(mockInstanceSettingsService.getExperimental).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("rejects agent callers", async () => {
|
||||
const app = createApp({
|
||||
type: "agent",
|
||||
agentId: "agent-1",
|
||||
companyId: "company-1",
|
||||
source: "agent_key",
|
||||
});
|
||||
|
||||
const res = await request(app)
|
||||
.patch("/api/instance/settings/experimental")
|
||||
.send({ enableIsolatedWorkspaces: true });
|
||||
|
||||
expect(res.status).toBe(403);
|
||||
expect(mockInstanceSettingsService.updateExperimental).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
95
server/src/__tests__/work-products.test.ts
Normal file
95
server/src/__tests__/work-products.test.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { workProductService } from "../services/work-products.ts";
|
||||
|
||||
function createWorkProductRow(overrides: Partial<Record<string, unknown>> = {}) {
|
||||
const now = new Date("2026-03-17T00:00:00.000Z");
|
||||
return {
|
||||
id: "work-product-1",
|
||||
companyId: "company-1",
|
||||
projectId: "project-1",
|
||||
issueId: "issue-1",
|
||||
executionWorkspaceId: null,
|
||||
runtimeServiceId: null,
|
||||
type: "pull_request",
|
||||
provider: "github",
|
||||
externalId: null,
|
||||
title: "PR 1",
|
||||
url: "https://example.com/pr/1",
|
||||
status: "open",
|
||||
reviewState: "draft",
|
||||
isPrimary: true,
|
||||
healthStatus: "unknown",
|
||||
summary: null,
|
||||
metadata: null,
|
||||
createdByRunId: null,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("workProductService", () => {
|
||||
it("uses a transaction when creating a new primary work product", async () => {
|
||||
const updatedWhere = vi.fn(async () => undefined);
|
||||
const updateSet = vi.fn(() => ({ where: updatedWhere }));
|
||||
const txUpdate = vi.fn(() => ({ set: updateSet }));
|
||||
|
||||
const insertedRow = createWorkProductRow();
|
||||
const insertReturning = vi.fn(async () => [insertedRow]);
|
||||
const insertValues = vi.fn(() => ({ returning: insertReturning }));
|
||||
const txInsert = vi.fn(() => ({ values: insertValues }));
|
||||
|
||||
const tx = {
|
||||
update: txUpdate,
|
||||
insert: txInsert,
|
||||
};
|
||||
const transaction = vi.fn(async (callback: (input: typeof tx) => Promise<unknown>) => await callback(tx));
|
||||
|
||||
const svc = workProductService({ transaction } as any);
|
||||
const result = await svc.createForIssue("issue-1", "company-1", {
|
||||
type: "pull_request",
|
||||
provider: "github",
|
||||
title: "PR 1",
|
||||
status: "open",
|
||||
reviewState: "draft",
|
||||
isPrimary: true,
|
||||
});
|
||||
|
||||
expect(transaction).toHaveBeenCalledTimes(1);
|
||||
expect(txUpdate).toHaveBeenCalledTimes(1);
|
||||
expect(txInsert).toHaveBeenCalledTimes(1);
|
||||
expect(result?.id).toBe("work-product-1");
|
||||
});
|
||||
|
||||
it("uses a transaction when promoting an existing work product to primary", async () => {
|
||||
const existingRow = createWorkProductRow({ isPrimary: false });
|
||||
|
||||
const selectWhere = vi.fn(async () => [existingRow]);
|
||||
const selectFrom = vi.fn(() => ({ where: selectWhere }));
|
||||
const txSelect = vi.fn(() => ({ from: selectFrom }));
|
||||
|
||||
const updateReturning = vi
|
||||
.fn()
|
||||
.mockResolvedValue([createWorkProductRow({ reviewState: "ready_for_review" })]);
|
||||
const updateWhere = vi.fn(() => ({ returning: updateReturning }));
|
||||
const updateSet = vi.fn(() => ({ where: updateWhere }));
|
||||
const txUpdate = vi.fn(() => ({ set: updateSet }));
|
||||
|
||||
const tx = {
|
||||
select: txSelect,
|
||||
update: txUpdate,
|
||||
};
|
||||
const transaction = vi.fn(async (callback: (input: typeof tx) => Promise<unknown>) => await callback(tx));
|
||||
|
||||
const svc = workProductService({ transaction } as any);
|
||||
const result = await svc.update("work-product-1", {
|
||||
isPrimary: true,
|
||||
reviewState: "ready_for_review",
|
||||
});
|
||||
|
||||
expect(transaction).toHaveBeenCalledTimes(1);
|
||||
expect(txSelect).toHaveBeenCalledTimes(1);
|
||||
expect(txUpdate).toHaveBeenCalledTimes(2);
|
||||
expect(result?.reviewState).toBe("ready_for_review");
|
||||
});
|
||||
});
|
||||
@@ -5,12 +5,16 @@ import path from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
cleanupExecutionWorkspaceArtifacts,
|
||||
ensureRuntimeServicesForRun,
|
||||
normalizeAdapterManagedRuntimeServices,
|
||||
realizeExecutionWorkspace,
|
||||
releaseRuntimeServicesForRun,
|
||||
stopRuntimeServicesForExecutionWorkspace,
|
||||
type RealizedExecutionWorkspace,
|
||||
} from "../services/workspace-runtime.ts";
|
||||
import type { WorkspaceOperation } from "@paperclipai/shared";
|
||||
import type { WorkspaceOperationRecorder } from "../services/workspace-operations.ts";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
const leasedRunIds = new Set<string>();
|
||||
@@ -48,6 +52,68 @@ function buildWorkspace(cwd: string): RealizedExecutionWorkspace {
|
||||
};
|
||||
}
|
||||
|
||||
function createWorkspaceOperationRecorderDouble() {
|
||||
const operations: Array<{
|
||||
phase: string;
|
||||
command: string | null;
|
||||
cwd: string | null;
|
||||
metadata: Record<string, unknown> | null;
|
||||
result: {
|
||||
status?: string;
|
||||
exitCode?: number | null;
|
||||
stdout?: string | null;
|
||||
stderr?: string | null;
|
||||
system?: string | null;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
};
|
||||
}> = [];
|
||||
let executionWorkspaceId: string | null = null;
|
||||
|
||||
const recorder: WorkspaceOperationRecorder = {
|
||||
attachExecutionWorkspaceId: async (nextExecutionWorkspaceId) => {
|
||||
executionWorkspaceId = nextExecutionWorkspaceId;
|
||||
},
|
||||
recordOperation: async (input) => {
|
||||
const result = await input.run();
|
||||
operations.push({
|
||||
phase: input.phase,
|
||||
command: input.command ?? null,
|
||||
cwd: input.cwd ?? null,
|
||||
metadata: {
|
||||
...(input.metadata ?? {}),
|
||||
...(executionWorkspaceId ? { executionWorkspaceId } : {}),
|
||||
},
|
||||
result,
|
||||
});
|
||||
return {
|
||||
id: `op-${operations.length}`,
|
||||
companyId: "company-1",
|
||||
executionWorkspaceId,
|
||||
heartbeatRunId: "run-1",
|
||||
phase: input.phase,
|
||||
command: input.command ?? null,
|
||||
cwd: input.cwd ?? null,
|
||||
status: (result.status ?? "succeeded") as WorkspaceOperation["status"],
|
||||
exitCode: result.exitCode ?? null,
|
||||
logStore: "local_file",
|
||||
logRef: `op-${operations.length}.ndjson`,
|
||||
logBytes: 0,
|
||||
logSha256: null,
|
||||
logCompressed: false,
|
||||
stdoutExcerpt: result.stdout ?? null,
|
||||
stderrExcerpt: result.stderr ?? null,
|
||||
metadata: input.metadata ?? null,
|
||||
startedAt: new Date(),
|
||||
finishedAt: new Date(),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
return { recorder, operations };
|
||||
}
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(
|
||||
Array.from(leasedRunIds).map(async (runId) => {
|
||||
@@ -55,6 +121,10 @@ afterEach(async () => {
|
||||
leasedRunIds.delete(runId);
|
||||
}),
|
||||
);
|
||||
delete process.env.PAPERCLIP_CONFIG;
|
||||
delete process.env.PAPERCLIP_HOME;
|
||||
delete process.env.PAPERCLIP_INSTANCE_ID;
|
||||
delete process.env.DATABASE_URL;
|
||||
});
|
||||
|
||||
describe("realizeExecutionWorkspace", () => {
|
||||
@@ -211,6 +281,304 @@ describe("realizeExecutionWorkspace", () => {
|
||||
|
||||
await expect(fs.readFile(path.join(reused.cwd, ".paperclip-provision-created"), "utf8")).resolves.toBe("false\n");
|
||||
});
|
||||
|
||||
it("records worktree setup and provision operations when a recorder is provided", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
const { recorder, operations } = createWorkspaceOperationRecorderDouble();
|
||||
|
||||
await fs.mkdir(path.join(repoRoot, "scripts"), { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(repoRoot, "scripts", "provision.sh"),
|
||||
[
|
||||
"#!/usr/bin/env bash",
|
||||
"set -euo pipefail",
|
||||
"printf 'provisioned\\n'",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
await runGit(repoRoot, ["add", "scripts/provision.sh"]);
|
||||
await runGit(repoRoot, ["commit", "-m", "Add recorder provision script"]);
|
||||
|
||||
await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
provisionCommand: "bash ./scripts/provision.sh",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-540",
|
||||
title: "Record workspace operations",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
recorder,
|
||||
});
|
||||
|
||||
expect(operations.map((operation) => operation.phase)).toEqual([
|
||||
"worktree_prepare",
|
||||
"workspace_provision",
|
||||
]);
|
||||
expect(operations[0]?.command).toContain("git worktree add");
|
||||
expect(operations[0]?.metadata).toMatchObject({
|
||||
branchName: "PAP-540-record-workspace-operations",
|
||||
created: true,
|
||||
});
|
||||
expect(operations[1]?.command).toBe("bash ./scripts/provision.sh");
|
||||
});
|
||||
|
||||
it("reuses an existing branch without resetting it when recreating a missing worktree", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
const branchName = "PAP-450-recreate-missing-worktree";
|
||||
|
||||
await runGit(repoRoot, ["checkout", "-b", branchName]);
|
||||
await fs.writeFile(path.join(repoRoot, "feature.txt"), "preserve me\n", "utf8");
|
||||
await runGit(repoRoot, ["add", "feature.txt"]);
|
||||
await runGit(repoRoot, ["commit", "-m", "Add preserved feature"]);
|
||||
const expectedHead = (await execFileAsync("git", ["rev-parse", branchName], { cwd: repoRoot })).stdout.trim();
|
||||
await runGit(repoRoot, ["checkout", "main"]);
|
||||
|
||||
const workspace = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-450",
|
||||
title: "Recreate missing worktree",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
expect(workspace.branchName).toBe(branchName);
|
||||
await expect(fs.readFile(path.join(workspace.cwd, "feature.txt"), "utf8")).resolves.toBe("preserve me\n");
|
||||
const actualHead = (await execFileAsync("git", ["rev-parse", "HEAD"], { cwd: workspace.cwd })).stdout.trim();
|
||||
expect(actualHead).toBe(expectedHead);
|
||||
});
|
||||
|
||||
it("removes a created git worktree and branch during cleanup", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
|
||||
const workspace = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-449",
|
||||
title: "Cleanup workspace",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
const cleanup = await cleanupExecutionWorkspaceArtifacts({
|
||||
workspace: {
|
||||
id: "execution-workspace-1",
|
||||
cwd: workspace.cwd,
|
||||
providerType: "git_worktree",
|
||||
providerRef: workspace.worktreePath,
|
||||
branchName: workspace.branchName,
|
||||
repoUrl: workspace.repoUrl,
|
||||
baseRef: workspace.repoRef,
|
||||
projectId: workspace.projectId,
|
||||
projectWorkspaceId: workspace.workspaceId,
|
||||
sourceIssueId: "issue-1",
|
||||
metadata: {
|
||||
createdByRuntime: true,
|
||||
},
|
||||
},
|
||||
projectWorkspace: {
|
||||
cwd: repoRoot,
|
||||
cleanupCommand: null,
|
||||
},
|
||||
});
|
||||
|
||||
expect(cleanup.cleaned).toBe(true);
|
||||
expect(cleanup.warnings).toEqual([]);
|
||||
await expect(fs.stat(workspace.cwd)).rejects.toThrow();
|
||||
await expect(
|
||||
execFileAsync("git", ["branch", "--list", workspace.branchName!], { cwd: repoRoot }),
|
||||
).resolves.toMatchObject({
|
||||
stdout: "",
|
||||
});
|
||||
});
|
||||
|
||||
it("keeps an unmerged runtime-created branch and warns instead of force deleting it", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
|
||||
const workspace = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-451",
|
||||
title: "Keep unmerged branch",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
await fs.writeFile(path.join(workspace.cwd, "unmerged.txt"), "still here\n", "utf8");
|
||||
await runGit(workspace.cwd, ["add", "unmerged.txt"]);
|
||||
await runGit(workspace.cwd, ["commit", "-m", "Keep unmerged work"]);
|
||||
|
||||
const cleanup = await cleanupExecutionWorkspaceArtifacts({
|
||||
workspace: {
|
||||
id: "execution-workspace-1",
|
||||
cwd: workspace.cwd,
|
||||
providerType: "git_worktree",
|
||||
providerRef: workspace.worktreePath,
|
||||
branchName: workspace.branchName,
|
||||
repoUrl: workspace.repoUrl,
|
||||
baseRef: workspace.repoRef,
|
||||
projectId: workspace.projectId,
|
||||
projectWorkspaceId: workspace.workspaceId,
|
||||
sourceIssueId: "issue-1",
|
||||
metadata: {
|
||||
createdByRuntime: true,
|
||||
},
|
||||
},
|
||||
projectWorkspace: {
|
||||
cwd: repoRoot,
|
||||
cleanupCommand: null,
|
||||
},
|
||||
});
|
||||
|
||||
expect(cleanup.cleaned).toBe(true);
|
||||
expect(cleanup.warnings).toHaveLength(1);
|
||||
expect(cleanup.warnings[0]).toContain(`Skipped deleting branch "${workspace.branchName}"`);
|
||||
await expect(
|
||||
execFileAsync("git", ["branch", "--list", workspace.branchName!], { cwd: repoRoot }),
|
||||
).resolves.toMatchObject({
|
||||
stdout: expect.stringContaining(workspace.branchName!),
|
||||
});
|
||||
});
|
||||
|
||||
it("records teardown and cleanup operations when a recorder is provided", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
const { recorder, operations } = createWorkspaceOperationRecorderDouble();
|
||||
|
||||
const workspace = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-541",
|
||||
title: "Cleanup recorder",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
await cleanupExecutionWorkspaceArtifacts({
|
||||
workspace: {
|
||||
id: "execution-workspace-1",
|
||||
cwd: workspace.cwd,
|
||||
providerType: "git_worktree",
|
||||
providerRef: workspace.worktreePath,
|
||||
branchName: workspace.branchName,
|
||||
repoUrl: workspace.repoUrl,
|
||||
baseRef: workspace.repoRef,
|
||||
projectId: workspace.projectId,
|
||||
projectWorkspaceId: workspace.workspaceId,
|
||||
sourceIssueId: "issue-1",
|
||||
metadata: {
|
||||
createdByRuntime: true,
|
||||
},
|
||||
},
|
||||
projectWorkspace: {
|
||||
cwd: repoRoot,
|
||||
cleanupCommand: "printf 'cleanup ok\\n'",
|
||||
},
|
||||
recorder,
|
||||
});
|
||||
|
||||
expect(operations.map((operation) => operation.phase)).toEqual([
|
||||
"workspace_teardown",
|
||||
"worktree_cleanup",
|
||||
"worktree_cleanup",
|
||||
]);
|
||||
expect(operations[0]?.command).toBe("printf 'cleanup ok\\n'");
|
||||
expect(operations[1]?.metadata).toMatchObject({
|
||||
cleanupAction: "worktree_remove",
|
||||
});
|
||||
expect(operations[2]?.metadata).toMatchObject({
|
||||
cleanupAction: "branch_delete",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("ensureRuntimeServicesForRun", () => {
|
||||
@@ -312,6 +680,199 @@ describe("ensureRuntimeServicesForRun", () => {
|
||||
expect(third[0]?.reused).toBe(false);
|
||||
expect(third[0]?.id).not.toBe(first[0]?.id);
|
||||
});
|
||||
|
||||
it("does not leak parent Paperclip instance env into runtime service commands", async () => {
|
||||
const workspaceRoot = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-runtime-env-"));
|
||||
const workspace = buildWorkspace(workspaceRoot);
|
||||
const envCapturePath = path.join(workspaceRoot, "captured-env.json");
|
||||
const serviceCommand = [
|
||||
"node -e",
|
||||
JSON.stringify(
|
||||
[
|
||||
"const fs = require('node:fs');",
|
||||
`fs.writeFileSync(${JSON.stringify(envCapturePath)}, JSON.stringify({`,
|
||||
"paperclipConfig: process.env.PAPERCLIP_CONFIG ?? null,",
|
||||
"paperclipHome: process.env.PAPERCLIP_HOME ?? null,",
|
||||
"paperclipInstanceId: process.env.PAPERCLIP_INSTANCE_ID ?? null,",
|
||||
"databaseUrl: process.env.DATABASE_URL ?? null,",
|
||||
"customEnv: process.env.RUNTIME_CUSTOM_ENV ?? null,",
|
||||
"port: process.env.PORT ?? null,",
|
||||
"}));",
|
||||
"require('node:http').createServer((req, res) => res.end('ok')).listen(Number(process.env.PORT), '127.0.0.1');",
|
||||
].join(" "),
|
||||
),
|
||||
].join(" ");
|
||||
|
||||
process.env.PAPERCLIP_CONFIG = "/tmp/base-paperclip-config.json";
|
||||
process.env.PAPERCLIP_HOME = "/tmp/base-paperclip-home";
|
||||
process.env.PAPERCLIP_INSTANCE_ID = "base-instance";
|
||||
process.env.DATABASE_URL = "postgres://shared-db.example.com/paperclip";
|
||||
|
||||
const runId = "run-env";
|
||||
leasedRunIds.add(runId);
|
||||
|
||||
const services = await ensureRuntimeServicesForRun({
|
||||
runId,
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: null,
|
||||
workspace,
|
||||
executionWorkspaceId: "execution-workspace-1",
|
||||
config: {
|
||||
workspaceRuntime: {
|
||||
services: [
|
||||
{
|
||||
name: "web",
|
||||
command: serviceCommand,
|
||||
port: { type: "auto" },
|
||||
readiness: {
|
||||
type: "http",
|
||||
urlTemplate: "http://127.0.0.1:{{port}}",
|
||||
timeoutSec: 10,
|
||||
intervalMs: 100,
|
||||
},
|
||||
lifecycle: "shared",
|
||||
reuseScope: "execution_workspace",
|
||||
stopPolicy: {
|
||||
type: "on_run_finish",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
adapterEnv: {
|
||||
RUNTIME_CUSTOM_ENV: "from-adapter",
|
||||
},
|
||||
});
|
||||
|
||||
expect(services).toHaveLength(1);
|
||||
const captured = JSON.parse(await fs.readFile(envCapturePath, "utf8")) as Record<string, string | null>;
|
||||
expect(captured.paperclipConfig).toBeNull();
|
||||
expect(captured.paperclipHome).toBeNull();
|
||||
expect(captured.paperclipInstanceId).toBeNull();
|
||||
expect(captured.databaseUrl).toBeNull();
|
||||
expect(captured.customEnv).toBe("from-adapter");
|
||||
expect(captured.port).toMatch(/^\d+$/);
|
||||
expect(services[0]?.executionWorkspaceId).toBe("execution-workspace-1");
|
||||
expect(services[0]?.scopeType).toBe("execution_workspace");
|
||||
expect(services[0]?.scopeId).toBe("execution-workspace-1");
|
||||
});
|
||||
|
||||
it("stops execution workspace runtime services by executionWorkspaceId", async () => {
|
||||
const workspaceRoot = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-runtime-stop-"));
|
||||
const workspace = buildWorkspace(workspaceRoot);
|
||||
const runId = "run-stop";
|
||||
leasedRunIds.add(runId);
|
||||
|
||||
const services = await ensureRuntimeServicesForRun({
|
||||
runId,
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: null,
|
||||
workspace,
|
||||
executionWorkspaceId: "execution-workspace-stop",
|
||||
config: {
|
||||
workspaceRuntime: {
|
||||
services: [
|
||||
{
|
||||
name: "web",
|
||||
command:
|
||||
"node -e \"require('node:http').createServer((req,res)=>res.end('ok')).listen(Number(process.env.PORT), '127.0.0.1')\"",
|
||||
port: { type: "auto" },
|
||||
readiness: {
|
||||
type: "http",
|
||||
urlTemplate: "http://127.0.0.1:{{port}}",
|
||||
timeoutSec: 10,
|
||||
intervalMs: 100,
|
||||
},
|
||||
lifecycle: "shared",
|
||||
reuseScope: "execution_workspace",
|
||||
stopPolicy: {
|
||||
type: "manual",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
adapterEnv: {},
|
||||
});
|
||||
|
||||
expect(services[0]?.url).toBeTruthy();
|
||||
await stopRuntimeServicesForExecutionWorkspace({
|
||||
executionWorkspaceId: "execution-workspace-stop",
|
||||
workspaceCwd: workspace.cwd,
|
||||
});
|
||||
await releaseRuntimeServicesForRun(runId);
|
||||
leasedRunIds.delete(runId);
|
||||
await new Promise((resolve) => setTimeout(resolve, 250));
|
||||
|
||||
await expect(fetch(services[0]!.url!)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("does not stop services in sibling directories when matching by workspace cwd", async () => {
|
||||
const workspaceParent = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-runtime-sibling-"));
|
||||
const targetWorkspaceRoot = path.join(workspaceParent, "project");
|
||||
const siblingWorkspaceRoot = path.join(workspaceParent, "project-extended", "service");
|
||||
await fs.mkdir(targetWorkspaceRoot, { recursive: true });
|
||||
await fs.mkdir(siblingWorkspaceRoot, { recursive: true });
|
||||
|
||||
const siblingWorkspace = buildWorkspace(siblingWorkspaceRoot);
|
||||
const runId = "run-sibling";
|
||||
leasedRunIds.add(runId);
|
||||
|
||||
const services = await ensureRuntimeServicesForRun({
|
||||
runId,
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: null,
|
||||
workspace: siblingWorkspace,
|
||||
executionWorkspaceId: "execution-workspace-sibling",
|
||||
config: {
|
||||
workspaceRuntime: {
|
||||
services: [
|
||||
{
|
||||
name: "web",
|
||||
command:
|
||||
"node -e \"require('node:http').createServer((req,res)=>res.end('ok')).listen(Number(process.env.PORT), '127.0.0.1')\"",
|
||||
port: { type: "auto" },
|
||||
readiness: {
|
||||
type: "http",
|
||||
urlTemplate: "http://127.0.0.1:{{port}}",
|
||||
timeoutSec: 10,
|
||||
intervalMs: 100,
|
||||
},
|
||||
lifecycle: "shared",
|
||||
reuseScope: "execution_workspace",
|
||||
stopPolicy: {
|
||||
type: "manual",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
adapterEnv: {},
|
||||
});
|
||||
|
||||
await stopRuntimeServicesForExecutionWorkspace({
|
||||
executionWorkspaceId: "execution-workspace-target",
|
||||
workspaceCwd: targetWorkspaceRoot,
|
||||
});
|
||||
|
||||
const response = await fetch(services[0]!.url!);
|
||||
expect(await response.text()).toBe("ok");
|
||||
|
||||
await releaseRuntimeServicesForRun(runId);
|
||||
leasedRunIds.delete(runId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeAdapterManagedRuntimeServices", () => {
|
||||
@@ -374,6 +935,7 @@ describe("normalizeAdapterManagedRuntimeServices", () => {
|
||||
companyId: "company-1",
|
||||
projectId: "project-1",
|
||||
projectWorkspaceId: "workspace-1",
|
||||
executionWorkspaceId: null,
|
||||
issueId: "issue-1",
|
||||
serviceName: "preview",
|
||||
provider: "adapter_managed",
|
||||
@@ -383,4 +945,33 @@ describe("normalizeAdapterManagedRuntimeServices", () => {
|
||||
});
|
||||
expect(first[0]?.id).toBe(second[0]?.id);
|
||||
});
|
||||
|
||||
it("prefers execution workspace ids over cwd for execution-scoped adapter services", () => {
|
||||
const workspace = buildWorkspace("/tmp/project");
|
||||
|
||||
const refs = normalizeAdapterManagedRuntimeServices({
|
||||
adapterType: "openclaw_gateway",
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Gateway Agent",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: null,
|
||||
workspace,
|
||||
executionWorkspaceId: "execution-workspace-1",
|
||||
reports: [
|
||||
{
|
||||
serviceName: "preview",
|
||||
scopeType: "execution_workspace",
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(refs[0]).toMatchObject({
|
||||
scopeType: "execution_workspace",
|
||||
scopeId: "execution-workspace-1",
|
||||
executionWorkspaceId: "execution-workspace-1",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,6 +15,7 @@ import { companySkillRoutes } from "./routes/company-skills.js";
|
||||
import { agentRoutes } from "./routes/agents.js";
|
||||
import { projectRoutes } from "./routes/projects.js";
|
||||
import { issueRoutes } from "./routes/issues.js";
|
||||
import { executionWorkspaceRoutes } from "./routes/execution-workspaces.js";
|
||||
import { goalRoutes } from "./routes/goals.js";
|
||||
import { approvalRoutes } from "./routes/approvals.js";
|
||||
import { secretRoutes } from "./routes/secrets.js";
|
||||
@@ -22,6 +23,7 @@ import { costRoutes } from "./routes/costs.js";
|
||||
import { activityRoutes } from "./routes/activity.js";
|
||||
import { dashboardRoutes } from "./routes/dashboard.js";
|
||||
import { sidebarBadgeRoutes } from "./routes/sidebar-badges.js";
|
||||
import { instanceSettingsRoutes } from "./routes/instance-settings.js";
|
||||
import { llmRoutes } from "./routes/llms.js";
|
||||
import { assetRoutes } from "./routes/assets.js";
|
||||
import { accessRoutes } from "./routes/access.js";
|
||||
@@ -47,6 +49,13 @@ import type { BetterAuthSessionResult } from "./auth/better-auth.js";
|
||||
|
||||
type UiMode = "none" | "static" | "vite-dev";
|
||||
|
||||
export function resolveViteHmrPort(serverPort: number): number {
|
||||
if (serverPort <= 55_535) {
|
||||
return serverPort + 10_000;
|
||||
}
|
||||
return Math.max(1_024, serverPort - 10_000);
|
||||
}
|
||||
|
||||
export async function createApp(
|
||||
db: Db,
|
||||
opts: {
|
||||
@@ -133,6 +142,7 @@ export async function createApp(
|
||||
api.use(assetRoutes(db, opts.storageService));
|
||||
api.use(projectRoutes(db));
|
||||
api.use(issueRoutes(db, opts.storageService));
|
||||
api.use(executionWorkspaceRoutes(db));
|
||||
api.use(goalRoutes(db));
|
||||
api.use(approvalRoutes(db));
|
||||
api.use(secretRoutes(db));
|
||||
@@ -140,6 +150,7 @@ export async function createApp(
|
||||
api.use(activityRoutes(db));
|
||||
api.use(dashboardRoutes(db));
|
||||
api.use(sidebarBadgeRoutes(db));
|
||||
api.use(instanceSettingsRoutes(db));
|
||||
const hostServicesDisposers = new Map<string, () => void>();
|
||||
const workerManager = createPluginWorkerManager();
|
||||
const pluginRegistry = pluginRegistryService(db);
|
||||
@@ -240,7 +251,7 @@ export async function createApp(
|
||||
|
||||
if (opts.uiMode === "vite-dev") {
|
||||
const uiRoot = path.resolve(__dirname, "../../ui");
|
||||
const hmrPort = opts.serverPort + 10000;
|
||||
const hmrPort = resolveViteHmrPort(opts.serverPort);
|
||||
const { createServer: createViteServer } = await import("vite");
|
||||
const vite = await createViteServer({
|
||||
root: uiRoot,
|
||||
|
||||
@@ -4,6 +4,7 @@ import path from "node:path";
|
||||
const DEFAULT_INSTANCE_ID = "default";
|
||||
const INSTANCE_ID_RE = /^[a-zA-Z0-9_-]+$/;
|
||||
const PATH_SEGMENT_RE = /^[a-zA-Z0-9_-]+$/;
|
||||
const FRIENDLY_PATH_SEGMENT_RE = /[^a-zA-Z0-9._-]+/g;
|
||||
|
||||
function expandHomePrefix(value: string): string {
|
||||
if (value === "~") return os.homedir();
|
||||
@@ -61,6 +62,34 @@ export function resolveDefaultAgentWorkspaceDir(agentId: string): string {
|
||||
return path.resolve(resolvePaperclipInstanceRoot(), "workspaces", trimmed);
|
||||
}
|
||||
|
||||
function sanitizeFriendlyPathSegment(value: string | null | undefined, fallback = "_default"): string {
|
||||
const trimmed = value?.trim() ?? "";
|
||||
if (!trimmed) return fallback;
|
||||
const sanitized = trimmed
|
||||
.replace(FRIENDLY_PATH_SEGMENT_RE, "-")
|
||||
.replace(/^-+|-+$/g, "");
|
||||
return sanitized || fallback;
|
||||
}
|
||||
|
||||
export function resolveManagedProjectWorkspaceDir(input: {
|
||||
companyId: string;
|
||||
projectId: string;
|
||||
repoName?: string | null;
|
||||
}): string {
|
||||
const companyId = input.companyId.trim();
|
||||
const projectId = input.projectId.trim();
|
||||
if (!companyId || !projectId) {
|
||||
throw new Error("Managed project workspace path requires companyId and projectId.");
|
||||
}
|
||||
return path.resolve(
|
||||
resolvePaperclipInstanceRoot(),
|
||||
"projects",
|
||||
sanitizeFriendlyPathSegment(companyId, "company"),
|
||||
sanitizeFriendlyPathSegment(projectId, "project"),
|
||||
sanitizeFriendlyPathSegment(input.repoName, "_default"),
|
||||
);
|
||||
}
|
||||
|
||||
export function resolveHomeAwarePath(value: string): string {
|
||||
return path.resolve(expandHomePrefix(value));
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import { and, eq } from "drizzle-orm";
|
||||
import {
|
||||
createDb,
|
||||
ensurePostgresDatabase,
|
||||
getPostgresDataDirectory,
|
||||
inspectMigrations,
|
||||
applyPendingMigrations,
|
||||
reconcilePendingMigrationHistory,
|
||||
@@ -320,45 +321,60 @@ export async function startServer(): Promise<StartedServer> {
|
||||
if (runningPid) {
|
||||
logger.warn(`Embedded PostgreSQL already running; reusing existing process (pid=${runningPid}, port=${port})`);
|
||||
} else {
|
||||
const detectedPort = await detectPort(configuredPort);
|
||||
if (detectedPort !== configuredPort) {
|
||||
logger.warn(`Embedded PostgreSQL port is in use; using next free port (requestedPort=${configuredPort}, selectedPort=${detectedPort})`);
|
||||
}
|
||||
port = detectedPort;
|
||||
logger.info(`Using embedded PostgreSQL because no DATABASE_URL set (dataDir=${dataDir}, port=${port})`);
|
||||
embeddedPostgres = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C"],
|
||||
onLog: appendEmbeddedPostgresLog,
|
||||
onError: appendEmbeddedPostgresLog,
|
||||
});
|
||||
|
||||
if (!clusterAlreadyInitialized) {
|
||||
const configuredAdminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${configuredPort}/postgres`;
|
||||
try {
|
||||
const actualDataDir = await getPostgresDataDirectory(configuredAdminConnectionString);
|
||||
if (
|
||||
typeof actualDataDir !== "string" ||
|
||||
resolve(actualDataDir) !== resolve(dataDir)
|
||||
) {
|
||||
throw new Error("reachable postgres does not use the expected embedded data directory");
|
||||
}
|
||||
await ensurePostgresDatabase(configuredAdminConnectionString, "paperclip");
|
||||
logger.warn(
|
||||
`Embedded PostgreSQL appears to already be reachable without a pid file; reusing existing server on configured port ${configuredPort}`,
|
||||
);
|
||||
} catch {
|
||||
const detectedPort = await detectPort(configuredPort);
|
||||
if (detectedPort !== configuredPort) {
|
||||
logger.warn(`Embedded PostgreSQL port is in use; using next free port (requestedPort=${configuredPort}, selectedPort=${detectedPort})`);
|
||||
}
|
||||
port = detectedPort;
|
||||
logger.info(`Using embedded PostgreSQL because no DATABASE_URL set (dataDir=${dataDir}, port=${port})`);
|
||||
embeddedPostgres = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C"],
|
||||
onLog: appendEmbeddedPostgresLog,
|
||||
onError: appendEmbeddedPostgresLog,
|
||||
});
|
||||
|
||||
if (!clusterAlreadyInitialized) {
|
||||
try {
|
||||
await embeddedPostgres.initialise();
|
||||
} catch (err) {
|
||||
logEmbeddedPostgresFailure("initialise", err);
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
logger.info(`Embedded PostgreSQL cluster already exists (${clusterVersionFile}); skipping init`);
|
||||
}
|
||||
|
||||
if (existsSync(postmasterPidFile)) {
|
||||
logger.warn("Removing stale embedded PostgreSQL lock file");
|
||||
rmSync(postmasterPidFile, { force: true });
|
||||
}
|
||||
try {
|
||||
await embeddedPostgres.initialise();
|
||||
await embeddedPostgres.start();
|
||||
} catch (err) {
|
||||
logEmbeddedPostgresFailure("initialise", err);
|
||||
logEmbeddedPostgresFailure("start", err);
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
logger.info(`Embedded PostgreSQL cluster already exists (${clusterVersionFile}); skipping init`);
|
||||
embeddedPostgresStartedByThisProcess = true;
|
||||
}
|
||||
|
||||
if (existsSync(postmasterPidFile)) {
|
||||
logger.warn("Removing stale embedded PostgreSQL lock file");
|
||||
rmSync(postmasterPidFile, { force: true });
|
||||
}
|
||||
try {
|
||||
await embeddedPostgres.start();
|
||||
} catch (err) {
|
||||
logEmbeddedPostgresFailure("start", err);
|
||||
throw err;
|
||||
}
|
||||
embeddedPostgresStartedByThisProcess = true;
|
||||
}
|
||||
|
||||
const embeddedAdminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
|
||||
@@ -36,6 +36,7 @@ import {
|
||||
issueService,
|
||||
logActivity,
|
||||
secretService,
|
||||
workspaceOperationService,
|
||||
} from "../services/index.js";
|
||||
import { conflict, forbidden, notFound, unprocessable } from "../errors.js";
|
||||
import { assertBoard, assertCompanyAccess, getActorInfo } from "./authz.js";
|
||||
@@ -70,6 +71,7 @@ export function agentRoutes(db: Db) {
|
||||
const issueApprovalsSvc = issueApprovalService(db);
|
||||
const secretsSvc = secretService(db);
|
||||
const companySkills = companySkillService(db);
|
||||
const workspaceOperations = workspaceOperationService(db);
|
||||
const strictSecretsMode = process.env.PAPERCLIP_SECRETS_STRICT_MODE === "true";
|
||||
|
||||
function canCreateAgents(agent: { role: string; permissions: Record<string, unknown> | null | undefined }) {
|
||||
@@ -1713,6 +1715,40 @@ export function agentRoutes(db: Db) {
|
||||
res.json(result);
|
||||
});
|
||||
|
||||
router.get("/heartbeat-runs/:runId/workspace-operations", async (req, res) => {
|
||||
const runId = req.params.runId as string;
|
||||
const run = await heartbeat.getRun(runId);
|
||||
if (!run) {
|
||||
res.status(404).json({ error: "Heartbeat run not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, run.companyId);
|
||||
|
||||
const context = asRecord(run.contextSnapshot);
|
||||
const executionWorkspaceId = asNonEmptyString(context?.executionWorkspaceId);
|
||||
const operations = await workspaceOperations.listForRun(runId, executionWorkspaceId);
|
||||
res.json(redactCurrentUserValue(operations));
|
||||
});
|
||||
|
||||
router.get("/workspace-operations/:operationId/log", async (req, res) => {
|
||||
const operationId = req.params.operationId as string;
|
||||
const operation = await workspaceOperations.getById(operationId);
|
||||
if (!operation) {
|
||||
res.status(404).json({ error: "Workspace operation not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, operation.companyId);
|
||||
|
||||
const offset = Number(req.query.offset ?? 0);
|
||||
const limitBytes = Number(req.query.limitBytes ?? 256000);
|
||||
const result = await workspaceOperations.readLog(operationId, {
|
||||
offset: Number.isFinite(offset) ? offset : 0,
|
||||
limitBytes: Number.isFinite(limitBytes) ? limitBytes : 256000,
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
});
|
||||
|
||||
router.get("/issues/:issueId/live-runs", async (req, res) => {
|
||||
const rawId = req.params.issueId as string;
|
||||
const issueSvc = issueService(db);
|
||||
|
||||
181
server/src/routes/execution-workspaces.ts
Normal file
181
server/src/routes/execution-workspaces.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { Router } from "express";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { issues, projects, projectWorkspaces } from "@paperclipai/db";
|
||||
import { updateExecutionWorkspaceSchema } from "@paperclipai/shared";
|
||||
import { validate } from "../middleware/validate.js";
|
||||
import { executionWorkspaceService, logActivity, workspaceOperationService } from "../services/index.js";
|
||||
import { parseProjectExecutionWorkspacePolicy } from "../services/execution-workspace-policy.js";
|
||||
import {
|
||||
cleanupExecutionWorkspaceArtifacts,
|
||||
stopRuntimeServicesForExecutionWorkspace,
|
||||
} from "../services/workspace-runtime.js";
|
||||
import { assertCompanyAccess, getActorInfo } from "./authz.js";
|
||||
|
||||
const TERMINAL_ISSUE_STATUSES = new Set(["done", "cancelled"]);
|
||||
|
||||
export function executionWorkspaceRoutes(db: Db) {
|
||||
const router = Router();
|
||||
const svc = executionWorkspaceService(db);
|
||||
const workspaceOperationsSvc = workspaceOperationService(db);
|
||||
|
||||
router.get("/companies/:companyId/execution-workspaces", async (req, res) => {
|
||||
const companyId = req.params.companyId as string;
|
||||
assertCompanyAccess(req, companyId);
|
||||
const workspaces = await svc.list(companyId, {
|
||||
projectId: req.query.projectId as string | undefined,
|
||||
projectWorkspaceId: req.query.projectWorkspaceId as string | undefined,
|
||||
issueId: req.query.issueId as string | undefined,
|
||||
status: req.query.status as string | undefined,
|
||||
reuseEligible: req.query.reuseEligible === "true",
|
||||
});
|
||||
res.json(workspaces);
|
||||
});
|
||||
|
||||
router.get("/execution-workspaces/:id", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const workspace = await svc.getById(id);
|
||||
if (!workspace) {
|
||||
res.status(404).json({ error: "Execution workspace not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, workspace.companyId);
|
||||
res.json(workspace);
|
||||
});
|
||||
|
||||
router.patch("/execution-workspaces/:id", validate(updateExecutionWorkspaceSchema), async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const existing = await svc.getById(id);
|
||||
if (!existing) {
|
||||
res.status(404).json({ error: "Execution workspace not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, existing.companyId);
|
||||
const patch: Record<string, unknown> = {
|
||||
...req.body,
|
||||
...(req.body.cleanupEligibleAt ? { cleanupEligibleAt: new Date(req.body.cleanupEligibleAt) } : {}),
|
||||
};
|
||||
let workspace = existing;
|
||||
let cleanupWarnings: string[] = [];
|
||||
|
||||
if (req.body.status === "archived" && existing.status !== "archived") {
|
||||
const linkedIssues = await db
|
||||
.select({
|
||||
id: issues.id,
|
||||
status: issues.status,
|
||||
})
|
||||
.from(issues)
|
||||
.where(and(eq(issues.companyId, existing.companyId), eq(issues.executionWorkspaceId, existing.id)));
|
||||
const activeLinkedIssues = linkedIssues.filter((issue) => !TERMINAL_ISSUE_STATUSES.has(issue.status));
|
||||
|
||||
if (activeLinkedIssues.length > 0) {
|
||||
res.status(409).json({
|
||||
error: `Cannot archive execution workspace while ${activeLinkedIssues.length} linked issue(s) are still open`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const closedAt = new Date();
|
||||
const archivedWorkspace = await svc.update(id, {
|
||||
...patch,
|
||||
status: "archived",
|
||||
closedAt,
|
||||
cleanupReason: null,
|
||||
});
|
||||
if (!archivedWorkspace) {
|
||||
res.status(404).json({ error: "Execution workspace not found" });
|
||||
return;
|
||||
}
|
||||
workspace = archivedWorkspace;
|
||||
|
||||
try {
|
||||
await stopRuntimeServicesForExecutionWorkspace({
|
||||
db,
|
||||
executionWorkspaceId: existing.id,
|
||||
workspaceCwd: existing.cwd,
|
||||
});
|
||||
const projectWorkspace = existing.projectWorkspaceId
|
||||
? await db
|
||||
.select({
|
||||
cwd: projectWorkspaces.cwd,
|
||||
cleanupCommand: projectWorkspaces.cleanupCommand,
|
||||
})
|
||||
.from(projectWorkspaces)
|
||||
.where(
|
||||
and(
|
||||
eq(projectWorkspaces.id, existing.projectWorkspaceId),
|
||||
eq(projectWorkspaces.companyId, existing.companyId),
|
||||
),
|
||||
)
|
||||
.then((rows) => rows[0] ?? null)
|
||||
: null;
|
||||
const projectPolicy = existing.projectId
|
||||
? await db
|
||||
.select({
|
||||
executionWorkspacePolicy: projects.executionWorkspacePolicy,
|
||||
})
|
||||
.from(projects)
|
||||
.where(and(eq(projects.id, existing.projectId), eq(projects.companyId, existing.companyId)))
|
||||
.then((rows) => parseProjectExecutionWorkspacePolicy(rows[0]?.executionWorkspacePolicy))
|
||||
: null;
|
||||
const cleanupResult = await cleanupExecutionWorkspaceArtifacts({
|
||||
workspace: existing,
|
||||
projectWorkspace,
|
||||
teardownCommand: projectPolicy?.workspaceStrategy?.teardownCommand ?? null,
|
||||
recorder: workspaceOperationsSvc.createRecorder({
|
||||
companyId: existing.companyId,
|
||||
executionWorkspaceId: existing.id,
|
||||
}),
|
||||
});
|
||||
cleanupWarnings = cleanupResult.warnings;
|
||||
const cleanupPatch: Record<string, unknown> = {
|
||||
closedAt,
|
||||
cleanupReason: cleanupWarnings.length > 0 ? cleanupWarnings.join(" | ") : null,
|
||||
};
|
||||
if (!cleanupResult.cleaned) {
|
||||
cleanupPatch.status = "cleanup_failed";
|
||||
}
|
||||
if (cleanupResult.warnings.length > 0 || !cleanupResult.cleaned) {
|
||||
workspace = (await svc.update(id, cleanupPatch)) ?? workspace;
|
||||
}
|
||||
} catch (error) {
|
||||
const failureReason = error instanceof Error ? error.message : String(error);
|
||||
workspace =
|
||||
(await svc.update(id, {
|
||||
status: "cleanup_failed",
|
||||
closedAt,
|
||||
cleanupReason: failureReason,
|
||||
})) ?? workspace;
|
||||
res.status(500).json({
|
||||
error: `Failed to archive execution workspace: ${failureReason}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
const updatedWorkspace = await svc.update(id, patch);
|
||||
if (!updatedWorkspace) {
|
||||
res.status(404).json({ error: "Execution workspace not found" });
|
||||
return;
|
||||
}
|
||||
workspace = updatedWorkspace;
|
||||
}
|
||||
const actor = getActorInfo(req);
|
||||
await logActivity(db, {
|
||||
companyId: existing.companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: "execution_workspace.updated",
|
||||
entityType: "execution_workspace",
|
||||
entityId: workspace.id,
|
||||
details: {
|
||||
changedKeys: Object.keys(req.body).sort(),
|
||||
...(cleanupWarnings.length > 0 ? { cleanupWarnings } : {}),
|
||||
},
|
||||
});
|
||||
res.json(workspace);
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -13,3 +13,4 @@ export { dashboardRoutes } from "./dashboard.js";
|
||||
export { sidebarBadgeRoutes } from "./sidebar-badges.js";
|
||||
export { llmRoutes } from "./llms.js";
|
||||
export { accessRoutes } from "./access.js";
|
||||
export { instanceSettingsRoutes } from "./instance-settings.js";
|
||||
|
||||
59
server/src/routes/instance-settings.ts
Normal file
59
server/src/routes/instance-settings.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { Router, type Request } from "express";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { patchInstanceExperimentalSettingsSchema } from "@paperclipai/shared";
|
||||
import { forbidden } from "../errors.js";
|
||||
import { validate } from "../middleware/validate.js";
|
||||
import { instanceSettingsService, logActivity } from "../services/index.js";
|
||||
import { getActorInfo } from "./authz.js";
|
||||
|
||||
function assertCanManageInstanceSettings(req: Request) {
|
||||
if (req.actor.type !== "board") {
|
||||
throw forbidden("Board access required");
|
||||
}
|
||||
if (req.actor.source === "local_implicit" || req.actor.isInstanceAdmin) {
|
||||
return;
|
||||
}
|
||||
throw forbidden("Instance admin access required");
|
||||
}
|
||||
|
||||
export function instanceSettingsRoutes(db: Db) {
|
||||
const router = Router();
|
||||
const svc = instanceSettingsService(db);
|
||||
|
||||
router.get("/instance/settings/experimental", async (req, res) => {
|
||||
assertCanManageInstanceSettings(req);
|
||||
res.json(await svc.getExperimental());
|
||||
});
|
||||
|
||||
router.patch(
|
||||
"/instance/settings/experimental",
|
||||
validate(patchInstanceExperimentalSettingsSchema),
|
||||
async (req, res) => {
|
||||
assertCanManageInstanceSettings(req);
|
||||
const updated = await svc.updateExperimental(req.body);
|
||||
const actor = getActorInfo(req);
|
||||
const companyIds = await svc.listCompanyIds();
|
||||
await Promise.all(
|
||||
companyIds.map((companyId) =>
|
||||
logActivity(db, {
|
||||
companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: "instance.settings.experimental_updated",
|
||||
entityType: "instance_settings",
|
||||
entityId: updated.id,
|
||||
details: {
|
||||
experimental: updated.experimental,
|
||||
changedKeys: Object.keys(req.body).sort(),
|
||||
},
|
||||
}),
|
||||
),
|
||||
);
|
||||
res.json(updated.experimental);
|
||||
},
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -4,11 +4,13 @@ import type { Db } from "@paperclipai/db";
|
||||
import {
|
||||
addIssueCommentSchema,
|
||||
createIssueAttachmentMetadataSchema,
|
||||
createIssueWorkProductSchema,
|
||||
createIssueLabelSchema,
|
||||
checkoutIssueSchema,
|
||||
createIssueSchema,
|
||||
linkIssueApprovalSchema,
|
||||
issueDocumentKeySchema,
|
||||
updateIssueWorkProductSchema,
|
||||
upsertIssueDocumentSchema,
|
||||
updateIssueSchema,
|
||||
} from "@paperclipai/shared";
|
||||
@@ -17,6 +19,7 @@ import { validate } from "../middleware/validate.js";
|
||||
import {
|
||||
accessService,
|
||||
agentService,
|
||||
executionWorkspaceService,
|
||||
goalService,
|
||||
heartbeatService,
|
||||
issueApprovalService,
|
||||
@@ -24,6 +27,7 @@ import {
|
||||
documentService,
|
||||
logActivity,
|
||||
projectService,
|
||||
workProductService,
|
||||
} from "../services/index.js";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
import { forbidden, HttpError, unauthorized } from "../errors.js";
|
||||
@@ -42,6 +46,8 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
const projectsSvc = projectService(db);
|
||||
const goalsSvc = goalService(db);
|
||||
const issueApprovalsSvc = issueApprovalService(db);
|
||||
const executionWorkspacesSvc = executionWorkspaceService(db);
|
||||
const workProductsSvc = workProductService(db);
|
||||
const documentsSvc = documentService(db);
|
||||
const upload = multer({
|
||||
storage: multer.memoryStorage(),
|
||||
@@ -311,6 +317,10 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
const mentionedProjects = mentionedProjectIds.length > 0
|
||||
? await projectsSvc.listByIds(issue.companyId, mentionedProjectIds)
|
||||
: [];
|
||||
const currentExecutionWorkspace = issue.executionWorkspaceId
|
||||
? await executionWorkspacesSvc.getById(issue.executionWorkspaceId)
|
||||
: null;
|
||||
const workProducts = await workProductsSvc.listForIssue(issue.id);
|
||||
res.json({
|
||||
...issue,
|
||||
goalId: goal?.id ?? issue.goalId,
|
||||
@@ -319,6 +329,8 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
project: project ?? null,
|
||||
goal: goal ?? null,
|
||||
mentionedProjects,
|
||||
currentExecutionWorkspace,
|
||||
workProducts,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -395,6 +407,18 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
});
|
||||
});
|
||||
|
||||
router.get("/issues/:id/work-products", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const workProducts = await workProductsSvc.listForIssue(issue.id);
|
||||
res.json(workProducts);
|
||||
});
|
||||
|
||||
router.get("/issues/:id/documents", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
@@ -535,6 +559,93 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
res.json({ ok: true });
|
||||
});
|
||||
|
||||
router.post("/issues/:id/work-products", validate(createIssueWorkProductSchema), async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const product = await workProductsSvc.createForIssue(issue.id, issue.companyId, {
|
||||
...req.body,
|
||||
projectId: req.body.projectId ?? issue.projectId ?? null,
|
||||
});
|
||||
if (!product) {
|
||||
res.status(422).json({ error: "Invalid work product payload" });
|
||||
return;
|
||||
}
|
||||
const actor = getActorInfo(req);
|
||||
await logActivity(db, {
|
||||
companyId: issue.companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: "issue.work_product_created",
|
||||
entityType: "issue",
|
||||
entityId: issue.id,
|
||||
details: { workProductId: product.id, type: product.type, provider: product.provider },
|
||||
});
|
||||
res.status(201).json(product);
|
||||
});
|
||||
|
||||
router.patch("/work-products/:id", validate(updateIssueWorkProductSchema), async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const existing = await workProductsSvc.getById(id);
|
||||
if (!existing) {
|
||||
res.status(404).json({ error: "Work product not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, existing.companyId);
|
||||
const product = await workProductsSvc.update(id, req.body);
|
||||
if (!product) {
|
||||
res.status(404).json({ error: "Work product not found" });
|
||||
return;
|
||||
}
|
||||
const actor = getActorInfo(req);
|
||||
await logActivity(db, {
|
||||
companyId: existing.companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: "issue.work_product_updated",
|
||||
entityType: "issue",
|
||||
entityId: existing.issueId,
|
||||
details: { workProductId: product.id, changedKeys: Object.keys(req.body).sort() },
|
||||
});
|
||||
res.json(product);
|
||||
});
|
||||
|
||||
router.delete("/work-products/:id", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const existing = await workProductsSvc.getById(id);
|
||||
if (!existing) {
|
||||
res.status(404).json({ error: "Work product not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, existing.companyId);
|
||||
const removed = await workProductsSvc.remove(id);
|
||||
if (!removed) {
|
||||
res.status(404).json({ error: "Work product not found" });
|
||||
return;
|
||||
}
|
||||
const actor = getActorInfo(req);
|
||||
await logActivity(db, {
|
||||
companyId: existing.companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: "issue.work_product_deleted",
|
||||
entityType: "issue",
|
||||
entityId: existing.issueId,
|
||||
details: { workProductId: removed.id, type: removed.type },
|
||||
});
|
||||
res.json(removed);
|
||||
});
|
||||
|
||||
router.post("/issues/:id/read", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
|
||||
@@ -2,11 +2,12 @@ import type {
|
||||
ExecutionWorkspaceMode,
|
||||
ExecutionWorkspaceStrategy,
|
||||
IssueExecutionWorkspaceSettings,
|
||||
ProjectExecutionWorkspaceDefaultMode,
|
||||
ProjectExecutionWorkspacePolicy,
|
||||
} from "@paperclipai/shared";
|
||||
import { asString, parseObject } from "../adapters/utils.js";
|
||||
|
||||
type ParsedExecutionWorkspaceMode = Exclude<ExecutionWorkspaceMode, "inherit">;
|
||||
type ParsedExecutionWorkspaceMode = Exclude<ExecutionWorkspaceMode, "inherit" | "reuse_existing">;
|
||||
|
||||
function cloneRecord(value: Record<string, unknown> | null | undefined): Record<string, unknown> | null {
|
||||
if (!value) return null;
|
||||
@@ -16,7 +17,7 @@ function cloneRecord(value: Record<string, unknown> | null | undefined): Record<
|
||||
function parseExecutionWorkspaceStrategy(raw: unknown): ExecutionWorkspaceStrategy | null {
|
||||
const parsed = parseObject(raw);
|
||||
const type = asString(parsed.type, "");
|
||||
if (type !== "project_primary" && type !== "git_worktree") {
|
||||
if (type !== "project_primary" && type !== "git_worktree" && type !== "adapter_managed" && type !== "cloud_sandbox") {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
@@ -33,16 +34,31 @@ export function parseProjectExecutionWorkspacePolicy(raw: unknown): ProjectExecu
|
||||
const parsed = parseObject(raw);
|
||||
if (Object.keys(parsed).length === 0) return null;
|
||||
const enabled = typeof parsed.enabled === "boolean" ? parsed.enabled : false;
|
||||
const workspaceStrategy = parseExecutionWorkspaceStrategy(parsed.workspaceStrategy);
|
||||
const defaultMode = asString(parsed.defaultMode, "");
|
||||
const defaultProjectWorkspaceId =
|
||||
typeof parsed.defaultProjectWorkspaceId === "string" ? parsed.defaultProjectWorkspaceId : undefined;
|
||||
const allowIssueOverride =
|
||||
typeof parsed.allowIssueOverride === "boolean" ? parsed.allowIssueOverride : undefined;
|
||||
const normalizedDefaultMode = (() => {
|
||||
if (
|
||||
defaultMode === "shared_workspace" ||
|
||||
defaultMode === "isolated_workspace" ||
|
||||
defaultMode === "operator_branch" ||
|
||||
defaultMode === "adapter_default"
|
||||
) {
|
||||
return defaultMode as ProjectExecutionWorkspaceDefaultMode;
|
||||
}
|
||||
if (defaultMode === "project_primary") return "shared_workspace";
|
||||
if (defaultMode === "isolated") return "isolated_workspace";
|
||||
return undefined;
|
||||
})();
|
||||
return {
|
||||
enabled,
|
||||
...(defaultMode === "project_primary" || defaultMode === "isolated" ? { defaultMode } : {}),
|
||||
...(normalizedDefaultMode ? { defaultMode: normalizedDefaultMode } : {}),
|
||||
...(allowIssueOverride !== undefined ? { allowIssueOverride } : {}),
|
||||
...(parseExecutionWorkspaceStrategy(parsed.workspaceStrategy)
|
||||
? { workspaceStrategy: parseExecutionWorkspaceStrategy(parsed.workspaceStrategy) }
|
||||
: {}),
|
||||
...(defaultProjectWorkspaceId ? { defaultProjectWorkspaceId } : {}),
|
||||
...(workspaceStrategy ? { workspaceStrategy } : {}),
|
||||
...(parsed.workspaceRuntime && typeof parsed.workspaceRuntime === "object" && !Array.isArray(parsed.workspaceRuntime)
|
||||
? { workspaceRuntime: { ...(parsed.workspaceRuntime as Record<string, unknown>) } }
|
||||
: {}),
|
||||
@@ -52,23 +68,48 @@ export function parseProjectExecutionWorkspacePolicy(raw: unknown): ProjectExecu
|
||||
...(parsed.pullRequestPolicy && typeof parsed.pullRequestPolicy === "object" && !Array.isArray(parsed.pullRequestPolicy)
|
||||
? { pullRequestPolicy: { ...(parsed.pullRequestPolicy as Record<string, unknown>) } }
|
||||
: {}),
|
||||
...(parsed.runtimePolicy && typeof parsed.runtimePolicy === "object" && !Array.isArray(parsed.runtimePolicy)
|
||||
? { runtimePolicy: { ...(parsed.runtimePolicy as Record<string, unknown>) } }
|
||||
: {}),
|
||||
...(parsed.cleanupPolicy && typeof parsed.cleanupPolicy === "object" && !Array.isArray(parsed.cleanupPolicy)
|
||||
? { cleanupPolicy: { ...(parsed.cleanupPolicy as Record<string, unknown>) } }
|
||||
: {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function gateProjectExecutionWorkspacePolicy(
|
||||
projectPolicy: ProjectExecutionWorkspacePolicy | null,
|
||||
isolatedWorkspacesEnabled: boolean,
|
||||
): ProjectExecutionWorkspacePolicy | null {
|
||||
if (!isolatedWorkspacesEnabled) return null;
|
||||
return projectPolicy;
|
||||
}
|
||||
|
||||
export function parseIssueExecutionWorkspaceSettings(raw: unknown): IssueExecutionWorkspaceSettings | null {
|
||||
const parsed = parseObject(raw);
|
||||
if (Object.keys(parsed).length === 0) return null;
|
||||
const workspaceStrategy = parseExecutionWorkspaceStrategy(parsed.workspaceStrategy);
|
||||
const mode = asString(parsed.mode, "");
|
||||
const normalizedMode = (() => {
|
||||
if (
|
||||
mode === "inherit" ||
|
||||
mode === "shared_workspace" ||
|
||||
mode === "isolated_workspace" ||
|
||||
mode === "operator_branch" ||
|
||||
mode === "reuse_existing" ||
|
||||
mode === "agent_default"
|
||||
) {
|
||||
return mode;
|
||||
}
|
||||
if (mode === "project_primary") return "shared_workspace";
|
||||
if (mode === "isolated") return "isolated_workspace";
|
||||
return "";
|
||||
})();
|
||||
return {
|
||||
...(mode === "inherit" || mode === "project_primary" || mode === "isolated" || mode === "agent_default"
|
||||
? { mode }
|
||||
: {}),
|
||||
...(parseExecutionWorkspaceStrategy(parsed.workspaceStrategy)
|
||||
? { workspaceStrategy: parseExecutionWorkspaceStrategy(parsed.workspaceStrategy) }
|
||||
...(normalizedMode
|
||||
? { mode: normalizedMode as IssueExecutionWorkspaceSettings["mode"] }
|
||||
: {}),
|
||||
...(workspaceStrategy ? { workspaceStrategy } : {}),
|
||||
...(parsed.workspaceRuntime && typeof parsed.workspaceRuntime === "object" && !Array.isArray(parsed.workspaceRuntime)
|
||||
? { workspaceRuntime: { ...(parsed.workspaceRuntime as Record<string, unknown>) } }
|
||||
: {}),
|
||||
@@ -80,7 +121,14 @@ export function defaultIssueExecutionWorkspaceSettingsForProject(
|
||||
): IssueExecutionWorkspaceSettings | null {
|
||||
if (!projectPolicy?.enabled) return null;
|
||||
return {
|
||||
mode: projectPolicy.defaultMode === "isolated" ? "isolated" : "project_primary",
|
||||
mode:
|
||||
projectPolicy.defaultMode === "isolated_workspace"
|
||||
? "isolated_workspace"
|
||||
: projectPolicy.defaultMode === "operator_branch"
|
||||
? "operator_branch"
|
||||
: projectPolicy.defaultMode === "adapter_default"
|
||||
? "agent_default"
|
||||
: "shared_workspace",
|
||||
};
|
||||
}
|
||||
|
||||
@@ -90,16 +138,19 @@ export function resolveExecutionWorkspaceMode(input: {
|
||||
legacyUseProjectWorkspace: boolean | null;
|
||||
}): ParsedExecutionWorkspaceMode {
|
||||
const issueMode = input.issueSettings?.mode;
|
||||
if (issueMode && issueMode !== "inherit") {
|
||||
if (issueMode && issueMode !== "inherit" && issueMode !== "reuse_existing") {
|
||||
return issueMode;
|
||||
}
|
||||
if (input.projectPolicy?.enabled) {
|
||||
return input.projectPolicy.defaultMode === "isolated" ? "isolated" : "project_primary";
|
||||
if (input.projectPolicy.defaultMode === "isolated_workspace") return "isolated_workspace";
|
||||
if (input.projectPolicy.defaultMode === "operator_branch") return "operator_branch";
|
||||
if (input.projectPolicy.defaultMode === "adapter_default") return "agent_default";
|
||||
return "shared_workspace";
|
||||
}
|
||||
if (input.legacyUseProjectWorkspace === false) {
|
||||
return "agent_default";
|
||||
}
|
||||
return "project_primary";
|
||||
return "shared_workspace";
|
||||
}
|
||||
|
||||
export function buildExecutionWorkspaceAdapterConfig(input: {
|
||||
@@ -119,7 +170,7 @@ export function buildExecutionWorkspaceAdapterConfig(input: {
|
||||
const hasWorkspaceControl = projectHasPolicy || issueHasWorkspaceOverrides || input.legacyUseProjectWorkspace === false;
|
||||
|
||||
if (hasWorkspaceControl) {
|
||||
if (input.mode === "isolated") {
|
||||
if (input.mode === "isolated_workspace") {
|
||||
const strategy =
|
||||
input.issueSettings?.workspaceStrategy ??
|
||||
input.projectPolicy?.workspaceStrategy ??
|
||||
|
||||
99
server/src/services/execution-workspaces.ts
Normal file
99
server/src/services/execution-workspaces.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { and, desc, eq, inArray } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { executionWorkspaces } from "@paperclipai/db";
|
||||
import type { ExecutionWorkspace } from "@paperclipai/shared";
|
||||
|
||||
type ExecutionWorkspaceRow = typeof executionWorkspaces.$inferSelect;
|
||||
|
||||
function toExecutionWorkspace(row: ExecutionWorkspaceRow): ExecutionWorkspace {
|
||||
return {
|
||||
id: row.id,
|
||||
companyId: row.companyId,
|
||||
projectId: row.projectId,
|
||||
projectWorkspaceId: row.projectWorkspaceId ?? null,
|
||||
sourceIssueId: row.sourceIssueId ?? null,
|
||||
mode: row.mode as ExecutionWorkspace["mode"],
|
||||
strategyType: row.strategyType as ExecutionWorkspace["strategyType"],
|
||||
name: row.name,
|
||||
status: row.status as ExecutionWorkspace["status"],
|
||||
cwd: row.cwd ?? null,
|
||||
repoUrl: row.repoUrl ?? null,
|
||||
baseRef: row.baseRef ?? null,
|
||||
branchName: row.branchName ?? null,
|
||||
providerType: row.providerType as ExecutionWorkspace["providerType"],
|
||||
providerRef: row.providerRef ?? null,
|
||||
derivedFromExecutionWorkspaceId: row.derivedFromExecutionWorkspaceId ?? null,
|
||||
lastUsedAt: row.lastUsedAt,
|
||||
openedAt: row.openedAt,
|
||||
closedAt: row.closedAt ?? null,
|
||||
cleanupEligibleAt: row.cleanupEligibleAt ?? null,
|
||||
cleanupReason: row.cleanupReason ?? null,
|
||||
metadata: (row.metadata as Record<string, unknown> | null) ?? null,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
export function executionWorkspaceService(db: Db) {
|
||||
return {
|
||||
list: async (companyId: string, filters?: {
|
||||
projectId?: string;
|
||||
projectWorkspaceId?: string;
|
||||
issueId?: string;
|
||||
status?: string;
|
||||
reuseEligible?: boolean;
|
||||
}) => {
|
||||
const conditions = [eq(executionWorkspaces.companyId, companyId)];
|
||||
if (filters?.projectId) conditions.push(eq(executionWorkspaces.projectId, filters.projectId));
|
||||
if (filters?.projectWorkspaceId) {
|
||||
conditions.push(eq(executionWorkspaces.projectWorkspaceId, filters.projectWorkspaceId));
|
||||
}
|
||||
if (filters?.issueId) conditions.push(eq(executionWorkspaces.sourceIssueId, filters.issueId));
|
||||
if (filters?.status) {
|
||||
const statuses = filters.status.split(",").map((value) => value.trim()).filter(Boolean);
|
||||
if (statuses.length === 1) conditions.push(eq(executionWorkspaces.status, statuses[0]!));
|
||||
else if (statuses.length > 1) conditions.push(inArray(executionWorkspaces.status, statuses));
|
||||
}
|
||||
if (filters?.reuseEligible) {
|
||||
conditions.push(inArray(executionWorkspaces.status, ["active", "idle", "in_review"]));
|
||||
}
|
||||
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(executionWorkspaces)
|
||||
.where(and(...conditions))
|
||||
.orderBy(desc(executionWorkspaces.lastUsedAt), desc(executionWorkspaces.createdAt));
|
||||
return rows.map(toExecutionWorkspace);
|
||||
},
|
||||
|
||||
getById: async (id: string) => {
|
||||
const row = await db
|
||||
.select()
|
||||
.from(executionWorkspaces)
|
||||
.where(eq(executionWorkspaces.id, id))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
return row ? toExecutionWorkspace(row) : null;
|
||||
},
|
||||
|
||||
create: async (data: typeof executionWorkspaces.$inferInsert) => {
|
||||
const row = await db
|
||||
.insert(executionWorkspaces)
|
||||
.values(data)
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
return row ? toExecutionWorkspace(row) : null;
|
||||
},
|
||||
|
||||
update: async (id: string, patch: Partial<typeof executionWorkspaces.$inferInsert>) => {
|
||||
const row = await db
|
||||
.update(executionWorkspaces)
|
||||
.set({ ...patch, updatedAt: new Date() })
|
||||
.where(eq(executionWorkspaces.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
return row ? toExecutionWorkspace(row) : null;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { toExecutionWorkspace };
|
||||
@@ -1,5 +1,7 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { execFile as execFileCallback } from "node:child_process";
|
||||
import { promisify } from "node:util";
|
||||
import { and, asc, desc, eq, gt, inArray, sql } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import type { BillingType } from "@paperclipai/shared";
|
||||
@@ -26,22 +28,28 @@ import { costService } from "./costs.js";
|
||||
import { companySkillService } from "./company-skills.js";
|
||||
import { budgetService, type BudgetEnforcementScope } from "./budgets.js";
|
||||
import { secretService } from "./secrets.js";
|
||||
import { resolveDefaultAgentWorkspaceDir } from "../home-paths.js";
|
||||
import { resolveDefaultAgentWorkspaceDir, resolveManagedProjectWorkspaceDir } from "../home-paths.js";
|
||||
import { summarizeHeartbeatRunResultJson } from "./heartbeat-run-summary.js";
|
||||
import {
|
||||
buildWorkspaceReadyComment,
|
||||
cleanupExecutionWorkspaceArtifacts,
|
||||
ensureRuntimeServicesForRun,
|
||||
persistAdapterManagedRuntimeServices,
|
||||
realizeExecutionWorkspace,
|
||||
releaseRuntimeServicesForRun,
|
||||
sanitizeRuntimeServiceBaseEnv,
|
||||
} from "./workspace-runtime.js";
|
||||
import { issueService } from "./issues.js";
|
||||
import { executionWorkspaceService } from "./execution-workspaces.js";
|
||||
import { workspaceOperationService } from "./workspace-operations.js";
|
||||
import {
|
||||
buildExecutionWorkspaceAdapterConfig,
|
||||
gateProjectExecutionWorkspacePolicy,
|
||||
parseIssueExecutionWorkspaceSettings,
|
||||
parseProjectExecutionWorkspacePolicy,
|
||||
resolveExecutionWorkspaceMode,
|
||||
} from "./execution-workspace-policy.js";
|
||||
import { instanceSettingsService } from "./instance-settings.js";
|
||||
import { redactCurrentUserText, redactCurrentUserValue } from "../log-redaction.js";
|
||||
import {
|
||||
hasSessionCompactionThresholds,
|
||||
@@ -55,6 +63,80 @@ const HEARTBEAT_MAX_CONCURRENT_RUNS_MAX = 10;
|
||||
const DEFERRED_WAKE_CONTEXT_KEY = "_paperclipWakeContext";
|
||||
const startLocksByAgent = new Map<string, Promise<void>>();
|
||||
const REPO_ONLY_CWD_SENTINEL = "/__paperclip_repo_only__";
|
||||
const MANAGED_WORKSPACE_GIT_CLONE_TIMEOUT_MS = 10 * 60 * 1000;
|
||||
const execFile = promisify(execFileCallback);
|
||||
const SESSIONED_LOCAL_ADAPTERS = new Set([
|
||||
"claude_local",
|
||||
"codex_local",
|
||||
"cursor",
|
||||
"gemini_local",
|
||||
"opencode_local",
|
||||
"pi_local",
|
||||
]);
|
||||
|
||||
function deriveRepoNameFromRepoUrl(repoUrl: string | null): string | null {
|
||||
const trimmed = repoUrl?.trim() ?? "";
|
||||
if (!trimmed) return null;
|
||||
try {
|
||||
const parsed = new URL(trimmed);
|
||||
const cleanedPath = parsed.pathname.replace(/\/+$/, "");
|
||||
const repoName = cleanedPath.split("/").filter(Boolean).pop()?.replace(/\.git$/i, "") ?? "";
|
||||
return repoName || null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureManagedProjectWorkspace(input: {
|
||||
companyId: string;
|
||||
projectId: string;
|
||||
repoUrl: string | null;
|
||||
}): Promise<{ cwd: string; warning: string | null }> {
|
||||
const cwd = resolveManagedProjectWorkspaceDir({
|
||||
companyId: input.companyId,
|
||||
projectId: input.projectId,
|
||||
repoName: deriveRepoNameFromRepoUrl(input.repoUrl),
|
||||
});
|
||||
await fs.mkdir(path.dirname(cwd), { recursive: true });
|
||||
const stats = await fs.stat(cwd).catch(() => null);
|
||||
|
||||
if (!input.repoUrl) {
|
||||
if (!stats) {
|
||||
await fs.mkdir(cwd, { recursive: true });
|
||||
}
|
||||
return { cwd, warning: null };
|
||||
}
|
||||
|
||||
const gitDirExists = await fs
|
||||
.stat(path.resolve(cwd, ".git"))
|
||||
.then((entry) => entry.isDirectory())
|
||||
.catch(() => false);
|
||||
if (gitDirExists) {
|
||||
return { cwd, warning: null };
|
||||
}
|
||||
|
||||
if (stats) {
|
||||
const entries = await fs.readdir(cwd).catch(() => []);
|
||||
if (entries.length > 0) {
|
||||
return {
|
||||
cwd,
|
||||
warning: `Managed workspace path "${cwd}" already exists but is not a git checkout. Using it as-is.`,
|
||||
};
|
||||
}
|
||||
await fs.rm(cwd, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
try {
|
||||
await execFile("git", ["clone", input.repoUrl, cwd], {
|
||||
env: sanitizeRuntimeServiceBaseEnv(process.env),
|
||||
timeout: MANAGED_WORKSPACE_GIT_CLONE_TIMEOUT_MS,
|
||||
});
|
||||
return { cwd, warning: null };
|
||||
} catch (error) {
|
||||
const reason = error instanceof Error ? error.message : String(error);
|
||||
throw new Error(`Failed to prepare managed checkout for "${input.repoUrl}" at "${cwd}": ${reason}`);
|
||||
}
|
||||
}
|
||||
|
||||
const heartbeatRunListColumns = {
|
||||
id: heartbeatRuns.id,
|
||||
@@ -159,6 +241,20 @@ export type ResolvedWorkspaceForRun = {
|
||||
warnings: string[];
|
||||
};
|
||||
|
||||
type ProjectWorkspaceCandidate = {
|
||||
id: string;
|
||||
};
|
||||
|
||||
export function prioritizeProjectWorkspaceCandidatesForRun<T extends ProjectWorkspaceCandidate>(
|
||||
rows: T[],
|
||||
preferredWorkspaceId: string | null | undefined,
|
||||
): T[] {
|
||||
if (!preferredWorkspaceId) return rows;
|
||||
const preferredIndex = rows.findIndex((row) => row.id === preferredWorkspaceId);
|
||||
if (preferredIndex <= 0) return rows;
|
||||
return [rows[preferredIndex]!, ...rows.slice(0, preferredIndex), ...rows.slice(preferredIndex + 1)];
|
||||
}
|
||||
|
||||
function readNonEmptyString(value: unknown): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value : null;
|
||||
}
|
||||
@@ -592,10 +688,14 @@ function resolveNextSessionState(input: {
|
||||
}
|
||||
|
||||
export function heartbeatService(db: Db) {
|
||||
const instanceSettings = instanceSettingsService(db);
|
||||
|
||||
const runLogStore = getRunLogStore();
|
||||
const secretsSvc = secretService(db);
|
||||
const companySkills = companySkillService(db);
|
||||
const issuesSvc = issueService(db);
|
||||
const executionWorkspacesSvc = executionWorkspaceService(db);
|
||||
const workspaceOperationsSvc = workspaceOperationService(db);
|
||||
const activeRunExecutions = new Set<string>();
|
||||
const budgetHooks = {
|
||||
cancelWorkForScope: cancelBudgetScopeWork,
|
||||
@@ -850,18 +950,25 @@ export function heartbeatService(db: Db) {
|
||||
): Promise<ResolvedWorkspaceForRun> {
|
||||
const issueId = readNonEmptyString(context.issueId);
|
||||
const contextProjectId = readNonEmptyString(context.projectId);
|
||||
const issueProjectId = issueId
|
||||
const contextProjectWorkspaceId = readNonEmptyString(context.projectWorkspaceId);
|
||||
const issueProjectRef = issueId
|
||||
? await db
|
||||
.select({ projectId: issues.projectId })
|
||||
.select({
|
||||
projectId: issues.projectId,
|
||||
projectWorkspaceId: issues.projectWorkspaceId,
|
||||
})
|
||||
.from(issues)
|
||||
.where(and(eq(issues.id, issueId), eq(issues.companyId, agent.companyId)))
|
||||
.then((rows) => rows[0]?.projectId ?? null)
|
||||
.then((rows) => rows[0] ?? null)
|
||||
: null;
|
||||
const issueProjectId = issueProjectRef?.projectId ?? null;
|
||||
const preferredProjectWorkspaceId =
|
||||
issueProjectRef?.projectWorkspaceId ?? contextProjectWorkspaceId ?? null;
|
||||
const resolvedProjectId = issueProjectId ?? contextProjectId;
|
||||
const useProjectWorkspace = opts?.useProjectWorkspace !== false;
|
||||
const workspaceProjectId = useProjectWorkspace ? resolvedProjectId : null;
|
||||
|
||||
const projectWorkspaceRows = workspaceProjectId
|
||||
const unorderedProjectWorkspaceRows = workspaceProjectId
|
||||
? await db
|
||||
.select()
|
||||
.from(projectWorkspaces)
|
||||
@@ -873,6 +980,10 @@ export function heartbeatService(db: Db) {
|
||||
)
|
||||
.orderBy(asc(projectWorkspaces.createdAt), asc(projectWorkspaces.id))
|
||||
: [];
|
||||
const projectWorkspaceRows = prioritizeProjectWorkspaceCandidatesForRun(
|
||||
unorderedProjectWorkspaceRows,
|
||||
preferredProjectWorkspaceId,
|
||||
);
|
||||
|
||||
const workspaceHints = projectWorkspaceRows.map((workspace) => ({
|
||||
workspaceId: workspace.id,
|
||||
@@ -882,12 +993,34 @@ export function heartbeatService(db: Db) {
|
||||
}));
|
||||
|
||||
if (projectWorkspaceRows.length > 0) {
|
||||
const preferredWorkspace = preferredProjectWorkspaceId
|
||||
? projectWorkspaceRows.find((workspace) => workspace.id === preferredProjectWorkspaceId) ?? null
|
||||
: null;
|
||||
const missingProjectCwds: string[] = [];
|
||||
let hasConfiguredProjectCwd = false;
|
||||
let preferredWorkspaceWarning: string | null = null;
|
||||
if (preferredProjectWorkspaceId && !preferredWorkspace) {
|
||||
preferredWorkspaceWarning =
|
||||
`Selected project workspace "${preferredProjectWorkspaceId}" is not available on this project.`;
|
||||
}
|
||||
for (const workspace of projectWorkspaceRows) {
|
||||
const projectCwd = readNonEmptyString(workspace.cwd);
|
||||
let projectCwd = readNonEmptyString(workspace.cwd);
|
||||
let managedWorkspaceWarning: string | null = null;
|
||||
if (!projectCwd || projectCwd === REPO_ONLY_CWD_SENTINEL) {
|
||||
continue;
|
||||
try {
|
||||
const managedWorkspace = await ensureManagedProjectWorkspace({
|
||||
companyId: agent.companyId,
|
||||
projectId: workspaceProjectId ?? resolvedProjectId ?? workspace.projectId,
|
||||
repoUrl: readNonEmptyString(workspace.repoUrl),
|
||||
});
|
||||
projectCwd = managedWorkspace.cwd;
|
||||
managedWorkspaceWarning = managedWorkspace.warning;
|
||||
} catch (error) {
|
||||
if (preferredWorkspace?.id === workspace.id) {
|
||||
preferredWorkspaceWarning = error instanceof Error ? error.message : String(error);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
hasConfiguredProjectCwd = true;
|
||||
const projectCwdExists = await fs
|
||||
@@ -903,15 +1036,24 @@ export function heartbeatService(db: Db) {
|
||||
repoUrl: workspace.repoUrl,
|
||||
repoRef: workspace.repoRef,
|
||||
workspaceHints,
|
||||
warnings: [],
|
||||
warnings: [preferredWorkspaceWarning, managedWorkspaceWarning].filter(
|
||||
(value): value is string => Boolean(value),
|
||||
),
|
||||
};
|
||||
}
|
||||
if (preferredWorkspace?.id === workspace.id) {
|
||||
preferredWorkspaceWarning =
|
||||
`Selected project workspace path "${projectCwd}" is not available yet.`;
|
||||
}
|
||||
missingProjectCwds.push(projectCwd);
|
||||
}
|
||||
|
||||
const fallbackCwd = resolveDefaultAgentWorkspaceDir(agent.id);
|
||||
await fs.mkdir(fallbackCwd, { recursive: true });
|
||||
const warnings: string[] = [];
|
||||
if (preferredWorkspaceWarning) {
|
||||
warnings.push(preferredWorkspaceWarning);
|
||||
}
|
||||
if (missingProjectCwds.length > 0) {
|
||||
const firstMissing = missingProjectCwds[0];
|
||||
const extraMissingCount = Math.max(0, missingProjectCwds.length - 1);
|
||||
@@ -937,6 +1079,24 @@ export function heartbeatService(db: Db) {
|
||||
};
|
||||
}
|
||||
|
||||
if (workspaceProjectId) {
|
||||
const managedWorkspace = await ensureManagedProjectWorkspace({
|
||||
companyId: agent.companyId,
|
||||
projectId: workspaceProjectId,
|
||||
repoUrl: null,
|
||||
});
|
||||
return {
|
||||
cwd: managedWorkspace.cwd,
|
||||
source: "project_primary" as const,
|
||||
projectId: resolvedProjectId,
|
||||
workspaceId: null,
|
||||
repoUrl: null,
|
||||
repoRef: null,
|
||||
workspaceHints,
|
||||
warnings: managedWorkspace.warning ? [managedWorkspace.warning] : [],
|
||||
};
|
||||
}
|
||||
|
||||
const sessionCwd = readNonEmptyString(previousSessionParams?.cwd);
|
||||
if (sessionCwd) {
|
||||
const sessionCwdExists = await fs
|
||||
@@ -1475,10 +1635,16 @@ export function heartbeatService(db: Db) {
|
||||
const taskKey = deriveTaskKey(context, null);
|
||||
const sessionCodec = getAdapterSessionCodec(agent.adapterType);
|
||||
const issueId = readNonEmptyString(context.issueId);
|
||||
const issueAssigneeConfig = issueId
|
||||
const issueContext = issueId
|
||||
? await db
|
||||
.select({
|
||||
id: issues.id,
|
||||
identifier: issues.identifier,
|
||||
title: issues.title,
|
||||
projectId: issues.projectId,
|
||||
projectWorkspaceId: issues.projectWorkspaceId,
|
||||
executionWorkspaceId: issues.executionWorkspaceId,
|
||||
executionWorkspacePreference: issues.executionWorkspacePreference,
|
||||
assigneeAgentId: issues.assigneeAgentId,
|
||||
assigneeAdapterOverrides: issues.assigneeAdapterOverrides,
|
||||
executionWorkspaceSettings: issues.executionWorkspaceSettings,
|
||||
@@ -1488,22 +1654,27 @@ export function heartbeatService(db: Db) {
|
||||
.then((rows) => rows[0] ?? null)
|
||||
: null;
|
||||
const issueAssigneeOverrides =
|
||||
issueAssigneeConfig && issueAssigneeConfig.assigneeAgentId === agent.id
|
||||
issueContext && issueContext.assigneeAgentId === agent.id
|
||||
? parseIssueAssigneeAdapterOverrides(
|
||||
issueAssigneeConfig.assigneeAdapterOverrides,
|
||||
issueContext.assigneeAdapterOverrides,
|
||||
)
|
||||
: null;
|
||||
const issueExecutionWorkspaceSettings = parseIssueExecutionWorkspaceSettings(
|
||||
issueAssigneeConfig?.executionWorkspaceSettings,
|
||||
);
|
||||
const isolatedWorkspacesEnabled = (await instanceSettings.getExperimental()).enableIsolatedWorkspaces;
|
||||
const issueExecutionWorkspaceSettings = isolatedWorkspacesEnabled
|
||||
? parseIssueExecutionWorkspaceSettings(issueContext?.executionWorkspaceSettings)
|
||||
: null;
|
||||
const contextProjectId = readNonEmptyString(context.projectId);
|
||||
const executionProjectId = issueAssigneeConfig?.projectId ?? contextProjectId;
|
||||
const executionProjectId = issueContext?.projectId ?? contextProjectId;
|
||||
const projectExecutionWorkspacePolicy = executionProjectId
|
||||
? await db
|
||||
.select({ executionWorkspacePolicy: projects.executionWorkspacePolicy })
|
||||
.from(projects)
|
||||
.where(and(eq(projects.id, executionProjectId), eq(projects.companyId, agent.companyId)))
|
||||
.then((rows) => parseProjectExecutionWorkspacePolicy(rows[0]?.executionWorkspacePolicy))
|
||||
.then((rows) =>
|
||||
gateProjectExecutionWorkspacePolicy(
|
||||
parseProjectExecutionWorkspacePolicy(rows[0]?.executionWorkspacePolicy),
|
||||
isolatedWorkspacesEnabled,
|
||||
))
|
||||
: null;
|
||||
const taskSession = taskKey
|
||||
? await getTaskSession(agent.companyId, agent.id, agent.adapterType, taskKey)
|
||||
@@ -1545,17 +1716,24 @@ export function heartbeatService(db: Db) {
|
||||
...resolvedConfig,
|
||||
paperclipRuntimeSkills: runtimeSkillEntries,
|
||||
};
|
||||
const issueRef = issueId
|
||||
? await db
|
||||
.select({
|
||||
id: issues.id,
|
||||
identifier: issues.identifier,
|
||||
title: issues.title,
|
||||
})
|
||||
.from(issues)
|
||||
.where(and(eq(issues.id, issueId), eq(issues.companyId, agent.companyId)))
|
||||
.then((rows) => rows[0] ?? null)
|
||||
const issueRef = issueContext
|
||||
? {
|
||||
id: issueContext.id,
|
||||
identifier: issueContext.identifier,
|
||||
title: issueContext.title,
|
||||
projectId: issueContext.projectId,
|
||||
projectWorkspaceId: issueContext.projectWorkspaceId,
|
||||
executionWorkspaceId: issueContext.executionWorkspaceId,
|
||||
executionWorkspacePreference: issueContext.executionWorkspacePreference,
|
||||
}
|
||||
: null;
|
||||
const existingExecutionWorkspace =
|
||||
issueRef?.executionWorkspaceId ? await executionWorkspacesSvc.getById(issueRef.executionWorkspaceId) : null;
|
||||
const workspaceOperationRecorder = workspaceOperationsSvc.createRecorder({
|
||||
companyId: agent.companyId,
|
||||
heartbeatRunId: run.id,
|
||||
executionWorkspaceId: existingExecutionWorkspace?.id ?? null,
|
||||
});
|
||||
const executionWorkspace = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: resolvedWorkspace.cwd,
|
||||
@@ -1565,14 +1743,139 @@ export function heartbeatService(db: Db) {
|
||||
repoUrl: resolvedWorkspace.repoUrl,
|
||||
repoRef: resolvedWorkspace.repoRef,
|
||||
},
|
||||
config: resolvedConfig,
|
||||
config: runtimeConfig,
|
||||
issue: issueRef,
|
||||
agent: {
|
||||
id: agent.id,
|
||||
name: agent.name,
|
||||
companyId: agent.companyId,
|
||||
},
|
||||
recorder: workspaceOperationRecorder,
|
||||
});
|
||||
const resolvedProjectId = executionWorkspace.projectId ?? issueRef?.projectId ?? executionProjectId ?? null;
|
||||
const resolvedProjectWorkspaceId = issueRef?.projectWorkspaceId ?? resolvedWorkspace.workspaceId ?? null;
|
||||
const shouldReuseExisting =
|
||||
issueRef?.executionWorkspacePreference === "reuse_existing" &&
|
||||
existingExecutionWorkspace &&
|
||||
existingExecutionWorkspace.status !== "archived";
|
||||
let persistedExecutionWorkspace = null;
|
||||
try {
|
||||
persistedExecutionWorkspace = shouldReuseExisting && existingExecutionWorkspace
|
||||
? await executionWorkspacesSvc.update(existingExecutionWorkspace.id, {
|
||||
cwd: executionWorkspace.cwd,
|
||||
repoUrl: executionWorkspace.repoUrl,
|
||||
baseRef: executionWorkspace.repoRef,
|
||||
branchName: executionWorkspace.branchName,
|
||||
providerType: executionWorkspace.strategy === "git_worktree" ? "git_worktree" : "local_fs",
|
||||
providerRef: executionWorkspace.worktreePath,
|
||||
status: "active",
|
||||
lastUsedAt: new Date(),
|
||||
metadata: {
|
||||
...(existingExecutionWorkspace.metadata ?? {}),
|
||||
source: executionWorkspace.source,
|
||||
createdByRuntime: executionWorkspace.created,
|
||||
},
|
||||
})
|
||||
: resolvedProjectId
|
||||
? await executionWorkspacesSvc.create({
|
||||
companyId: agent.companyId,
|
||||
projectId: resolvedProjectId,
|
||||
projectWorkspaceId: resolvedProjectWorkspaceId,
|
||||
sourceIssueId: issueRef?.id ?? null,
|
||||
mode:
|
||||
executionWorkspaceMode === "isolated_workspace"
|
||||
? "isolated_workspace"
|
||||
: executionWorkspaceMode === "operator_branch"
|
||||
? "operator_branch"
|
||||
: executionWorkspaceMode === "agent_default"
|
||||
? "adapter_managed"
|
||||
: "shared_workspace",
|
||||
strategyType: executionWorkspace.strategy === "git_worktree" ? "git_worktree" : "project_primary",
|
||||
name: executionWorkspace.branchName ?? issueRef?.identifier ?? `workspace-${agent.id.slice(0, 8)}`,
|
||||
status: "active",
|
||||
cwd: executionWorkspace.cwd,
|
||||
repoUrl: executionWorkspace.repoUrl,
|
||||
baseRef: executionWorkspace.repoRef,
|
||||
branchName: executionWorkspace.branchName,
|
||||
providerType: executionWorkspace.strategy === "git_worktree" ? "git_worktree" : "local_fs",
|
||||
providerRef: executionWorkspace.worktreePath,
|
||||
lastUsedAt: new Date(),
|
||||
openedAt: new Date(),
|
||||
metadata: {
|
||||
source: executionWorkspace.source,
|
||||
createdByRuntime: executionWorkspace.created,
|
||||
},
|
||||
})
|
||||
: null;
|
||||
} catch (error) {
|
||||
if (executionWorkspace.created) {
|
||||
try {
|
||||
await cleanupExecutionWorkspaceArtifacts({
|
||||
workspace: {
|
||||
id: existingExecutionWorkspace?.id ?? `transient-${run.id}`,
|
||||
cwd: executionWorkspace.cwd,
|
||||
providerType: executionWorkspace.strategy === "git_worktree" ? "git_worktree" : "local_fs",
|
||||
providerRef: executionWorkspace.worktreePath,
|
||||
branchName: executionWorkspace.branchName,
|
||||
repoUrl: executionWorkspace.repoUrl,
|
||||
baseRef: executionWorkspace.repoRef,
|
||||
projectId: resolvedProjectId,
|
||||
projectWorkspaceId: resolvedProjectWorkspaceId,
|
||||
sourceIssueId: issueRef?.id ?? null,
|
||||
metadata: {
|
||||
createdByRuntime: true,
|
||||
source: executionWorkspace.source,
|
||||
},
|
||||
},
|
||||
projectWorkspace: {
|
||||
cwd: resolvedWorkspace.cwd,
|
||||
cleanupCommand: null,
|
||||
},
|
||||
teardownCommand: projectExecutionWorkspacePolicy?.workspaceStrategy?.teardownCommand ?? null,
|
||||
recorder: workspaceOperationRecorder,
|
||||
});
|
||||
} catch (cleanupError) {
|
||||
logger.warn(
|
||||
{
|
||||
runId: run.id,
|
||||
issueId,
|
||||
executionWorkspaceCwd: executionWorkspace.cwd,
|
||||
cleanupError: cleanupError instanceof Error ? cleanupError.message : String(cleanupError),
|
||||
},
|
||||
"Failed to cleanup realized execution workspace after persistence failure",
|
||||
);
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
await workspaceOperationRecorder.attachExecutionWorkspaceId(persistedExecutionWorkspace?.id ?? null);
|
||||
if (
|
||||
existingExecutionWorkspace &&
|
||||
persistedExecutionWorkspace &&
|
||||
existingExecutionWorkspace.id !== persistedExecutionWorkspace.id &&
|
||||
existingExecutionWorkspace.status === "active"
|
||||
) {
|
||||
await executionWorkspacesSvc.update(existingExecutionWorkspace.id, {
|
||||
status: "idle",
|
||||
cleanupReason: null,
|
||||
});
|
||||
}
|
||||
if (issueId && persistedExecutionWorkspace && issueRef?.executionWorkspaceId !== persistedExecutionWorkspace.id) {
|
||||
await issuesSvc.update(issueId, {
|
||||
executionWorkspaceId: persistedExecutionWorkspace.id,
|
||||
...(resolvedProjectWorkspaceId ? { projectWorkspaceId: resolvedProjectWorkspaceId } : {}),
|
||||
});
|
||||
}
|
||||
if (persistedExecutionWorkspace) {
|
||||
context.executionWorkspaceId = persistedExecutionWorkspace.id;
|
||||
await db
|
||||
.update(heartbeatRuns)
|
||||
.set({
|
||||
contextSnapshot: context,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(heartbeatRuns.id, run.id));
|
||||
}
|
||||
const runtimeSessionResolution = resolveRuntimeSessionParamsForWorkspace({
|
||||
agentId: agent.id,
|
||||
previousSessionParams,
|
||||
@@ -1776,6 +2079,7 @@ export function heartbeatService(db: Db) {
|
||||
},
|
||||
issue: issueRef,
|
||||
workspace: executionWorkspace,
|
||||
executionWorkspaceId: persistedExecutionWorkspace?.id ?? issueRef?.executionWorkspaceId ?? null,
|
||||
config: resolvedConfig,
|
||||
adapterEnv,
|
||||
onLog,
|
||||
|
||||
@@ -17,7 +17,11 @@ export { heartbeatService } from "./heartbeat.js";
|
||||
export { dashboardService } from "./dashboard.js";
|
||||
export { sidebarBadgeService } from "./sidebar-badges.js";
|
||||
export { accessService } from "./access.js";
|
||||
export { instanceSettingsService } from "./instance-settings.js";
|
||||
export { companyPortabilityService } from "./company-portability.js";
|
||||
export { executionWorkspaceService } from "./execution-workspaces.js";
|
||||
export { workspaceOperationService } from "./workspace-operations.js";
|
||||
export { workProductService } from "./work-products.js";
|
||||
export { logActivity, type LogActivityInput } from "./activity-log.js";
|
||||
export { notifyHireApproved, type NotifyHireApprovedInput } from "./hire-hook.js";
|
||||
export { publishLiveEvent, subscribeCompanyLiveEvents } from "./live-events.js";
|
||||
|
||||
95
server/src/services/instance-settings.ts
Normal file
95
server/src/services/instance-settings.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { companies, instanceSettings } from "@paperclipai/db";
|
||||
import {
|
||||
instanceExperimentalSettingsSchema,
|
||||
type InstanceExperimentalSettings,
|
||||
type InstanceSettings,
|
||||
type PatchInstanceExperimentalSettings,
|
||||
} from "@paperclipai/shared";
|
||||
import { eq } from "drizzle-orm";
|
||||
|
||||
const DEFAULT_SINGLETON_KEY = "default";
|
||||
|
||||
function normalizeExperimentalSettings(raw: unknown): InstanceExperimentalSettings {
|
||||
const parsed = instanceExperimentalSettingsSchema.safeParse(raw ?? {});
|
||||
if (parsed.success) {
|
||||
return {
|
||||
enableIsolatedWorkspaces: parsed.data.enableIsolatedWorkspaces ?? false,
|
||||
};
|
||||
}
|
||||
return {
|
||||
enableIsolatedWorkspaces: false,
|
||||
};
|
||||
}
|
||||
|
||||
function toInstanceSettings(row: typeof instanceSettings.$inferSelect): InstanceSettings {
|
||||
return {
|
||||
id: row.id,
|
||||
experimental: normalizeExperimentalSettings(row.experimental),
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
export function instanceSettingsService(db: Db) {
|
||||
async function getOrCreateRow() {
|
||||
const existing = await db
|
||||
.select()
|
||||
.from(instanceSettings)
|
||||
.where(eq(instanceSettings.singletonKey, DEFAULT_SINGLETON_KEY))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (existing) return existing;
|
||||
|
||||
const now = new Date();
|
||||
const [created] = await db
|
||||
.insert(instanceSettings)
|
||||
.values({
|
||||
singletonKey: DEFAULT_SINGLETON_KEY,
|
||||
experimental: {},
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [instanceSettings.singletonKey],
|
||||
set: {
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.returning();
|
||||
|
||||
return created;
|
||||
}
|
||||
|
||||
return {
|
||||
get: async (): Promise<InstanceSettings> => toInstanceSettings(await getOrCreateRow()),
|
||||
|
||||
getExperimental: async (): Promise<InstanceExperimentalSettings> => {
|
||||
const row = await getOrCreateRow();
|
||||
return normalizeExperimentalSettings(row.experimental);
|
||||
},
|
||||
|
||||
updateExperimental: async (patch: PatchInstanceExperimentalSettings): Promise<InstanceSettings> => {
|
||||
const current = await getOrCreateRow();
|
||||
const nextExperimental = normalizeExperimentalSettings({
|
||||
...normalizeExperimentalSettings(current.experimental),
|
||||
...patch,
|
||||
});
|
||||
const now = new Date();
|
||||
const [updated] = await db
|
||||
.update(instanceSettings)
|
||||
.set({
|
||||
experimental: { ...nextExperimental },
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(instanceSettings.id, current.id))
|
||||
.returning();
|
||||
return toInstanceSettings(updated ?? current);
|
||||
},
|
||||
|
||||
listCompanyIds: async (): Promise<string[]> =>
|
||||
db
|
||||
.select({ id: companies.id })
|
||||
.from(companies)
|
||||
.then((rows) => rows.map((row) => row.id)),
|
||||
};
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
documents,
|
||||
goals,
|
||||
heartbeatRuns,
|
||||
executionWorkspaces,
|
||||
issueAttachments,
|
||||
issueLabels,
|
||||
issueComments,
|
||||
@@ -22,8 +23,10 @@ import { extractProjectMentionIds } from "@paperclipai/shared";
|
||||
import { conflict, notFound, unprocessable } from "../errors.js";
|
||||
import {
|
||||
defaultIssueExecutionWorkspaceSettingsForProject,
|
||||
gateProjectExecutionWorkspacePolicy,
|
||||
parseProjectExecutionWorkspacePolicy,
|
||||
} from "./execution-workspace-policy.js";
|
||||
import { instanceSettingsService } from "./instance-settings.js";
|
||||
import { redactCurrentUserText } from "../log-redaction.js";
|
||||
import { resolveIssueGoalId, resolveNextIssueGoalId } from "./issue-goal-fallback.js";
|
||||
import { getDefaultCompanyGoal } from "./goals.js";
|
||||
@@ -315,6 +318,8 @@ function withActiveRuns(
|
||||
}
|
||||
|
||||
export function issueService(db: Db) {
|
||||
const instanceSettings = instanceSettingsService(db);
|
||||
|
||||
async function assertAssignableAgent(companyId: string, agentId: string) {
|
||||
const assignee = await db
|
||||
.select({
|
||||
@@ -356,6 +361,40 @@ export function issueService(db: Db) {
|
||||
}
|
||||
}
|
||||
|
||||
async function assertValidProjectWorkspace(companyId: string, projectId: string | null | undefined, projectWorkspaceId: string) {
|
||||
const workspace = await db
|
||||
.select({
|
||||
id: projectWorkspaces.id,
|
||||
companyId: projectWorkspaces.companyId,
|
||||
projectId: projectWorkspaces.projectId,
|
||||
})
|
||||
.from(projectWorkspaces)
|
||||
.where(eq(projectWorkspaces.id, projectWorkspaceId))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (!workspace) throw notFound("Project workspace not found");
|
||||
if (workspace.companyId !== companyId) throw unprocessable("Project workspace must belong to same company");
|
||||
if (projectId && workspace.projectId !== projectId) {
|
||||
throw unprocessable("Project workspace must belong to the selected project");
|
||||
}
|
||||
}
|
||||
|
||||
async function assertValidExecutionWorkspace(companyId: string, projectId: string | null | undefined, executionWorkspaceId: string) {
|
||||
const workspace = await db
|
||||
.select({
|
||||
id: executionWorkspaces.id,
|
||||
companyId: executionWorkspaces.companyId,
|
||||
projectId: executionWorkspaces.projectId,
|
||||
})
|
||||
.from(executionWorkspaces)
|
||||
.where(eq(executionWorkspaces.id, executionWorkspaceId))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (!workspace) throw notFound("Execution workspace not found");
|
||||
if (workspace.companyId !== companyId) throw unprocessable("Execution workspace must belong to same company");
|
||||
if (projectId && workspace.projectId !== projectId) {
|
||||
throw unprocessable("Execution workspace must belong to the selected project");
|
||||
}
|
||||
}
|
||||
|
||||
async function assertValidLabelIds(companyId: string, labelIds: string[], dbOrTx: any = db) {
|
||||
if (labelIds.length === 0) return;
|
||||
const existing = await dbOrTx
|
||||
@@ -641,6 +680,12 @@ export function issueService(db: Db) {
|
||||
data: Omit<typeof issues.$inferInsert, "companyId"> & { labelIds?: string[] },
|
||||
) => {
|
||||
const { labelIds: inputLabelIds, ...issueData } = data;
|
||||
const isolatedWorkspacesEnabled = (await instanceSettings.getExperimental()).enableIsolatedWorkspaces;
|
||||
if (!isolatedWorkspacesEnabled) {
|
||||
delete issueData.executionWorkspaceId;
|
||||
delete issueData.executionWorkspacePreference;
|
||||
delete issueData.executionWorkspaceSettings;
|
||||
}
|
||||
if (data.assigneeAgentId && data.assigneeUserId) {
|
||||
throw unprocessable("Issue can only have one assignee");
|
||||
}
|
||||
@@ -650,6 +695,12 @@ export function issueService(db: Db) {
|
||||
if (data.assigneeUserId) {
|
||||
await assertAssignableUser(companyId, data.assigneeUserId);
|
||||
}
|
||||
if (data.projectWorkspaceId) {
|
||||
await assertValidProjectWorkspace(companyId, data.projectId, data.projectWorkspaceId);
|
||||
}
|
||||
if (data.executionWorkspaceId) {
|
||||
await assertValidExecutionWorkspace(companyId, data.projectId, data.executionWorkspaceId);
|
||||
}
|
||||
if (data.status === "in_progress" && !data.assigneeAgentId && !data.assigneeUserId) {
|
||||
throw unprocessable("in_progress issues require an assignee");
|
||||
}
|
||||
@@ -665,9 +716,32 @@ export function issueService(db: Db) {
|
||||
.then((rows) => rows[0] ?? null);
|
||||
executionWorkspaceSettings =
|
||||
defaultIssueExecutionWorkspaceSettingsForProject(
|
||||
parseProjectExecutionWorkspacePolicy(project?.executionWorkspacePolicy),
|
||||
gateProjectExecutionWorkspacePolicy(
|
||||
parseProjectExecutionWorkspacePolicy(project?.executionWorkspacePolicy),
|
||||
isolatedWorkspacesEnabled,
|
||||
),
|
||||
) as Record<string, unknown> | null;
|
||||
}
|
||||
let projectWorkspaceId = issueData.projectWorkspaceId ?? null;
|
||||
if (!projectWorkspaceId && issueData.projectId) {
|
||||
const project = await tx
|
||||
.select({
|
||||
executionWorkspacePolicy: projects.executionWorkspacePolicy,
|
||||
})
|
||||
.from(projects)
|
||||
.where(and(eq(projects.id, issueData.projectId), eq(projects.companyId, companyId)))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
const projectPolicy = parseProjectExecutionWorkspacePolicy(project?.executionWorkspacePolicy);
|
||||
projectWorkspaceId = projectPolicy?.defaultProjectWorkspaceId ?? null;
|
||||
if (!projectWorkspaceId) {
|
||||
projectWorkspaceId = await tx
|
||||
.select({ id: projectWorkspaces.id })
|
||||
.from(projectWorkspaces)
|
||||
.where(and(eq(projectWorkspaces.projectId, issueData.projectId), eq(projectWorkspaces.companyId, companyId)))
|
||||
.orderBy(desc(projectWorkspaces.isPrimary), asc(projectWorkspaces.createdAt), asc(projectWorkspaces.id))
|
||||
.then((rows) => rows[0]?.id ?? null);
|
||||
}
|
||||
}
|
||||
const [company] = await tx
|
||||
.update(companies)
|
||||
.set({ issueCounter: sql`${companies.issueCounter} + 1` })
|
||||
@@ -684,6 +758,7 @@ export function issueService(db: Db) {
|
||||
goalId: issueData.goalId,
|
||||
defaultGoalId: defaultCompanyGoal?.id ?? null,
|
||||
}),
|
||||
...(projectWorkspaceId ? { projectWorkspaceId } : {}),
|
||||
...(executionWorkspaceSettings ? { executionWorkspaceSettings } : {}),
|
||||
companyId,
|
||||
issueNumber,
|
||||
@@ -717,6 +792,12 @@ export function issueService(db: Db) {
|
||||
if (!existing) return null;
|
||||
|
||||
const { labelIds: nextLabelIds, ...issueData } = data;
|
||||
const isolatedWorkspacesEnabled = (await instanceSettings.getExperimental()).enableIsolatedWorkspaces;
|
||||
if (!isolatedWorkspacesEnabled) {
|
||||
delete issueData.executionWorkspaceId;
|
||||
delete issueData.executionWorkspacePreference;
|
||||
delete issueData.executionWorkspaceSettings;
|
||||
}
|
||||
|
||||
if (issueData.status) {
|
||||
assertTransition(existing.status, issueData.status);
|
||||
@@ -744,6 +825,17 @@ export function issueService(db: Db) {
|
||||
if (issueData.assigneeUserId) {
|
||||
await assertAssignableUser(existing.companyId, issueData.assigneeUserId);
|
||||
}
|
||||
const nextProjectId = issueData.projectId !== undefined ? issueData.projectId : existing.projectId;
|
||||
const nextProjectWorkspaceId =
|
||||
issueData.projectWorkspaceId !== undefined ? issueData.projectWorkspaceId : existing.projectWorkspaceId;
|
||||
const nextExecutionWorkspaceId =
|
||||
issueData.executionWorkspaceId !== undefined ? issueData.executionWorkspaceId : existing.executionWorkspaceId;
|
||||
if (nextProjectWorkspaceId) {
|
||||
await assertValidProjectWorkspace(existing.companyId, nextProjectId, nextProjectWorkspaceId);
|
||||
}
|
||||
if (nextExecutionWorkspaceId) {
|
||||
await assertValidExecutionWorkspace(existing.companyId, nextProjectId, nextExecutionWorkspaceId);
|
||||
}
|
||||
|
||||
applyStatusSideEffects(issueData.status, patch);
|
||||
if (issueData.status && issueData.status !== "done") {
|
||||
|
||||
@@ -718,17 +718,16 @@ export function buildHostServices(
|
||||
const project = await projects.getById(params.projectId);
|
||||
if (!inCompany(project, companyId)) return null;
|
||||
const row = project.primaryWorkspace;
|
||||
if (!row) return null;
|
||||
const path = sanitizeWorkspacePath(row.cwd);
|
||||
const name = sanitizeWorkspaceName(row.name, path);
|
||||
const path = sanitizeWorkspacePath(project.codebase.effectiveLocalFolder);
|
||||
const name = sanitizeWorkspaceName(row?.name ?? project.name, path);
|
||||
return {
|
||||
id: row.id,
|
||||
projectId: row.projectId,
|
||||
id: row?.id ?? `${project.id}:managed`,
|
||||
projectId: project.id,
|
||||
name,
|
||||
path,
|
||||
isPrimary: row.isPrimary,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
isPrimary: true,
|
||||
createdAt: (row?.createdAt ?? project.createdAt).toISOString(),
|
||||
updatedAt: (row?.updatedAt ?? project.updatedAt).toISOString(),
|
||||
};
|
||||
},
|
||||
|
||||
@@ -742,17 +741,16 @@ export function buildHostServices(
|
||||
const project = await projects.getById(projectId);
|
||||
if (!inCompany(project, companyId)) return null;
|
||||
const row = project.primaryWorkspace;
|
||||
if (!row) return null;
|
||||
const path = sanitizeWorkspacePath(row.cwd);
|
||||
const name = sanitizeWorkspaceName(row.name, path);
|
||||
const path = sanitizeWorkspacePath(project.codebase.effectiveLocalFolder);
|
||||
const name = sanitizeWorkspaceName(row?.name ?? project.name, path);
|
||||
return {
|
||||
id: row.id,
|
||||
projectId: row.projectId,
|
||||
id: row?.id ?? `${project.id}:managed`,
|
||||
projectId: project.id,
|
||||
name,
|
||||
path,
|
||||
isPrimary: row.isPrimary,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
isPrimary: true,
|
||||
createdAt: (row?.createdAt ?? project.createdAt).toISOString(),
|
||||
updatedAt: (row?.updatedAt ?? project.updatedAt).toISOString(),
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
deriveProjectUrlKey,
|
||||
isUuidLike,
|
||||
normalizeProjectUrlKey,
|
||||
type ProjectCodebase,
|
||||
type ProjectExecutionWorkspacePolicy,
|
||||
type ProjectGoalRef,
|
||||
type ProjectWorkspace,
|
||||
@@ -13,6 +14,7 @@ import {
|
||||
} from "@paperclipai/shared";
|
||||
import { listWorkspaceRuntimeServicesForProjectWorkspaces } from "./workspace-runtime.js";
|
||||
import { parseProjectExecutionWorkspacePolicy } from "./execution-workspace-policy.js";
|
||||
import { resolveManagedProjectWorkspaceDir } from "../home-paths.js";
|
||||
|
||||
type ProjectRow = typeof projects.$inferSelect;
|
||||
type ProjectWorkspaceRow = typeof projectWorkspaces.$inferSelect;
|
||||
@@ -20,9 +22,17 @@ type WorkspaceRuntimeServiceRow = typeof workspaceRuntimeServices.$inferSelect;
|
||||
const REPO_ONLY_CWD_SENTINEL = "/__paperclip_repo_only__";
|
||||
type CreateWorkspaceInput = {
|
||||
name?: string | null;
|
||||
sourceType?: string | null;
|
||||
cwd?: string | null;
|
||||
repoUrl?: string | null;
|
||||
repoRef?: string | null;
|
||||
defaultRef?: string | null;
|
||||
visibility?: string | null;
|
||||
setupCommand?: string | null;
|
||||
cleanupCommand?: string | null;
|
||||
remoteProvider?: string | null;
|
||||
remoteWorkspaceRef?: string | null;
|
||||
sharedWorkspaceKey?: string | null;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
isPrimary?: boolean;
|
||||
};
|
||||
@@ -33,6 +43,7 @@ interface ProjectWithGoals extends Omit<ProjectRow, "executionWorkspacePolicy">
|
||||
goalIds: string[];
|
||||
goals: ProjectGoalRef[];
|
||||
executionWorkspacePolicy: ProjectExecutionWorkspacePolicy | null;
|
||||
codebase: ProjectCodebase;
|
||||
workspaces: ProjectWorkspace[];
|
||||
primaryWorkspace: ProjectWorkspace | null;
|
||||
}
|
||||
@@ -91,6 +102,7 @@ function toRuntimeService(row: WorkspaceRuntimeServiceRow): WorkspaceRuntimeServ
|
||||
companyId: row.companyId,
|
||||
projectId: row.projectId ?? null,
|
||||
projectWorkspaceId: row.projectWorkspaceId ?? null,
|
||||
executionWorkspaceId: row.executionWorkspaceId ?? null,
|
||||
issueId: row.issueId ?? null,
|
||||
scopeType: row.scopeType as WorkspaceRuntimeService["scopeType"],
|
||||
scopeId: row.scopeId ?? null,
|
||||
@@ -125,9 +137,17 @@ function toWorkspace(
|
||||
companyId: row.companyId,
|
||||
projectId: row.projectId,
|
||||
name: row.name,
|
||||
cwd: row.cwd,
|
||||
sourceType: row.sourceType as ProjectWorkspace["sourceType"],
|
||||
cwd: normalizeWorkspaceCwd(row.cwd),
|
||||
repoUrl: row.repoUrl ?? null,
|
||||
repoRef: row.repoRef ?? null,
|
||||
defaultRef: row.defaultRef ?? row.repoRef ?? null,
|
||||
visibility: row.visibility as ProjectWorkspace["visibility"],
|
||||
setupCommand: row.setupCommand ?? null,
|
||||
cleanupCommand: row.cleanupCommand ?? null,
|
||||
remoteProvider: row.remoteProvider ?? null,
|
||||
remoteWorkspaceRef: row.remoteWorkspaceRef ?? null,
|
||||
sharedWorkspaceKey: row.sharedWorkspaceKey ?? null,
|
||||
metadata: (row.metadata as Record<string, unknown> | null) ?? null,
|
||||
isPrimary: row.isPrimary,
|
||||
runtimeServices,
|
||||
@@ -136,6 +156,48 @@ function toWorkspace(
|
||||
};
|
||||
}
|
||||
|
||||
function deriveRepoNameFromRepoUrl(repoUrl: string | null): string | null {
|
||||
const raw = readNonEmptyString(repoUrl);
|
||||
if (!raw) return null;
|
||||
try {
|
||||
const parsed = new URL(raw);
|
||||
const cleanedPath = parsed.pathname.replace(/\/+$/, "");
|
||||
const repoName = cleanedPath.split("/").filter(Boolean).pop()?.replace(/\.git$/i, "") ?? "";
|
||||
return repoName || null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function deriveProjectCodebase(input: {
|
||||
companyId: string;
|
||||
projectId: string;
|
||||
primaryWorkspace: ProjectWorkspace | null;
|
||||
fallbackWorkspaces: ProjectWorkspace[];
|
||||
}): ProjectCodebase {
|
||||
const primaryWorkspace = input.primaryWorkspace ?? input.fallbackWorkspaces[0] ?? null;
|
||||
const repoUrl = primaryWorkspace?.repoUrl ?? null;
|
||||
const repoName = deriveRepoNameFromRepoUrl(repoUrl);
|
||||
const localFolder = primaryWorkspace?.cwd ?? null;
|
||||
const managedFolder = resolveManagedProjectWorkspaceDir({
|
||||
companyId: input.companyId,
|
||||
projectId: input.projectId,
|
||||
repoName,
|
||||
});
|
||||
|
||||
return {
|
||||
workspaceId: primaryWorkspace?.id ?? null,
|
||||
repoUrl,
|
||||
repoRef: primaryWorkspace?.repoRef ?? null,
|
||||
defaultRef: primaryWorkspace?.defaultRef ?? null,
|
||||
repoName,
|
||||
localFolder,
|
||||
managedFolder,
|
||||
effectiveLocalFolder: localFolder ?? managedFolder,
|
||||
origin: localFolder ? "local_folder" : "managed_checkout",
|
||||
};
|
||||
}
|
||||
|
||||
function pickPrimaryWorkspace(
|
||||
rows: ProjectWorkspaceRow[],
|
||||
runtimeServicesByWorkspaceId?: Map<string, WorkspaceRuntimeService[]>,
|
||||
@@ -186,10 +248,17 @@ async function attachWorkspaces(db: Db, rows: ProjectWithGoals[]): Promise<Proje
|
||||
sharedRuntimeServicesByWorkspaceId.get(workspace.id) ?? [],
|
||||
),
|
||||
);
|
||||
const primaryWorkspace = pickPrimaryWorkspace(projectWorkspaceRows, sharedRuntimeServicesByWorkspaceId);
|
||||
return {
|
||||
...row,
|
||||
codebase: deriveProjectCodebase({
|
||||
companyId: row.companyId,
|
||||
projectId: row.id,
|
||||
primaryWorkspace,
|
||||
fallbackWorkspaces: workspaces,
|
||||
}),
|
||||
workspaces,
|
||||
primaryWorkspace: pickPrimaryWorkspace(projectWorkspaceRows, sharedRuntimeServicesByWorkspaceId),
|
||||
primaryWorkspace,
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -491,7 +560,13 @@ export function projectService(db: Db) {
|
||||
|
||||
const cwd = normalizeWorkspaceCwd(data.cwd);
|
||||
const repoUrl = readNonEmptyString(data.repoUrl);
|
||||
if (!cwd && !repoUrl) return null;
|
||||
const sourceType = readNonEmptyString(data.sourceType) ?? (repoUrl ? "git_repo" : cwd ? "local_path" : "remote_managed");
|
||||
const remoteWorkspaceRef = readNonEmptyString(data.remoteWorkspaceRef);
|
||||
if (sourceType === "remote_managed") {
|
||||
if (!remoteWorkspaceRef && !repoUrl) return null;
|
||||
} else if (!cwd && !repoUrl) {
|
||||
return null;
|
||||
}
|
||||
const name = deriveWorkspaceName({
|
||||
name: data.name,
|
||||
cwd,
|
||||
@@ -525,9 +600,17 @@ export function projectService(db: Db) {
|
||||
companyId: project.companyId,
|
||||
projectId,
|
||||
name,
|
||||
sourceType,
|
||||
cwd: cwd ?? null,
|
||||
repoUrl: repoUrl ?? null,
|
||||
repoRef: readNonEmptyString(data.repoRef),
|
||||
defaultRef: readNonEmptyString(data.defaultRef) ?? readNonEmptyString(data.repoRef),
|
||||
visibility: readNonEmptyString(data.visibility) ?? "default",
|
||||
setupCommand: readNonEmptyString(data.setupCommand),
|
||||
cleanupCommand: readNonEmptyString(data.cleanupCommand),
|
||||
remoteProvider: readNonEmptyString(data.remoteProvider),
|
||||
remoteWorkspaceRef,
|
||||
sharedWorkspaceKey: readNonEmptyString(data.sharedWorkspaceKey),
|
||||
metadata: (data.metadata as Record<string, unknown> | null | undefined) ?? null,
|
||||
isPrimary: shouldBePrimary,
|
||||
})
|
||||
@@ -564,7 +647,19 @@ export function projectService(db: Db) {
|
||||
data.repoUrl !== undefined
|
||||
? readNonEmptyString(data.repoUrl)
|
||||
: readNonEmptyString(existing.repoUrl);
|
||||
if (!nextCwd && !nextRepoUrl) return null;
|
||||
const nextSourceType =
|
||||
data.sourceType !== undefined
|
||||
? readNonEmptyString(data.sourceType)
|
||||
: readNonEmptyString(existing.sourceType);
|
||||
const nextRemoteWorkspaceRef =
|
||||
data.remoteWorkspaceRef !== undefined
|
||||
? readNonEmptyString(data.remoteWorkspaceRef)
|
||||
: readNonEmptyString(existing.remoteWorkspaceRef);
|
||||
if (nextSourceType === "remote_managed") {
|
||||
if (!nextRemoteWorkspaceRef && !nextRepoUrl) return null;
|
||||
} else if (!nextCwd && !nextRepoUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const patch: Partial<typeof projectWorkspaces.$inferInsert> = {
|
||||
updatedAt: new Date(),
|
||||
@@ -576,6 +671,16 @@ export function projectService(db: Db) {
|
||||
if (data.cwd !== undefined) patch.cwd = nextCwd ?? null;
|
||||
if (data.repoUrl !== undefined) patch.repoUrl = nextRepoUrl ?? null;
|
||||
if (data.repoRef !== undefined) patch.repoRef = readNonEmptyString(data.repoRef);
|
||||
if (data.sourceType !== undefined && nextSourceType) patch.sourceType = nextSourceType;
|
||||
if (data.defaultRef !== undefined) patch.defaultRef = readNonEmptyString(data.defaultRef);
|
||||
if (data.visibility !== undefined && readNonEmptyString(data.visibility)) {
|
||||
patch.visibility = readNonEmptyString(data.visibility)!;
|
||||
}
|
||||
if (data.setupCommand !== undefined) patch.setupCommand = readNonEmptyString(data.setupCommand);
|
||||
if (data.cleanupCommand !== undefined) patch.cleanupCommand = readNonEmptyString(data.cleanupCommand);
|
||||
if (data.remoteProvider !== undefined) patch.remoteProvider = readNonEmptyString(data.remoteProvider);
|
||||
if (data.remoteWorkspaceRef !== undefined) patch.remoteWorkspaceRef = nextRemoteWorkspaceRef;
|
||||
if (data.sharedWorkspaceKey !== undefined) patch.sharedWorkspaceKey = readNonEmptyString(data.sharedWorkspaceKey);
|
||||
if (data.metadata !== undefined) patch.metadata = data.metadata;
|
||||
|
||||
const updated = await db.transaction(async (tx) => {
|
||||
|
||||
123
server/src/services/work-products.ts
Normal file
123
server/src/services/work-products.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { and, desc, eq } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { issueWorkProducts } from "@paperclipai/db";
|
||||
import type { IssueWorkProduct } from "@paperclipai/shared";
|
||||
|
||||
type IssueWorkProductRow = typeof issueWorkProducts.$inferSelect;
|
||||
|
||||
function toIssueWorkProduct(row: IssueWorkProductRow): IssueWorkProduct {
|
||||
return {
|
||||
id: row.id,
|
||||
companyId: row.companyId,
|
||||
projectId: row.projectId ?? null,
|
||||
issueId: row.issueId,
|
||||
executionWorkspaceId: row.executionWorkspaceId ?? null,
|
||||
runtimeServiceId: row.runtimeServiceId ?? null,
|
||||
type: row.type as IssueWorkProduct["type"],
|
||||
provider: row.provider,
|
||||
externalId: row.externalId ?? null,
|
||||
title: row.title,
|
||||
url: row.url ?? null,
|
||||
status: row.status,
|
||||
reviewState: row.reviewState as IssueWorkProduct["reviewState"],
|
||||
isPrimary: row.isPrimary,
|
||||
healthStatus: row.healthStatus as IssueWorkProduct["healthStatus"],
|
||||
summary: row.summary ?? null,
|
||||
metadata: (row.metadata as Record<string, unknown> | null) ?? null,
|
||||
createdByRunId: row.createdByRunId ?? null,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
export function workProductService(db: Db) {
|
||||
return {
|
||||
listForIssue: async (issueId: string) => {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(issueWorkProducts)
|
||||
.where(eq(issueWorkProducts.issueId, issueId))
|
||||
.orderBy(desc(issueWorkProducts.isPrimary), desc(issueWorkProducts.updatedAt));
|
||||
return rows.map(toIssueWorkProduct);
|
||||
},
|
||||
|
||||
getById: async (id: string) => {
|
||||
const row = await db
|
||||
.select()
|
||||
.from(issueWorkProducts)
|
||||
.where(eq(issueWorkProducts.id, id))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
return row ? toIssueWorkProduct(row) : null;
|
||||
},
|
||||
|
||||
createForIssue: async (issueId: string, companyId: string, data: Omit<typeof issueWorkProducts.$inferInsert, "issueId" | "companyId">) => {
|
||||
const row = await db.transaction(async (tx) => {
|
||||
if (data.isPrimary) {
|
||||
await tx
|
||||
.update(issueWorkProducts)
|
||||
.set({ isPrimary: false, updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(issueWorkProducts.companyId, companyId),
|
||||
eq(issueWorkProducts.issueId, issueId),
|
||||
eq(issueWorkProducts.type, data.type),
|
||||
),
|
||||
);
|
||||
}
|
||||
return await tx
|
||||
.insert(issueWorkProducts)
|
||||
.values({
|
||||
...data,
|
||||
companyId,
|
||||
issueId,
|
||||
})
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
});
|
||||
return row ? toIssueWorkProduct(row) : null;
|
||||
},
|
||||
|
||||
update: async (id: string, patch: Partial<typeof issueWorkProducts.$inferInsert>) => {
|
||||
const row = await db.transaction(async (tx) => {
|
||||
const existing = await tx
|
||||
.select()
|
||||
.from(issueWorkProducts)
|
||||
.where(eq(issueWorkProducts.id, id))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (!existing) return null;
|
||||
|
||||
if (patch.isPrimary === true) {
|
||||
await tx
|
||||
.update(issueWorkProducts)
|
||||
.set({ isPrimary: false, updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(issueWorkProducts.companyId, existing.companyId),
|
||||
eq(issueWorkProducts.issueId, existing.issueId),
|
||||
eq(issueWorkProducts.type, existing.type),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
return await tx
|
||||
.update(issueWorkProducts)
|
||||
.set({ ...patch, updatedAt: new Date() })
|
||||
.where(eq(issueWorkProducts.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
});
|
||||
return row ? toIssueWorkProduct(row) : null;
|
||||
},
|
||||
|
||||
remove: async (id: string) => {
|
||||
const row = await db
|
||||
.delete(issueWorkProducts)
|
||||
.where(eq(issueWorkProducts.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
return row ? toIssueWorkProduct(row) : null;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { toIssueWorkProduct };
|
||||
156
server/src/services/workspace-operation-log-store.ts
Normal file
156
server/src/services/workspace-operation-log-store.ts
Normal file
@@ -0,0 +1,156 @@
|
||||
import { createReadStream, promises as fs } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { createHash } from "node:crypto";
|
||||
import { notFound } from "../errors.js";
|
||||
import { resolvePaperclipInstanceRoot } from "../home-paths.js";
|
||||
|
||||
export type WorkspaceOperationLogStoreType = "local_file";
|
||||
|
||||
export interface WorkspaceOperationLogHandle {
|
||||
store: WorkspaceOperationLogStoreType;
|
||||
logRef: string;
|
||||
}
|
||||
|
||||
export interface WorkspaceOperationLogReadOptions {
|
||||
offset?: number;
|
||||
limitBytes?: number;
|
||||
}
|
||||
|
||||
export interface WorkspaceOperationLogReadResult {
|
||||
content: string;
|
||||
nextOffset?: number;
|
||||
}
|
||||
|
||||
export interface WorkspaceOperationLogFinalizeSummary {
|
||||
bytes: number;
|
||||
sha256?: string;
|
||||
compressed: boolean;
|
||||
}
|
||||
|
||||
export interface WorkspaceOperationLogStore {
|
||||
begin(input: { companyId: string; operationId: string }): Promise<WorkspaceOperationLogHandle>;
|
||||
append(
|
||||
handle: WorkspaceOperationLogHandle,
|
||||
event: { stream: "stdout" | "stderr" | "system"; chunk: string; ts: string },
|
||||
): Promise<void>;
|
||||
finalize(handle: WorkspaceOperationLogHandle): Promise<WorkspaceOperationLogFinalizeSummary>;
|
||||
read(handle: WorkspaceOperationLogHandle, opts?: WorkspaceOperationLogReadOptions): Promise<WorkspaceOperationLogReadResult>;
|
||||
}
|
||||
|
||||
function safeSegments(...segments: string[]) {
|
||||
return segments.map((segment) => segment.replace(/[^a-zA-Z0-9._-]/g, "_"));
|
||||
}
|
||||
|
||||
function resolveWithin(basePath: string, relativePath: string) {
|
||||
const resolved = path.resolve(basePath, relativePath);
|
||||
const base = path.resolve(basePath) + path.sep;
|
||||
if (!resolved.startsWith(base) && resolved !== path.resolve(basePath)) {
|
||||
throw new Error("Invalid log path");
|
||||
}
|
||||
return resolved;
|
||||
}
|
||||
|
||||
function createLocalFileWorkspaceOperationLogStore(basePath: string): WorkspaceOperationLogStore {
|
||||
async function ensureDir(relativeDir: string) {
|
||||
const dir = resolveWithin(basePath, relativeDir);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
}
|
||||
|
||||
async function readFileRange(filePath: string, offset: number, limitBytes: number): Promise<WorkspaceOperationLogReadResult> {
|
||||
const stat = await fs.stat(filePath).catch(() => null);
|
||||
if (!stat) throw notFound("Workspace operation log not found");
|
||||
|
||||
const start = Math.max(0, Math.min(offset, stat.size));
|
||||
const end = Math.max(start, Math.min(start + limitBytes - 1, stat.size - 1));
|
||||
|
||||
if (start > end) {
|
||||
return { content: "", nextOffset: start };
|
||||
}
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const stream = createReadStream(filePath, { start, end });
|
||||
stream.on("data", (chunk) => {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
});
|
||||
stream.on("error", reject);
|
||||
stream.on("end", () => resolve());
|
||||
});
|
||||
|
||||
const content = Buffer.concat(chunks).toString("utf8");
|
||||
const nextOffset = end + 1 < stat.size ? end + 1 : undefined;
|
||||
return { content, nextOffset };
|
||||
}
|
||||
|
||||
async function sha256File(filePath: string): Promise<string> {
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
const hash = createHash("sha256");
|
||||
const stream = createReadStream(filePath);
|
||||
stream.on("data", (chunk) => hash.update(chunk));
|
||||
stream.on("error", reject);
|
||||
stream.on("end", () => resolve(hash.digest("hex")));
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
async begin(input) {
|
||||
const [companyId] = safeSegments(input.companyId);
|
||||
const operationId = safeSegments(input.operationId)[0]!;
|
||||
const relDir = companyId;
|
||||
const relPath = path.join(relDir, `${operationId}.ndjson`);
|
||||
await ensureDir(relDir);
|
||||
|
||||
const absPath = resolveWithin(basePath, relPath);
|
||||
await fs.writeFile(absPath, "", "utf8");
|
||||
|
||||
return { store: "local_file", logRef: relPath };
|
||||
},
|
||||
|
||||
async append(handle, event) {
|
||||
if (handle.store !== "local_file") return;
|
||||
const absPath = resolveWithin(basePath, handle.logRef);
|
||||
const line = JSON.stringify({
|
||||
ts: event.ts,
|
||||
stream: event.stream,
|
||||
chunk: event.chunk,
|
||||
});
|
||||
await fs.appendFile(absPath, `${line}\n`, "utf8");
|
||||
},
|
||||
|
||||
async finalize(handle) {
|
||||
if (handle.store !== "local_file") {
|
||||
return { bytes: 0, compressed: false };
|
||||
}
|
||||
const absPath = resolveWithin(basePath, handle.logRef);
|
||||
const stat = await fs.stat(absPath).catch(() => null);
|
||||
if (!stat) throw notFound("Workspace operation log not found");
|
||||
|
||||
const hash = await sha256File(absPath);
|
||||
return {
|
||||
bytes: stat.size,
|
||||
sha256: hash,
|
||||
compressed: false,
|
||||
};
|
||||
},
|
||||
|
||||
async read(handle, opts) {
|
||||
if (handle.store !== "local_file") {
|
||||
throw notFound("Workspace operation log not found");
|
||||
}
|
||||
const absPath = resolveWithin(basePath, handle.logRef);
|
||||
const offset = opts?.offset ?? 0;
|
||||
const limitBytes = opts?.limitBytes ?? 256_000;
|
||||
return readFileRange(absPath, offset, limitBytes);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
let cachedStore: WorkspaceOperationLogStore | null = null;
|
||||
|
||||
export function getWorkspaceOperationLogStore() {
|
||||
if (cachedStore) return cachedStore;
|
||||
const basePath = process.env.WORKSPACE_OPERATION_LOG_BASE_PATH
|
||||
?? path.resolve(resolvePaperclipInstanceRoot(), "data", "workspace-operation-logs");
|
||||
cachedStore = createLocalFileWorkspaceOperationLogStore(basePath);
|
||||
return cachedStore;
|
||||
}
|
||||
250
server/src/services/workspace-operations.ts
Normal file
250
server/src/services/workspace-operations.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { workspaceOperations } from "@paperclipai/db";
|
||||
import type { WorkspaceOperation, WorkspaceOperationPhase, WorkspaceOperationStatus } from "@paperclipai/shared";
|
||||
import { asc, desc, eq, inArray, isNull, or, and } from "drizzle-orm";
|
||||
import { notFound } from "../errors.js";
|
||||
import { redactCurrentUserText, redactCurrentUserValue } from "../log-redaction.js";
|
||||
import { getWorkspaceOperationLogStore } from "./workspace-operation-log-store.js";
|
||||
|
||||
type WorkspaceOperationRow = typeof workspaceOperations.$inferSelect;
|
||||
|
||||
function toWorkspaceOperation(row: WorkspaceOperationRow): WorkspaceOperation {
|
||||
return {
|
||||
id: row.id,
|
||||
companyId: row.companyId,
|
||||
executionWorkspaceId: row.executionWorkspaceId ?? null,
|
||||
heartbeatRunId: row.heartbeatRunId ?? null,
|
||||
phase: row.phase as WorkspaceOperationPhase,
|
||||
command: row.command ?? null,
|
||||
cwd: row.cwd ?? null,
|
||||
status: row.status as WorkspaceOperationStatus,
|
||||
exitCode: row.exitCode ?? null,
|
||||
logStore: row.logStore ?? null,
|
||||
logRef: row.logRef ?? null,
|
||||
logBytes: row.logBytes ?? null,
|
||||
logSha256: row.logSha256 ?? null,
|
||||
logCompressed: row.logCompressed,
|
||||
stdoutExcerpt: row.stdoutExcerpt ?? null,
|
||||
stderrExcerpt: row.stderrExcerpt ?? null,
|
||||
metadata: (row.metadata as Record<string, unknown> | null) ?? null,
|
||||
startedAt: row.startedAt,
|
||||
finishedAt: row.finishedAt ?? null,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
function appendExcerpt(current: string, chunk: string) {
|
||||
return `${current}${chunk}`.slice(-4096);
|
||||
}
|
||||
|
||||
function combineMetadata(
|
||||
base: Record<string, unknown> | null | undefined,
|
||||
patch: Record<string, unknown> | null | undefined,
|
||||
) {
|
||||
if (!base && !patch) return null;
|
||||
return {
|
||||
...(base ?? {}),
|
||||
...(patch ?? {}),
|
||||
};
|
||||
}
|
||||
|
||||
export interface WorkspaceOperationRecorder {
|
||||
attachExecutionWorkspaceId(executionWorkspaceId: string | null): Promise<void>;
|
||||
recordOperation(input: {
|
||||
phase: WorkspaceOperationPhase;
|
||||
command?: string | null;
|
||||
cwd?: string | null;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
run: () => Promise<{
|
||||
status?: WorkspaceOperationStatus;
|
||||
exitCode?: number | null;
|
||||
stdout?: string | null;
|
||||
stderr?: string | null;
|
||||
system?: string | null;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
}>;
|
||||
}): Promise<WorkspaceOperation>;
|
||||
}
|
||||
|
||||
export function workspaceOperationService(db: Db) {
|
||||
const logStore = getWorkspaceOperationLogStore();
|
||||
|
||||
async function getById(id: string) {
|
||||
const row = await db
|
||||
.select()
|
||||
.from(workspaceOperations)
|
||||
.where(eq(workspaceOperations.id, id))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
return row ? toWorkspaceOperation(row) : null;
|
||||
}
|
||||
|
||||
return {
|
||||
getById,
|
||||
|
||||
createRecorder(input: {
|
||||
companyId: string;
|
||||
heartbeatRunId?: string | null;
|
||||
executionWorkspaceId?: string | null;
|
||||
}): WorkspaceOperationRecorder {
|
||||
let executionWorkspaceId = input.executionWorkspaceId ?? null;
|
||||
const createdIds: string[] = [];
|
||||
|
||||
return {
|
||||
async attachExecutionWorkspaceId(nextExecutionWorkspaceId) {
|
||||
executionWorkspaceId = nextExecutionWorkspaceId ?? null;
|
||||
if (!executionWorkspaceId || createdIds.length === 0) return;
|
||||
await db
|
||||
.update(workspaceOperations)
|
||||
.set({
|
||||
executionWorkspaceId,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(inArray(workspaceOperations.id, createdIds));
|
||||
},
|
||||
|
||||
async recordOperation(recordInput) {
|
||||
const startedAt = new Date();
|
||||
const id = randomUUID();
|
||||
const handle = await logStore.begin({
|
||||
companyId: input.companyId,
|
||||
operationId: id,
|
||||
});
|
||||
|
||||
let stdoutExcerpt = "";
|
||||
let stderrExcerpt = "";
|
||||
const append = async (stream: "stdout" | "stderr" | "system", chunk: string | null | undefined) => {
|
||||
if (!chunk) return;
|
||||
const sanitizedChunk = redactCurrentUserText(chunk);
|
||||
if (stream === "stdout") stdoutExcerpt = appendExcerpt(stdoutExcerpt, sanitizedChunk);
|
||||
if (stream === "stderr") stderrExcerpt = appendExcerpt(stderrExcerpt, sanitizedChunk);
|
||||
await logStore.append(handle, {
|
||||
stream,
|
||||
chunk: sanitizedChunk,
|
||||
ts: new Date().toISOString(),
|
||||
});
|
||||
};
|
||||
|
||||
await db.insert(workspaceOperations).values({
|
||||
id,
|
||||
companyId: input.companyId,
|
||||
executionWorkspaceId,
|
||||
heartbeatRunId: input.heartbeatRunId ?? null,
|
||||
phase: recordInput.phase,
|
||||
command: recordInput.command ?? null,
|
||||
cwd: recordInput.cwd ?? null,
|
||||
status: "running",
|
||||
logStore: handle.store,
|
||||
logRef: handle.logRef,
|
||||
metadata: redactCurrentUserValue(recordInput.metadata ?? null) as Record<string, unknown> | null,
|
||||
startedAt,
|
||||
});
|
||||
createdIds.push(id);
|
||||
|
||||
try {
|
||||
const result = await recordInput.run();
|
||||
await append("system", result.system ?? null);
|
||||
await append("stdout", result.stdout ?? null);
|
||||
await append("stderr", result.stderr ?? null);
|
||||
const finalized = await logStore.finalize(handle);
|
||||
const finishedAt = new Date();
|
||||
const row = await db
|
||||
.update(workspaceOperations)
|
||||
.set({
|
||||
executionWorkspaceId,
|
||||
status: result.status ?? "succeeded",
|
||||
exitCode: result.exitCode ?? null,
|
||||
stdoutExcerpt: stdoutExcerpt || null,
|
||||
stderrExcerpt: stderrExcerpt || null,
|
||||
logBytes: finalized.bytes,
|
||||
logSha256: finalized.sha256,
|
||||
logCompressed: finalized.compressed,
|
||||
metadata: redactCurrentUserValue(
|
||||
combineMetadata(recordInput.metadata, result.metadata),
|
||||
) as Record<string, unknown> | null,
|
||||
finishedAt,
|
||||
updatedAt: finishedAt,
|
||||
})
|
||||
.where(eq(workspaceOperations.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (!row) throw notFound("Workspace operation not found");
|
||||
return toWorkspaceOperation(row);
|
||||
} catch (error) {
|
||||
await append("stderr", error instanceof Error ? error.message : String(error));
|
||||
const finalized = await logStore.finalize(handle).catch(() => null);
|
||||
const finishedAt = new Date();
|
||||
await db
|
||||
.update(workspaceOperations)
|
||||
.set({
|
||||
executionWorkspaceId,
|
||||
status: "failed",
|
||||
stdoutExcerpt: stdoutExcerpt || null,
|
||||
stderrExcerpt: stderrExcerpt || null,
|
||||
logBytes: finalized?.bytes ?? null,
|
||||
logSha256: finalized?.sha256 ?? null,
|
||||
logCompressed: finalized?.compressed ?? false,
|
||||
finishedAt,
|
||||
updatedAt: finishedAt,
|
||||
})
|
||||
.where(eq(workspaceOperations.id, id));
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
listForRun: async (runId: string, executionWorkspaceId?: string | null) => {
|
||||
const conditions = [eq(workspaceOperations.heartbeatRunId, runId)];
|
||||
if (executionWorkspaceId) {
|
||||
const cleanupCondition = and(
|
||||
eq(workspaceOperations.executionWorkspaceId, executionWorkspaceId)!,
|
||||
isNull(workspaceOperations.heartbeatRunId),
|
||||
)!;
|
||||
if (cleanupCondition) conditions.push(cleanupCondition);
|
||||
}
|
||||
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(workspaceOperations)
|
||||
.where(conditions.length === 1 ? conditions[0]! : or(...conditions)!)
|
||||
.orderBy(asc(workspaceOperations.startedAt), asc(workspaceOperations.createdAt), asc(workspaceOperations.id));
|
||||
|
||||
return rows.map(toWorkspaceOperation);
|
||||
},
|
||||
|
||||
listForExecutionWorkspace: async (executionWorkspaceId: string) => {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(workspaceOperations)
|
||||
.where(eq(workspaceOperations.executionWorkspaceId, executionWorkspaceId))
|
||||
.orderBy(desc(workspaceOperations.startedAt), desc(workspaceOperations.createdAt));
|
||||
return rows.map(toWorkspaceOperation);
|
||||
},
|
||||
|
||||
readLog: async (operationId: string, opts?: { offset?: number; limitBytes?: number }) => {
|
||||
const operation = await getById(operationId);
|
||||
if (!operation) throw notFound("Workspace operation not found");
|
||||
if (!operation.logStore || !operation.logRef) throw notFound("Workspace operation log not found");
|
||||
|
||||
const result = await logStore.read(
|
||||
{
|
||||
store: operation.logStore as "local_file",
|
||||
logRef: operation.logRef,
|
||||
},
|
||||
opts,
|
||||
);
|
||||
|
||||
return {
|
||||
operationId,
|
||||
store: operation.logStore,
|
||||
logRef: operation.logRef,
|
||||
...result,
|
||||
content: redactCurrentUserText(result.content),
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { toWorkspaceOperation };
|
||||
@@ -10,6 +10,7 @@ import { workspaceRuntimeServices } from "@paperclipai/db";
|
||||
import { and, desc, eq, inArray } from "drizzle-orm";
|
||||
import { asNumber, asString, parseObject, renderTemplate } from "../adapters/utils.js";
|
||||
import { resolveHomeAwarePath } from "../home-paths.js";
|
||||
import type { WorkspaceOperationRecorder } from "./workspace-operations.js";
|
||||
|
||||
export interface ExecutionWorkspaceInput {
|
||||
baseCwd: string;
|
||||
@@ -46,6 +47,7 @@ export interface RuntimeServiceRef {
|
||||
companyId: string;
|
||||
projectId: string | null;
|
||||
projectWorkspaceId: string | null;
|
||||
executionWorkspaceId: string | null;
|
||||
issueId: string | null;
|
||||
serviceName: string;
|
||||
status: "starting" | "running" | "stopped" | "failed";
|
||||
@@ -92,6 +94,17 @@ function stableStringify(value: unknown): string {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
|
||||
export function sanitizeRuntimeServiceBaseEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
||||
const env: NodeJS.ProcessEnv = { ...baseEnv };
|
||||
for (const key of Object.keys(env)) {
|
||||
if (key.startsWith("PAPERCLIP_")) {
|
||||
delete env[key];
|
||||
}
|
||||
}
|
||||
delete env.DATABASE_URL;
|
||||
return env;
|
||||
}
|
||||
|
||||
function stableRuntimeServiceId(input: {
|
||||
adapterType: string;
|
||||
runId: string;
|
||||
@@ -126,6 +139,7 @@ function toRuntimeServiceRef(record: RuntimeServiceRecord, overrides?: Partial<R
|
||||
companyId: record.companyId,
|
||||
projectId: record.projectId,
|
||||
projectWorkspaceId: record.projectWorkspaceId,
|
||||
executionWorkspaceId: record.executionWorkspaceId,
|
||||
issueId: record.issueId,
|
||||
serviceName: record.serviceName,
|
||||
status: record.status,
|
||||
@@ -208,12 +222,23 @@ function resolveConfiguredPath(value: string, baseDir: string): string {
|
||||
return path.resolve(baseDir, value);
|
||||
}
|
||||
|
||||
async function runGit(args: string[], cwd: string): Promise<string> {
|
||||
function formatCommandForDisplay(command: string, args: string[]) {
|
||||
return [command, ...args]
|
||||
.map((part) => (/^[A-Za-z0-9_./:-]+$/.test(part) ? part : JSON.stringify(part)))
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
async function executeProcess(input: {
|
||||
command: string;
|
||||
args: string[];
|
||||
cwd: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
}): Promise<{ stdout: string; stderr: string; code: number | null }> {
|
||||
const proc = await new Promise<{ stdout: string; stderr: string; code: number | null }>((resolve, reject) => {
|
||||
const child = spawn("git", args, {
|
||||
cwd,
|
||||
const child = spawn(input.command, input.args, {
|
||||
cwd: input.cwd,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
env: process.env,
|
||||
env: input.env ?? process.env,
|
||||
});
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
@@ -226,16 +251,45 @@ async function runGit(args: string[], cwd: string): Promise<string> {
|
||||
child.on("error", reject);
|
||||
child.on("close", (code) => resolve({ stdout, stderr, code }));
|
||||
});
|
||||
return proc;
|
||||
}
|
||||
|
||||
async function runGit(args: string[], cwd: string): Promise<string> {
|
||||
const proc = await executeProcess({
|
||||
command: "git",
|
||||
args,
|
||||
cwd,
|
||||
});
|
||||
if (proc.code !== 0) {
|
||||
throw new Error(proc.stderr.trim() || proc.stdout.trim() || `git ${args.join(" ")} failed`);
|
||||
}
|
||||
return proc.stdout.trim();
|
||||
}
|
||||
|
||||
function gitErrorIncludes(error: unknown, needle: string) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
return message.toLowerCase().includes(needle.toLowerCase());
|
||||
}
|
||||
|
||||
async function directoryExists(value: string) {
|
||||
return fs.stat(value).then((stats) => stats.isDirectory()).catch(() => false);
|
||||
}
|
||||
|
||||
function terminateChildProcess(child: ChildProcess) {
|
||||
if (!child.pid) return;
|
||||
if (process.platform !== "win32") {
|
||||
try {
|
||||
process.kill(-child.pid, "SIGTERM");
|
||||
return;
|
||||
} catch {
|
||||
// Fall through to the direct child kill.
|
||||
}
|
||||
}
|
||||
if (!child.killed) {
|
||||
child.kill("SIGTERM");
|
||||
}
|
||||
}
|
||||
|
||||
function buildWorkspaceCommandEnv(input: {
|
||||
base: ExecutionWorkspaceInput;
|
||||
repoRoot: string;
|
||||
@@ -274,22 +328,11 @@ async function runWorkspaceCommand(input: {
|
||||
label: string;
|
||||
}) {
|
||||
const shell = process.env.SHELL?.trim() || "/bin/sh";
|
||||
const proc = await new Promise<{ stdout: string; stderr: string; code: number | null }>((resolve, reject) => {
|
||||
const child = spawn(shell, ["-c", input.command], {
|
||||
cwd: input.cwd,
|
||||
env: input.env,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
child.stdout?.on("data", (chunk) => {
|
||||
stdout += String(chunk);
|
||||
});
|
||||
child.stderr?.on("data", (chunk) => {
|
||||
stderr += String(chunk);
|
||||
});
|
||||
child.on("error", reject);
|
||||
child.on("close", (code) => resolve({ stdout, stderr, code }));
|
||||
const proc = await executeProcess({
|
||||
command: shell,
|
||||
args: ["-c", input.command],
|
||||
cwd: input.cwd,
|
||||
env: input.env,
|
||||
});
|
||||
if (proc.code === 0) return;
|
||||
|
||||
@@ -301,6 +344,115 @@ async function runWorkspaceCommand(input: {
|
||||
);
|
||||
}
|
||||
|
||||
async function recordGitOperation(
|
||||
recorder: WorkspaceOperationRecorder | null | undefined,
|
||||
input: {
|
||||
phase: "worktree_prepare" | "worktree_cleanup";
|
||||
args: string[];
|
||||
cwd: string;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
successMessage?: string | null;
|
||||
failureLabel?: string | null;
|
||||
},
|
||||
): Promise<string> {
|
||||
if (!recorder) {
|
||||
return runGit(input.args, input.cwd);
|
||||
}
|
||||
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
let code: number | null = null;
|
||||
await recorder.recordOperation({
|
||||
phase: input.phase,
|
||||
command: formatCommandForDisplay("git", input.args),
|
||||
cwd: input.cwd,
|
||||
metadata: input.metadata ?? null,
|
||||
run: async () => {
|
||||
const result = await executeProcess({
|
||||
command: "git",
|
||||
args: input.args,
|
||||
cwd: input.cwd,
|
||||
});
|
||||
stdout = result.stdout;
|
||||
stderr = result.stderr;
|
||||
code = result.code;
|
||||
return {
|
||||
status: result.code === 0 ? "succeeded" : "failed",
|
||||
exitCode: result.code,
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
system: result.code === 0 ? input.successMessage ?? null : null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
if (code !== 0) {
|
||||
const details = [stderr.trim(), stdout.trim()].filter(Boolean).join("\n");
|
||||
throw new Error(
|
||||
details.length > 0
|
||||
? `${input.failureLabel ?? `git ${input.args.join(" ")}`} failed: ${details}`
|
||||
: `${input.failureLabel ?? `git ${input.args.join(" ")}`} failed with exit code ${code ?? -1}`,
|
||||
);
|
||||
}
|
||||
return stdout.trim();
|
||||
}
|
||||
|
||||
async function recordWorkspaceCommandOperation(
|
||||
recorder: WorkspaceOperationRecorder | null | undefined,
|
||||
input: {
|
||||
phase: "workspace_provision" | "workspace_teardown";
|
||||
command: string;
|
||||
cwd: string;
|
||||
env: NodeJS.ProcessEnv;
|
||||
label: string;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
successMessage?: string | null;
|
||||
},
|
||||
) {
|
||||
if (!recorder) {
|
||||
await runWorkspaceCommand(input);
|
||||
return;
|
||||
}
|
||||
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
let code: number | null = null;
|
||||
await recorder.recordOperation({
|
||||
phase: input.phase,
|
||||
command: input.command,
|
||||
cwd: input.cwd,
|
||||
metadata: input.metadata ?? null,
|
||||
run: async () => {
|
||||
const shell = process.env.SHELL?.trim() || "/bin/sh";
|
||||
const result = await executeProcess({
|
||||
command: shell,
|
||||
args: ["-c", input.command],
|
||||
cwd: input.cwd,
|
||||
env: input.env,
|
||||
});
|
||||
stdout = result.stdout;
|
||||
stderr = result.stderr;
|
||||
code = result.code;
|
||||
return {
|
||||
status: result.code === 0 ? "succeeded" : "failed",
|
||||
exitCode: result.code,
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr,
|
||||
system: result.code === 0 ? input.successMessage ?? null : null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
if (code === 0) return;
|
||||
|
||||
const details = [stderr.trim(), stdout.trim()].filter(Boolean).join("\n");
|
||||
throw new Error(
|
||||
details.length > 0
|
||||
? `${input.label} failed: ${details}`
|
||||
: `${input.label} failed with exit code ${code ?? -1}`,
|
||||
);
|
||||
}
|
||||
|
||||
async function provisionExecutionWorktree(input: {
|
||||
strategy: Record<string, unknown>;
|
||||
base: ExecutionWorkspaceInput;
|
||||
@@ -310,11 +462,13 @@ async function provisionExecutionWorktree(input: {
|
||||
issue: ExecutionWorkspaceIssueRef | null;
|
||||
agent: ExecutionWorkspaceAgentRef;
|
||||
created: boolean;
|
||||
recorder?: WorkspaceOperationRecorder | null;
|
||||
}) {
|
||||
const provisionCommand = asString(input.strategy.provisionCommand, "").trim();
|
||||
if (!provisionCommand) return;
|
||||
|
||||
await runWorkspaceCommand({
|
||||
await recordWorkspaceCommandOperation(input.recorder, {
|
||||
phase: "workspace_provision",
|
||||
command: provisionCommand,
|
||||
cwd: input.worktreePath,
|
||||
env: buildWorkspaceCommandEnv({
|
||||
@@ -327,14 +481,71 @@ async function provisionExecutionWorktree(input: {
|
||||
created: input.created,
|
||||
}),
|
||||
label: `Execution workspace provision command "${provisionCommand}"`,
|
||||
metadata: {
|
||||
repoRoot: input.repoRoot,
|
||||
worktreePath: input.worktreePath,
|
||||
branchName: input.branchName,
|
||||
created: input.created,
|
||||
},
|
||||
successMessage: `Provisioned workspace at ${input.worktreePath}\n`,
|
||||
});
|
||||
}
|
||||
|
||||
function buildExecutionWorkspaceCleanupEnv(input: {
|
||||
workspace: {
|
||||
cwd: string | null;
|
||||
providerRef: string | null;
|
||||
branchName: string | null;
|
||||
repoUrl: string | null;
|
||||
baseRef: string | null;
|
||||
projectId: string | null;
|
||||
projectWorkspaceId: string | null;
|
||||
sourceIssueId: string | null;
|
||||
};
|
||||
projectWorkspaceCwd?: string | null;
|
||||
}) {
|
||||
const env: NodeJS.ProcessEnv = sanitizeRuntimeServiceBaseEnv(process.env);
|
||||
env.PAPERCLIP_WORKSPACE_CWD = input.workspace.cwd ?? "";
|
||||
env.PAPERCLIP_WORKSPACE_PATH = input.workspace.cwd ?? "";
|
||||
env.PAPERCLIP_WORKSPACE_WORKTREE_PATH =
|
||||
input.workspace.providerRef ?? input.workspace.cwd ?? "";
|
||||
env.PAPERCLIP_WORKSPACE_BRANCH = input.workspace.branchName ?? "";
|
||||
env.PAPERCLIP_WORKSPACE_BASE_CWD = input.projectWorkspaceCwd ?? "";
|
||||
env.PAPERCLIP_WORKSPACE_REPO_ROOT = input.projectWorkspaceCwd ?? "";
|
||||
env.PAPERCLIP_WORKSPACE_REPO_URL = input.workspace.repoUrl ?? "";
|
||||
env.PAPERCLIP_WORKSPACE_REPO_REF = input.workspace.baseRef ?? "";
|
||||
env.PAPERCLIP_PROJECT_ID = input.workspace.projectId ?? "";
|
||||
env.PAPERCLIP_PROJECT_WORKSPACE_ID = input.workspace.projectWorkspaceId ?? "";
|
||||
env.PAPERCLIP_ISSUE_ID = input.workspace.sourceIssueId ?? "";
|
||||
return env;
|
||||
}
|
||||
|
||||
async function resolveGitRepoRootForWorkspaceCleanup(
|
||||
worktreePath: string,
|
||||
projectWorkspaceCwd: string | null,
|
||||
): Promise<string | null> {
|
||||
if (projectWorkspaceCwd) {
|
||||
const resolvedProjectWorkspaceCwd = path.resolve(projectWorkspaceCwd);
|
||||
const gitDir = await runGit(["rev-parse", "--git-common-dir"], resolvedProjectWorkspaceCwd)
|
||||
.catch(() => null);
|
||||
if (gitDir) {
|
||||
const resolvedGitDir = path.resolve(resolvedProjectWorkspaceCwd, gitDir);
|
||||
return path.dirname(resolvedGitDir);
|
||||
}
|
||||
}
|
||||
|
||||
const gitDir = await runGit(["rev-parse", "--git-common-dir"], worktreePath).catch(() => null);
|
||||
if (!gitDir) return null;
|
||||
const resolvedGitDir = path.resolve(worktreePath, gitDir);
|
||||
return path.dirname(resolvedGitDir);
|
||||
}
|
||||
|
||||
export async function realizeExecutionWorkspace(input: {
|
||||
base: ExecutionWorkspaceInput;
|
||||
config: Record<string, unknown>;
|
||||
issue: ExecutionWorkspaceIssueRef | null;
|
||||
agent: ExecutionWorkspaceAgentRef;
|
||||
recorder?: WorkspaceOperationRecorder | null;
|
||||
}): Promise<RealizedExecutionWorkspace> {
|
||||
const rawStrategy = parseObject(input.config.workspaceStrategy);
|
||||
const strategyType = asString(rawStrategy.type, "project_primary");
|
||||
@@ -372,6 +583,25 @@ export async function realizeExecutionWorkspace(input: {
|
||||
if (existingWorktree) {
|
||||
const existingGitDir = await runGit(["rev-parse", "--git-dir"], worktreePath).catch(() => null);
|
||||
if (existingGitDir) {
|
||||
if (input.recorder) {
|
||||
await input.recorder.recordOperation({
|
||||
phase: "worktree_prepare",
|
||||
cwd: repoRoot,
|
||||
metadata: {
|
||||
repoRoot,
|
||||
worktreePath,
|
||||
branchName,
|
||||
baseRef,
|
||||
created: false,
|
||||
reused: true,
|
||||
},
|
||||
run: async () => ({
|
||||
status: "succeeded",
|
||||
exitCode: 0,
|
||||
system: `Reused existing git worktree at ${worktreePath}\n`,
|
||||
}),
|
||||
});
|
||||
}
|
||||
await provisionExecutionWorktree({
|
||||
strategy: rawStrategy,
|
||||
base: input.base,
|
||||
@@ -381,6 +611,7 @@ export async function realizeExecutionWorkspace(input: {
|
||||
issue: input.issue,
|
||||
agent: input.agent,
|
||||
created: false,
|
||||
recorder: input.recorder ?? null,
|
||||
});
|
||||
return {
|
||||
...input.base,
|
||||
@@ -395,7 +626,41 @@ export async function realizeExecutionWorkspace(input: {
|
||||
throw new Error(`Configured worktree path "${worktreePath}" already exists and is not a git worktree.`);
|
||||
}
|
||||
|
||||
await runGit(["worktree", "add", "-B", branchName, worktreePath, baseRef], repoRoot);
|
||||
try {
|
||||
await recordGitOperation(input.recorder, {
|
||||
phase: "worktree_prepare",
|
||||
args: ["worktree", "add", "-b", branchName, worktreePath, baseRef],
|
||||
cwd: repoRoot,
|
||||
metadata: {
|
||||
repoRoot,
|
||||
worktreePath,
|
||||
branchName,
|
||||
baseRef,
|
||||
created: true,
|
||||
},
|
||||
successMessage: `Created git worktree at ${worktreePath}\n`,
|
||||
failureLabel: `git worktree add ${worktreePath}`,
|
||||
});
|
||||
} catch (error) {
|
||||
if (!gitErrorIncludes(error, "already exists")) {
|
||||
throw error;
|
||||
}
|
||||
await recordGitOperation(input.recorder, {
|
||||
phase: "worktree_prepare",
|
||||
args: ["worktree", "add", worktreePath, branchName],
|
||||
cwd: repoRoot,
|
||||
metadata: {
|
||||
repoRoot,
|
||||
worktreePath,
|
||||
branchName,
|
||||
baseRef,
|
||||
created: false,
|
||||
reusedExistingBranch: true,
|
||||
},
|
||||
successMessage: `Attached existing branch ${branchName} at ${worktreePath}\n`,
|
||||
failureLabel: `git worktree add ${worktreePath}`,
|
||||
});
|
||||
}
|
||||
await provisionExecutionWorktree({
|
||||
strategy: rawStrategy,
|
||||
base: input.base,
|
||||
@@ -405,6 +670,7 @@ export async function realizeExecutionWorkspace(input: {
|
||||
issue: input.issue,
|
||||
agent: input.agent,
|
||||
created: true,
|
||||
recorder: input.recorder ?? null,
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -418,6 +684,158 @@ export async function realizeExecutionWorkspace(input: {
|
||||
};
|
||||
}
|
||||
|
||||
export async function cleanupExecutionWorkspaceArtifacts(input: {
|
||||
workspace: {
|
||||
id: string;
|
||||
cwd: string | null;
|
||||
providerType: string;
|
||||
providerRef: string | null;
|
||||
branchName: string | null;
|
||||
repoUrl: string | null;
|
||||
baseRef: string | null;
|
||||
projectId: string | null;
|
||||
projectWorkspaceId: string | null;
|
||||
sourceIssueId: string | null;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
};
|
||||
projectWorkspace?: {
|
||||
cwd: string | null;
|
||||
cleanupCommand: string | null;
|
||||
} | null;
|
||||
teardownCommand?: string | null;
|
||||
recorder?: WorkspaceOperationRecorder | null;
|
||||
}) {
|
||||
const warnings: string[] = [];
|
||||
const workspacePath = input.workspace.providerRef ?? input.workspace.cwd;
|
||||
const cleanupEnv = buildExecutionWorkspaceCleanupEnv({
|
||||
workspace: input.workspace,
|
||||
projectWorkspaceCwd: input.projectWorkspace?.cwd ?? null,
|
||||
});
|
||||
const createdByRuntime = input.workspace.metadata?.createdByRuntime === true;
|
||||
const cleanupCommands = [
|
||||
input.projectWorkspace?.cleanupCommand ?? null,
|
||||
input.teardownCommand ?? null,
|
||||
]
|
||||
.map((value) => asString(value, "").trim())
|
||||
.filter(Boolean);
|
||||
|
||||
for (const command of cleanupCommands) {
|
||||
try {
|
||||
await recordWorkspaceCommandOperation(input.recorder, {
|
||||
phase: "workspace_teardown",
|
||||
command,
|
||||
cwd: workspacePath ?? input.projectWorkspace?.cwd ?? process.cwd(),
|
||||
env: cleanupEnv,
|
||||
label: `Execution workspace cleanup command "${command}"`,
|
||||
metadata: {
|
||||
workspaceId: input.workspace.id,
|
||||
workspacePath,
|
||||
branchName: input.workspace.branchName,
|
||||
providerType: input.workspace.providerType,
|
||||
},
|
||||
successMessage: `Completed cleanup command "${command}"\n`,
|
||||
});
|
||||
} catch (err) {
|
||||
warnings.push(err instanceof Error ? err.message : String(err));
|
||||
}
|
||||
}
|
||||
|
||||
if (input.workspace.providerType === "git_worktree" && workspacePath) {
|
||||
const repoRoot = await resolveGitRepoRootForWorkspaceCleanup(
|
||||
workspacePath,
|
||||
input.projectWorkspace?.cwd ?? null,
|
||||
);
|
||||
const worktreeExists = await directoryExists(workspacePath);
|
||||
if (worktreeExists) {
|
||||
if (!repoRoot) {
|
||||
warnings.push(`Could not resolve git repo root for "${workspacePath}".`);
|
||||
} else {
|
||||
try {
|
||||
await recordGitOperation(input.recorder, {
|
||||
phase: "worktree_cleanup",
|
||||
args: ["worktree", "remove", "--force", workspacePath],
|
||||
cwd: repoRoot,
|
||||
metadata: {
|
||||
workspaceId: input.workspace.id,
|
||||
workspacePath,
|
||||
branchName: input.workspace.branchName,
|
||||
cleanupAction: "worktree_remove",
|
||||
},
|
||||
successMessage: `Removed git worktree ${workspacePath}\n`,
|
||||
failureLabel: `git worktree remove ${workspacePath}`,
|
||||
});
|
||||
} catch (err) {
|
||||
warnings.push(err instanceof Error ? err.message : String(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (createdByRuntime && input.workspace.branchName) {
|
||||
if (!repoRoot) {
|
||||
warnings.push(`Could not resolve git repo root to delete branch "${input.workspace.branchName}".`);
|
||||
} else {
|
||||
try {
|
||||
await recordGitOperation(input.recorder, {
|
||||
phase: "worktree_cleanup",
|
||||
args: ["branch", "-d", input.workspace.branchName],
|
||||
cwd: repoRoot,
|
||||
metadata: {
|
||||
workspaceId: input.workspace.id,
|
||||
workspacePath,
|
||||
branchName: input.workspace.branchName,
|
||||
cleanupAction: "branch_delete",
|
||||
},
|
||||
successMessage: `Deleted branch ${input.workspace.branchName}\n`,
|
||||
failureLabel: `git branch -d ${input.workspace.branchName}`,
|
||||
});
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
warnings.push(`Skipped deleting branch "${input.workspace.branchName}": ${message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (input.workspace.providerType === "local_fs" && createdByRuntime && workspacePath) {
|
||||
const projectWorkspaceCwd = input.projectWorkspace?.cwd ? path.resolve(input.projectWorkspace.cwd) : null;
|
||||
const resolvedWorkspacePath = path.resolve(workspacePath);
|
||||
const containsProjectWorkspace = projectWorkspaceCwd
|
||||
? (
|
||||
resolvedWorkspacePath === projectWorkspaceCwd ||
|
||||
projectWorkspaceCwd.startsWith(`${resolvedWorkspacePath}${path.sep}`)
|
||||
)
|
||||
: false;
|
||||
if (containsProjectWorkspace) {
|
||||
warnings.push(`Refusing to remove path "${workspacePath}" because it contains the project workspace.`);
|
||||
} else {
|
||||
await fs.rm(resolvedWorkspacePath, { recursive: true, force: true });
|
||||
if (input.recorder) {
|
||||
await input.recorder.recordOperation({
|
||||
phase: "workspace_teardown",
|
||||
cwd: projectWorkspaceCwd ?? process.cwd(),
|
||||
metadata: {
|
||||
workspaceId: input.workspace.id,
|
||||
workspacePath: resolvedWorkspacePath,
|
||||
cleanupAction: "remove_local_fs",
|
||||
},
|
||||
run: async () => ({
|
||||
status: "succeeded",
|
||||
exitCode: 0,
|
||||
system: `Removed local workspace directory ${resolvedWorkspacePath}\n`,
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const cleaned =
|
||||
!workspacePath ||
|
||||
!(await directoryExists(workspacePath));
|
||||
|
||||
return {
|
||||
cleanedPath: workspacePath,
|
||||
cleaned,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
async function allocatePort(): Promise<number> {
|
||||
return await new Promise<number>((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
@@ -471,6 +889,7 @@ function buildTemplateData(input: {
|
||||
function resolveServiceScopeId(input: {
|
||||
service: Record<string, unknown>;
|
||||
workspace: RealizedExecutionWorkspace;
|
||||
executionWorkspaceId?: string | null;
|
||||
issue: ExecutionWorkspaceIssueRef | null;
|
||||
runId: string;
|
||||
agent: ExecutionWorkspaceAgentRef;
|
||||
@@ -486,7 +905,9 @@ function resolveServiceScopeId(input: {
|
||||
? scopeTypeRaw
|
||||
: "run";
|
||||
if (scopeType === "project_workspace") return { scopeType, scopeId: input.workspace.workspaceId ?? input.workspace.projectId };
|
||||
if (scopeType === "execution_workspace") return { scopeType, scopeId: input.workspace.cwd };
|
||||
if (scopeType === "execution_workspace") {
|
||||
return { scopeType, scopeId: input.executionWorkspaceId ?? input.workspace.cwd };
|
||||
}
|
||||
if (scopeType === "agent") return { scopeType, scopeId: input.agent.id };
|
||||
return { scopeType: "run" as const, scopeId: input.runId };
|
||||
}
|
||||
@@ -521,6 +942,7 @@ function toPersistedWorkspaceRuntimeService(record: RuntimeServiceRecord): typeo
|
||||
companyId: record.companyId,
|
||||
projectId: record.projectId,
|
||||
projectWorkspaceId: record.projectWorkspaceId,
|
||||
executionWorkspaceId: record.executionWorkspaceId,
|
||||
issueId: record.issueId,
|
||||
scopeType: record.scopeType,
|
||||
scopeId: record.scopeId,
|
||||
@@ -556,6 +978,7 @@ async function persistRuntimeServiceRecord(db: Db | undefined, record: RuntimeSe
|
||||
set: {
|
||||
projectId: values.projectId,
|
||||
projectWorkspaceId: values.projectWorkspaceId,
|
||||
executionWorkspaceId: values.executionWorkspaceId,
|
||||
issueId: values.issueId,
|
||||
scopeType: values.scopeType,
|
||||
scopeId: values.scopeId,
|
||||
@@ -593,6 +1016,7 @@ export function normalizeAdapterManagedRuntimeServices(input: {
|
||||
agent: ExecutionWorkspaceAgentRef;
|
||||
issue: ExecutionWorkspaceIssueRef | null;
|
||||
workspace: RealizedExecutionWorkspace;
|
||||
executionWorkspaceId?: string | null;
|
||||
reports: AdapterRuntimeServiceReport[];
|
||||
now?: Date;
|
||||
}): RuntimeServiceRef[] {
|
||||
@@ -604,7 +1028,7 @@ export function normalizeAdapterManagedRuntimeServices(input: {
|
||||
(scopeType === "project_workspace"
|
||||
? input.workspace.workspaceId
|
||||
: scopeType === "execution_workspace"
|
||||
? input.workspace.cwd
|
||||
? input.executionWorkspaceId ?? input.workspace.cwd
|
||||
: scopeType === "agent"
|
||||
? input.agent.id
|
||||
: input.runId) ??
|
||||
@@ -629,6 +1053,7 @@ export function normalizeAdapterManagedRuntimeServices(input: {
|
||||
companyId: input.agent.companyId,
|
||||
projectId: report.projectId ?? input.workspace.projectId,
|
||||
projectWorkspaceId: report.projectWorkspaceId ?? input.workspace.workspaceId,
|
||||
executionWorkspaceId: input.executionWorkspaceId ?? null,
|
||||
issueId: report.issueId ?? input.issue?.id ?? null,
|
||||
serviceName,
|
||||
status,
|
||||
@@ -660,6 +1085,7 @@ async function startLocalRuntimeService(input: {
|
||||
agent: ExecutionWorkspaceAgentRef;
|
||||
issue: ExecutionWorkspaceIssueRef | null;
|
||||
workspace: RealizedExecutionWorkspace;
|
||||
executionWorkspaceId?: string | null;
|
||||
adapterEnv: Record<string, string>;
|
||||
service: Record<string, unknown>;
|
||||
onLog?: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
@@ -683,7 +1109,10 @@ async function startLocalRuntimeService(input: {
|
||||
port,
|
||||
});
|
||||
const serviceCwd = resolveConfiguredPath(renderTemplate(serviceCwdTemplate, templateData), input.workspace.cwd);
|
||||
const env: Record<string, string> = { ...process.env, ...input.adapterEnv } as Record<string, string>;
|
||||
const env: Record<string, string> = {
|
||||
...sanitizeRuntimeServiceBaseEnv(process.env),
|
||||
...input.adapterEnv,
|
||||
} as Record<string, string>;
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") {
|
||||
env[key] = renderTemplate(value, templateData);
|
||||
@@ -697,7 +1126,7 @@ async function startLocalRuntimeService(input: {
|
||||
const child = spawn(shell, ["-lc", command], {
|
||||
cwd: serviceCwd,
|
||||
env,
|
||||
detached: false,
|
||||
detached: process.platform !== "win32",
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
let stderrExcerpt = "";
|
||||
@@ -723,7 +1152,7 @@ async function startLocalRuntimeService(input: {
|
||||
try {
|
||||
await waitForReadiness({ service: input.service, url });
|
||||
} catch (err) {
|
||||
child.kill("SIGTERM");
|
||||
terminateChildProcess(child);
|
||||
throw new Error(
|
||||
`Failed to start runtime service "${serviceName}": ${err instanceof Error ? err.message : String(err)}${stderrExcerpt ? ` | stderr: ${stderrExcerpt.trim()}` : ""}`,
|
||||
);
|
||||
@@ -735,6 +1164,7 @@ async function startLocalRuntimeService(input: {
|
||||
companyId: input.agent.companyId,
|
||||
projectId: input.workspace.projectId,
|
||||
projectWorkspaceId: input.workspace.workspaceId,
|
||||
executionWorkspaceId: input.executionWorkspaceId ?? null,
|
||||
issueId: input.issue?.id ?? null,
|
||||
serviceName,
|
||||
status: "running",
|
||||
@@ -781,8 +1211,8 @@ async function stopRuntimeService(serviceId: string) {
|
||||
record.status = "stopped";
|
||||
record.lastUsedAt = new Date().toISOString();
|
||||
record.stoppedAt = new Date().toISOString();
|
||||
if (record.child && !record.child.killed) {
|
||||
record.child.kill("SIGTERM");
|
||||
if (record.child && record.child.pid) {
|
||||
terminateChildProcess(record.child);
|
||||
}
|
||||
runtimeServicesById.delete(serviceId);
|
||||
if (record.reuseKey) {
|
||||
@@ -791,6 +1221,28 @@ async function stopRuntimeService(serviceId: string) {
|
||||
await persistRuntimeServiceRecord(record.db, record);
|
||||
}
|
||||
|
||||
async function markPersistedRuntimeServicesStoppedForExecutionWorkspace(input: {
|
||||
db: Db;
|
||||
executionWorkspaceId: string;
|
||||
}) {
|
||||
const now = new Date();
|
||||
await input.db
|
||||
.update(workspaceRuntimeServices)
|
||||
.set({
|
||||
status: "stopped",
|
||||
healthStatus: "unknown",
|
||||
stoppedAt: now,
|
||||
lastUsedAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(workspaceRuntimeServices.executionWorkspaceId, input.executionWorkspaceId),
|
||||
inArray(workspaceRuntimeServices.status, ["starting", "running"]),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
function registerRuntimeService(db: Db | undefined, record: RuntimeServiceRecord) {
|
||||
record.db = db;
|
||||
runtimeServicesById.set(record.id, record);
|
||||
@@ -820,6 +1272,7 @@ export async function ensureRuntimeServicesForRun(input: {
|
||||
agent: ExecutionWorkspaceAgentRef;
|
||||
issue: ExecutionWorkspaceIssueRef | null;
|
||||
workspace: RealizedExecutionWorkspace;
|
||||
executionWorkspaceId?: string | null;
|
||||
config: Record<string, unknown>;
|
||||
adapterEnv: Record<string, string>;
|
||||
onLog?: (stream: "stdout" | "stderr", chunk: string) => Promise<void>;
|
||||
@@ -838,6 +1291,7 @@ export async function ensureRuntimeServicesForRun(input: {
|
||||
const { scopeType, scopeId } = resolveServiceScopeId({
|
||||
service,
|
||||
workspace: input.workspace,
|
||||
executionWorkspaceId: input.executionWorkspaceId,
|
||||
issue: input.issue,
|
||||
runId: input.runId,
|
||||
agent: input.agent,
|
||||
@@ -871,6 +1325,7 @@ export async function ensureRuntimeServicesForRun(input: {
|
||||
agent: input.agent,
|
||||
issue: input.issue,
|
||||
workspace: input.workspace,
|
||||
executionWorkspaceId: input.executionWorkspaceId,
|
||||
adapterEnv: input.adapterEnv,
|
||||
service,
|
||||
onLog: input.onLog,
|
||||
@@ -911,6 +1366,36 @@ export async function releaseRuntimeServicesForRun(runId: string) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function stopRuntimeServicesForExecutionWorkspace(input: {
|
||||
db?: Db;
|
||||
executionWorkspaceId: string;
|
||||
workspaceCwd?: string | null;
|
||||
}) {
|
||||
const normalizedWorkspaceCwd = input.workspaceCwd ? path.resolve(input.workspaceCwd) : null;
|
||||
const matchingServiceIds = Array.from(runtimeServicesById.values())
|
||||
.filter((record) => {
|
||||
if (record.executionWorkspaceId === input.executionWorkspaceId) return true;
|
||||
if (!normalizedWorkspaceCwd || !record.cwd) return false;
|
||||
const resolvedCwd = path.resolve(record.cwd);
|
||||
return (
|
||||
resolvedCwd === normalizedWorkspaceCwd ||
|
||||
resolvedCwd.startsWith(`${normalizedWorkspaceCwd}${path.sep}`)
|
||||
);
|
||||
})
|
||||
.map((record) => record.id);
|
||||
|
||||
for (const serviceId of matchingServiceIds) {
|
||||
await stopRuntimeService(serviceId);
|
||||
}
|
||||
|
||||
if (input.db) {
|
||||
await markPersistedRuntimeServicesStoppedForExecutionWorkspace({
|
||||
db: input.db,
|
||||
executionWorkspaceId: input.executionWorkspaceId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function listWorkspaceRuntimeServicesForProjectWorkspaces(
|
||||
db: Db,
|
||||
companyId: string,
|
||||
@@ -978,6 +1463,7 @@ export async function persistAdapterManagedRuntimeServices(input: {
|
||||
agent: ExecutionWorkspaceAgentRef;
|
||||
issue: ExecutionWorkspaceIssueRef | null;
|
||||
workspace: RealizedExecutionWorkspace;
|
||||
executionWorkspaceId?: string | null;
|
||||
reports: AdapterRuntimeServiceReport[];
|
||||
}) {
|
||||
const refs = normalizeAdapterManagedRuntimeServices(input);
|
||||
@@ -1000,6 +1486,7 @@ export async function persistAdapterManagedRuntimeServices(input: {
|
||||
companyId: ref.companyId,
|
||||
projectId: ref.projectId,
|
||||
projectWorkspaceId: ref.projectWorkspaceId,
|
||||
executionWorkspaceId: ref.executionWorkspaceId,
|
||||
issueId: ref.issueId,
|
||||
scopeType: ref.scopeType,
|
||||
scopeId: ref.scopeId,
|
||||
@@ -1028,6 +1515,7 @@ export async function persistAdapterManagedRuntimeServices(input: {
|
||||
set: {
|
||||
projectId: ref.projectId,
|
||||
projectWorkspaceId: ref.projectWorkspaceId,
|
||||
executionWorkspaceId: ref.executionWorkspaceId,
|
||||
issueId: ref.issueId,
|
||||
scopeType: ref.scopeType,
|
||||
scopeId: ref.scopeId,
|
||||
|
||||
Reference in New Issue
Block a user