feat: workspace improvements - nullable cwd, repo-only workspaces, and resolution refactor

Make workspace cwd optional to support repo-only workspaces that don't require
a local directory. Refactor workspace resolution in heartbeat service to pass
all workspace hints to adapters, add fallback logic when project workspaces
have no valid local cwd, and improve workspace name derivation. Also adds limit
param to heartbeat runs list endpoint.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Forgotten
2026-02-25 21:35:33 -06:00
parent 30522f3f11
commit 20a4ca08a5
10 changed files with 5822 additions and 67 deletions

View File

@@ -0,0 +1 @@
ALTER TABLE "project_workspaces" ALTER COLUMN "cwd" DROP NOT NULL;

File diff suppressed because it is too large Load Diff

View File

@@ -141,6 +141,13 @@
"when": 1772029333401,
"tag": "0019_public_victor_mancha",
"breakpoints": true
},
{
"idx": 20,
"version": "7",
"when": 1772032176413,
"tag": "0020_white_anita_blake",
"breakpoints": true
}
]
}

View File

@@ -17,7 +17,7 @@ export const projectWorkspaces = pgTable(
companyId: uuid("company_id").notNull().references(() => companies.id),
projectId: uuid("project_id").notNull().references(() => projects.id, { onDelete: "cascade" }),
name: text("name").notNull(),
cwd: text("cwd").notNull(),
cwd: text("cwd"),
repoUrl: text("repo_url"),
repoRef: text("repo_ref"),
metadata: jsonb("metadata").$type<Record<string, unknown>>(),

View File

@@ -10,7 +10,7 @@ export interface ProjectWorkspace {
companyId: string;
projectId: string;
name: string;
cwd: string;
cwd: string | null;
repoUrl: string | null;
repoRef: string | null;
metadata: Record<string, unknown> | null;

View File

@@ -20,17 +20,34 @@ export const updateProjectSchema = createProjectSchema.partial();
export type UpdateProject = z.infer<typeof updateProjectSchema>;
export const createProjectWorkspaceSchema = z.object({
name: z.string().min(1),
cwd: z.string().min(1),
const projectWorkspaceFields = {
name: z.string().min(1).optional(),
cwd: z.string().min(1).optional().nullable(),
repoUrl: z.string().url().optional().nullable(),
repoRef: z.string().optional().nullable(),
metadata: z.record(z.unknown()).optional().nullable(),
};
export const createProjectWorkspaceSchema = z.object({
...projectWorkspaceFields,
isPrimary: z.boolean().optional().default(false),
}).superRefine((value, ctx) => {
const hasCwd = typeof value.cwd === "string" && value.cwd.trim().length > 0;
const hasRepo = typeof value.repoUrl === "string" && value.repoUrl.trim().length > 0;
if (!hasCwd && !hasRepo) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "Workspace requires at least one of cwd or repoUrl.",
path: ["cwd"],
});
}
});
export type CreateProjectWorkspace = z.infer<typeof createProjectWorkspaceSchema>;
export const updateProjectWorkspaceSchema = createProjectWorkspaceSchema.partial();
export const updateProjectWorkspaceSchema = z.object({
...projectWorkspaceFields,
isPrimary: z.boolean().optional(),
}).partial();
export type UpdateProjectWorkspace = z.infer<typeof updateProjectWorkspaceSchema>;

View File

@@ -101,7 +101,7 @@ export function projectRoutes(db: Db) {
assertCompanyAccess(req, existing.companyId);
const workspace = await svc.createWorkspace(id, req.body);
if (!workspace) {
res.status(404).json({ error: "Project not found" });
res.status(422).json({ error: "Invalid project workspace payload" });
return;
}
@@ -137,9 +137,14 @@ export function projectRoutes(db: Db) {
return;
}
assertCompanyAccess(req, existing.companyId);
const workspaceExists = (await svc.listWorkspaces(id)).some((workspace) => workspace.id === workspaceId);
if (!workspaceExists) {
res.status(404).json({ error: "Project workspace not found" });
return;
}
const workspace = await svc.updateWorkspace(id, workspaceId, req.body);
if (!workspace) {
res.status(404).json({ error: "Project workspace not found" });
res.status(422).json({ error: "Invalid project workspace payload" });
return;
}

View File

@@ -28,6 +28,7 @@ const HEARTBEAT_MAX_CONCURRENT_RUNS_DEFAULT = 1;
const HEARTBEAT_MAX_CONCURRENT_RUNS_MAX = 10;
const DEFERRED_WAKE_CONTEXT_KEY = "_paperclipWakeContext";
const startLocksByAgent = new Map<string, Promise<void>>();
const REPO_ONLY_CWD_SENTINEL = "/__paperclip_repo_only__";
function appendExcerpt(prev: string, chunk: string) {
return appendWithCap(prev, chunk, MAX_EXCERPT_BYTES);
@@ -344,6 +345,73 @@ export function heartbeatService(db: Db) {
context: Record<string, unknown>,
previousSessionParams: Record<string, unknown> | null,
) {
const issueId = readNonEmptyString(context.issueId);
const contextProjectId = readNonEmptyString(context.projectId);
const issueProjectId = issueId
? await db
.select({ projectId: issues.projectId })
.from(issues)
.where(and(eq(issues.id, issueId), eq(issues.companyId, agent.companyId)))
.then((rows) => rows[0]?.projectId ?? null)
: null;
const resolvedProjectId = issueProjectId ?? contextProjectId;
const projectWorkspaceRows = resolvedProjectId
? await db
.select()
.from(projectWorkspaces)
.where(
and(
eq(projectWorkspaces.companyId, agent.companyId),
eq(projectWorkspaces.projectId, resolvedProjectId),
),
)
.orderBy(asc(projectWorkspaces.createdAt), asc(projectWorkspaces.id))
: [];
const workspaceHints = projectWorkspaceRows.map((workspace) => ({
workspaceId: workspace.id,
cwd: readNonEmptyString(workspace.cwd),
repoUrl: readNonEmptyString(workspace.repoUrl),
repoRef: readNonEmptyString(workspace.repoRef),
}));
if (projectWorkspaceRows.length > 0) {
for (const workspace of projectWorkspaceRows) {
const projectCwd = readNonEmptyString(workspace.cwd);
if (!projectCwd || projectCwd === REPO_ONLY_CWD_SENTINEL) {
continue;
}
const projectCwdExists = await fs
.stat(projectCwd)
.then((stats) => stats.isDirectory())
.catch(() => false);
if (projectCwdExists) {
return {
cwd: projectCwd,
source: "project_primary" as const,
projectId: resolvedProjectId,
workspaceId: workspace.id,
repoUrl: workspace.repoUrl,
repoRef: workspace.repoRef,
workspaceHints,
};
}
}
const fallbackCwd = resolveDefaultAgentWorkspaceDir(agent.id);
await fs.mkdir(fallbackCwd, { recursive: true });
return {
cwd: fallbackCwd,
source: "project_primary" as const,
projectId: resolvedProjectId,
workspaceId: projectWorkspaceRows[0]?.id ?? null,
repoUrl: projectWorkspaceRows[0]?.repoUrl ?? null,
repoRef: projectWorkspaceRows[0]?.repoRef ?? null,
workspaceHints,
};
}
const sessionCwd = readNonEmptyString(previousSessionParams?.cwd);
if (sessionCwd) {
const sessionCwdExists = await fs
@@ -354,56 +422,25 @@ export function heartbeatService(db: Db) {
return {
cwd: sessionCwd,
source: "task_session" as const,
projectId: readNonEmptyString(context.projectId),
projectId: resolvedProjectId,
workspaceId: readNonEmptyString(previousSessionParams?.workspaceId),
repoUrl: readNonEmptyString(previousSessionParams?.repoUrl),
repoRef: readNonEmptyString(previousSessionParams?.repoRef),
workspaceHints,
};
}
}
const issueId = readNonEmptyString(context.issueId);
if (issueId) {
const issue = await db
.select({ id: issues.id, projectId: issues.projectId })
.from(issues)
.where(and(eq(issues.id, issueId), eq(issues.companyId, agent.companyId)))
.then((rows) => rows[0] ?? null);
if (issue?.projectId) {
const workspace = await db
.select()
.from(projectWorkspaces)
.where(
and(
eq(projectWorkspaces.companyId, agent.companyId),
eq(projectWorkspaces.projectId, issue.projectId),
),
)
.orderBy(desc(projectWorkspaces.isPrimary), asc(projectWorkspaces.createdAt), asc(projectWorkspaces.id))
.limit(1)
.then((rows) => rows[0] ?? null);
if (workspace) {
return {
cwd: workspace.cwd,
source: "project_primary" as const,
projectId: issue.projectId,
workspaceId: workspace.id,
repoUrl: workspace.repoUrl,
repoRef: workspace.repoRef,
};
}
}
}
const cwd = resolveDefaultAgentWorkspaceDir(agent.id);
await fs.mkdir(cwd, { recursive: true });
return {
cwd,
source: "agent_home" as const,
projectId: readNonEmptyString(context.projectId),
projectId: resolvedProjectId,
workspaceId: null,
repoUrl: null,
repoRef: null,
workspaceHints,
};
}
@@ -745,6 +782,7 @@ export function heartbeatService(db: Db) {
const outputTokens = usage?.outputTokens ?? 0;
const cachedInputTokens = usage?.cachedInputTokens ?? 0;
const additionalCostCents = Math.max(0, Math.round((result.costUsd ?? 0) * 100));
const hasTokenUsage = inputTokens > 0 || outputTokens > 0 || cachedInputTokens > 0;
await db
.update(agentRuntimeState)
@@ -762,7 +800,7 @@ export function heartbeatService(db: Db) {
})
.where(eq(agentRuntimeState.agentId, agent.id));
if (additionalCostCents > 0) {
if (additionalCostCents > 0 || hasTokenUsage) {
await db.insert(costEvents).values({
companyId: agent.companyId,
agentId: agent.id,
@@ -773,7 +811,9 @@ export function heartbeatService(db: Db) {
costCents: additionalCostCents,
occurredAt: new Date(),
});
}
if (additionalCostCents > 0) {
await db
.update(agents)
.set({
@@ -866,6 +906,7 @@ export function heartbeatService(db: Db) {
repoUrl: resolvedWorkspace.repoUrl,
repoRef: resolvedWorkspace.repoRef,
};
context.paperclipWorkspaces = resolvedWorkspace.workspaceHints;
if (resolvedWorkspace.projectId && !readNonEmptyString(context.projectId)) {
context.projectId = resolvedWorkspace.projectId;
}
@@ -1053,6 +1094,7 @@ export function heartbeatService(db: Db) {
? ({
...(adapterResult.usage ?? {}),
...(adapterResult.costUsd != null ? { costUsd: adapterResult.costUsd } : {}),
...(adapterResult.billingType ? { billingType: adapterResult.billingType } : {}),
} as Record<string, unknown>)
: null;
@@ -1751,20 +1793,21 @@ export function heartbeatService(db: Db) {
}
return {
list: (companyId: string, agentId?: string) => {
if (!agentId) {
return db
.select()
.from(heartbeatRuns)
.where(eq(heartbeatRuns.companyId, companyId))
.orderBy(desc(heartbeatRuns.createdAt));
}
return db
list: (companyId: string, agentId?: string, limit?: number) => {
const query = db
.select()
.from(heartbeatRuns)
.where(and(eq(heartbeatRuns.companyId, companyId), eq(heartbeatRuns.agentId, agentId)))
.where(
agentId
? and(eq(heartbeatRuns.companyId, companyId), eq(heartbeatRuns.agentId, agentId))
: eq(heartbeatRuns.companyId, companyId),
)
.orderBy(desc(heartbeatRuns.createdAt));
if (limit) {
return query.limit(limit);
}
return query;
},
getRun,

View File

@@ -900,7 +900,7 @@ export function issueService(db: Db) {
companyId: string;
projectId: string;
name: string;
cwd: string;
cwd: string | null;
repoUrl: string | null;
repoRef: string | null;
metadata: Record<string, unknown> | null;
@@ -913,7 +913,7 @@ export function issueService(db: Db) {
companyId: string;
projectId: string;
name: string;
cwd: string;
cwd: string | null;
repoUrl: string | null;
repoRef: string | null;
metadata: Record<string, unknown> | null;

View File

@@ -5,6 +5,16 @@ import { PROJECT_COLORS, type ProjectGoalRef, type ProjectWorkspace } from "@pap
type ProjectRow = typeof projects.$inferSelect;
type ProjectWorkspaceRow = typeof projectWorkspaces.$inferSelect;
const REPO_ONLY_CWD_SENTINEL = "/__paperclip_repo_only__";
type CreateWorkspaceInput = {
name?: string | null;
cwd?: string | null;
repoUrl?: string | null;
repoRef?: string | null;
metadata?: Record<string, unknown> | null;
isPrimary?: boolean;
};
type UpdateWorkspaceInput = Partial<CreateWorkspaceInput>;
interface ProjectWithGoals extends ProjectRow {
goalIds: string[];
@@ -122,6 +132,53 @@ function resolveGoalIds(data: { goalIds?: string[]; goalId?: string | null }): s
return undefined;
}
function readNonEmptyString(value: unknown): string | null {
if (typeof value !== "string") return null;
const trimmed = value.trim();
return trimmed.length > 0 ? trimmed : null;
}
function normalizeWorkspaceCwd(value: unknown): string | null {
const cwd = readNonEmptyString(value);
if (!cwd) return null;
return cwd === REPO_ONLY_CWD_SENTINEL ? null : cwd;
}
function deriveNameFromCwd(cwd: string): string {
const normalized = cwd.replace(/[\\/]+$/, "");
const segments = normalized.split(/[\\/]/).filter(Boolean);
return segments[segments.length - 1] ?? "Local folder";
}
function deriveNameFromRepoUrl(repoUrl: string): string {
try {
const url = new URL(repoUrl);
const cleanedPath = url.pathname.replace(/\/+$/, "");
const lastSegment = cleanedPath.split("/").filter(Boolean).pop() ?? "";
const noGitSuffix = lastSegment.replace(/\.git$/i, "");
return noGitSuffix || repoUrl;
} catch {
return repoUrl;
}
}
function deriveWorkspaceName(input: {
name?: string | null;
cwd?: string | null;
repoUrl?: string | null;
}) {
const explicit = readNonEmptyString(input.name);
if (explicit) return explicit;
const cwd = readNonEmptyString(input.cwd);
if (cwd) return deriveNameFromCwd(cwd);
const repoUrl = readNonEmptyString(input.repoUrl);
if (repoUrl) return deriveNameFromRepoUrl(repoUrl);
return "Workspace";
}
async function ensureSinglePrimaryWorkspace(
dbOrTx: any,
input: {
@@ -257,7 +314,7 @@ export function projectService(db: Db) {
createWorkspace: async (
projectId: string,
data: Omit<typeof projectWorkspaces.$inferInsert, "projectId" | "companyId">,
data: CreateWorkspaceInput,
): Promise<ProjectWorkspace | null> => {
const project = await db
.select()
@@ -266,6 +323,15 @@ export function projectService(db: Db) {
.then((rows) => rows[0] ?? null);
if (!project) return null;
const cwd = normalizeWorkspaceCwd(data.cwd);
const repoUrl = readNonEmptyString(data.repoUrl);
if (!cwd && !repoUrl) return null;
const name = deriveWorkspaceName({
name: data.name,
cwd,
repoUrl,
});
const existing = await db
.select()
.from(projectWorkspaces)
@@ -292,10 +358,10 @@ export function projectService(db: Db) {
.values({
companyId: project.companyId,
projectId,
name: data.name,
cwd: data.cwd,
repoUrl: data.repoUrl ?? null,
repoRef: data.repoRef ?? null,
name,
cwd: cwd ?? null,
repoUrl: repoUrl ?? null,
repoRef: readNonEmptyString(data.repoRef),
metadata: (data.metadata as Record<string, unknown> | null | undefined) ?? null,
isPrimary: shouldBePrimary,
})
@@ -310,7 +376,7 @@ export function projectService(db: Db) {
updateWorkspace: async (
projectId: string,
workspaceId: string,
data: Partial<typeof projectWorkspaces.$inferInsert>,
data: UpdateWorkspaceInput,
): Promise<ProjectWorkspace | null> => {
const existing = await db
.select()
@@ -324,13 +390,26 @@ export function projectService(db: Db) {
.then((rows) => rows[0] ?? null);
if (!existing) return null;
const nextCwd =
data.cwd !== undefined
? normalizeWorkspaceCwd(data.cwd)
: normalizeWorkspaceCwd(existing.cwd);
const nextRepoUrl =
data.repoUrl !== undefined
? readNonEmptyString(data.repoUrl)
: readNonEmptyString(existing.repoUrl);
if (!nextCwd && !nextRepoUrl) return null;
const patch: Partial<typeof projectWorkspaces.$inferInsert> = {
updatedAt: new Date(),
};
if (data.name !== undefined) patch.name = data.name;
if (data.cwd !== undefined) patch.cwd = data.cwd;
if (data.repoUrl !== undefined) patch.repoUrl = data.repoUrl;
if (data.repoRef !== undefined) patch.repoRef = data.repoRef;
if (data.name !== undefined) patch.name = deriveWorkspaceName({ name: data.name, cwd: nextCwd, repoUrl: nextRepoUrl });
if (data.name === undefined && (data.cwd !== undefined || data.repoUrl !== undefined)) {
patch.name = deriveWorkspaceName({ cwd: nextCwd, repoUrl: nextRepoUrl });
}
if (data.cwd !== undefined) patch.cwd = nextCwd ?? null;
if (data.repoUrl !== undefined) patch.repoUrl = nextRepoUrl ?? null;
if (data.repoRef !== undefined) patch.repoRef = readNonEmptyString(data.repoRef);
if (data.metadata !== undefined) patch.metadata = data.metadata;
const updated = await db.transaction(async (tx) => {