feat: add storage system with local disk and S3 providers

Introduces a provider-agnostic storage subsystem for file attachments.
Includes local disk and S3 backends, asset/attachment DB schemas, issue
attachment CRUD routes with multer upload, CLI configure/doctor/env
integration, and enriched issue ancestors with project/goal resolution.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Forgotten
2026-02-20 10:31:56 -06:00
parent 32119f5c2f
commit fdd2ea6157
36 changed files with 1683 additions and 32 deletions

View File

@@ -16,10 +16,12 @@
"@paperclip/adapter-utils": "workspace:*",
"@paperclip/db": "workspace:*",
"@paperclip/shared": "workspace:*",
"@aws-sdk/client-s3": "^3.888.0",
"detect-port": "^2.1.0",
"dotenv": "^17.0.1",
"drizzle-orm": "^0.38.4",
"express": "^5.1.0",
"multer": "^2.0.2",
"pino": "^9.6.0",
"pino-http": "^10.4.0",
"pino-pretty": "^13.1.3",
@@ -32,6 +34,7 @@
"devDependencies": {
"@types/express": "^5.0.0",
"@types/express-serve-static-core": "^5.0.0",
"@types/multer": "^2.0.0",
"@types/supertest": "^6.0.2",
"supertest": "^7.0.0",
"tsx": "^4.19.2",

View File

@@ -3,6 +3,7 @@ import path from "node:path";
import fs from "node:fs";
import { fileURLToPath } from "node:url";
import type { Db } from "@paperclip/db";
import type { StorageService } from "./storage/types.js";
import { httpLogger, errorHandler } from "./middleware/index.js";
import { actorMiddleware } from "./middleware/auth.js";
import { healthRoutes } from "./routes/health.js";
@@ -21,7 +22,7 @@ import { llmRoutes } from "./routes/llms.js";
type UiMode = "none" | "static" | "vite-dev";
export async function createApp(db: Db, opts: { uiMode: UiMode }) {
export async function createApp(db: Db, opts: { uiMode: UiMode; storageService: StorageService }) {
const app = express();
app.use(express.json());
@@ -35,7 +36,7 @@ export async function createApp(db: Db, opts: { uiMode: UiMode }) {
api.use("/companies", companyRoutes(db));
api.use(agentRoutes(db));
api.use(projectRoutes(db));
api.use(issueRoutes(db));
api.use(issueRoutes(db, opts.storageService));
api.use(goalRoutes(db));
api.use(approvalRoutes(db));
api.use(secretRoutes(db));

View File

@@ -2,10 +2,11 @@ import { readConfigFile } from "./config-file.js";
import { existsSync } from "node:fs";
import { config as loadDotenv } from "dotenv";
import { resolvePaperclipEnvPath } from "./paths.js";
import { SECRET_PROVIDERS, type SecretProvider } from "@paperclip/shared";
import { SECRET_PROVIDERS, STORAGE_PROVIDERS, type SecretProvider, type StorageProvider } from "@paperclip/shared";
import {
resolveDefaultEmbeddedPostgresDir,
resolveDefaultSecretsKeyFilePath,
resolveDefaultStorageDir,
resolveHomeAwarePath,
} from "./home-paths.js";
@@ -27,6 +28,13 @@ export interface Config {
secretsProvider: SecretProvider;
secretsStrictMode: boolean;
secretsMasterKeyFilePath: string;
storageProvider: StorageProvider;
storageLocalDiskBaseDir: string;
storageS3Bucket: string;
storageS3Region: string;
storageS3Endpoint: string | undefined;
storageS3Prefix: string;
storageS3ForcePathStyle: boolean;
heartbeatSchedulerEnabled: boolean;
heartbeatSchedulerIntervalMs: number;
}
@@ -41,6 +49,7 @@ export function loadConfig(): Config {
? fileConfig?.database.connectionString
: undefined;
const fileSecrets = fileConfig?.secrets;
const fileStorage = fileConfig?.storage;
const strictModeFromEnv = process.env.PAPERCLIP_SECRETS_STRICT_MODE;
const secretsStrictMode =
strictModeFromEnv !== undefined
@@ -55,6 +64,26 @@ export function loadConfig(): Config {
const providerFromFile = fileSecrets?.provider;
const secretsProvider: SecretProvider = providerFromEnv ?? providerFromFile ?? "local_encrypted";
const storageProviderFromEnvRaw = process.env.PAPERCLIP_STORAGE_PROVIDER;
const storageProviderFromEnv =
storageProviderFromEnvRaw && STORAGE_PROVIDERS.includes(storageProviderFromEnvRaw as StorageProvider)
? (storageProviderFromEnvRaw as StorageProvider)
: null;
const storageProvider: StorageProvider = storageProviderFromEnv ?? fileStorage?.provider ?? "local_disk";
const storageLocalDiskBaseDir = resolveHomeAwarePath(
process.env.PAPERCLIP_STORAGE_LOCAL_DIR ??
fileStorage?.localDisk?.baseDir ??
resolveDefaultStorageDir(),
);
const storageS3Bucket = process.env.PAPERCLIP_STORAGE_S3_BUCKET ?? fileStorage?.s3?.bucket ?? "paperclip";
const storageS3Region = process.env.PAPERCLIP_STORAGE_S3_REGION ?? fileStorage?.s3?.region ?? "us-east-1";
const storageS3Endpoint = process.env.PAPERCLIP_STORAGE_S3_ENDPOINT ?? fileStorage?.s3?.endpoint ?? undefined;
const storageS3Prefix = process.env.PAPERCLIP_STORAGE_S3_PREFIX ?? fileStorage?.s3?.prefix ?? "";
const storageS3ForcePathStyle =
process.env.PAPERCLIP_STORAGE_S3_FORCE_PATH_STYLE !== undefined
? process.env.PAPERCLIP_STORAGE_S3_FORCE_PATH_STYLE === "true"
: (fileStorage?.s3?.forcePathStyle ?? false);
return {
port: Number(process.env.PORT) || fileConfig?.server.port || 3100,
databaseMode: fileDatabaseMode,
@@ -76,6 +105,13 @@ export function loadConfig(): Config {
fileSecrets?.localEncrypted.keyFilePath ??
resolveDefaultSecretsKeyFilePath(),
),
storageProvider,
storageLocalDiskBaseDir,
storageS3Bucket,
storageS3Region,
storageS3Endpoint,
storageS3Prefix,
storageS3ForcePathStyle,
heartbeatSchedulerEnabled: process.env.HEARTBEAT_SCHEDULER_ENABLED !== "false",
heartbeatSchedulerIntervalMs: Math.max(10000, Number(process.env.HEARTBEAT_SCHEDULER_INTERVAL_MS) || 30000),
};

View File

@@ -44,6 +44,10 @@ export function resolveDefaultSecretsKeyFilePath(): string {
return path.resolve(resolvePaperclipInstanceRoot(), "secrets", "master.key");
}
export function resolveDefaultStorageDir(): string {
return path.resolve(resolvePaperclipInstanceRoot(), "data", "storage");
}
export function resolveHomeAwarePath(value: string): string {
return path.resolve(expandHomePrefix(value));
}

View File

@@ -16,6 +16,7 @@ import { loadConfig } from "./config.js";
import { logger } from "./middleware/logger.js";
import { setupLiveEventsWebSocketServer } from "./realtime/live-events-ws.js";
import { heartbeatService } from "./services/index.js";
import { createStorageServiceFromConfig } from "./storage/index.js";
import { printStartupBanner } from "./startup-banner.js";
type EmbeddedPostgresInstance = {
@@ -217,7 +218,8 @@ if (config.databaseUrl) {
}
const uiMode = config.uiDevMiddleware ? "vite-dev" : config.serveUi ? "static" : "none";
const app = await createApp(db as any, { uiMode });
const storageService = createStorageServiceFromConfig(config);
const app = await createApp(db as any, { uiMode, storageService });
const server = createServer(app);
const listenPort = await detectPort(config.port);

View File

@@ -1,29 +1,65 @@
import { Router, type Request, type Response } from "express";
import multer from "multer";
import type { Db } from "@paperclip/db";
import {
addIssueCommentSchema,
createIssueAttachmentMetadataSchema,
checkoutIssueSchema,
createIssueSchema,
linkIssueApprovalSchema,
updateIssueSchema,
} from "@paperclip/shared";
import type { StorageService } from "../storage/types.js";
import { validate } from "../middleware/validate.js";
import {
agentService,
goalService,
heartbeatService,
issueApprovalService,
issueService,
logActivity,
projectService,
} from "../services/index.js";
import { logger } from "../middleware/logger.js";
import { assertCompanyAccess, getActorInfo } from "./authz.js";
export function issueRoutes(db: Db) {
const MAX_ATTACHMENT_BYTES = Number(process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES) || 10 * 1024 * 1024;
const ALLOWED_ATTACHMENT_CONTENT_TYPES = new Set([
"image/png",
"image/jpeg",
"image/jpg",
"image/webp",
"image/gif",
]);
export function issueRoutes(db: Db, storage: StorageService) {
const router = Router();
const svc = issueService(db);
const heartbeat = heartbeatService(db);
const agentsSvc = agentService(db);
const projectsSvc = projectService(db);
const goalsSvc = goalService(db);
const issueApprovalsSvc = issueApprovalService(db);
const upload = multer({
storage: multer.memoryStorage(),
limits: { fileSize: MAX_ATTACHMENT_BYTES, files: 1 },
});
function withContentPath<T extends { id: string }>(attachment: T) {
return {
...attachment,
contentPath: `/api/attachments/${attachment.id}/content`,
};
}
async function runSingleFileUpload(req: Request, res: Response) {
await new Promise<void>((resolve, reject) => {
upload.single("file")(req, res, (err: unknown) => {
if (err) reject(err);
else resolve();
});
});
}
async function assertCanManageIssueApprovalLinks(req: Request, res: Response, companyId: string) {
assertCompanyAccess(req, companyId);
@@ -62,8 +98,12 @@ export function issueRoutes(db: Db) {
return;
}
assertCompanyAccess(req, issue.companyId);
const ancestors = await svc.getAncestors(issue.id);
res.json({ ...issue, ancestors });
const [ancestors, project, goal] = await Promise.all([
svc.getAncestors(issue.id),
issue.projectId ? projectsSvc.getById(issue.projectId) : null,
issue.goalId ? goalsSvc.getById(issue.goalId) : null,
]);
res.json({ ...issue, ancestors, project: project ?? null, goal: goal ?? null });
});
router.get("/issues/:id/approvals", async (req, res) => {
@@ -254,20 +294,17 @@ export function issueRoutes(db: Db) {
const assigneeChanged =
req.body.assigneeAgentId !== undefined && req.body.assigneeAgentId !== existing.assigneeAgentId;
const reopened =
(existing.status === "done" || existing.status === "cancelled") &&
issue.status !== "done" && issue.status !== "cancelled";
if ((assigneeChanged || reopened) && issue.assigneeAgentId) {
if (assigneeChanged && issue.assigneeAgentId) {
void heartbeat
.wakeup(issue.assigneeAgentId, {
source: reopened ? "automation" : "assignment",
source: "assignment",
triggerDetail: "system",
reason: reopened ? "issue_reopened" : "issue_assigned",
reason: "issue_assigned",
payload: { issueId: issue.id, mutation: "update" },
requestedByActorType: actor.actorType,
requestedByActorId: actor.actorId,
contextSnapshot: { issueId: issue.id, source: reopened ? "issue.reopen" : "issue.update" },
contextSnapshot: { issueId: issue.id, source: "issue.update" },
})
.catch((err) => logger.warn({ err, issueId: issue.id }, "failed to wake assignee on issue update"));
}
@@ -518,5 +555,169 @@ export function issueRoutes(db: Db) {
res.status(201).json(comment);
});
router.get("/issues/:id/attachments", async (req, res) => {
const issueId = req.params.id as string;
const issue = await svc.getById(issueId);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
assertCompanyAccess(req, issue.companyId);
const attachments = await svc.listAttachments(issueId);
res.json(attachments.map(withContentPath));
});
router.post("/companies/:companyId/issues/:issueId/attachments", async (req, res) => {
const companyId = req.params.companyId as string;
const issueId = req.params.issueId as string;
assertCompanyAccess(req, companyId);
const issue = await svc.getById(issueId);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
if (issue.companyId !== companyId) {
res.status(422).json({ error: "Issue does not belong to company" });
return;
}
try {
await runSingleFileUpload(req, res);
} catch (err) {
if (err instanceof multer.MulterError) {
if (err.code === "LIMIT_FILE_SIZE") {
res.status(422).json({ error: `Attachment exceeds ${MAX_ATTACHMENT_BYTES} bytes` });
return;
}
res.status(400).json({ error: err.message });
return;
}
throw err;
}
const file = (req as Request & { file?: { mimetype: string; buffer: Buffer; originalname: string } }).file;
if (!file) {
res.status(400).json({ error: "Missing file field 'file'" });
return;
}
const contentType = (file.mimetype || "").toLowerCase();
if (!ALLOWED_ATTACHMENT_CONTENT_TYPES.has(contentType)) {
res.status(422).json({ error: `Unsupported attachment type: ${contentType || "unknown"}` });
return;
}
if (file.buffer.length <= 0) {
res.status(422).json({ error: "Attachment is empty" });
return;
}
const parsedMeta = createIssueAttachmentMetadataSchema.safeParse(req.body ?? {});
if (!parsedMeta.success) {
res.status(400).json({ error: "Invalid attachment metadata", details: parsedMeta.error.issues });
return;
}
const actor = getActorInfo(req);
const stored = await storage.putFile({
companyId,
namespace: `issues/${issueId}`,
originalFilename: file.originalname || null,
contentType,
body: file.buffer,
});
const attachment = await svc.createAttachment({
issueId,
issueCommentId: parsedMeta.data.issueCommentId ?? null,
provider: stored.provider,
objectKey: stored.objectKey,
contentType: stored.contentType,
byteSize: stored.byteSize,
sha256: stored.sha256,
originalFilename: stored.originalFilename,
createdByAgentId: actor.agentId,
createdByUserId: actor.actorType === "user" ? actor.actorId : null,
});
await logActivity(db, {
companyId,
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.attachment_added",
entityType: "issue",
entityId: issueId,
details: {
attachmentId: attachment.id,
originalFilename: attachment.originalFilename,
contentType: attachment.contentType,
byteSize: attachment.byteSize,
},
});
res.status(201).json(withContentPath(attachment));
});
router.get("/attachments/:attachmentId/content", async (req, res, next) => {
const attachmentId = req.params.attachmentId as string;
const attachment = await svc.getAttachmentById(attachmentId);
if (!attachment) {
res.status(404).json({ error: "Attachment not found" });
return;
}
assertCompanyAccess(req, attachment.companyId);
const object = await storage.getObject(attachment.companyId, attachment.objectKey);
res.setHeader("Content-Type", attachment.contentType || object.contentType || "application/octet-stream");
res.setHeader("Content-Length", String(attachment.byteSize || object.contentLength || 0));
res.setHeader("Cache-Control", "private, max-age=60");
const filename = attachment.originalFilename ?? "attachment";
res.setHeader("Content-Disposition", `inline; filename=\"${filename.replaceAll("\"", "")}\"`);
object.stream.on("error", (err) => {
next(err);
});
object.stream.pipe(res);
});
router.delete("/attachments/:attachmentId", async (req, res) => {
const attachmentId = req.params.attachmentId as string;
const attachment = await svc.getAttachmentById(attachmentId);
if (!attachment) {
res.status(404).json({ error: "Attachment not found" });
return;
}
assertCompanyAccess(req, attachment.companyId);
try {
await storage.deleteObject(attachment.companyId, attachment.objectKey);
} catch (err) {
logger.warn({ err, attachmentId }, "storage delete failed while removing attachment");
}
const removed = await svc.removeAttachment(attachmentId);
if (!removed) {
res.status(404).json({ error: "Attachment not found" });
return;
}
const actor = getActorInfo(req);
await logActivity(db, {
companyId: removed.companyId,
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.attachment_removed",
entityType: "issue",
entityId: removed.issueId,
details: {
attachmentId: removed.id,
},
});
res.json({ ok: true });
});
return router;
}

View File

@@ -13,3 +13,4 @@ export { dashboardService } from "./dashboard.js";
export { sidebarBadgeService } from "./sidebar-badges.js";
export { logActivity, type LogActivityInput } from "./activity-log.js";
export { publishLiveEvent, subscribeCompanyLiveEvents } from "./live-events.js";
export { createStorageServiceFromConfig, getStorageService } from "../storage/index.js";

View File

@@ -1,6 +1,15 @@
import { and, asc, desc, eq, inArray, isNull, or, sql } from "drizzle-orm";
import type { Db } from "@paperclip/db";
import { agents, companies, issues, issueComments } from "@paperclip/db";
import {
agents,
assets,
companies,
goals,
issueAttachments,
issueComments,
issues,
projects,
} from "@paperclip/db";
import { conflict, notFound, unprocessable } from "../errors.js";
const ALL_ISSUE_STATUSES = ["backlog", "todo", "in_progress", "in_review", "blocked", "done", "cancelled"];
@@ -162,11 +171,26 @@ export function issueService(db: Db) {
},
remove: (id: string) =>
db
.delete(issues)
.where(eq(issues.id, id))
.returning()
.then((rows) => rows[0] ?? null),
db.transaction(async (tx) => {
const attachmentAssetIds = await tx
.select({ assetId: issueAttachments.assetId })
.from(issueAttachments)
.where(eq(issueAttachments.issueId, id));
const removedIssue = await tx
.delete(issues)
.where(eq(issues.id, id))
.returning()
.then((rows) => rows[0] ?? null);
if (removedIssue && attachmentAssetIds.length > 0) {
await tx
.delete(assets)
.where(inArray(assets.id, attachmentAssetIds.map((row) => row.assetId)));
}
return removedIssue;
}),
checkout: async (id: string, agentId: string, expectedStatuses: string[]) => {
const issueCompany = await db
@@ -275,6 +299,162 @@ export function issueService(db: Db) {
.then((rows) => rows[0]);
},
createAttachment: async (input: {
issueId: string;
issueCommentId?: string | null;
provider: string;
objectKey: string;
contentType: string;
byteSize: number;
sha256: string;
originalFilename?: string | null;
createdByAgentId?: string | null;
createdByUserId?: string | null;
}) => {
const issue = await db
.select({ id: issues.id, companyId: issues.companyId })
.from(issues)
.where(eq(issues.id, input.issueId))
.then((rows) => rows[0] ?? null);
if (!issue) throw notFound("Issue not found");
if (input.issueCommentId) {
const comment = await db
.select({ id: issueComments.id, companyId: issueComments.companyId, issueId: issueComments.issueId })
.from(issueComments)
.where(eq(issueComments.id, input.issueCommentId))
.then((rows) => rows[0] ?? null);
if (!comment) throw notFound("Issue comment not found");
if (comment.companyId !== issue.companyId || comment.issueId !== issue.id) {
throw unprocessable("Attachment comment must belong to same issue and company");
}
}
return db.transaction(async (tx) => {
const [asset] = await tx
.insert(assets)
.values({
companyId: issue.companyId,
provider: input.provider,
objectKey: input.objectKey,
contentType: input.contentType,
byteSize: input.byteSize,
sha256: input.sha256,
originalFilename: input.originalFilename ?? null,
createdByAgentId: input.createdByAgentId ?? null,
createdByUserId: input.createdByUserId ?? null,
})
.returning();
const [attachment] = await tx
.insert(issueAttachments)
.values({
companyId: issue.companyId,
issueId: issue.id,
assetId: asset.id,
issueCommentId: input.issueCommentId ?? null,
})
.returning();
return {
id: attachment.id,
companyId: attachment.companyId,
issueId: attachment.issueId,
issueCommentId: attachment.issueCommentId,
assetId: attachment.assetId,
provider: asset.provider,
objectKey: asset.objectKey,
contentType: asset.contentType,
byteSize: asset.byteSize,
sha256: asset.sha256,
originalFilename: asset.originalFilename,
createdByAgentId: asset.createdByAgentId,
createdByUserId: asset.createdByUserId,
createdAt: attachment.createdAt,
updatedAt: attachment.updatedAt,
};
});
},
listAttachments: async (issueId: string) =>
db
.select({
id: issueAttachments.id,
companyId: issueAttachments.companyId,
issueId: issueAttachments.issueId,
issueCommentId: issueAttachments.issueCommentId,
assetId: issueAttachments.assetId,
provider: assets.provider,
objectKey: assets.objectKey,
contentType: assets.contentType,
byteSize: assets.byteSize,
sha256: assets.sha256,
originalFilename: assets.originalFilename,
createdByAgentId: assets.createdByAgentId,
createdByUserId: assets.createdByUserId,
createdAt: issueAttachments.createdAt,
updatedAt: issueAttachments.updatedAt,
})
.from(issueAttachments)
.innerJoin(assets, eq(issueAttachments.assetId, assets.id))
.where(eq(issueAttachments.issueId, issueId))
.orderBy(desc(issueAttachments.createdAt)),
getAttachmentById: async (id: string) =>
db
.select({
id: issueAttachments.id,
companyId: issueAttachments.companyId,
issueId: issueAttachments.issueId,
issueCommentId: issueAttachments.issueCommentId,
assetId: issueAttachments.assetId,
provider: assets.provider,
objectKey: assets.objectKey,
contentType: assets.contentType,
byteSize: assets.byteSize,
sha256: assets.sha256,
originalFilename: assets.originalFilename,
createdByAgentId: assets.createdByAgentId,
createdByUserId: assets.createdByUserId,
createdAt: issueAttachments.createdAt,
updatedAt: issueAttachments.updatedAt,
})
.from(issueAttachments)
.innerJoin(assets, eq(issueAttachments.assetId, assets.id))
.where(eq(issueAttachments.id, id))
.then((rows) => rows[0] ?? null),
removeAttachment: async (id: string) =>
db.transaction(async (tx) => {
const existing = await tx
.select({
id: issueAttachments.id,
companyId: issueAttachments.companyId,
issueId: issueAttachments.issueId,
issueCommentId: issueAttachments.issueCommentId,
assetId: issueAttachments.assetId,
provider: assets.provider,
objectKey: assets.objectKey,
contentType: assets.contentType,
byteSize: assets.byteSize,
sha256: assets.sha256,
originalFilename: assets.originalFilename,
createdByAgentId: assets.createdByAgentId,
createdByUserId: assets.createdByUserId,
createdAt: issueAttachments.createdAt,
updatedAt: issueAttachments.updatedAt,
})
.from(issueAttachments)
.innerJoin(assets, eq(issueAttachments.assetId, assets.id))
.where(eq(issueAttachments.id, id))
.then((rows) => rows[0] ?? null);
if (!existing) return null;
await tx.delete(issueAttachments).where(eq(issueAttachments.id, id));
await tx.delete(assets).where(eq(assets.id, existing.assetId));
return existing;
}),
findMentionedAgents: async (companyId: string, body: string) => {
const re = /\B@([^\s@,!?.]+)/g;
const tokens = new Set<string>();
@@ -287,7 +467,7 @@ export function issueService(db: Db) {
},
getAncestors: async (issueId: string) => {
const ancestors: Array<{
const raw: Array<{
id: string; title: string; description: string | null;
status: string; priority: string;
assigneeAgentId: string | null; projectId: string | null; goalId: string | null;
@@ -295,7 +475,7 @@ export function issueService(db: Db) {
const visited = new Set<string>([issueId]);
const start = await db.select().from(issues).where(eq(issues.id, issueId)).then(r => r[0] ?? null);
let currentId = start?.parentId ?? null;
while (currentId && !visited.has(currentId) && ancestors.length < 50) {
while (currentId && !visited.has(currentId) && raw.length < 50) {
visited.add(currentId);
const parent = await db.select({
id: issues.id, title: issues.title, description: issues.description,
@@ -304,7 +484,7 @@ export function issueService(db: Db) {
goalId: issues.goalId, parentId: issues.parentId,
}).from(issues).where(eq(issues.id, currentId)).then(r => r[0] ?? null);
if (!parent) break;
ancestors.push({
raw.push({
id: parent.id, title: parent.title, description: parent.description ?? null,
status: parent.status, priority: parent.priority,
assigneeAgentId: parent.assigneeAgentId ?? null,
@@ -312,7 +492,39 @@ export function issueService(db: Db) {
});
currentId = parent.parentId ?? null;
}
return ancestors;
// Batch-fetch referenced projects and goals
const projectIds = [...new Set(raw.map(a => a.projectId).filter((id): id is string => id != null))];
const goalIds = [...new Set(raw.map(a => a.goalId).filter((id): id is string => id != null))];
const projectMap = new Map<string, { id: string; name: string; description: string | null; status: string; goalId: string | null }>();
const goalMap = new Map<string, { id: string; title: string; description: string | null; level: string; status: string }>();
if (projectIds.length > 0) {
const rows = await db.select({
id: projects.id, name: projects.name, description: projects.description,
status: projects.status, goalId: projects.goalId,
}).from(projects).where(inArray(projects.id, projectIds));
for (const r of rows) {
projectMap.set(r.id, r);
// Also collect goalIds from projects
if (r.goalId && !goalIds.includes(r.goalId)) goalIds.push(r.goalId);
}
}
if (goalIds.length > 0) {
const rows = await db.select({
id: goals.id, title: goals.title, description: goals.description,
level: goals.level, status: goals.status,
}).from(goals).where(inArray(goals.id, goalIds));
for (const r of rows) goalMap.set(r.id, r);
}
return raw.map(a => ({
...a,
project: a.projectId ? projectMap.get(a.projectId) ?? null : null,
goal: a.goalId ? goalMap.get(a.goalId) ?? null : null,
}));
},
staleCount: async (companyId: string, minutes = 60) => {

View File

@@ -0,0 +1,35 @@
import { loadConfig, type Config } from "../config.js";
import { createStorageProviderFromConfig } from "./provider-registry.js";
import { createStorageService } from "./service.js";
import type { StorageService } from "./types.js";
let cachedStorageService: StorageService | null = null;
let cachedSignature: string | null = null;
function signatureForConfig(config: Config): string {
return JSON.stringify({
provider: config.storageProvider,
localDisk: config.storageLocalDiskBaseDir,
s3Bucket: config.storageS3Bucket,
s3Region: config.storageS3Region,
s3Endpoint: config.storageS3Endpoint,
s3Prefix: config.storageS3Prefix,
s3ForcePathStyle: config.storageS3ForcePathStyle,
});
}
export function createStorageServiceFromConfig(config: Config): StorageService {
return createStorageService(createStorageProviderFromConfig(config));
}
export function getStorageService(): StorageService {
const config = loadConfig();
const signature = signatureForConfig(config);
if (!cachedStorageService || cachedSignature !== signature) {
cachedStorageService = createStorageServiceFromConfig(config);
cachedSignature = signature;
}
return cachedStorageService;
}
export type { StorageService, PutFileResult } from "./types.js";

View File

@@ -0,0 +1,89 @@
import { createReadStream, promises as fs } from "node:fs";
import path from "node:path";
import type { StorageProvider, GetObjectResult, HeadObjectResult } from "./types.js";
import { notFound, badRequest } from "../errors.js";
function normalizeObjectKey(objectKey: string): string {
const normalized = objectKey.replace(/\\/g, "/").trim();
if (!normalized || normalized.startsWith("/")) {
throw badRequest("Invalid object key");
}
const parts = normalized.split("/").filter((part) => part.length > 0);
if (parts.length === 0 || parts.some((part) => part === "." || part === "..")) {
throw badRequest("Invalid object key");
}
return parts.join("/");
}
function resolveWithin(baseDir: string, objectKey: string): string {
const normalizedKey = normalizeObjectKey(objectKey);
const resolved = path.resolve(baseDir, normalizedKey);
const base = path.resolve(baseDir);
if (resolved !== base && !resolved.startsWith(base + path.sep)) {
throw badRequest("Invalid object key path");
}
return resolved;
}
async function statOrNull(filePath: string) {
try {
return await fs.stat(filePath);
} catch {
return null;
}
}
export function createLocalDiskStorageProvider(baseDir: string): StorageProvider {
const root = path.resolve(baseDir);
return {
id: "local_disk",
async putObject(input) {
const targetPath = resolveWithin(root, input.objectKey);
const dir = path.dirname(targetPath);
await fs.mkdir(dir, { recursive: true });
const tempPath = `${targetPath}.tmp-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
await fs.writeFile(tempPath, input.body);
await fs.rename(tempPath, targetPath);
},
async getObject(input): Promise<GetObjectResult> {
const filePath = resolveWithin(root, input.objectKey);
const stat = await statOrNull(filePath);
if (!stat || !stat.isFile()) {
throw notFound("Object not found");
}
return {
stream: createReadStream(filePath),
contentLength: stat.size,
lastModified: stat.mtime,
};
},
async headObject(input): Promise<HeadObjectResult> {
const filePath = resolveWithin(root, input.objectKey);
const stat = await statOrNull(filePath);
if (!stat || !stat.isFile()) {
return { exists: false };
}
return {
exists: true,
contentLength: stat.size,
lastModified: stat.mtime,
};
},
async deleteObject(input): Promise<void> {
const filePath = resolveWithin(root, input.objectKey);
try {
await fs.unlink(filePath);
} catch {
// idempotent delete
}
},
};
}

View File

@@ -0,0 +1,18 @@
import type { Config } from "../config.js";
import type { StorageProvider } from "./types.js";
import { createLocalDiskStorageProvider } from "./local-disk-provider.js";
import { createS3StorageProvider } from "./s3-provider.js";
export function createStorageProviderFromConfig(config: Config): StorageProvider {
if (config.storageProvider === "local_disk") {
return createLocalDiskStorageProvider(config.storageLocalDiskBaseDir);
}
return createS3StorageProvider({
bucket: config.storageS3Bucket,
region: config.storageS3Region,
endpoint: config.storageS3Endpoint,
prefix: config.storageS3Prefix,
forcePathStyle: config.storageS3ForcePathStyle,
});
}

View File

@@ -0,0 +1,145 @@
import {
S3Client,
DeleteObjectCommand,
GetObjectCommand,
HeadObjectCommand,
PutObjectCommand,
} from "@aws-sdk/client-s3";
import { Readable } from "node:stream";
import type { StorageProvider, GetObjectResult, HeadObjectResult } from "./types.js";
import { notFound, unprocessable } from "../errors.js";
interface S3ProviderConfig {
bucket: string;
region: string;
endpoint?: string;
prefix?: string;
forcePathStyle?: boolean;
}
function normalizePrefix(prefix: string | undefined): string {
if (!prefix) return "";
return prefix
.trim()
.replace(/^\/+/, "")
.replace(/\/+$/, "");
}
function buildKey(prefix: string, objectKey: string): string {
if (!prefix) return objectKey;
return `${prefix}/${objectKey}`;
}
async function toReadableStream(body: unknown): Promise<Readable> {
if (!body) throw notFound("Object not found");
if (body instanceof Readable) return body;
const candidate = body as {
transformToWebStream?: () => ReadableStream<Uint8Array>;
arrayBuffer?: () => Promise<ArrayBuffer>;
};
if (typeof candidate.transformToWebStream === "function") {
return Readable.fromWeb(candidate.transformToWebStream() as globalThis.ReadableStream<any>);
}
if (typeof candidate.arrayBuffer === "function") {
const buffer = Buffer.from(await candidate.arrayBuffer());
return Readable.from(buffer);
}
throw unprocessable("Unsupported S3 body stream type");
}
function toDate(value: Date | undefined): Date | undefined {
return value instanceof Date ? value : undefined;
}
export function createS3StorageProvider(config: S3ProviderConfig): StorageProvider {
const bucket = config.bucket.trim();
const region = config.region.trim();
if (!bucket) throw unprocessable("S3 storage bucket is required");
if (!region) throw unprocessable("S3 storage region is required");
const prefix = normalizePrefix(config.prefix);
const client = new S3Client({
region,
endpoint: config.endpoint,
forcePathStyle: Boolean(config.forcePathStyle),
});
return {
id: "s3",
async putObject(input) {
const key = buildKey(prefix, input.objectKey);
await client.send(
new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: input.body,
ContentType: input.contentType,
ContentLength: input.contentLength,
}),
);
},
async getObject(input): Promise<GetObjectResult> {
const key = buildKey(prefix, input.objectKey);
try {
const output = await client.send(
new GetObjectCommand({
Bucket: bucket,
Key: key,
}),
);
return {
stream: await toReadableStream(output.Body),
contentType: output.ContentType,
contentLength: output.ContentLength,
etag: output.ETag,
lastModified: toDate(output.LastModified),
};
} catch (err) {
const code = (err as { name?: string }).name;
if (code === "NoSuchKey" || code === "NotFound") throw notFound("Object not found");
throw err;
}
},
async headObject(input): Promise<HeadObjectResult> {
const key = buildKey(prefix, input.objectKey);
try {
const output = await client.send(
new HeadObjectCommand({
Bucket: bucket,
Key: key,
}),
);
return {
exists: true,
contentType: output.ContentType,
contentLength: output.ContentLength,
etag: output.ETag,
lastModified: toDate(output.LastModified),
};
} catch (err) {
const code = (err as { name?: string }).name;
if (code === "NoSuchKey" || code === "NotFound") return { exists: false };
throw err;
}
},
async deleteObject(input): Promise<void> {
const key = buildKey(prefix, input.objectKey);
await client.send(
new DeleteObjectCommand({
Bucket: bucket,
Key: key,
}),
);
},
};
}

View File

@@ -0,0 +1,131 @@
import { createHash, randomUUID } from "node:crypto";
import path from "node:path";
import type { StorageService, StorageProvider, PutFileInput, PutFileResult } from "./types.js";
import { badRequest, forbidden, unprocessable } from "../errors.js";
const MAX_SEGMENT_LENGTH = 120;
function sanitizeSegment(value: string): string {
const cleaned = value
.trim()
.replace(/[^a-zA-Z0-9._-]+/g, "_")
.replace(/_{2,}/g, "_")
.replace(/^_+|_+$/g, "");
if (!cleaned) return "file";
return cleaned.slice(0, MAX_SEGMENT_LENGTH);
}
function normalizeNamespace(namespace: string): string {
const normalized = namespace
.split("/")
.map((entry) => entry.trim())
.filter((entry) => entry.length > 0)
.map((entry) => sanitizeSegment(entry));
if (normalized.length === 0) return "misc";
return normalized.join("/");
}
function splitFilename(filename: string | null): { stem: string; ext: string } {
if (!filename) return { stem: "file", ext: "" };
const base = path.basename(filename).trim();
if (!base) return { stem: "file", ext: "" };
const extRaw = path.extname(base);
const stemRaw = extRaw ? base.slice(0, base.length - extRaw.length) : base;
const stem = sanitizeSegment(stemRaw);
const ext = extRaw
.toLowerCase()
.replace(/[^a-z0-9.]/g, "")
.slice(0, 16);
return {
stem,
ext,
};
}
function ensureCompanyPrefix(companyId: string, objectKey: string): void {
const expectedPrefix = `${companyId}/`;
if (!objectKey.startsWith(expectedPrefix)) {
throw forbidden("Object does not belong to company");
}
if (objectKey.includes("..")) {
throw badRequest("Invalid object key");
}
}
function hashBuffer(input: Buffer): string {
return createHash("sha256").update(input).digest("hex");
}
function buildObjectKey(companyId: string, namespace: string, originalFilename: string | null): string {
const ns = normalizeNamespace(namespace);
const now = new Date();
const year = String(now.getUTCFullYear());
const month = String(now.getUTCMonth() + 1).padStart(2, "0");
const day = String(now.getUTCDate()).padStart(2, "0");
const { stem, ext } = splitFilename(originalFilename);
const suffix = randomUUID();
const filename = `${suffix}-${stem}${ext}`;
return `${companyId}/${ns}/${year}/${month}/${day}/${filename}`;
}
function assertPutFileInput(input: PutFileInput): void {
if (!input.companyId || input.companyId.trim().length === 0) {
throw unprocessable("companyId is required");
}
if (!input.namespace || input.namespace.trim().length === 0) {
throw unprocessable("namespace is required");
}
if (!input.contentType || input.contentType.trim().length === 0) {
throw unprocessable("contentType is required");
}
if (!(input.body instanceof Buffer)) {
throw unprocessable("body must be a Buffer");
}
if (input.body.length <= 0) {
throw unprocessable("File is empty");
}
}
export function createStorageService(provider: StorageProvider): StorageService {
return {
provider: provider.id,
async putFile(input: PutFileInput): Promise<PutFileResult> {
assertPutFileInput(input);
const objectKey = buildObjectKey(input.companyId, input.namespace, input.originalFilename);
const byteSize = input.body.length;
const contentType = input.contentType.trim().toLowerCase();
await provider.putObject({
objectKey,
body: input.body,
contentType,
contentLength: byteSize,
});
return {
provider: provider.id,
objectKey,
contentType,
byteSize,
sha256: hashBuffer(input.body),
originalFilename: input.originalFilename,
};
},
async getObject(companyId: string, objectKey: string) {
ensureCompanyPrefix(companyId, objectKey);
return provider.getObject({ objectKey });
},
async headObject(companyId: string, objectKey: string) {
ensureCompanyPrefix(companyId, objectKey);
return provider.headObject({ objectKey });
},
async deleteObject(companyId: string, objectKey: string) {
ensureCompanyPrefix(companyId, objectKey);
await provider.deleteObject({ objectKey });
},
};
}

View File

@@ -0,0 +1,62 @@
import type { StorageProvider as StorageProviderId } from "@paperclip/shared";
import type { Readable } from "node:stream";
export interface PutObjectInput {
objectKey: string;
body: Buffer;
contentType: string;
contentLength: number;
}
export interface GetObjectInput {
objectKey: string;
}
export interface GetObjectResult {
stream: Readable;
contentType?: string;
contentLength?: number;
etag?: string;
lastModified?: Date;
}
export interface HeadObjectResult {
exists: boolean;
contentType?: string;
contentLength?: number;
etag?: string;
lastModified?: Date;
}
export interface StorageProvider {
id: StorageProviderId;
putObject(input: PutObjectInput): Promise<void>;
getObject(input: GetObjectInput): Promise<GetObjectResult>;
headObject(input: GetObjectInput): Promise<HeadObjectResult>;
deleteObject(input: GetObjectInput): Promise<void>;
}
export interface PutFileInput {
companyId: string;
namespace: string;
originalFilename: string | null;
contentType: string;
body: Buffer;
}
export interface PutFileResult {
provider: StorageProviderId;
objectKey: string;
contentType: string;
byteSize: number;
sha256: string;
originalFilename: string | null;
}
export interface StorageService {
provider: StorageProviderId;
putFile(input: PutFileInput): Promise<PutFileResult>;
getObject(companyId: string, objectKey: string): Promise<GetObjectResult>;
headObject(companyId: string, objectKey: string): Promise<HeadObjectResult>;
deleteObject(companyId: string, objectKey: string): Promise<void>;
}