Merge remote-tracking branch 'public-gh/master' into paperclip-subissues

* public-gh/master:
  Drop lockfile from watcher change
  Tighten plugin dev file watching
  Fix plugin smoke example typecheck
  Fix plugin dev watcher and migration snapshot
  Clarify plugin authoring and external dev workflow
  Expand kitchen sink plugin demos
  fix: set AGENT_HOME env var for agent processes
  Add kitchen sink plugin example
  Simplify plugin runtime and cleanup lifecycle
  Add plugin framework and settings UI

# Conflicts:
#	packages/db/src/migrations/meta/0029_snapshot.json
#	packages/db/src/migrations/meta/_journal.json
This commit is contained in:
Dotta
2026-03-14 13:56:09 -05:00
141 changed files with 47521 additions and 961 deletions

View File

@@ -0,0 +1,68 @@
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import { resolvePluginWatchTargets } from "../services/plugin-dev-watcher.js";
const tempDirs: string[] = [];
afterEach(() => {
while (tempDirs.length > 0) {
const dir = tempDirs.pop();
if (dir) rmSync(dir, { recursive: true, force: true });
}
});
function makeTempPluginDir(): string {
const dir = mkdtempSync(path.join(os.tmpdir(), "paperclip-plugin-watch-"));
tempDirs.push(dir);
return dir;
}
describe("resolvePluginWatchTargets", () => {
it("watches package metadata plus concrete declared runtime files", () => {
const pluginDir = makeTempPluginDir();
mkdirSync(path.join(pluginDir, "dist", "ui"), { recursive: true });
writeFileSync(
path.join(pluginDir, "package.json"),
JSON.stringify({
name: "@acme/example",
paperclipPlugin: {
manifest: "./dist/manifest.js",
worker: "./dist/worker.js",
ui: "./dist/ui",
},
}),
);
writeFileSync(path.join(pluginDir, "dist", "manifest.js"), "export default {};\n");
writeFileSync(path.join(pluginDir, "dist", "worker.js"), "export default {};\n");
writeFileSync(path.join(pluginDir, "dist", "ui", "index.js"), "export default {};\n");
writeFileSync(path.join(pluginDir, "dist", "ui", "index.css"), "body {}\n");
const targets = resolvePluginWatchTargets(pluginDir);
expect(targets).toEqual([
{ path: path.join(pluginDir, "dist", "manifest.js"), recursive: false, kind: "file" },
{ path: path.join(pluginDir, "dist", "ui", "index.css"), recursive: false, kind: "file" },
{ path: path.join(pluginDir, "dist", "ui", "index.js"), recursive: false, kind: "file" },
{ path: path.join(pluginDir, "dist", "worker.js"), recursive: false, kind: "file" },
{ path: path.join(pluginDir, "package.json"), recursive: false, kind: "file" },
]);
});
it("falls back to dist when package metadata does not declare entrypoints", () => {
const pluginDir = makeTempPluginDir();
mkdirSync(path.join(pluginDir, "dist", "nested"), { recursive: true });
writeFileSync(path.join(pluginDir, "package.json"), JSON.stringify({ name: "@acme/example" }));
writeFileSync(path.join(pluginDir, "dist", "manifest.js"), "export default {};\n");
writeFileSync(path.join(pluginDir, "dist", "nested", "chunk.js"), "export default {};\n");
const targets = resolvePluginWatchTargets(pluginDir);
expect(targets).toEqual([
{ path: path.join(pluginDir, "package.json"), recursive: false, kind: "file" },
{ path: path.join(pluginDir, "dist", "manifest.js"), recursive: false, kind: "file" },
{ path: path.join(pluginDir, "dist", "nested", "chunk.js"), recursive: false, kind: "file" },
]);
});
});

View File

@@ -0,0 +1,43 @@
import { describe, expect, it } from "vitest";
import { appendStderrExcerpt, formatWorkerFailureMessage } from "../services/plugin-worker-manager.js";
describe("plugin-worker-manager stderr failure context", () => {
it("appends worker stderr context to failure messages", () => {
expect(
formatWorkerFailureMessage(
"Worker process exited (code=1, signal=null)",
"TypeError: Unknown file extension \".ts\"",
),
).toBe(
"Worker process exited (code=1, signal=null)\n\nWorker stderr:\nTypeError: Unknown file extension \".ts\"",
);
});
it("does not duplicate stderr that is already present", () => {
const message = [
"Worker process exited (code=1, signal=null)",
"",
"Worker stderr:",
"TypeError: Unknown file extension \".ts\"",
].join("\n");
expect(
formatWorkerFailureMessage(message, "TypeError: Unknown file extension \".ts\""),
).toBe(message);
});
it("keeps only the latest stderr excerpt", () => {
let excerpt = "";
excerpt = appendStderrExcerpt(excerpt, "first line");
excerpt = appendStderrExcerpt(excerpt, "second line");
expect(excerpt).toContain("first line");
expect(excerpt).toContain("second line");
excerpt = appendStderrExcerpt(excerpt, "x".repeat(9_000));
expect(excerpt).not.toContain("first line");
expect(excerpt).not.toContain("second line");
expect(excerpt.length).toBeLessThanOrEqual(8_000);
});
});

View File

@@ -25,7 +25,23 @@ import { sidebarBadgeRoutes } from "./routes/sidebar-badges.js";
import { llmRoutes } from "./routes/llms.js";
import { assetRoutes } from "./routes/assets.js";
import { accessRoutes } from "./routes/access.js";
import { pluginRoutes } from "./routes/plugins.js";
import { pluginUiStaticRoutes } from "./routes/plugin-ui-static.js";
import { applyUiBranding } from "./ui-branding.js";
import { logger } from "./middleware/logger.js";
import { DEFAULT_LOCAL_PLUGIN_DIR, pluginLoader } from "./services/plugin-loader.js";
import { createPluginWorkerManager } from "./services/plugin-worker-manager.js";
import { createPluginJobScheduler } from "./services/plugin-job-scheduler.js";
import { pluginJobStore } from "./services/plugin-job-store.js";
import { createPluginToolDispatcher } from "./services/plugin-tool-dispatcher.js";
import { pluginLifecycleManager } from "./services/plugin-lifecycle.js";
import { createPluginJobCoordinator } from "./services/plugin-job-coordinator.js";
import { buildHostServices, flushPluginLogBuffer } from "./services/plugin-host-services.js";
import { createPluginEventBus } from "./services/plugin-event-bus.js";
import { createPluginDevWatcher } from "./services/plugin-dev-watcher.js";
import { createPluginHostServiceCleanup } from "./services/plugin-host-service-cleanup.js";
import { pluginRegistryService } from "./services/plugin-registry.js";
import { createHostClientHandlers } from "@paperclipai/plugin-sdk";
import type { BetterAuthSessionResult } from "./auth/better-auth.js";
type UiMode = "none" | "static" | "vite-dev";
@@ -49,13 +65,20 @@ export async function createApp(
bindHost: string;
authReady: boolean;
companyDeletionEnabled: boolean;
instanceId?: string;
hostVersion?: string;
localPluginDir?: string;
betterAuthHandler?: express.RequestHandler;
resolveSession?: (req: ExpressRequest) => Promise<BetterAuthSessionResult | null>;
},
) {
const app = express();
app.use(express.json());
app.use(express.json({
verify: (req, _res, buf) => {
(req as unknown as { rawBody: Buffer }).rawBody = buf;
},
}));
app.use(httpLogger);
const privateHostnameGateEnabled =
opts.deploymentMode === "authenticated" && opts.deploymentExposure === "private";
@@ -123,6 +146,68 @@ export async function createApp(
api.use(activityRoutes(db));
api.use(dashboardRoutes(db));
api.use(sidebarBadgeRoutes(db));
const hostServicesDisposers = new Map<string, () => void>();
const workerManager = createPluginWorkerManager();
const pluginRegistry = pluginRegistryService(db);
const eventBus = createPluginEventBus();
const jobStore = pluginJobStore(db);
const lifecycle = pluginLifecycleManager(db, { workerManager });
const scheduler = createPluginJobScheduler({
db,
jobStore,
workerManager,
});
const toolDispatcher = createPluginToolDispatcher({
workerManager,
lifecycleManager: lifecycle,
db,
});
const jobCoordinator = createPluginJobCoordinator({
db,
lifecycle,
scheduler,
jobStore,
});
const hostServiceCleanup = createPluginHostServiceCleanup(lifecycle, hostServicesDisposers);
const loader = pluginLoader(
db,
{ localPluginDir: opts.localPluginDir ?? DEFAULT_LOCAL_PLUGIN_DIR },
{
workerManager,
eventBus,
jobScheduler: scheduler,
jobStore,
toolDispatcher,
lifecycleManager: lifecycle,
instanceInfo: {
instanceId: opts.instanceId ?? "default",
hostVersion: opts.hostVersion ?? "0.0.0",
},
buildHostHandlers: (pluginId, manifest) => {
const notifyWorker = (method: string, params: unknown) => {
const handle = workerManager.getWorker(pluginId);
if (handle) handle.notify(method, params);
};
const services = buildHostServices(db, pluginId, manifest.id, eventBus, notifyWorker);
hostServicesDisposers.set(pluginId, () => services.dispose());
return createHostClientHandlers({
pluginId,
capabilities: manifest.capabilities,
services,
});
},
},
);
api.use(
pluginRoutes(
db,
loader,
{ scheduler, jobStore },
{ workerManager },
{ toolDispatcher },
{ workerManager },
),
);
api.use(
accessRoutes(db, {
deploymentMode: opts.deploymentMode,
@@ -135,6 +220,9 @@ export async function createApp(
app.use("/api", (_req, res) => {
res.status(404).json({ error: "API route not found" });
});
app.use(pluginUiStaticRoutes(db, {
localPluginDir: opts.localPluginDir ?? DEFAULT_LOCAL_PLUGIN_DIR,
}));
const __dirname = path.dirname(fileURLToPath(import.meta.url));
if (opts.uiMode === "static") {
@@ -188,5 +276,35 @@ export async function createApp(
app.use(errorHandler);
jobCoordinator.start();
scheduler.start();
void toolDispatcher.initialize().catch((err) => {
logger.error({ err }, "Failed to initialize plugin tool dispatcher");
});
const devWatcher = opts.uiMode === "vite-dev"
? createPluginDevWatcher(
lifecycle,
async (pluginId) => (await pluginRegistry.getById(pluginId))?.packagePath ?? null,
)
: null;
void loader.loadAll().then((result) => {
if (!result) return;
for (const loaded of result.results) {
if (devWatcher && loaded.success && loaded.plugin.packagePath) {
devWatcher.watch(loaded.plugin.id, loaded.plugin.packagePath);
}
}
}).catch((err) => {
logger.error({ err }, "Failed to load ready plugins on startup");
});
process.once("exit", () => {
devWatcher?.close();
hostServiceCleanup.disposeAll();
hostServiceCleanup.teardown();
});
process.once("beforeExit", () => {
void flushPluginLogBuffer();
});
return app;
}

View File

@@ -0,0 +1,496 @@
/**
* @fileoverview Plugin UI static file serving route
*
* Serves plugin UI bundles from the plugin's dist/ui/ directory under the
* `/_plugins/:pluginId/ui/*` namespace. This is specified in PLUGIN_SPEC.md
* §19.0.3 (Bundle Serving).
*
* Plugin UI bundles are pre-built ESM that the host serves as static assets.
* The host dynamically imports the plugin's UI entry module from this path,
* resolves the named export declared in `ui.slots[].exportName`, and mounts
* it into the extension slot.
*
* Security:
* - Path traversal is prevented by resolving the requested path and verifying
* it stays within the plugin's UI directory.
* - Only plugins in 'ready' status have their UI served.
* - Only plugins that declare `entrypoints.ui` serve UI bundles.
*
* Cache Headers:
* - Files with content-hash patterns in their name (e.g., `index-a1b2c3d4.js`)
* receive `Cache-Control: public, max-age=31536000, immutable`.
* - Other files receive `Cache-Control: public, max-age=0, must-revalidate`
* with ETag-based conditional request support.
*
* @module server/routes/plugin-ui-static
* @see doc/plugins/PLUGIN_SPEC.md §19.0.3 — Bundle Serving
* @see doc/plugins/PLUGIN_SPEC.md §25.4.5 — Frontend Cache Invalidation
*/
import { Router } from "express";
import path from "node:path";
import fs from "node:fs";
import crypto from "node:crypto";
import type { Db } from "@paperclipai/db";
import { pluginRegistryService } from "../services/plugin-registry.js";
import { logger } from "../middleware/logger.js";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/**
* Regex to detect content-hashed filenames.
*
* Matches patterns like:
* - `index-a1b2c3d4.js`
* - `styles.abc123def.css`
* - `chunk-ABCDEF01.mjs`
*
* The hash portion must be at least 8 hex characters to avoid false positives.
*/
const CONTENT_HASH_PATTERN = /[.-][a-fA-F0-9]{8,}\.\w+$/;
/**
* Cache-Control header for content-hashed files.
* These files are immutable by definition (the hash changes when content changes).
*/
/** 1 year in seconds — standard for content-hashed immutable resources. */
const ONE_YEAR_SECONDS = 365 * 24 * 60 * 60; // 31_536_000
const CACHE_CONTROL_IMMUTABLE = `public, max-age=${ONE_YEAR_SECONDS}, immutable`;
/**
* Cache-Control header for non-hashed files.
* These files must be revalidated on each request (ETag-based).
*/
const CACHE_CONTROL_REVALIDATE = "public, max-age=0, must-revalidate";
/**
* MIME types for common plugin UI bundle file extensions.
*/
const MIME_TYPES: Record<string, string> = {
".js": "application/javascript; charset=utf-8",
".mjs": "application/javascript; charset=utf-8",
".css": "text/css; charset=utf-8",
".json": "application/json; charset=utf-8",
".map": "application/json; charset=utf-8",
".html": "text/html; charset=utf-8",
".svg": "image/svg+xml",
".png": "image/png",
".jpg": "image/jpeg",
".jpeg": "image/jpeg",
".gif": "image/gif",
".webp": "image/webp",
".woff": "font/woff",
".woff2": "font/woff2",
".ttf": "font/ttf",
".eot": "application/vnd.ms-fontobject",
".ico": "image/x-icon",
".txt": "text/plain; charset=utf-8",
};
// ---------------------------------------------------------------------------
// Helper
// ---------------------------------------------------------------------------
/**
* Resolve a plugin's UI directory from its package location.
*
* The plugin's `packageName` is stored in the DB. We resolve the package path
* from the local plugin directory (DEFAULT_LOCAL_PLUGIN_DIR) by looking in
* `node_modules`. If the plugin was installed from a local path, the manifest
* `entrypoints.ui` path is resolved relative to the package directory.
*
* @param localPluginDir - The plugin installation directory
* @param packageName - The npm package name
* @param entrypointsUi - The UI entrypoint path from the manifest (e.g., "./dist/ui/")
* @returns Absolute path to the UI directory, or null if not found
*/
export function resolvePluginUiDir(
localPluginDir: string,
packageName: string,
entrypointsUi: string,
packagePath?: string | null,
): string | null {
// For local-path installs, prefer the persisted package path.
if (packagePath) {
const resolvedPackagePath = path.resolve(packagePath);
if (fs.existsSync(resolvedPackagePath)) {
const uiDirFromPackagePath = path.resolve(resolvedPackagePath, entrypointsUi);
if (
uiDirFromPackagePath.startsWith(resolvedPackagePath)
&& fs.existsSync(uiDirFromPackagePath)
) {
return uiDirFromPackagePath;
}
}
}
// Resolve the package root within the local plugin directory's node_modules.
// npm installs go to <localPluginDir>/node_modules/<packageName>/
let packageRoot: string;
if (packageName.startsWith("@")) {
// Scoped package: @scope/name -> node_modules/@scope/name
packageRoot = path.join(localPluginDir, "node_modules", ...packageName.split("/"));
} else {
packageRoot = path.join(localPluginDir, "node_modules", packageName);
}
// If the standard location doesn't exist, the plugin may have been installed
// from a local path. Try to check if the package.json is accessible at the
// computed path or if the package is found elsewhere.
if (!fs.existsSync(packageRoot)) {
// For local-path installs, the packageName may be a directory that doesn't
// live inside node_modules. Check if the package exists directly at the
// localPluginDir level.
const directPath = path.join(localPluginDir, packageName);
if (fs.existsSync(directPath)) {
packageRoot = directPath;
} else {
return null;
}
}
// Resolve the UI directory relative to the package root
const uiDir = path.resolve(packageRoot, entrypointsUi);
// Verify the resolved UI directory exists and is actually inside the package
if (!fs.existsSync(uiDir)) {
return null;
}
return uiDir;
}
/**
* Compute an ETag from file stat (size + mtime).
* This is a lightweight approach that avoids reading the file content.
*/
function computeETag(size: number, mtimeMs: number): string {
const ETAG_VERSION = "v2";
const hash = crypto
.createHash("md5")
.update(`${ETAG_VERSION}:${size}-${mtimeMs}`)
.digest("hex")
.slice(0, 16);
return `"${hash}"`;
}
// ---------------------------------------------------------------------------
// Route factory
// ---------------------------------------------------------------------------
/**
* Options for the plugin UI static route.
*/
export interface PluginUiStaticRouteOptions {
/**
* The local plugin installation directory.
* This is where plugins are installed via `npm install --prefix`.
* Defaults to the standard `~/.paperclip/plugins/` location.
*/
localPluginDir: string;
}
/**
* Create an Express router that serves plugin UI static files.
*
* This route handles `GET /_plugins/:pluginId/ui/*` requests by:
* 1. Looking up the plugin in the registry by ID or key
* 2. Verifying the plugin is in 'ready' status with UI declared
* 3. Resolving the file path within the plugin's dist/ui/ directory
* 4. Serving the file with appropriate cache headers
*
* @param db - Database connection for plugin registry lookups
* @param options - Configuration options
* @returns Express router
*/
export function pluginUiStaticRoutes(db: Db, options: PluginUiStaticRouteOptions) {
const router = Router();
const registry = pluginRegistryService(db);
const log = logger.child({ service: "plugin-ui-static" });
/**
* GET /_plugins/:pluginId/ui/*
*
* Serve a static file from a plugin's UI bundle directory.
*
* The :pluginId parameter accepts either:
* - Database UUID
* - Plugin key (e.g., "acme.linear")
*
* The wildcard captures the relative file path within the UI directory.
*
* Cache strategy:
* - Content-hashed filenames → immutable, 1-year max-age
* - Other files → must-revalidate with ETag
*/
router.get("/_plugins/:pluginId/ui/*filePath", async (req, res) => {
const { pluginId } = req.params;
// Extract the relative file path from the named wildcard.
// In Express 5 with path-to-regexp v8, named wildcards may return
// an array of path segments or a single string.
const rawParam = req.params.filePath;
const rawFilePath = Array.isArray(rawParam)
? rawParam.join("/")
: rawParam as string | undefined;
if (!rawFilePath || rawFilePath.length === 0) {
res.status(400).json({ error: "File path is required" });
return;
}
// Step 1: Look up the plugin
let plugin = null;
try {
plugin = await registry.getById(pluginId);
} catch (error) {
const maybeCode =
typeof error === "object" && error !== null && "code" in error
? (error as { code?: unknown }).code
: undefined;
if (maybeCode !== "22P02") {
throw error;
}
}
if (!plugin) {
plugin = await registry.getByKey(pluginId);
}
if (!plugin) {
res.status(404).json({ error: "Plugin not found" });
return;
}
// Step 2: Verify the plugin is ready and has UI declared
if (plugin.status !== "ready") {
res.status(403).json({
error: `Plugin UI is not available (status: ${plugin.status})`,
});
return;
}
const manifest = plugin.manifestJson;
if (!manifest?.entrypoints?.ui) {
res.status(404).json({ error: "Plugin does not declare a UI bundle" });
return;
}
// Step 2b: Check for devUiUrl in plugin config — proxy to local dev server
// when a plugin author has configured a dev server URL for hot-reload.
// See PLUGIN_SPEC.md §27.2 — Local Development Workflow
try {
const configRow = await registry.getConfig(plugin.id);
const devUiUrl =
configRow &&
typeof configRow === "object" &&
"configJson" in configRow &&
(configRow as { configJson: Record<string, unknown> }).configJson?.devUiUrl;
if (typeof devUiUrl === "string" && devUiUrl.length > 0) {
// Dev proxy is only available in development mode
if (process.env.NODE_ENV === "production") {
log.warn(
{ pluginId: plugin.id },
"plugin-ui-static: devUiUrl ignored in production",
);
// Fall through to static file serving below
} else {
// Guard against rawFilePath overriding the base URL via protocol
// scheme (e.g. "https://evil.com/x") or protocol-relative paths
// (e.g. "//evil.com/x") which cause `new URL(path, base)` to
// ignore the base entirely.
// Normalize percent-encoding so encoded slashes (%2F) can't bypass
// the protocol/path checks below.
let decodedPath: string;
try {
decodedPath = decodeURIComponent(rawFilePath);
} catch {
res.status(400).json({ error: "Invalid file path" });
return;
}
if (
decodedPath.includes("://") ||
decodedPath.startsWith("//") ||
decodedPath.startsWith("\\\\")
) {
res.status(400).json({ error: "Invalid file path" });
return;
}
// Proxy the request to the dev server
const targetUrl = new URL(rawFilePath, devUiUrl.endsWith("/") ? devUiUrl : devUiUrl + "/");
// SSRF protection: only allow http/https and localhost targets for dev proxy
if (targetUrl.protocol !== "http:" && targetUrl.protocol !== "https:") {
res.status(400).json({ error: "devUiUrl must use http or https protocol" });
return;
}
// Dev proxy is restricted to loopback addresses only.
// Validate the *constructed* targetUrl hostname (not the base) to
// catch any path-based override that slipped past the checks above.
const devHost = targetUrl.hostname;
const isLoopback =
devHost === "localhost" ||
devHost === "127.0.0.1" ||
devHost === "::1" ||
devHost === "[::1]";
if (!isLoopback) {
log.warn(
{ pluginId: plugin.id, devUiUrl, host: devHost },
"plugin-ui-static: devUiUrl must target localhost, rejecting proxy",
);
res.status(400).json({ error: "devUiUrl must target localhost" });
return;
}
log.debug(
{ pluginId: plugin.id, devUiUrl, targetUrl: targetUrl.href },
"plugin-ui-static: proxying to devUiUrl",
);
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 10_000);
try {
const upstream = await fetch(targetUrl.href, { signal: controller.signal });
if (!upstream.ok) {
res.status(upstream.status).json({
error: `Dev server returned ${upstream.status}`,
});
return;
}
const contentType = upstream.headers.get("content-type");
if (contentType) res.set("Content-Type", contentType);
res.set("Cache-Control", "no-cache, no-store, must-revalidate");
const body = await upstream.arrayBuffer();
res.send(Buffer.from(body));
return;
} finally {
clearTimeout(timeout);
}
} catch (proxyErr) {
log.warn(
{
pluginId: plugin.id,
devUiUrl,
err: proxyErr instanceof Error ? proxyErr.message : String(proxyErr),
},
"plugin-ui-static: failed to proxy to devUiUrl, falling back to static",
);
// Fall through to static serving below
}
}
}
} catch {
// Config lookup failure is non-fatal — fall through to static serving
}
// Step 3: Resolve the plugin's UI directory
const uiDir = resolvePluginUiDir(
options.localPluginDir,
plugin.packageName,
manifest.entrypoints.ui,
plugin.packagePath,
);
if (!uiDir) {
log.warn(
{ pluginId: plugin.id, pluginKey: plugin.pluginKey, packageName: plugin.packageName },
"plugin-ui-static: UI directory not found on disk",
);
res.status(404).json({ error: "Plugin UI directory not found" });
return;
}
// Step 4: Resolve the requested file path and prevent traversal (including symlinks)
const resolvedFilePath = path.resolve(uiDir, rawFilePath);
// Step 5: Check that the file exists and is a regular file
let fileStat: fs.Stats;
try {
fileStat = fs.statSync(resolvedFilePath);
} catch {
res.status(404).json({ error: "File not found" });
return;
}
// Security: resolve symlinks via realpathSync and verify containment.
// This prevents symlink-based traversal that string-based startsWith misses.
let realFilePath: string;
let realUiDir: string;
try {
realFilePath = fs.realpathSync(resolvedFilePath);
realUiDir = fs.realpathSync(uiDir);
} catch {
res.status(404).json({ error: "File not found" });
return;
}
const relative = path.relative(realUiDir, realFilePath);
if (relative.startsWith("..") || path.isAbsolute(relative)) {
res.status(403).json({ error: "Access denied" });
return;
}
if (!fileStat.isFile()) {
res.status(404).json({ error: "File not found" });
return;
}
// Step 6: Determine cache strategy based on filename
const basename = path.basename(resolvedFilePath);
const isContentHashed = CONTENT_HASH_PATTERN.test(basename);
// Step 7: Set cache headers
if (isContentHashed) {
res.set("Cache-Control", CACHE_CONTROL_IMMUTABLE);
} else {
res.set("Cache-Control", CACHE_CONTROL_REVALIDATE);
// Compute and set ETag for conditional request support
const etag = computeETag(fileStat.size, fileStat.mtimeMs);
res.set("ETag", etag);
// Check If-None-Match for 304 Not Modified
const ifNoneMatch = req.headers["if-none-match"];
if (ifNoneMatch === etag) {
res.status(304).end();
return;
}
}
// Step 8: Set Content-Type
const ext = path.extname(resolvedFilePath).toLowerCase();
const contentType = MIME_TYPES[ext];
if (contentType) {
res.set("Content-Type", contentType);
}
// Step 9: Set CORS headers (plugin UI may be loaded from different origin in dev)
res.set("Access-Control-Allow-Origin", "*");
// Step 10: Send the file
// The plugin source can live in Git worktrees (e.g. ".worktrees/...").
// `send` defaults to dotfiles:"ignore", which treats dot-directories as
// not found. We already enforce traversal safety above, so allow dot paths.
res.sendFile(resolvedFilePath, { dotfiles: "allow" }, (err) => {
if (err) {
log.error(
{ err, pluginId: plugin.id, filePath: resolvedFilePath },
"plugin-ui-static: error sending file",
);
// Only send error if headers haven't been sent yet
if (!res.headersSent) {
res.status(500).json({ error: "Failed to serve file" });
}
}
});
});
return router;
}

2219
server/src/routes/plugins.ts Normal file

File diff suppressed because it is too large Load Diff

373
server/src/services/cron.ts Normal file
View File

@@ -0,0 +1,373 @@
/**
* Lightweight cron expression parser and next-run calculator.
*
* Supports standard 5-field cron expressions:
*
* ┌────────────── minute (059)
* │ ┌──────────── hour (023)
* │ │ ┌────────── day of month (131)
* │ │ │ ┌──────── month (112)
* │ │ │ │ ┌────── day of week (06, Sun=0)
* │ │ │ │ │
* * * * * *
*
* Supported syntax per field:
* - `*` — any value
* - `N` — exact value
* - `N-M` — range (inclusive)
* - `N/S` — start at N, step S (within field bounds)
* - `* /S` — every S (from field min) [no space — shown to avoid comment termination]
* - `N-M/S` — range with step
* - `N,M,...` — list of values, ranges, or steps
*
* @module
*/
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
/**
* A parsed cron schedule. Each field is a sorted array of valid integer values
* for that field.
*/
export interface ParsedCron {
minutes: number[];
hours: number[];
daysOfMonth: number[];
months: number[];
daysOfWeek: number[];
}
// ---------------------------------------------------------------------------
// Field bounds
// ---------------------------------------------------------------------------
interface FieldSpec {
min: number;
max: number;
name: string;
}
const FIELD_SPECS: FieldSpec[] = [
{ min: 0, max: 59, name: "minute" },
{ min: 0, max: 23, name: "hour" },
{ min: 1, max: 31, name: "day of month" },
{ min: 1, max: 12, name: "month" },
{ min: 0, max: 6, name: "day of week" },
];
// ---------------------------------------------------------------------------
// Parsing
// ---------------------------------------------------------------------------
/**
* Parse a single cron field token (e.g. `"5"`, `"1-3"`, `"* /10"`, `"1,3,5"`).
*
* @returns Sorted deduplicated array of matching integer values within bounds.
* @throws {Error} on invalid syntax or out-of-range values.
*/
function parseField(token: string, spec: FieldSpec): number[] {
const values = new Set<number>();
// Split on commas first — each part can be a value, range, or step
const parts = token.split(",");
for (const part of parts) {
const trimmed = part.trim();
if (trimmed === "") {
throw new Error(`Empty element in cron ${spec.name} field`);
}
// Check for step syntax: "X/S" where X is "*" or a range or a number
const slashIdx = trimmed.indexOf("/");
if (slashIdx !== -1) {
const base = trimmed.slice(0, slashIdx);
const stepStr = trimmed.slice(slashIdx + 1);
const step = parseInt(stepStr, 10);
if (isNaN(step) || step <= 0) {
throw new Error(
`Invalid step "${stepStr}" in cron ${spec.name} field`,
);
}
let rangeStart = spec.min;
let rangeEnd = spec.max;
if (base === "*") {
// */S — every S from field min
} else if (base.includes("-")) {
// N-M/S — range with step
const [a, b] = base.split("-").map((s) => parseInt(s, 10));
if (isNaN(a!) || isNaN(b!)) {
throw new Error(
`Invalid range "${base}" in cron ${spec.name} field`,
);
}
rangeStart = a!;
rangeEnd = b!;
} else {
// N/S — start at N, step S
const start = parseInt(base, 10);
if (isNaN(start)) {
throw new Error(
`Invalid start "${base}" in cron ${spec.name} field`,
);
}
rangeStart = start;
}
validateBounds(rangeStart, spec);
validateBounds(rangeEnd, spec);
for (let i = rangeStart; i <= rangeEnd; i += step) {
values.add(i);
}
continue;
}
// Check for range syntax: "N-M"
if (trimmed.includes("-")) {
const [aStr, bStr] = trimmed.split("-");
const a = parseInt(aStr!, 10);
const b = parseInt(bStr!, 10);
if (isNaN(a) || isNaN(b)) {
throw new Error(
`Invalid range "${trimmed}" in cron ${spec.name} field`,
);
}
validateBounds(a, spec);
validateBounds(b, spec);
if (a > b) {
throw new Error(
`Invalid range ${a}-${b} in cron ${spec.name} field (start > end)`,
);
}
for (let i = a; i <= b; i++) {
values.add(i);
}
continue;
}
// Wildcard
if (trimmed === "*") {
for (let i = spec.min; i <= spec.max; i++) {
values.add(i);
}
continue;
}
// Single value
const val = parseInt(trimmed, 10);
if (isNaN(val)) {
throw new Error(
`Invalid value "${trimmed}" in cron ${spec.name} field`,
);
}
validateBounds(val, spec);
values.add(val);
}
if (values.size === 0) {
throw new Error(`Empty result for cron ${spec.name} field`);
}
return [...values].sort((a, b) => a - b);
}
function validateBounds(value: number, spec: FieldSpec): void {
if (value < spec.min || value > spec.max) {
throw new Error(
`Value ${value} out of range [${spec.min}${spec.max}] for cron ${spec.name} field`,
);
}
}
// ---------------------------------------------------------------------------
// Public API
// ---------------------------------------------------------------------------
/**
* Parse a cron expression string into a structured {@link ParsedCron}.
*
* @param expression — A standard 5-field cron expression.
* @returns Parsed cron with sorted valid values for each field.
* @throws {Error} on invalid syntax.
*
* @example
* ```ts
* const parsed = parseCron("0 * * * *"); // every hour at minute 0
* // parsed.minutes === [0]
* // parsed.hours === [0,1,2,...,23]
* ```
*/
export function parseCron(expression: string): ParsedCron {
const trimmed = expression.trim();
if (!trimmed) {
throw new Error("Cron expression must not be empty");
}
const tokens = trimmed.split(/\s+/);
if (tokens.length !== 5) {
throw new Error(
`Cron expression must have exactly 5 fields, got ${tokens.length}: "${trimmed}"`,
);
}
return {
minutes: parseField(tokens[0]!, FIELD_SPECS[0]!),
hours: parseField(tokens[1]!, FIELD_SPECS[1]!),
daysOfMonth: parseField(tokens[2]!, FIELD_SPECS[2]!),
months: parseField(tokens[3]!, FIELD_SPECS[3]!),
daysOfWeek: parseField(tokens[4]!, FIELD_SPECS[4]!),
};
}
/**
* Validate a cron expression string. Returns `null` if valid, or an error
* message string if invalid.
*
* @param expression — A cron expression string to validate.
* @returns `null` on success, error message on failure.
*/
export function validateCron(expression: string): string | null {
try {
parseCron(expression);
return null;
} catch (err) {
return err instanceof Error ? err.message : String(err);
}
}
/**
* Calculate the next run time after `after` for the given parsed cron schedule.
*
* Starts from the minute immediately following `after` and walks forward
* until a matching minute is found (up to a safety limit of ~4 years to
* prevent infinite loops on impossible schedules).
*
* @param cron — Parsed cron schedule.
* @param after — The reference date. The returned date will be strictly after this.
* @returns The next matching `Date`, or `null` if no match found within the search window.
*/
export function nextCronTick(cron: ParsedCron, after: Date): Date | null {
// Work in local minutes — start from the minute after `after`
const d = new Date(after.getTime());
// Advance to the next whole minute
d.setUTCSeconds(0, 0);
d.setUTCMinutes(d.getUTCMinutes() + 1);
// Safety: search up to 4 years worth of minutes (~2.1M iterations max).
// Uses 366 to account for leap years.
const MAX_CRON_SEARCH_YEARS = 4;
const maxIterations = MAX_CRON_SEARCH_YEARS * 366 * 24 * 60;
for (let i = 0; i < maxIterations; i++) {
const month = d.getUTCMonth() + 1; // 1-12
const dayOfMonth = d.getUTCDate(); // 1-31
const dayOfWeek = d.getUTCDay(); // 0-6
const hour = d.getUTCHours(); // 0-23
const minute = d.getUTCMinutes(); // 0-59
// Check month
if (!cron.months.includes(month)) {
// Skip to the first day of the next matching month
advanceToNextMonth(d, cron.months);
continue;
}
// Check day of month AND day of week (both must match)
if (!cron.daysOfMonth.includes(dayOfMonth) || !cron.daysOfWeek.includes(dayOfWeek)) {
// Advance one day
d.setUTCDate(d.getUTCDate() + 1);
d.setUTCHours(0, 0, 0, 0);
continue;
}
// Check hour
if (!cron.hours.includes(hour)) {
// Advance to next matching hour within the day
const nextHour = findNext(cron.hours, hour);
if (nextHour !== null) {
d.setUTCHours(nextHour, 0, 0, 0);
} else {
// No matching hour left today — advance to next day
d.setUTCDate(d.getUTCDate() + 1);
d.setUTCHours(0, 0, 0, 0);
}
continue;
}
// Check minute
if (!cron.minutes.includes(minute)) {
const nextMin = findNext(cron.minutes, minute);
if (nextMin !== null) {
d.setUTCMinutes(nextMin, 0, 0);
} else {
// No matching minute left this hour — advance to next hour
d.setUTCHours(d.getUTCHours() + 1, 0, 0, 0);
}
continue;
}
// All fields match!
return new Date(d.getTime());
}
// No match found within the search window
return null;
}
/**
* Convenience: parse a cron expression and compute the next run time.
*
* @param expression — 5-field cron expression string.
* @param after — Reference date (defaults to `new Date()`).
* @returns The next matching Date, or `null` if no match within 4 years.
* @throws {Error} if the cron expression is invalid.
*/
export function nextCronTickFromExpression(
expression: string,
after: Date = new Date(),
): Date | null {
const cron = parseCron(expression);
return nextCronTick(cron, after);
}
// ---------------------------------------------------------------------------
// Internal helpers
// ---------------------------------------------------------------------------
/**
* Find the next value in `sortedValues` that is greater than `current`.
* Returns `null` if no such value exists.
*/
function findNext(sortedValues: number[], current: number): number | null {
for (const v of sortedValues) {
if (v > current) return v;
}
return null;
}
/**
* Advance `d` (mutated in place) to midnight UTC of the first day of the next
* month whose 1-based month number is in `months`.
*/
function advanceToNextMonth(d: Date, months: number[]): void {
let year = d.getUTCFullYear();
let month = d.getUTCMonth() + 1; // 1-based
// Walk months forward until we find one in the set (max 48 iterations = 4 years)
for (let i = 0; i < 48; i++) {
month++;
if (month > 12) {
month = 1;
year++;
}
if (months.includes(month)) {
d.setUTCFullYear(year, month - 1, 1);
d.setUTCHours(0, 0, 0, 0);
return;
}
}
}

View File

@@ -1636,6 +1636,7 @@ export function heartbeatService(db: Db) {
repoRef: executionWorkspace.repoRef,
branchName: executionWorkspace.branchName,
worktreePath: executionWorkspace.worktreePath,
agentHome: resolveDefaultAgentWorkspaceDir(agent.id),
};
context.paperclipWorkspaces = resolvedWorkspace.workspaceHints;
const runtimeServiceIntents = (() => {

View File

@@ -34,7 +34,21 @@ export function publishLiveEvent(input: {
return event;
}
export function publishGlobalLiveEvent(input: {
type: LiveEventType;
payload?: LiveEventPayload;
}) {
const event = toLiveEvent({ companyId: "*", type: input.type, payload: input.payload });
emitter.emit("*", event);
return event;
}
export function subscribeCompanyLiveEvents(companyId: string, listener: LiveEventListener) {
emitter.on(companyId, listener);
return () => emitter.off(companyId, listener);
}
export function subscribeGlobalLiveEvents(listener: LiveEventListener) {
emitter.on("*", listener);
return () => emitter.off("*", listener);
}

View File

@@ -0,0 +1,447 @@
/**
* PluginCapabilityValidator — enforces the capability model at both
* install-time and runtime.
*
* Every plugin declares the capabilities it requires in its manifest
* (`manifest.capabilities`). This service checks those declarations
* against a mapping of operations → required capabilities so that:
*
* 1. **Install-time validation** — `validateManifestCapabilities()`
* ensures that declared features (tools, jobs, webhooks, UI slots)
* have matching capability entries, giving operators clear feedback
* before a plugin is activated.
*
* 2. **Runtime gating** — `checkOperation()` / `assertOperation()` are
* called on every worker→host bridge call to enforce least-privilege
* access. If a plugin attempts an operation it did not declare, the
* call is rejected with a 403 error.
*
* @see PLUGIN_SPEC.md §15 — Capability Model
* @see host-client-factory.ts — SDK-side capability gating
*/
import type {
PluginCapability,
PaperclipPluginManifestV1,
PluginUiSlotType,
PluginLauncherPlacementZone,
} from "@paperclipai/shared";
import { forbidden } from "../errors.js";
import { logger } from "../middleware/logger.js";
// ---------------------------------------------------------------------------
// Capability requirement mappings
// ---------------------------------------------------------------------------
/**
* Maps high-level operations to the capabilities they require.
*
* When the bridge receives a call from a plugin worker, the host looks up
* the operation in this map and checks the plugin's declared capabilities.
* If any required capability is missing, the call is rejected.
*
* @see PLUGIN_SPEC.md §15 — Capability Model
*/
const OPERATION_CAPABILITIES: Record<string, readonly PluginCapability[]> = {
// Data read operations
"companies.list": ["companies.read"],
"companies.get": ["companies.read"],
"projects.list": ["projects.read"],
"projects.get": ["projects.read"],
"project.workspaces.list": ["project.workspaces.read"],
"project.workspaces.get": ["project.workspaces.read"],
"issues.list": ["issues.read"],
"issues.get": ["issues.read"],
"issue.comments.list": ["issue.comments.read"],
"issue.comments.get": ["issue.comments.read"],
"agents.list": ["agents.read"],
"agents.get": ["agents.read"],
"goals.list": ["goals.read"],
"goals.get": ["goals.read"],
"activity.list": ["activity.read"],
"activity.get": ["activity.read"],
"costs.list": ["costs.read"],
"costs.get": ["costs.read"],
// Data write operations
"issues.create": ["issues.create"],
"issues.update": ["issues.update"],
"issue.comments.create": ["issue.comments.create"],
"activity.log": ["activity.log.write"],
"metrics.write": ["metrics.write"],
// Plugin state operations
"plugin.state.get": ["plugin.state.read"],
"plugin.state.list": ["plugin.state.read"],
"plugin.state.set": ["plugin.state.write"],
"plugin.state.delete": ["plugin.state.write"],
// Runtime / Integration operations
"events.subscribe": ["events.subscribe"],
"events.emit": ["events.emit"],
"jobs.schedule": ["jobs.schedule"],
"jobs.cancel": ["jobs.schedule"],
"webhooks.receive": ["webhooks.receive"],
"http.request": ["http.outbound"],
"secrets.resolve": ["secrets.read-ref"],
// Agent tools
"agent.tools.register": ["agent.tools.register"],
"agent.tools.execute": ["agent.tools.register"],
};
/**
* Maps UI slot types to the capability required to register them.
*
* @see PLUGIN_SPEC.md §19 — UI Extension Model
*/
const UI_SLOT_CAPABILITIES: Record<PluginUiSlotType, PluginCapability> = {
sidebar: "ui.sidebar.register",
sidebarPanel: "ui.sidebar.register",
projectSidebarItem: "ui.sidebar.register",
page: "ui.page.register",
detailTab: "ui.detailTab.register",
taskDetailView: "ui.detailTab.register",
dashboardWidget: "ui.dashboardWidget.register",
toolbarButton: "ui.action.register",
contextMenuItem: "ui.action.register",
commentAnnotation: "ui.commentAnnotation.register",
commentContextMenuItem: "ui.action.register",
settingsPage: "instance.settings.register",
};
/**
* Launcher placement zones align with host UI surfaces and therefore inherit
* the same capability requirements as the equivalent slot type.
*/
const LAUNCHER_PLACEMENT_CAPABILITIES: Record<
PluginLauncherPlacementZone,
PluginCapability
> = {
page: "ui.page.register",
detailTab: "ui.detailTab.register",
taskDetailView: "ui.detailTab.register",
dashboardWidget: "ui.dashboardWidget.register",
sidebar: "ui.sidebar.register",
sidebarPanel: "ui.sidebar.register",
projectSidebarItem: "ui.sidebar.register",
toolbarButton: "ui.action.register",
contextMenuItem: "ui.action.register",
commentAnnotation: "ui.commentAnnotation.register",
commentContextMenuItem: "ui.action.register",
settingsPage: "instance.settings.register",
};
/**
* Maps feature declarations in the manifest to their required capabilities.
*/
const FEATURE_CAPABILITIES: Record<string, PluginCapability> = {
tools: "agent.tools.register",
jobs: "jobs.schedule",
webhooks: "webhooks.receive",
};
// ---------------------------------------------------------------------------
// Result types
// ---------------------------------------------------------------------------
/**
* Result of a capability check. When `allowed` is false, `missing` contains
* the capabilities that the plugin does not declare but the operation requires.
*/
export interface CapabilityCheckResult {
allowed: boolean;
missing: PluginCapability[];
operation?: string;
pluginId?: string;
}
// ---------------------------------------------------------------------------
// PluginCapabilityValidator interface
// ---------------------------------------------------------------------------
export interface PluginCapabilityValidator {
/**
* Check whether a plugin has a specific capability.
*/
hasCapability(
manifest: PaperclipPluginManifestV1,
capability: PluginCapability,
): boolean;
/**
* Check whether a plugin has all of the specified capabilities.
*/
hasAllCapabilities(
manifest: PaperclipPluginManifestV1,
capabilities: PluginCapability[],
): CapabilityCheckResult;
/**
* Check whether a plugin has at least one of the specified capabilities.
*/
hasAnyCapability(
manifest: PaperclipPluginManifestV1,
capabilities: PluginCapability[],
): boolean;
/**
* Check whether a plugin is allowed to perform the named operation.
*
* Operations are mapped to required capabilities via OPERATION_CAPABILITIES.
* Unknown operations are rejected by default.
*/
checkOperation(
manifest: PaperclipPluginManifestV1,
operation: string,
): CapabilityCheckResult;
/**
* Assert that a plugin is allowed to perform an operation.
* Throws a 403 HttpError if the capability check fails.
*/
assertOperation(
manifest: PaperclipPluginManifestV1,
operation: string,
): void;
/**
* Assert that a plugin has a specific capability.
* Throws a 403 HttpError if the capability is missing.
*/
assertCapability(
manifest: PaperclipPluginManifestV1,
capability: PluginCapability,
): void;
/**
* Check whether a plugin can register the given UI slot type.
*/
checkUiSlot(
manifest: PaperclipPluginManifestV1,
slotType: PluginUiSlotType,
): CapabilityCheckResult;
/**
* Validate that a manifest's declared capabilities are consistent with its
* declared features (tools, jobs, webhooks, UI slots).
*
* Returns all missing capabilities rather than failing on the first one.
* This is useful for install-time validation to give comprehensive feedback.
*/
validateManifestCapabilities(
manifest: PaperclipPluginManifestV1,
): CapabilityCheckResult;
/**
* Get the capabilities required for a named operation.
* Returns an empty array if the operation is unknown.
*/
getRequiredCapabilities(operation: string): readonly PluginCapability[];
/**
* Get the capability required for a UI slot type.
*/
getUiSlotCapability(slotType: PluginUiSlotType): PluginCapability;
}
// ---------------------------------------------------------------------------
// Factory
// ---------------------------------------------------------------------------
/**
* Create a PluginCapabilityValidator.
*
* This service enforces capability gates for plugin operations. The host
* uses it to verify that a plugin's declared capabilities permit the
* operation it is attempting, both at install time (manifest validation)
* and at runtime (bridge call gating).
*
* Usage:
* ```ts
* const validator = pluginCapabilityValidator();
*
* // Runtime: gate a bridge call
* validator.assertOperation(plugin.manifestJson, "issues.create");
*
* // Install time: validate manifest consistency
* const result = validator.validateManifestCapabilities(manifest);
* if (!result.allowed) {
* throw badRequest("Missing capabilities", result.missing);
* }
* ```
*/
export function pluginCapabilityValidator(): PluginCapabilityValidator {
const log = logger.child({ service: "plugin-capability-validator" });
// -----------------------------------------------------------------------
// Internal helpers
// -----------------------------------------------------------------------
function capabilitySet(manifest: PaperclipPluginManifestV1): Set<PluginCapability> {
return new Set(manifest.capabilities);
}
function buildForbiddenMessage(
manifest: PaperclipPluginManifestV1,
operation: string,
missing: PluginCapability[],
): string {
return (
`Plugin '${manifest.id}' is not allowed to perform '${operation}'. ` +
`Missing required capabilities: ${missing.join(", ")}`
);
}
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
hasCapability(manifest, capability) {
return manifest.capabilities.includes(capability);
},
hasAllCapabilities(manifest, capabilities) {
const declared = capabilitySet(manifest);
const missing = capabilities.filter((cap) => !declared.has(cap));
return {
allowed: missing.length === 0,
missing,
pluginId: manifest.id,
};
},
hasAnyCapability(manifest, capabilities) {
const declared = capabilitySet(manifest);
return capabilities.some((cap) => declared.has(cap));
},
checkOperation(manifest, operation) {
const required = OPERATION_CAPABILITIES[operation];
if (!required) {
log.warn(
{ pluginId: manifest.id, operation },
"capability check for unknown operation rejecting by default",
);
return {
allowed: false,
missing: [],
operation,
pluginId: manifest.id,
};
}
const declared = capabilitySet(manifest);
const missing = required.filter((cap) => !declared.has(cap));
if (missing.length > 0) {
log.debug(
{ pluginId: manifest.id, operation, missing },
"capability check failed",
);
}
return {
allowed: missing.length === 0,
missing,
operation,
pluginId: manifest.id,
};
},
assertOperation(manifest, operation) {
const result = this.checkOperation(manifest, operation);
if (!result.allowed) {
const msg = result.missing.length > 0
? buildForbiddenMessage(manifest, operation, result.missing)
: `Plugin '${manifest.id}' attempted unknown operation '${operation}'`;
throw forbidden(msg);
}
},
assertCapability(manifest, capability) {
if (!this.hasCapability(manifest, capability)) {
throw forbidden(
`Plugin '${manifest.id}' lacks required capability '${capability}'`,
);
}
},
checkUiSlot(manifest, slotType) {
const required = UI_SLOT_CAPABILITIES[slotType];
if (!required) {
return {
allowed: false,
missing: [],
operation: `ui.${slotType}.register`,
pluginId: manifest.id,
};
}
const has = manifest.capabilities.includes(required);
return {
allowed: has,
missing: has ? [] : [required],
operation: `ui.${slotType}.register`,
pluginId: manifest.id,
};
},
validateManifestCapabilities(manifest) {
const declared = capabilitySet(manifest);
const allMissing: PluginCapability[] = [];
// Check feature declarations → required capabilities
for (const [feature, requiredCap] of Object.entries(FEATURE_CAPABILITIES)) {
const featureValue = manifest[feature as keyof PaperclipPluginManifestV1];
if (Array.isArray(featureValue) && featureValue.length > 0) {
if (!declared.has(requiredCap)) {
allMissing.push(requiredCap);
}
}
}
// Check UI slots → required capabilities
const uiSlots = manifest.ui?.slots ?? [];
if (uiSlots.length > 0) {
for (const slot of uiSlots) {
const requiredCap = UI_SLOT_CAPABILITIES[slot.type];
if (requiredCap && !declared.has(requiredCap)) {
if (!allMissing.includes(requiredCap)) {
allMissing.push(requiredCap);
}
}
}
}
// Check launcher declarations → required capabilities
const launchers = [
...(manifest.launchers ?? []),
...(manifest.ui?.launchers ?? []),
];
if (launchers.length > 0) {
for (const launcher of launchers) {
const requiredCap = LAUNCHER_PLACEMENT_CAPABILITIES[launcher.placementZone];
if (requiredCap && !declared.has(requiredCap) && !allMissing.includes(requiredCap)) {
allMissing.push(requiredCap);
}
}
}
return {
allowed: allMissing.length === 0,
missing: allMissing,
pluginId: manifest.id,
};
},
getRequiredCapabilities(operation) {
return OPERATION_CAPABILITIES[operation] ?? [];
},
getUiSlotCapability(slotType) {
return UI_SLOT_CAPABILITIES[slotType];
},
};
}

View File

@@ -0,0 +1,50 @@
/**
* @fileoverview Validates plugin instance configuration against its JSON Schema.
*
* Uses Ajv to validate `configJson` values against the `instanceConfigSchema`
* declared in a plugin's manifest. This ensures that invalid configuration is
* rejected at the API boundary, not discovered later at worker startup.
*
* @module server/services/plugin-config-validator
*/
import Ajv, { type ErrorObject } from "ajv";
import addFormats from "ajv-formats";
import type { JsonSchema } from "@paperclipai/shared";
export interface ConfigValidationResult {
valid: boolean;
errors?: { field: string; message: string }[];
}
/**
* Validate a config object against a JSON Schema.
*
* @param configJson - The configuration values to validate.
* @param schema - The JSON Schema from the plugin manifest's `instanceConfigSchema`.
* @returns Validation result with structured field errors on failure.
*/
export function validateInstanceConfig(
configJson: Record<string, unknown>,
schema: JsonSchema,
): ConfigValidationResult {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const AjvCtor = (Ajv as any).default ?? Ajv;
const ajv = new AjvCtor({ allErrors: true });
// ajv-formats v3 default export is a FormatsPlugin object; call it as a plugin.
const applyFormats = (addFormats as any).default ?? addFormats;
applyFormats(ajv);
const validate = ajv.compile(schema);
const valid = validate(configJson);
if (valid) {
return { valid: true };
}
const errors = (validate.errors ?? []).map((err: ErrorObject) => ({
field: err.instancePath || "/",
message: err.message ?? "validation failed",
}));
return { valid: false, errors };
}

View File

@@ -0,0 +1,339 @@
/**
* PluginDevWatcher — watches local-path plugin directories for file changes
* and triggers worker restarts so plugin authors get a fast rebuild-and-reload
* cycle without manually restarting the server.
*
* Only plugins installed from a local path (i.e. those with a non-null
* `packagePath` in the DB) are watched. File changes in the plugin's package
* directory trigger a debounced worker restart via the lifecycle manager.
*
* Uses chokidar rather than raw fs.watch so we get a production-grade watcher
* backend across platforms and avoid exhausting file descriptors as quickly in
* large dev workspaces.
*
* @see PLUGIN_SPEC.md §27.2 — Local Development Workflow
*/
import chokidar, { type FSWatcher } from "chokidar";
import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
import path from "node:path";
import { logger } from "../middleware/logger.js";
import type { PluginLifecycleManager } from "./plugin-lifecycle.js";
const log = logger.child({ service: "plugin-dev-watcher" });
/** Debounce interval for file changes (ms). */
const DEBOUNCE_MS = 500;
export interface PluginDevWatcher {
/** Start watching a local-path plugin directory. */
watch(pluginId: string, packagePath: string): void;
/** Stop watching a specific plugin. */
unwatch(pluginId: string): void;
/** Stop all watchers and clean up. */
close(): void;
}
export type ResolvePluginPackagePath = (
pluginId: string,
) => Promise<string | null | undefined>;
export interface PluginDevWatcherFsDeps {
existsSync?: typeof existsSync;
readFileSync?: typeof readFileSync;
readdirSync?: typeof readdirSync;
statSync?: typeof statSync;
}
type PluginWatchTarget = {
path: string;
recursive: boolean;
kind: "file" | "dir";
};
type PluginPackageJson = {
paperclipPlugin?: {
manifest?: string;
worker?: string;
ui?: string;
};
};
function shouldIgnorePath(filename: string | null | undefined): boolean {
if (!filename) return false;
const normalized = filename.replace(/\\/g, "/");
const segments = normalized.split("/").filter(Boolean);
return segments.some(
(segment) =>
segment === "node_modules" ||
segment === ".git" ||
segment === ".vite" ||
segment === ".paperclip-sdk" ||
segment.startsWith("."),
);
}
export function resolvePluginWatchTargets(
packagePath: string,
fsDeps?: Pick<PluginDevWatcherFsDeps, "existsSync" | "readFileSync" | "readdirSync" | "statSync">,
): PluginWatchTarget[] {
const fileExists = fsDeps?.existsSync ?? existsSync;
const readFile = fsDeps?.readFileSync ?? readFileSync;
const readDir = fsDeps?.readdirSync ?? readdirSync;
const statFile = fsDeps?.statSync ?? statSync;
const absPath = path.resolve(packagePath);
const targets = new Map<string, PluginWatchTarget>();
function addWatchTarget(targetPath: string, recursive: boolean, kind?: "file" | "dir"): void {
const resolved = path.resolve(targetPath);
if (!fileExists(resolved)) return;
const inferredKind = kind ?? (statFile(resolved).isDirectory() ? "dir" : "file");
const existing = targets.get(resolved);
if (existing) {
existing.recursive = existing.recursive || recursive;
return;
}
targets.set(resolved, { path: resolved, recursive, kind: inferredKind });
}
function addRuntimeFilesFromDir(dirPath: string): void {
if (!fileExists(dirPath)) return;
for (const entry of readDir(dirPath, { withFileTypes: true })) {
const entryPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
addRuntimeFilesFromDir(entryPath);
continue;
}
if (!entry.isFile()) continue;
if (!entry.name.endsWith(".js") && !entry.name.endsWith(".css")) continue;
addWatchTarget(entryPath, false, "file");
}
}
const packageJsonPath = path.join(absPath, "package.json");
addWatchTarget(packageJsonPath, false, "file");
if (!fileExists(packageJsonPath)) {
return [...targets.values()];
}
let packageJson: PluginPackageJson | null = null;
try {
packageJson = JSON.parse(readFile(packageJsonPath, "utf8")) as PluginPackageJson;
} catch {
packageJson = null;
}
const entrypointPaths = [
packageJson?.paperclipPlugin?.manifest,
packageJson?.paperclipPlugin?.worker,
packageJson?.paperclipPlugin?.ui,
].filter((value): value is string => typeof value === "string" && value.length > 0);
if (entrypointPaths.length === 0) {
addRuntimeFilesFromDir(path.join(absPath, "dist"));
return [...targets.values()];
}
for (const relativeEntrypoint of entrypointPaths) {
const resolvedEntrypoint = path.resolve(absPath, relativeEntrypoint);
if (!fileExists(resolvedEntrypoint)) continue;
const stat = statFile(resolvedEntrypoint);
if (stat.isDirectory()) {
addRuntimeFilesFromDir(resolvedEntrypoint);
} else {
addWatchTarget(resolvedEntrypoint, false, "file");
}
}
return [...targets.values()].sort((a, b) => a.path.localeCompare(b.path));
}
/**
* Create a PluginDevWatcher that monitors local plugin directories and
* restarts workers on file changes.
*/
export function createPluginDevWatcher(
lifecycle: PluginLifecycleManager,
resolvePluginPackagePath?: ResolvePluginPackagePath,
fsDeps?: PluginDevWatcherFsDeps,
): PluginDevWatcher {
const watchers = new Map<string, FSWatcher>();
const debounceTimers = new Map<string, ReturnType<typeof setTimeout>>();
const fileExists = fsDeps?.existsSync ?? existsSync;
function watchPlugin(pluginId: string, packagePath: string): void {
// Don't double-watch
if (watchers.has(pluginId)) return;
const absPath = path.resolve(packagePath);
if (!fileExists(absPath)) {
log.warn(
{ pluginId, packagePath: absPath },
"plugin-dev-watcher: package path does not exist, skipping watch",
);
return;
}
try {
const watcherTargets = resolvePluginWatchTargets(absPath, fsDeps);
if (watcherTargets.length === 0) {
log.warn(
{ pluginId, packagePath: absPath },
"plugin-dev-watcher: no valid watch targets found, skipping watch",
);
return;
}
const watcher = chokidar.watch(
watcherTargets.map((target) => target.path),
{
ignoreInitial: true,
awaitWriteFinish: {
stabilityThreshold: 200,
pollInterval: 100,
},
ignored: (watchedPath) => {
const relativePath = path.relative(absPath, watchedPath);
return shouldIgnorePath(relativePath);
},
},
);
watcher.on("all", (_eventName, changedPath) => {
const relativePath = path.relative(absPath, changedPath);
if (shouldIgnorePath(relativePath)) return;
const existing = debounceTimers.get(pluginId);
if (existing) clearTimeout(existing);
debounceTimers.set(
pluginId,
setTimeout(() => {
debounceTimers.delete(pluginId);
log.info(
{ pluginId, changedFile: relativePath || path.basename(changedPath) },
"plugin-dev-watcher: file change detected, restarting worker",
);
lifecycle.restartWorker(pluginId).catch((err) => {
log.warn(
{
pluginId,
err: err instanceof Error ? err.message : String(err),
},
"plugin-dev-watcher: failed to restart worker after file change",
);
});
}, DEBOUNCE_MS),
);
});
watcher.on("error", (err) => {
log.warn(
{
pluginId,
packagePath: absPath,
err: err instanceof Error ? err.message : String(err),
},
"plugin-dev-watcher: watcher error, stopping watch for this plugin",
);
unwatchPlugin(pluginId);
});
watchers.set(pluginId, watcher);
log.info(
{
pluginId,
packagePath: absPath,
watchTargets: watcherTargets.map((target) => ({
path: target.path,
kind: target.kind,
})),
},
"plugin-dev-watcher: watching local plugin for changes",
);
} catch (err) {
log.warn(
{
pluginId,
packagePath: absPath,
err: err instanceof Error ? err.message : String(err),
},
"plugin-dev-watcher: failed to start file watcher",
);
}
}
function unwatchPlugin(pluginId: string): void {
const pluginWatcher = watchers.get(pluginId);
if (pluginWatcher) {
void pluginWatcher.close();
watchers.delete(pluginId);
}
const timer = debounceTimers.get(pluginId);
if (timer) {
clearTimeout(timer);
debounceTimers.delete(pluginId);
}
}
function close(): void {
lifecycle.off("plugin.loaded", handlePluginLoaded);
lifecycle.off("plugin.enabled", handlePluginEnabled);
lifecycle.off("plugin.disabled", handlePluginDisabled);
lifecycle.off("plugin.unloaded", handlePluginUnloaded);
for (const [pluginId] of watchers) {
unwatchPlugin(pluginId);
}
}
async function watchLocalPluginById(pluginId: string): Promise<void> {
if (!resolvePluginPackagePath) return;
try {
const packagePath = await resolvePluginPackagePath(pluginId);
if (!packagePath) return;
watchPlugin(pluginId, packagePath);
} catch (err) {
log.warn(
{
pluginId,
err: err instanceof Error ? err.message : String(err),
},
"plugin-dev-watcher: failed to resolve plugin package path",
);
}
}
function handlePluginLoaded(payload: { pluginId: string }): void {
void watchLocalPluginById(payload.pluginId);
}
function handlePluginEnabled(payload: { pluginId: string }): void {
void watchLocalPluginById(payload.pluginId);
}
function handlePluginDisabled(payload: { pluginId: string }): void {
unwatchPlugin(payload.pluginId);
}
function handlePluginUnloaded(payload: { pluginId: string }): void {
unwatchPlugin(payload.pluginId);
}
lifecycle.on("plugin.loaded", handlePluginLoaded);
lifecycle.on("plugin.enabled", handlePluginEnabled);
lifecycle.on("plugin.disabled", handlePluginDisabled);
lifecycle.on("plugin.unloaded", handlePluginUnloaded);
return {
watch: watchPlugin,
unwatch: unwatchPlugin,
close,
};
}

View File

@@ -0,0 +1,412 @@
/**
* PluginEventBus — typed in-process event bus for the Paperclip plugin system.
*
* Responsibilities:
* - Deliver core domain events to subscribing plugin workers (server-side).
* - Apply `EventFilter` server-side so filtered-out events never reach the handler.
* - Namespace plugin-emitted events as `plugin.<pluginId>.<eventName>`.
* - Guard the core namespace: plugins may not emit events with the `plugin.` prefix.
* - Isolate subscriptions per plugin — a plugin cannot enumerate or interfere with
* another plugin's subscriptions.
* - Support wildcard subscriptions via prefix matching (e.g. `plugin.acme.linear.*`).
*
* The bus operates in-process. In the full out-of-process architecture the host
* calls `bus.emit()` after receiving events from the DB/queue layer, and the bus
* forwards to handlers that proxy the call to the relevant worker process via IPC.
* That IPC layer is separate; this module only handles routing and filtering.
*
* @see PLUGIN_SPEC.md §16 — Event System
* @see PLUGIN_SPEC.md §16.1 — Event Filtering
* @see PLUGIN_SPEC.md §16.2 — Plugin-to-Plugin Events
*/
import type { PluginEventType } from "@paperclipai/shared";
import type { PluginEvent, EventFilter } from "@paperclipai/plugin-sdk";
// ---------------------------------------------------------------------------
// Internal types
// ---------------------------------------------------------------------------
/**
* A registered subscription record stored per plugin.
*/
interface Subscription {
/** The event name or prefix pattern this subscription matches. */
eventPattern: string;
/** Optional server-side filter applied before delivery. */
filter: EventFilter | null;
/** Async handler to invoke when a matching event passes the filter. */
handler: (event: PluginEvent) => Promise<void>;
}
// ---------------------------------------------------------------------------
// Pattern matching helpers
// ---------------------------------------------------------------------------
/**
* Returns true if the event type matches the subscription pattern.
*
* Matching rules:
* - Exact match: `"issue.created"` matches `"issue.created"`.
* - Wildcard suffix: `"plugin.acme.*"` matches any event type that starts with
* `"plugin.acme."`. The wildcard `*` is only supported as a trailing token.
*
* No full glob syntax is supported — only trailing `*` after a `.` separator.
*/
function matchesPattern(eventType: string, pattern: string): boolean {
if (pattern === eventType) return true;
// Trailing wildcard: "plugin.foo.*" → prefix is "plugin.foo."
if (pattern.endsWith(".*")) {
const prefix = pattern.slice(0, -1); // remove the trailing "*", keep the "."
return eventType.startsWith(prefix);
}
return false;
}
/**
* Returns true if the event passes all fields of the filter.
* A `null` or empty filter object passes all events.
*
* **Resolution strategy per field:**
*
* - `projectId` — checked against `event.entityId` when `entityType === "project"`,
* otherwise against `payload.projectId`. This covers both direct project events
* (e.g. `project.created`) and secondary events that embed a project reference in
* their payload (e.g. `issue.created` with `payload.projectId`).
*
* - `companyId` — always resolved from `payload.companyId`. Core domain events that
* belong to a company embed the company ID in their payload.
*
* - `agentId` — checked against `event.entityId` when `entityType === "agent"`,
* otherwise against `payload.agentId`. Covers both direct agent lifecycle events
* (e.g. `agent.created`) and run-level events with `payload.agentId` (e.g.
* `agent.run.started`).
*
* Multiple filter fields are ANDed — all specified fields must match.
*/
function passesFilter(event: PluginEvent, filter: EventFilter | null): boolean {
if (!filter) return true;
const payload = event.payload as Record<string, unknown> | null;
if (filter.projectId !== undefined) {
const projectId = event.entityType === "project"
? event.entityId
: (typeof payload?.projectId === "string" ? payload.projectId : undefined);
if (projectId !== filter.projectId) return false;
}
if (filter.companyId !== undefined) {
if (event.companyId !== filter.companyId) return false;
}
if (filter.agentId !== undefined) {
const agentId = event.entityType === "agent"
? event.entityId
: (typeof payload?.agentId === "string" ? payload.agentId : undefined);
if (agentId !== filter.agentId) return false;
}
return true;
}
// ---------------------------------------------------------------------------
// Event bus factory
// ---------------------------------------------------------------------------
/**
* Creates and returns a new `PluginEventBus` instance.
*
* A single bus instance should be shared across the server process. Each
* plugin interacts with the bus through a scoped handle obtained via
* {@link PluginEventBus.forPlugin}.
*
* @example
* ```ts
* const bus = createPluginEventBus();
*
* // Give the Linear plugin a scoped handle
* const linearBus = bus.forPlugin("acme.linear");
*
* // Subscribe from the plugin's perspective
* linearBus.subscribe("issue.created", async (event) => {
* // handle event
* });
*
* // Emit a core domain event (called by the host, not the plugin)
* await bus.emit({
* eventId: "evt-1",
* eventType: "issue.created",
* occurredAt: new Date().toISOString(),
* entityId: "iss-1",
* entityType: "issue",
* payload: { title: "Fix login bug", projectId: "proj-1" },
* });
* ```
*/
export function createPluginEventBus(): PluginEventBus {
// Subscription registry: pluginKey → list of subscriptions
const registry = new Map<string, Subscription[]>();
/**
* Retrieve or create the subscription list for a plugin.
*/
function subsFor(pluginId: string): Subscription[] {
let subs = registry.get(pluginId);
if (!subs) {
subs = [];
registry.set(pluginId, subs);
}
return subs;
}
/**
* Emit an event envelope to all matching subscribers across all plugins.
*
* Subscribers are called concurrently (Promise.all). Each handler's errors
* are caught individually and collected in the returned `errors` array so a
* single misbehaving plugin cannot interrupt delivery to other plugins.
*/
async function emit(event: PluginEvent): Promise<PluginEventBusEmitResult> {
const errors: Array<{ pluginId: string; error: unknown }> = [];
const promises: Promise<void>[] = [];
for (const [pluginId, subs] of registry) {
for (const sub of subs) {
if (!matchesPattern(event.eventType, sub.eventPattern)) continue;
if (!passesFilter(event, sub.filter)) continue;
// Use Promise.resolve().then() so that synchronous throws from handlers
// are also caught inside the promise chain. Calling
// Promise.resolve(syncThrowingFn()) does NOT catch sync throws — the
// throw escapes before Promise.resolve() can wrap it. Using .then()
// ensures the call is deferred into the microtask queue where all
// exceptions become rejections. Each .catch() swallows the rejection
// and records it — the promise always resolves, so Promise.all never rejects.
promises.push(
Promise.resolve().then(() => sub.handler(event)).catch((error: unknown) => {
errors.push({ pluginId, error });
}),
);
}
}
await Promise.all(promises);
return { errors };
}
/**
* Remove all subscriptions for a plugin (e.g. on worker shutdown or uninstall).
*/
function clearPlugin(pluginId: string): void {
registry.delete(pluginId);
}
/**
* Return a scoped handle for a specific plugin. The handle exposes only the
* plugin's own subscription list and enforces the plugin namespace on `emit`.
*/
function forPlugin(pluginId: string): ScopedPluginEventBus {
return {
/**
* Subscribe to a core domain event or a plugin-namespaced event.
*
* For wildcard subscriptions use a trailing `.*` pattern, e.g.
* `"plugin.acme.linear.*"`.
*
* Requires the `events.subscribe` capability (capability enforcement is
* done by the host layer before calling this method).
*/
subscribe(
eventPattern: PluginEventType | `plugin.${string}`,
fnOrFilter: EventFilter | ((event: PluginEvent) => Promise<void>),
maybeFn?: (event: PluginEvent) => Promise<void>,
): void {
let filter: EventFilter | null = null;
let handler: (event: PluginEvent) => Promise<void>;
if (typeof fnOrFilter === "function") {
handler = fnOrFilter;
} else {
filter = fnOrFilter;
if (!maybeFn) throw new Error("Handler function is required when a filter is provided");
handler = maybeFn;
}
subsFor(pluginId).push({ eventPattern, filter, handler });
},
/**
* Emit a plugin-namespaced event. The event type is automatically
* prefixed with `plugin.<pluginId>.` so:
* - `emit("sync-done", payload)` becomes `"plugin.acme.linear.sync-done"`.
*
* Requires the `events.emit` capability (enforced by the host layer).
*
* @throws {Error} if `name` already contains the `plugin.` prefix
* (prevents cross-namespace spoofing).
*/
async emit(name: string, companyId: string, payload: unknown): Promise<PluginEventBusEmitResult> {
if (!name || name.trim() === "") {
throw new Error(`Plugin "${pluginId}" must provide a non-empty event name.`);
}
if (!companyId || companyId.trim() === "") {
throw new Error(`Plugin "${pluginId}" must provide a companyId when emitting events.`);
}
if (name.startsWith("plugin.")) {
throw new Error(
`Plugin "${pluginId}" must not include the "plugin." prefix when emitting events. ` +
`Emit the bare event name (e.g. "sync-done") and the bus will namespace it automatically.`,
);
}
const eventType = `plugin.${pluginId}.${name}` as const;
const event: PluginEvent = {
eventId: crypto.randomUUID(),
eventType,
companyId,
occurredAt: new Date().toISOString(),
actorType: "plugin",
actorId: pluginId,
payload,
};
return emit(event);
},
/** Remove all subscriptions registered by this plugin. */
clear(): void {
clearPlugin(pluginId);
},
};
}
return {
emit,
forPlugin,
clearPlugin,
/** Expose subscription count for a plugin (useful for tests and diagnostics). */
subscriptionCount(pluginId?: string): number {
if (pluginId !== undefined) {
return registry.get(pluginId)?.length ?? 0;
}
let total = 0;
for (const subs of registry.values()) total += subs.length;
return total;
},
};
}
// ---------------------------------------------------------------------------
// Public types
// ---------------------------------------------------------------------------
/**
* Result returned from `emit()`. Handler errors are collected and returned
* rather than thrown so a single misbehaving plugin cannot block delivery to
* other plugins.
*/
export interface PluginEventBusEmitResult {
/** Errors thrown by individual handlers, keyed by the plugin that failed. */
errors: Array<{ pluginId: string; error: unknown }>;
}
/**
* The full event bus — held by the host process.
*
* Call `forPlugin(id)` to obtain a `ScopedPluginEventBus` for each plugin worker.
*/
export interface PluginEventBus {
/**
* Emit a typed domain event to all matching subscribers.
*
* Called by the host when a domain event occurs (e.g. from the DB layer or
* message queue). All registered subscriptions across all plugins are checked.
*/
emit(event: PluginEvent): Promise<PluginEventBusEmitResult>;
/**
* Get a scoped handle for a specific plugin worker.
*
* The scoped handle isolates the plugin's subscriptions and enforces the
* plugin namespace on outbound events.
*/
forPlugin(pluginId: string): ScopedPluginEventBus;
/**
* Remove all subscriptions for a plugin (called on worker shutdown/uninstall).
*/
clearPlugin(pluginId: string): void;
/**
* Return the total number of active subscriptions, or the count for a
* specific plugin if `pluginId` is provided.
*/
subscriptionCount(pluginId?: string): number;
}
/**
* A plugin-scoped view of the event bus. Handed to the plugin worker (or its
* host-side proxy) during initialisation.
*
* Plugins use this to:
* 1. Subscribe to domain events (with optional server-side filter).
* 2. Emit plugin-namespaced events for other plugins to consume.
*
* Note: `subscribe` overloads mirror the `PluginEventsClient.on()` interface
* from the SDK. `emit` intentionally returns `PluginEventBusEmitResult` rather
* than `void` so the host layer can inspect handler errors; the SDK-facing
* `PluginEventsClient.emit()` wraps this and returns `void`.
*/
export interface ScopedPluginEventBus {
/**
* Subscribe to a core domain event or a plugin-namespaced event.
*
* **Pattern syntax:**
* - Exact match: `"issue.created"` — receives only that event type.
* - Wildcard suffix: `"plugin.acme.linear.*"` — receives all events emitted by
* the `acme.linear` plugin. The `*` is supported only as a trailing token after
* a `.` separator; no other glob syntax is supported.
* - Top-level plugin wildcard: `"plugin.*"` — receives all plugin-emitted events
* regardless of which plugin emitted them.
*
* Wildcards apply only to the `plugin.*` namespace. Core domain events must be
* subscribed to by exact name (e.g. `"issue.created"`, not `"issue.*"`).
*
* An optional `EventFilter` can be passed as the second argument to perform
* server-side pre-filtering; filtered-out events are never delivered to the handler.
*/
subscribe(
eventPattern: PluginEventType | `plugin.${string}`,
fn: (event: PluginEvent) => Promise<void>,
): void;
subscribe(
eventPattern: PluginEventType | `plugin.${string}`,
filter: EventFilter,
fn: (event: PluginEvent) => Promise<void>,
): void;
/**
* Emit a plugin-namespaced event. The bus automatically prepends
* `plugin.<pluginId>.` to the `name`, so passing `"sync-done"` from plugin
* `"acme.linear"` produces the event type `"plugin.acme.linear.sync-done"`.
*
* @param name Bare event name (e.g. `"sync-done"`). Must be non-empty and
* must not include the `plugin.` prefix — the bus adds that automatically.
* @param companyId UUID of the company this event belongs to.
* @param payload Arbitrary JSON-serializable data to attach to the event.
*
* @throws {Error} if `name` is empty or whitespace-only.
* @throws {Error} if `name` starts with `"plugin."` (namespace spoofing guard).
*/
emit(name: string, companyId: string, payload: unknown): Promise<PluginEventBusEmitResult>;
/**
* Remove all subscriptions registered by this plugin.
*/
clear(): void;
}

View File

@@ -0,0 +1,59 @@
import type { PluginLifecycleManager } from "./plugin-lifecycle.js";
type LifecycleLike = Pick<PluginLifecycleManager, "on" | "off">;
export interface PluginWorkerRuntimeEvent {
type: "plugin.worker.crashed" | "plugin.worker.restarted";
pluginId: string;
}
export interface PluginHostServiceCleanupController {
handleWorkerEvent(event: PluginWorkerRuntimeEvent): void;
disposeAll(): void;
teardown(): void;
}
export function createPluginHostServiceCleanup(
lifecycle: LifecycleLike,
disposers: Map<string, () => void>,
): PluginHostServiceCleanupController {
const runDispose = (pluginId: string, remove = false) => {
const dispose = disposers.get(pluginId);
if (!dispose) return;
dispose();
if (remove) {
disposers.delete(pluginId);
}
};
const handleWorkerStopped = ({ pluginId }: { pluginId: string }) => {
runDispose(pluginId);
};
const handlePluginUnloaded = ({ pluginId }: { pluginId: string }) => {
runDispose(pluginId, true);
};
lifecycle.on("plugin.worker_stopped", handleWorkerStopped);
lifecycle.on("plugin.unloaded", handlePluginUnloaded);
return {
handleWorkerEvent(event) {
if (event.type === "plugin.worker.crashed") {
runDispose(event.pluginId);
}
},
disposeAll() {
for (const dispose of disposers.values()) {
dispose();
}
disposers.clear();
},
teardown() {
lifecycle.off("plugin.worker_stopped", handleWorkerStopped);
lifecycle.off("plugin.unloaded", handlePluginUnloaded);
},
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,260 @@
/**
* PluginJobCoordinator — bridges the plugin lifecycle manager with the
* job scheduler and job store.
*
* This service listens to lifecycle events and performs the corresponding
* scheduler and job store operations:
*
* - **plugin.loaded** → sync job declarations from manifest, then register
* the plugin with the scheduler (computes `nextRunAt` for active jobs).
*
* - **plugin.disabled / plugin.unloaded** → unregister the plugin from the
* scheduler (cancels in-flight runs, clears tracking state).
*
* ## Why a separate coordinator?
*
* The lifecycle manager, scheduler, and job store are independent services
* with clean single-responsibility boundaries. The coordinator provides
* the "glue" between them without adding coupling. This pattern is used
* throughout Paperclip (e.g. heartbeat service coordinates timers + runs).
*
* @see PLUGIN_SPEC.md §17 — Scheduled Jobs
* @see ./plugin-job-scheduler.ts — Scheduler service
* @see ./plugin-job-store.ts — Persistence layer
* @see ./plugin-lifecycle.ts — Plugin state machine
*/
import type { PluginLifecycleManager } from "./plugin-lifecycle.js";
import type { PluginJobScheduler } from "./plugin-job-scheduler.js";
import type { PluginJobStore } from "./plugin-job-store.js";
import { pluginRegistryService } from "./plugin-registry.js";
import type { Db } from "@paperclipai/db";
import { logger } from "../middleware/logger.js";
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
/**
* Options for creating a PluginJobCoordinator.
*/
export interface PluginJobCoordinatorOptions {
/** Drizzle database instance. */
db: Db;
/** The plugin lifecycle manager to listen to. */
lifecycle: PluginLifecycleManager;
/** The job scheduler to register/unregister plugins with. */
scheduler: PluginJobScheduler;
/** The job store for syncing declarations. */
jobStore: PluginJobStore;
}
/**
* The public interface of the job coordinator.
*/
export interface PluginJobCoordinator {
/**
* Start listening to lifecycle events.
*
* This wires up the `plugin.loaded`, `plugin.disabled`, and
* `plugin.unloaded` event handlers.
*/
start(): void;
/**
* Stop listening to lifecycle events.
*
* Removes all event subscriptions added by `start()`.
*/
stop(): void;
}
// ---------------------------------------------------------------------------
// Implementation
// ---------------------------------------------------------------------------
/**
* Create a PluginJobCoordinator.
*
* @example
* ```ts
* const coordinator = createPluginJobCoordinator({
* db,
* lifecycle,
* scheduler,
* jobStore,
* });
*
* // Start listening to lifecycle events
* coordinator.start();
*
* // On server shutdown
* coordinator.stop();
* ```
*/
export function createPluginJobCoordinator(
options: PluginJobCoordinatorOptions,
): PluginJobCoordinator {
const { db, lifecycle, scheduler, jobStore } = options;
const log = logger.child({ service: "plugin-job-coordinator" });
const registry = pluginRegistryService(db);
// -----------------------------------------------------------------------
// Event handlers
// -----------------------------------------------------------------------
/**
* When a plugin is loaded (transitions to `ready`):
* 1. Look up the manifest from the registry
* 2. Sync job declarations from the manifest into the DB
* 3. Register the plugin with the scheduler (computes nextRunAt)
*/
async function onPluginLoaded(payload: { pluginId: string; pluginKey: string }): Promise<void> {
const { pluginId, pluginKey } = payload;
log.info({ pluginId, pluginKey }, "plugin loaded — syncing jobs and registering with scheduler");
try {
// Get the manifest from the registry
const plugin = await registry.getById(pluginId);
if (!plugin?.manifestJson) {
log.warn({ pluginId, pluginKey }, "plugin loaded but no manifest found — skipping job sync");
return;
}
// Sync job declarations from the manifest
const manifest = plugin.manifestJson;
const jobDeclarations = manifest.jobs ?? [];
if (jobDeclarations.length > 0) {
log.info(
{ pluginId, pluginKey, jobCount: jobDeclarations.length },
"syncing job declarations from manifest",
);
await jobStore.syncJobDeclarations(pluginId, jobDeclarations);
}
// Register with the scheduler (computes nextRunAt for active jobs)
await scheduler.registerPlugin(pluginId);
} catch (err) {
log.error(
{
pluginId,
pluginKey,
err: err instanceof Error ? err.message : String(err),
},
"failed to sync jobs or register plugin with scheduler",
);
}
}
/**
* When a plugin is disabled (transitions to `error` with "disabled by
* operator" or genuine error): unregister from the scheduler.
*/
async function onPluginDisabled(payload: {
pluginId: string;
pluginKey: string;
reason?: string;
}): Promise<void> {
const { pluginId, pluginKey, reason } = payload;
log.info(
{ pluginId, pluginKey, reason },
"plugin disabled — unregistering from scheduler",
);
try {
await scheduler.unregisterPlugin(pluginId);
} catch (err) {
log.error(
{
pluginId,
pluginKey,
err: err instanceof Error ? err.message : String(err),
},
"failed to unregister plugin from scheduler",
);
}
}
/**
* When a plugin is unloaded (uninstalled): unregister from the scheduler.
*/
async function onPluginUnloaded(payload: {
pluginId: string;
pluginKey: string;
removeData: boolean;
}): Promise<void> {
const { pluginId, pluginKey, removeData } = payload;
log.info(
{ pluginId, pluginKey, removeData },
"plugin unloaded — unregistering from scheduler",
);
try {
await scheduler.unregisterPlugin(pluginId);
// If data is being purged, also delete all job definitions and runs
if (removeData) {
log.info({ pluginId, pluginKey }, "purging job data for uninstalled plugin");
await jobStore.deleteAllJobs(pluginId);
}
} catch (err) {
log.error(
{
pluginId,
pluginKey,
err: err instanceof Error ? err.message : String(err),
},
"failed to unregister plugin from scheduler during unload",
);
}
}
// -----------------------------------------------------------------------
// State
// -----------------------------------------------------------------------
let attached = false;
// We need stable references for on/off since the lifecycle manager
// uses them for matching. We wrap the async handlers in sync wrappers
// that fire-and-forget (swallowing unhandled rejections via the try/catch
// inside each handler).
const boundOnLoaded = (payload: { pluginId: string; pluginKey: string }) => {
void onPluginLoaded(payload);
};
const boundOnDisabled = (payload: { pluginId: string; pluginKey: string; reason?: string }) => {
void onPluginDisabled(payload);
};
const boundOnUnloaded = (payload: { pluginId: string; pluginKey: string; removeData: boolean }) => {
void onPluginUnloaded(payload);
};
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
start(): void {
if (attached) return;
attached = true;
lifecycle.on("plugin.loaded", boundOnLoaded);
lifecycle.on("plugin.disabled", boundOnDisabled);
lifecycle.on("plugin.unloaded", boundOnUnloaded);
log.info("plugin job coordinator started — listening to lifecycle events");
},
stop(): void {
if (!attached) return;
attached = false;
lifecycle.off("plugin.loaded", boundOnLoaded);
lifecycle.off("plugin.disabled", boundOnDisabled);
lifecycle.off("plugin.unloaded", boundOnUnloaded);
log.info("plugin job coordinator stopped");
},
};
}

View File

@@ -0,0 +1,752 @@
/**
* PluginJobScheduler — tick-based scheduler for plugin scheduled jobs.
*
* The scheduler is the central coordinator for all plugin cron jobs. It
* periodically ticks (default every 30 seconds), queries the `plugin_jobs`
* table for jobs whose `nextRunAt` has passed, dispatches `runJob` RPC calls
* to the appropriate worker processes, records each execution in the
* `plugin_job_runs` table, and advances the scheduling pointer.
*
* ## Responsibilities
*
* 1. **Tick loop** — A `setInterval`-based loop fires every `tickIntervalMs`
* (default 30s). Each tick scans for due jobs and dispatches them.
*
* 2. **Cron parsing & next-run calculation** — Uses the lightweight built-in
* cron parser ({@link parseCron}, {@link nextCronTick}) to compute the
* `nextRunAt` timestamp after each run or when a new job is registered.
*
* 3. **Overlap prevention** — Before dispatching a job, the scheduler checks
* for an existing `running` run for the same job. If one exists, the job
* is skipped for that tick.
*
* 4. **Job run recording** — Every execution creates a `plugin_job_runs` row:
* `queued` → `running` → `succeeded` | `failed`. Duration and error are
* captured.
*
* 5. **Lifecycle integration** — The scheduler exposes `registerPlugin()` and
* `unregisterPlugin()` so the host lifecycle manager can wire up job
* scheduling when plugins start/stop. On registration, the scheduler
* computes `nextRunAt` for all active jobs that don't already have one.
*
* @see PLUGIN_SPEC.md §17 — Scheduled Jobs
* @see ./plugin-job-store.ts — Persistence layer
* @see ./cron.ts — Cron parsing utilities
*/
import { and, eq, lte, or } from "drizzle-orm";
import type { Db } from "@paperclipai/db";
import { pluginJobs, pluginJobRuns } from "@paperclipai/db";
import type { PluginJobStore } from "./plugin-job-store.js";
import type { PluginWorkerManager } from "./plugin-worker-manager.js";
import { parseCron, nextCronTick, validateCron } from "./cron.js";
import { logger } from "../middleware/logger.js";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** Default interval between scheduler ticks (30 seconds). */
const DEFAULT_TICK_INTERVAL_MS = 30_000;
/** Default timeout for a runJob RPC call (5 minutes). */
const DEFAULT_JOB_TIMEOUT_MS = 5 * 60 * 1_000;
/** Maximum number of concurrent job executions across all plugins. */
const DEFAULT_MAX_CONCURRENT_JOBS = 10;
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
/**
* Options for creating a PluginJobScheduler.
*/
export interface PluginJobSchedulerOptions {
/** Drizzle database instance. */
db: Db;
/** Persistence layer for jobs and runs. */
jobStore: PluginJobStore;
/** Worker process manager for RPC calls. */
workerManager: PluginWorkerManager;
/** Interval between scheduler ticks in ms (default: 30s). */
tickIntervalMs?: number;
/** Timeout for individual job RPC calls in ms (default: 5min). */
jobTimeoutMs?: number;
/** Maximum number of concurrent job executions (default: 10). */
maxConcurrentJobs?: number;
}
/**
* Result of a manual job trigger.
*/
export interface TriggerJobResult {
/** The created run ID. */
runId: string;
/** The job ID that was triggered. */
jobId: string;
}
/**
* Diagnostic information about the scheduler.
*/
export interface SchedulerDiagnostics {
/** Whether the tick loop is running. */
running: boolean;
/** Number of jobs currently executing. */
activeJobCount: number;
/** Set of job IDs currently in-flight. */
activeJobIds: string[];
/** Total number of ticks executed since start. */
tickCount: number;
/** Timestamp of the last tick (ISO 8601). */
lastTickAt: string | null;
}
// ---------------------------------------------------------------------------
// Scheduler
// ---------------------------------------------------------------------------
/**
* The public interface of the job scheduler.
*/
export interface PluginJobScheduler {
/**
* Start the scheduler tick loop.
*
* Safe to call multiple times — subsequent calls are no-ops.
*/
start(): void;
/**
* Stop the scheduler tick loop.
*
* In-flight job runs are NOT cancelled — they are allowed to finish
* naturally. The tick loop simply stops firing.
*/
stop(): void;
/**
* Register a plugin with the scheduler.
*
* Computes `nextRunAt` for all active jobs that are missing it. This is
* typically called after a plugin's worker process starts and
* `syncJobDeclarations()` has been called.
*
* @param pluginId - UUID of the plugin
*/
registerPlugin(pluginId: string): Promise<void>;
/**
* Unregister a plugin from the scheduler.
*
* Cancels any in-flight runs for the plugin and removes tracking state.
*
* @param pluginId - UUID of the plugin
*/
unregisterPlugin(pluginId: string): Promise<void>;
/**
* Manually trigger a specific job (outside of the cron schedule).
*
* Creates a run with `trigger: "manual"` and dispatches immediately,
* respecting the overlap prevention check.
*
* @param jobId - UUID of the job to trigger
* @param trigger - What triggered this run (default: "manual")
* @returns The created run info
* @throws {Error} if the job is not found, not active, or already running
*/
triggerJob(jobId: string, trigger?: "manual" | "retry"): Promise<TriggerJobResult>;
/**
* Run a single scheduler tick immediately (for testing).
*
* @internal
*/
tick(): Promise<void>;
/**
* Get diagnostic information about the scheduler state.
*/
diagnostics(): SchedulerDiagnostics;
}
// ---------------------------------------------------------------------------
// Implementation
// ---------------------------------------------------------------------------
/**
* Create a new PluginJobScheduler.
*
* @example
* ```ts
* const scheduler = createPluginJobScheduler({
* db,
* jobStore,
* workerManager,
* });
*
* // Start the tick loop
* scheduler.start();
*
* // When a plugin comes online, register it
* await scheduler.registerPlugin(pluginId);
*
* // Manually trigger a job
* const { runId } = await scheduler.triggerJob(jobId);
*
* // On server shutdown
* scheduler.stop();
* ```
*/
export function createPluginJobScheduler(
options: PluginJobSchedulerOptions,
): PluginJobScheduler {
const {
db,
jobStore,
workerManager,
tickIntervalMs = DEFAULT_TICK_INTERVAL_MS,
jobTimeoutMs = DEFAULT_JOB_TIMEOUT_MS,
maxConcurrentJobs = DEFAULT_MAX_CONCURRENT_JOBS,
} = options;
const log = logger.child({ service: "plugin-job-scheduler" });
// -----------------------------------------------------------------------
// State
// -----------------------------------------------------------------------
/** Timer handle for the tick loop. */
let tickTimer: ReturnType<typeof setInterval> | null = null;
/** Whether the scheduler is running. */
let running = false;
/** Set of job IDs currently being executed (for overlap prevention). */
const activeJobs = new Set<string>();
/** Total number of ticks since start. */
let tickCount = 0;
/** Timestamp of the last tick. */
let lastTickAt: Date | null = null;
/** Guard against concurrent tick execution. */
let tickInProgress = false;
// -----------------------------------------------------------------------
// Core: tick
// -----------------------------------------------------------------------
/**
* A single scheduler tick. Queries for due jobs and dispatches them.
*/
async function tick(): Promise<void> {
// Prevent overlapping ticks (in case a tick takes longer than the interval)
if (tickInProgress) {
log.debug("skipping tick — previous tick still in progress");
return;
}
tickInProgress = true;
tickCount++;
lastTickAt = new Date();
try {
const now = new Date();
// Query for jobs whose nextRunAt has passed and are active.
// We include jobs with null nextRunAt since they may have just been
// registered and need their first run calculated.
const dueJobs = await db
.select()
.from(pluginJobs)
.where(
and(
eq(pluginJobs.status, "active"),
lte(pluginJobs.nextRunAt, now),
),
);
if (dueJobs.length === 0) {
return;
}
log.debug({ count: dueJobs.length }, "found due jobs");
// Dispatch each due job (respecting concurrency limits)
const dispatches: Promise<void>[] = [];
for (const job of dueJobs) {
// Concurrency limit
if (activeJobs.size >= maxConcurrentJobs) {
log.warn(
{ maxConcurrentJobs, activeJobCount: activeJobs.size },
"max concurrent jobs reached, deferring remaining jobs",
);
break;
}
// Overlap prevention: skip if this job is already running
if (activeJobs.has(job.id)) {
log.debug(
{ jobId: job.id, jobKey: job.jobKey, pluginId: job.pluginId },
"skipping job — already running (overlap prevention)",
);
continue;
}
// Check if the worker is available
if (!workerManager.isRunning(job.pluginId)) {
log.debug(
{ jobId: job.id, pluginId: job.pluginId },
"skipping job — worker not running",
);
continue;
}
// Validate cron expression before dispatching
if (!job.schedule) {
log.warn(
{ jobId: job.id, jobKey: job.jobKey },
"skipping job — no schedule defined",
);
continue;
}
dispatches.push(dispatchJob(job));
}
if (dispatches.length > 0) {
await Promise.allSettled(dispatches);
}
} catch (err) {
log.error(
{ err: err instanceof Error ? err.message : String(err) },
"scheduler tick error",
);
} finally {
tickInProgress = false;
}
}
// -----------------------------------------------------------------------
// Core: dispatch a single job
// -----------------------------------------------------------------------
/**
* Dispatch a single job run — create the run record, call the worker,
* record the result, and advance the schedule pointer.
*/
async function dispatchJob(
job: typeof pluginJobs.$inferSelect,
): Promise<void> {
const { id: jobId, pluginId, jobKey, schedule } = job;
const jobLog = log.child({ jobId, pluginId, jobKey });
// Mark as active (overlap prevention)
activeJobs.add(jobId);
let runId: string | undefined;
const startedAt = Date.now();
try {
// 1. Create run record
const run = await jobStore.createRun({
jobId,
pluginId,
trigger: "schedule",
});
runId = run.id;
jobLog.info({ runId }, "dispatching scheduled job");
// 2. Mark run as running
await jobStore.markRunning(runId);
// 3. Call worker via RPC
await workerManager.call(
pluginId,
"runJob",
{
job: {
jobKey,
runId,
trigger: "schedule" as const,
scheduledAt: (job.nextRunAt ?? new Date()).toISOString(),
},
},
jobTimeoutMs,
);
// 4. Mark run as succeeded
const durationMs = Date.now() - startedAt;
await jobStore.completeRun(runId, {
status: "succeeded",
durationMs,
});
jobLog.info({ runId, durationMs }, "job completed successfully");
} catch (err) {
const durationMs = Date.now() - startedAt;
const errorMessage = err instanceof Error ? err.message : String(err);
jobLog.error(
{ runId, durationMs, err: errorMessage },
"job execution failed",
);
// Record the failure
if (runId) {
try {
await jobStore.completeRun(runId, {
status: "failed",
error: errorMessage,
durationMs,
});
} catch (completeErr) {
jobLog.error(
{
runId,
err: completeErr instanceof Error ? completeErr.message : String(completeErr),
},
"failed to record job failure",
);
}
}
} finally {
// Remove from active set
activeJobs.delete(jobId);
// 5. Always advance the schedule pointer (even on failure)
try {
await advanceSchedulePointer(job);
} catch (err) {
jobLog.error(
{ err: err instanceof Error ? err.message : String(err) },
"failed to advance schedule pointer",
);
}
}
}
// -----------------------------------------------------------------------
// Core: manual trigger
// -----------------------------------------------------------------------
async function triggerJob(
jobId: string,
trigger: "manual" | "retry" = "manual",
): Promise<TriggerJobResult> {
const job = await jobStore.getJobById(jobId);
if (!job) {
throw new Error(`Job not found: ${jobId}`);
}
if (job.status !== "active") {
throw new Error(
`Job "${job.jobKey}" is not active (status: ${job.status})`,
);
}
// Overlap prevention
if (activeJobs.has(jobId)) {
throw new Error(
`Job "${job.jobKey}" is already running — cannot trigger while in progress`,
);
}
// Also check DB for running runs (defensive — covers multi-instance)
const existingRuns = await db
.select()
.from(pluginJobRuns)
.where(
and(
eq(pluginJobRuns.jobId, jobId),
eq(pluginJobRuns.status, "running"),
),
);
if (existingRuns.length > 0) {
throw new Error(
`Job "${job.jobKey}" already has a running execution — cannot trigger while in progress`,
);
}
// Check worker availability
if (!workerManager.isRunning(job.pluginId)) {
throw new Error(
`Worker for plugin "${job.pluginId}" is not running — cannot trigger job`,
);
}
// Create the run and dispatch (non-blocking)
const run = await jobStore.createRun({
jobId,
pluginId: job.pluginId,
trigger,
});
// Dispatch in background — don't block the caller
void dispatchManualRun(job, run.id, trigger);
return { runId: run.id, jobId };
}
/**
* Dispatch a manually triggered job run.
*/
async function dispatchManualRun(
job: typeof pluginJobs.$inferSelect,
runId: string,
trigger: "manual" | "retry",
): Promise<void> {
const { id: jobId, pluginId, jobKey } = job;
const jobLog = log.child({ jobId, pluginId, jobKey, runId, trigger });
activeJobs.add(jobId);
const startedAt = Date.now();
try {
await jobStore.markRunning(runId);
await workerManager.call(
pluginId,
"runJob",
{
job: {
jobKey,
runId,
trigger,
scheduledAt: new Date().toISOString(),
},
},
jobTimeoutMs,
);
const durationMs = Date.now() - startedAt;
await jobStore.completeRun(runId, {
status: "succeeded",
durationMs,
});
jobLog.info({ durationMs }, "manual job completed successfully");
} catch (err) {
const durationMs = Date.now() - startedAt;
const errorMessage = err instanceof Error ? err.message : String(err);
jobLog.error({ durationMs, err: errorMessage }, "manual job failed");
try {
await jobStore.completeRun(runId, {
status: "failed",
error: errorMessage,
durationMs,
});
} catch (completeErr) {
jobLog.error(
{
err: completeErr instanceof Error ? completeErr.message : String(completeErr),
},
"failed to record manual job failure",
);
}
} finally {
activeJobs.delete(jobId);
}
}
// -----------------------------------------------------------------------
// Schedule pointer management
// -----------------------------------------------------------------------
/**
* Advance the `lastRunAt` and `nextRunAt` timestamps on a job after a run.
*/
async function advanceSchedulePointer(
job: typeof pluginJobs.$inferSelect,
): Promise<void> {
const now = new Date();
let nextRunAt: Date | null = null;
if (job.schedule) {
const validationError = validateCron(job.schedule);
if (validationError) {
log.warn(
{ jobId: job.id, schedule: job.schedule, error: validationError },
"invalid cron schedule — cannot compute next run",
);
} else {
const cron = parseCron(job.schedule);
nextRunAt = nextCronTick(cron, now);
}
}
await jobStore.updateRunTimestamps(job.id, now, nextRunAt);
}
/**
* Ensure all active jobs for a plugin have a `nextRunAt` value.
* Called when a plugin is registered with the scheduler.
*/
async function ensureNextRunTimestamps(pluginId: string): Promise<void> {
const jobs = await jobStore.listJobs(pluginId, "active");
for (const job of jobs) {
// Skip jobs that already have a valid nextRunAt in the future
if (job.nextRunAt && job.nextRunAt.getTime() > Date.now()) {
continue;
}
// Skip jobs without a schedule
if (!job.schedule) {
continue;
}
const validationError = validateCron(job.schedule);
if (validationError) {
log.warn(
{ jobId: job.id, jobKey: job.jobKey, schedule: job.schedule, error: validationError },
"skipping job with invalid cron schedule",
);
continue;
}
const cron = parseCron(job.schedule);
const nextRunAt = nextCronTick(cron, new Date());
if (nextRunAt) {
await jobStore.updateRunTimestamps(
job.id,
job.lastRunAt ?? new Date(0),
nextRunAt,
);
log.debug(
{ jobId: job.id, jobKey: job.jobKey, nextRunAt: nextRunAt.toISOString() },
"computed nextRunAt for job",
);
}
}
}
// -----------------------------------------------------------------------
// Plugin registration
// -----------------------------------------------------------------------
async function registerPlugin(pluginId: string): Promise<void> {
log.info({ pluginId }, "registering plugin with job scheduler");
await ensureNextRunTimestamps(pluginId);
}
async function unregisterPlugin(pluginId: string): Promise<void> {
log.info({ pluginId }, "unregistering plugin from job scheduler");
// Cancel any in-flight run records for this plugin that are still
// queued or running. Active jobs in-memory will finish naturally.
try {
const runningRuns = await db
.select()
.from(pluginJobRuns)
.where(
and(
eq(pluginJobRuns.pluginId, pluginId),
or(
eq(pluginJobRuns.status, "running"),
eq(pluginJobRuns.status, "queued"),
),
),
);
for (const run of runningRuns) {
await jobStore.completeRun(run.id, {
status: "cancelled",
error: "Plugin unregistered",
durationMs: run.startedAt
? Date.now() - run.startedAt.getTime()
: null,
});
}
} catch (err) {
log.error(
{
pluginId,
err: err instanceof Error ? err.message : String(err),
},
"error cancelling in-flight runs during unregister",
);
}
// Remove any active tracking for jobs owned by this plugin
const jobs = await jobStore.listJobs(pluginId);
for (const job of jobs) {
activeJobs.delete(job.id);
}
}
// -----------------------------------------------------------------------
// Lifecycle: start / stop
// -----------------------------------------------------------------------
function start(): void {
if (running) {
log.debug("scheduler already running");
return;
}
running = true;
tickTimer = setInterval(() => {
void tick();
}, tickIntervalMs);
log.info(
{ tickIntervalMs, maxConcurrentJobs },
"plugin job scheduler started",
);
}
function stop(): void {
// Always clear the timer defensively, even if `running` is already false,
// to prevent leaked interval timers.
if (tickTimer !== null) {
clearInterval(tickTimer);
tickTimer = null;
}
if (!running) return;
running = false;
log.info(
{ activeJobCount: activeJobs.size },
"plugin job scheduler stopped",
);
}
// -----------------------------------------------------------------------
// Diagnostics
// -----------------------------------------------------------------------
function diagnostics(): SchedulerDiagnostics {
return {
running,
activeJobCount: activeJobs.size,
activeJobIds: [...activeJobs],
tickCount,
lastTickAt: lastTickAt?.toISOString() ?? null,
};
}
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
start,
stop,
registerPlugin,
unregisterPlugin,
triggerJob,
tick,
diagnostics,
};
}

View File

@@ -0,0 +1,465 @@
/**
* Plugin Job Store — persistence layer for scheduled plugin jobs and their
* execution history.
*
* This service manages the `plugin_jobs` and `plugin_job_runs` tables. It is
* the server-side backing store for the `ctx.jobs` SDK surface exposed to
* plugin workers.
*
* ## Responsibilities
*
* 1. **Sync job declarations** — When a plugin is installed or started, the
* host calls `syncJobDeclarations()` to upsert the manifest's declared jobs
* into the `plugin_jobs` table. Jobs removed from the manifest are marked
* `paused` (not deleted) to preserve history.
*
* 2. **Job CRUD** — List, get, pause, and resume jobs for a given plugin.
*
* 3. **Run lifecycle** — Create job run records, update their status, and
* record results (duration, errors, logs).
*
* 4. **Next-run calculation** — After a run completes the host should call
* `updateNextRunAt()` with the next cron tick so the scheduler knows when
* to fire next.
*
* The capability check (`jobs.schedule`) is enforced upstream by the host
* client factory and manifest validator — this store trusts that the caller
* has already been authorised.
*
* @see PLUGIN_SPEC.md §17 — Scheduled Jobs
* @see PLUGIN_SPEC.md §21.3 — `plugin_jobs` / `plugin_job_runs` tables
*/
import { and, desc, eq } from "drizzle-orm";
import type { Db } from "@paperclipai/db";
import { plugins, pluginJobs, pluginJobRuns } from "@paperclipai/db";
import type {
PluginJobDeclaration,
PluginJobRunStatus,
PluginJobRunTrigger,
PluginJobRecord,
} from "@paperclipai/shared";
import { notFound } from "../errors.js";
/**
* The statuses used for job *definitions* in the `plugin_jobs` table.
* Aliased from `PluginJobRecord` to keep the store API aligned with
* the domain type (`"active" | "paused" | "failed"`).
*/
type JobDefinitionStatus = PluginJobRecord["status"];
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
/**
* Input for creating a job run record.
*/
export interface CreateJobRunInput {
/** FK to the plugin_jobs row. */
jobId: string;
/** FK to the plugins row. */
pluginId: string;
/** What triggered this run. */
trigger: PluginJobRunTrigger;
}
/**
* Input for completing (or failing) a job run.
*/
export interface CompleteJobRunInput {
/** Final run status. */
status: PluginJobRunStatus;
/** Error message if the run failed. */
error?: string | null;
/** Run duration in milliseconds. */
durationMs?: number | null;
}
// ---------------------------------------------------------------------------
// Service
// ---------------------------------------------------------------------------
/**
* Create a PluginJobStore backed by the given Drizzle database instance.
*
* @example
* ```ts
* const jobStore = pluginJobStore(db);
*
* // On plugin install/start — sync declared jobs into the DB
* await jobStore.syncJobDeclarations(pluginId, manifest.jobs ?? []);
*
* // Before dispatching a runJob RPC — create a run record
* const run = await jobStore.createRun({ jobId, pluginId, trigger: "schedule" });
*
* // After the RPC completes — record the result
* await jobStore.completeRun(run.id, {
* status: "succeeded",
* durationMs: Date.now() - startedAt,
* });
* ```
*/
export function pluginJobStore(db: Db) {
// -----------------------------------------------------------------------
// Internal helpers
// -----------------------------------------------------------------------
async function assertPluginExists(pluginId: string): Promise<void> {
const rows = await db
.select({ id: plugins.id })
.from(plugins)
.where(eq(plugins.id, pluginId));
if (rows.length === 0) {
throw notFound(`Plugin not found: ${pluginId}`);
}
}
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
// =====================================================================
// Job declarations (plugin_jobs)
// =====================================================================
/**
* Sync declared jobs from a plugin manifest into the `plugin_jobs` table.
*
* This is called at plugin install and on each worker startup so the DB
* always reflects the manifest's declared jobs:
*
* - **New jobs** are inserted with status `active`.
* - **Existing jobs** have their `schedule` updated if it changed.
* - **Removed jobs** (present in DB but absent from the manifest) are
* set to `paused` so their history is preserved.
*
* The unique constraint `(pluginId, jobKey)` is used for conflict
* resolution.
*
* @param pluginId - UUID of the owning plugin
* @param declarations - Job declarations from the plugin manifest
*/
async syncJobDeclarations(
pluginId: string,
declarations: PluginJobDeclaration[],
): Promise<void> {
await assertPluginExists(pluginId);
// Fetch existing jobs for this plugin
const existingJobs = await db
.select()
.from(pluginJobs)
.where(eq(pluginJobs.pluginId, pluginId));
const existingByKey = new Map(
existingJobs.map((j) => [j.jobKey, j]),
);
const declaredKeys = new Set<string>();
// Upsert each declared job
for (const decl of declarations) {
declaredKeys.add(decl.jobKey);
const existing = existingByKey.get(decl.jobKey);
const schedule = decl.schedule ?? "";
if (existing) {
// Update schedule if it changed; re-activate if it was paused
const updates: Record<string, unknown> = {
updatedAt: new Date(),
};
if (existing.schedule !== schedule) {
updates.schedule = schedule;
}
if (existing.status === "paused") {
updates.status = "active";
}
await db
.update(pluginJobs)
.set(updates)
.where(eq(pluginJobs.id, existing.id));
} else {
// Insert new job
await db.insert(pluginJobs).values({
pluginId,
jobKey: decl.jobKey,
schedule,
status: "active",
});
}
}
// Pause jobs that are no longer declared in the manifest
for (const existing of existingJobs) {
if (!declaredKeys.has(existing.jobKey) && existing.status !== "paused") {
await db
.update(pluginJobs)
.set({ status: "paused", updatedAt: new Date() })
.where(eq(pluginJobs.id, existing.id));
}
}
},
/**
* List all jobs for a plugin, optionally filtered by status.
*
* @param pluginId - UUID of the owning plugin
* @param status - Optional status filter
*/
async listJobs(
pluginId: string,
status?: JobDefinitionStatus,
): Promise<(typeof pluginJobs.$inferSelect)[]> {
const conditions = [eq(pluginJobs.pluginId, pluginId)];
if (status) {
conditions.push(eq(pluginJobs.status, status));
}
return db
.select()
.from(pluginJobs)
.where(and(...conditions));
},
/**
* Get a single job by its composite key `(pluginId, jobKey)`.
*
* @param pluginId - UUID of the owning plugin
* @param jobKey - Stable job identifier from the manifest
* @returns The job row, or `null` if not found
*/
async getJobByKey(
pluginId: string,
jobKey: string,
): Promise<(typeof pluginJobs.$inferSelect) | null> {
const rows = await db
.select()
.from(pluginJobs)
.where(
and(
eq(pluginJobs.pluginId, pluginId),
eq(pluginJobs.jobKey, jobKey),
),
);
return rows[0] ?? null;
},
/**
* Get a single job by its primary key (UUID).
*
* @param jobId - UUID of the job row
* @returns The job row, or `null` if not found
*/
async getJobById(
jobId: string,
): Promise<(typeof pluginJobs.$inferSelect) | null> {
const rows = await db
.select()
.from(pluginJobs)
.where(eq(pluginJobs.id, jobId));
return rows[0] ?? null;
},
/**
* Fetch a single job by ID, scoped to a specific plugin.
*
* Returns `null` if the job does not exist or does not belong to the
* given plugin — callers should treat both cases as "not found".
*/
async getJobByIdForPlugin(
pluginId: string,
jobId: string,
): Promise<(typeof pluginJobs.$inferSelect) | null> {
const rows = await db
.select()
.from(pluginJobs)
.where(and(eq(pluginJobs.id, jobId), eq(pluginJobs.pluginId, pluginId)));
return rows[0] ?? null;
},
/**
* Update a job's status.
*
* @param jobId - UUID of the job row
* @param status - New status
*/
async updateJobStatus(
jobId: string,
status: JobDefinitionStatus,
): Promise<void> {
await db
.update(pluginJobs)
.set({ status, updatedAt: new Date() })
.where(eq(pluginJobs.id, jobId));
},
/**
* Update the `lastRunAt` and `nextRunAt` timestamps on a job.
*
* Called by the scheduler after a run completes to advance the
* scheduling pointer.
*
* @param jobId - UUID of the job row
* @param lastRunAt - When the last run started
* @param nextRunAt - When the next run should fire
*/
async updateRunTimestamps(
jobId: string,
lastRunAt: Date,
nextRunAt: Date | null,
): Promise<void> {
await db
.update(pluginJobs)
.set({
lastRunAt,
nextRunAt,
updatedAt: new Date(),
})
.where(eq(pluginJobs.id, jobId));
},
/**
* Delete all jobs (and cascaded runs) owned by a plugin.
*
* Called during plugin uninstall when `removeData = true`.
*
* @param pluginId - UUID of the owning plugin
*/
async deleteAllJobs(pluginId: string): Promise<void> {
await db
.delete(pluginJobs)
.where(eq(pluginJobs.pluginId, pluginId));
},
// =====================================================================
// Job runs (plugin_job_runs)
// =====================================================================
/**
* Create a new job run record with status `queued`.
*
* The caller should create the run record *before* dispatching the
* `runJob` RPC to the worker, then update it to `running` once the
* worker begins execution.
*
* @param input - Job run input (jobId, pluginId, trigger)
* @returns The newly created run row
*/
async createRun(
input: CreateJobRunInput,
): Promise<typeof pluginJobRuns.$inferSelect> {
const rows = await db
.insert(pluginJobRuns)
.values({
jobId: input.jobId,
pluginId: input.pluginId,
trigger: input.trigger,
status: "queued",
})
.returning();
return rows[0]!;
},
/**
* Mark a run as `running` and set its `startedAt` timestamp.
*
* @param runId - UUID of the run row
*/
async markRunning(runId: string): Promise<void> {
await db
.update(pluginJobRuns)
.set({
status: "running" as PluginJobRunStatus,
startedAt: new Date(),
})
.where(eq(pluginJobRuns.id, runId));
},
/**
* Complete a run — set its final status, error, duration, and
* `finishedAt` timestamp.
*
* @param runId - UUID of the run row
* @param input - Completion details
*/
async completeRun(
runId: string,
input: CompleteJobRunInput,
): Promise<void> {
await db
.update(pluginJobRuns)
.set({
status: input.status,
error: input.error ?? null,
durationMs: input.durationMs ?? null,
finishedAt: new Date(),
})
.where(eq(pluginJobRuns.id, runId));
},
/**
* Get a run by its primary key.
*
* @param runId - UUID of the run row
* @returns The run row, or `null` if not found
*/
async getRunById(
runId: string,
): Promise<(typeof pluginJobRuns.$inferSelect) | null> {
const rows = await db
.select()
.from(pluginJobRuns)
.where(eq(pluginJobRuns.id, runId));
return rows[0] ?? null;
},
/**
* List runs for a specific job, ordered by creation time descending.
*
* @param jobId - UUID of the job
* @param limit - Maximum number of rows to return (default: 50)
*/
async listRunsByJob(
jobId: string,
limit = 50,
): Promise<(typeof pluginJobRuns.$inferSelect)[]> {
return db
.select()
.from(pluginJobRuns)
.where(eq(pluginJobRuns.jobId, jobId))
.orderBy(desc(pluginJobRuns.createdAt))
.limit(limit);
},
/**
* List runs for a plugin, optionally filtered by status.
*
* @param pluginId - UUID of the owning plugin
* @param status - Optional status filter
* @param limit - Maximum number of rows to return (default: 50)
*/
async listRunsByPlugin(
pluginId: string,
status?: PluginJobRunStatus,
limit = 50,
): Promise<(typeof pluginJobRuns.$inferSelect)[]> {
const conditions = [eq(pluginJobRuns.pluginId, pluginId)];
if (status) {
conditions.push(eq(pluginJobRuns.status, status));
}
return db
.select()
.from(pluginJobRuns)
.where(and(...conditions))
.orderBy(desc(pluginJobRuns.createdAt))
.limit(limit);
},
};
}
/** Type alias for the return value of `pluginJobStore()`. */
export type PluginJobStore = ReturnType<typeof pluginJobStore>;

View File

@@ -0,0 +1,821 @@
/**
* PluginLifecycleManager — state-machine controller for plugin status
* transitions and worker process coordination.
*
* Each plugin moves through a well-defined state machine:
*
* ```
* installed ──→ ready ──→ disabled
* │ │ │
* │ ├──→ error│
* │ ↓ │
* │ upgrade_pending │
* │ │ │
* ↓ ↓ ↓
* uninstalled
* ```
*
* The lifecycle manager:
*
* 1. **Validates transitions** — Only transitions defined in
* `VALID_TRANSITIONS` are allowed; invalid transitions throw.
*
* 2. **Coordinates workers** — When a plugin moves to `ready`, its
* worker process is started. When it moves out of `ready`, the
* worker is stopped gracefully.
*
* 3. **Emits events** — `plugin.loaded`, `plugin.enabled`,
* `plugin.disabled`, `plugin.unloaded`, `plugin.status_changed`
* events are emitted so that other services (job coordinator,
* tool dispatcher, event bus) can react accordingly.
*
* 4. **Persists state** — Status changes are written to the database
* through the plugin registry service.
*
* @see PLUGIN_SPEC.md §12 — Process Model
* @see PLUGIN_SPEC.md §12.5 — Graceful Shutdown Policy
*/
import { EventEmitter } from "node:events";
import type { Db } from "@paperclipai/db";
import type {
PluginStatus,
PluginRecord,
PaperclipPluginManifestV1,
} from "@paperclipai/shared";
import { pluginRegistryService } from "./plugin-registry.js";
import { pluginLoader, type PluginLoader } from "./plugin-loader.js";
import type { PluginWorkerManager, WorkerStartOptions } from "./plugin-worker-manager.js";
import { badRequest, notFound } from "../errors.js";
import { logger } from "../middleware/logger.js";
// ---------------------------------------------------------------------------
// Lifecycle state machine
// ---------------------------------------------------------------------------
/**
* Valid state transitions for the plugin lifecycle.
*
* installed → ready (initial load succeeds)
* installed → error (initial load fails)
* installed → uninstalled (abort installation)
*
* ready → disabled (operator disables plugin)
* ready → error (runtime failure)
* ready → upgrade_pending (upgrade with new capabilities)
* ready → uninstalled (uninstall)
*
* disabled → ready (operator re-enables plugin)
* disabled → uninstalled (uninstall while disabled)
*
* error → ready (retry / recovery)
* error → uninstalled (give up and uninstall)
*
* upgrade_pending → ready (operator approves new capabilities)
* upgrade_pending → error (upgrade worker fails)
* upgrade_pending → uninstalled (reject upgrade and uninstall)
*
* uninstalled → installed (reinstall)
*/
const VALID_TRANSITIONS: Record<string, readonly PluginStatus[]> = {
installed: ["ready", "error", "uninstalled"],
ready: ["ready", "disabled", "error", "upgrade_pending", "uninstalled"],
disabled: ["ready", "uninstalled"],
error: ["ready", "uninstalled"],
upgrade_pending: ["ready", "error", "uninstalled"],
uninstalled: ["installed"], // reinstall
};
/**
* Check whether a transition from `from` → `to` is valid.
*/
function isValidTransition(from: PluginStatus, to: PluginStatus): boolean {
return VALID_TRANSITIONS[from]?.includes(to) ?? false;
}
// ---------------------------------------------------------------------------
// Lifecycle events
// ---------------------------------------------------------------------------
/**
* Events emitted by the PluginLifecycleManager.
* Consumers can subscribe to these for routing-table updates, UI refresh
* notifications, and observability.
*/
export interface PluginLifecycleEvents {
/** Emitted after a plugin is loaded (installed → ready). */
"plugin.loaded": { pluginId: string; pluginKey: string };
/** Emitted after a plugin transitions to ready (enabled). */
"plugin.enabled": { pluginId: string; pluginKey: string };
/** Emitted after a plugin is disabled (ready → disabled). */
"plugin.disabled": { pluginId: string; pluginKey: string; reason?: string };
/** Emitted after a plugin is unloaded (any → uninstalled). */
"plugin.unloaded": { pluginId: string; pluginKey: string; removeData: boolean };
/** Emitted on any status change. */
"plugin.status_changed": {
pluginId: string;
pluginKey: string;
previousStatus: PluginStatus;
newStatus: PluginStatus;
};
/** Emitted when a plugin enters an error state. */
"plugin.error": { pluginId: string; pluginKey: string; error: string };
/** Emitted when a plugin enters upgrade_pending. */
"plugin.upgrade_pending": { pluginId: string; pluginKey: string };
/** Emitted when a plugin worker process has been started. */
"plugin.worker_started": { pluginId: string; pluginKey: string };
/** Emitted when a plugin worker process has been stopped. */
"plugin.worker_stopped": { pluginId: string; pluginKey: string };
}
type LifecycleEventName = keyof PluginLifecycleEvents;
type LifecycleEventPayload<K extends LifecycleEventName> = PluginLifecycleEvents[K];
// ---------------------------------------------------------------------------
// PluginLifecycleManager
// ---------------------------------------------------------------------------
export interface PluginLifecycleManager {
/**
* Load a newly installed plugin transitions `installed` → `ready`.
*
* This is called after the registry has persisted the initial install record.
* The caller should have already spawned the worker and performed health
* checks before calling this. If the worker fails, call `markError` instead.
*/
load(pluginId: string): Promise<PluginRecord>;
/**
* Enable a plugin that is in `disabled`, `error`, or `upgrade_pending` state.
* Transitions → `ready`.
*/
enable(pluginId: string): Promise<PluginRecord>;
/**
* Disable a running plugin.
* Transitions `ready` → `disabled`.
*/
disable(pluginId: string, reason?: string): Promise<PluginRecord>;
/**
* Unload (uninstall) a plugin from any active state.
* Transitions → `uninstalled`.
*
* When `removeData` is true, the plugin row and cascaded config are
* hard-deleted. Otherwise a soft-delete sets status to `uninstalled`.
*/
unload(pluginId: string, removeData?: boolean): Promise<PluginRecord | null>;
/**
* Mark a plugin as errored (e.g. worker crash, health-check failure).
* Transitions → `error`.
*/
markError(pluginId: string, error: string): Promise<PluginRecord>;
/**
* Mark a plugin as requiring upgrade approval.
* Transitions `ready` → `upgrade_pending`.
*/
markUpgradePending(pluginId: string): Promise<PluginRecord>;
/**
* Upgrade a plugin to a newer version.
* This is a placeholder that handles the lifecycle state transition.
* The actual package installation is handled by plugin-loader.
*
* If the upgrade adds new capabilities, transitions to `upgrade_pending`.
* Otherwise, transitions to `ready` directly.
*/
upgrade(pluginId: string, version?: string): Promise<PluginRecord>;
/**
* Start the worker process for a plugin that is already in `ready` state.
*
* This is used by the server startup orchestration to start workers for
* plugins that were persisted as `ready`. It requires a `PluginWorkerManager`
* to have been provided at construction time.
*
* @param pluginId - The UUID of the plugin to start
* @param options - Worker start options (entrypoint path, config, etc.)
* @throws if no worker manager is configured or the plugin is not ready
*/
startWorker(pluginId: string, options: WorkerStartOptions): Promise<void>;
/**
* Stop the worker process for a plugin without changing lifecycle state.
*
* This is used during server shutdown to gracefully stop all workers.
* It does not transition the plugin state — plugins remain in their
* current status so they can be restarted on next server boot.
*
* @param pluginId - The UUID of the plugin to stop
*/
stopWorker(pluginId: string): Promise<void>;
/**
* Restart the worker process for a running plugin.
*
* Stops and re-starts the worker process. The plugin remains in `ready`
* state throughout. This is typically called after a config change.
*
* @param pluginId - The UUID of the plugin to restart
* @throws if no worker manager is configured or the plugin is not ready
*/
restartWorker(pluginId: string): Promise<void>;
/**
* Get the current lifecycle state for a plugin.
*/
getStatus(pluginId: string): Promise<PluginStatus | null>;
/**
* Check whether a transition is allowed from the plugin's current state.
*/
canTransition(pluginId: string, to: PluginStatus): Promise<boolean>;
/**
* Subscribe to lifecycle events.
*/
on<K extends LifecycleEventName>(
event: K,
listener: (payload: LifecycleEventPayload<K>) => void,
): void;
/**
* Unsubscribe from lifecycle events.
*/
off<K extends LifecycleEventName>(
event: K,
listener: (payload: LifecycleEventPayload<K>) => void,
): void;
/**
* Subscribe to a lifecycle event once.
*/
once<K extends LifecycleEventName>(
event: K,
listener: (payload: LifecycleEventPayload<K>) => void,
): void;
}
// ---------------------------------------------------------------------------
// Factory
// ---------------------------------------------------------------------------
/**
* Options for constructing a PluginLifecycleManager.
*/
export interface PluginLifecycleManagerOptions {
/** Plugin loader instance. Falls back to the default if omitted. */
loader?: PluginLoader;
/**
* Worker process manager. When provided, lifecycle transitions that bring
* a plugin online (load, enable, upgrade-to-ready) will start the worker
* process, and transitions that take a plugin offline (disable, unload,
* markError) will stop it.
*
* When omitted the lifecycle manager operates in state-only mode — the
* caller is responsible for managing worker processes externally.
*/
workerManager?: PluginWorkerManager;
}
/**
* Create a PluginLifecycleManager.
*
* This service orchestrates plugin state transitions on top of the
* `pluginRegistryService` (which handles raw DB persistence). It enforces
* the lifecycle state machine, emits events for downstream consumers
* (routing tables, UI, observability), and manages worker processes via
* the `PluginWorkerManager` when one is provided.
*
* Usage:
* ```ts
* const lifecycle = pluginLifecycleManager(db, {
* workerManager: createPluginWorkerManager(),
* });
* lifecycle.on("plugin.enabled", ({ pluginId }) => { ... });
* await lifecycle.load(pluginId);
* ```
*
* @see PLUGIN_SPEC.md §21.3 — `plugins.status` column
* @see PLUGIN_SPEC.md §12 — Process Model
*/
export function pluginLifecycleManager(
db: Db,
options?: PluginLoader | PluginLifecycleManagerOptions,
): PluginLifecycleManager {
// Support the legacy signature: pluginLifecycleManager(db, loader)
// as well as the new options object form.
let loaderArg: PluginLoader | undefined;
let workerManager: PluginWorkerManager | undefined;
if (options && typeof options === "object" && "discoverAll" in options) {
// Legacy: second arg is a PluginLoader directly
loaderArg = options as PluginLoader;
} else if (options && typeof options === "object") {
const opts = options as PluginLifecycleManagerOptions;
loaderArg = opts.loader;
workerManager = opts.workerManager;
}
const registry = pluginRegistryService(db);
const pluginLoaderInstance = loaderArg ?? pluginLoader(db);
const emitter = new EventEmitter();
emitter.setMaxListeners(100); // plugins may have many listeners; 100 is a safe upper bound
const log = logger.child({ service: "plugin-lifecycle" });
// -----------------------------------------------------------------------
// Internal helpers
// -----------------------------------------------------------------------
async function requirePlugin(pluginId: string): Promise<PluginRecord> {
const plugin = await registry.getById(pluginId);
if (!plugin) throw notFound(`Plugin not found: ${pluginId}`);
return plugin as PluginRecord;
}
function assertTransition(plugin: PluginRecord, to: PluginStatus): void {
if (!isValidTransition(plugin.status, to)) {
throw badRequest(
`Invalid lifecycle transition: ${plugin.status}${to} for plugin ${plugin.pluginKey}`,
);
}
}
async function transition(
pluginId: string,
to: PluginStatus,
lastError: string | null = null,
existingPlugin?: PluginRecord,
): Promise<PluginRecord> {
const plugin = existingPlugin ?? await requirePlugin(pluginId);
assertTransition(plugin, to);
const previousStatus = plugin.status;
const updated = await registry.updateStatus(pluginId, {
status: to,
lastError,
});
if (!updated) throw notFound(`Plugin not found after status update: ${pluginId}`);
const result = updated as PluginRecord;
log.info(
{ pluginId, pluginKey: result.pluginKey, from: previousStatus, to },
`plugin lifecycle: ${previousStatus}${to}`,
);
// Emit the generic status_changed event
emitter.emit("plugin.status_changed", {
pluginId,
pluginKey: result.pluginKey,
previousStatus,
newStatus: to,
});
return result;
}
function emitDomain(
event: LifecycleEventName,
payload: PluginLifecycleEvents[LifecycleEventName],
): void {
emitter.emit(event, payload);
}
// -----------------------------------------------------------------------
// Worker management helpers
// -----------------------------------------------------------------------
/**
* Stop the worker for a plugin if one is running.
* This is a best-effort operation — if no worker manager is configured
* or no worker is running, it silently succeeds.
*/
async function stopWorkerIfRunning(
pluginId: string,
pluginKey: string,
): Promise<void> {
if (!workerManager) return;
if (!workerManager.isRunning(pluginId) && !workerManager.getWorker(pluginId)) return;
try {
await workerManager.stopWorker(pluginId);
log.info({ pluginId, pluginKey }, "plugin lifecycle: worker stopped");
emitDomain("plugin.worker_stopped", { pluginId, pluginKey });
} catch (err) {
log.warn(
{ pluginId, pluginKey, err: err instanceof Error ? err.message : String(err) },
"plugin lifecycle: failed to stop worker (best-effort)",
);
}
}
async function activateReadyPlugin(pluginId: string): Promise<void> {
const supportsRuntimeActivation =
typeof pluginLoaderInstance.hasRuntimeServices === "function"
&& typeof pluginLoaderInstance.loadSingle === "function";
if (!supportsRuntimeActivation || !pluginLoaderInstance.hasRuntimeServices()) {
return;
}
const loadResult = await pluginLoaderInstance.loadSingle(pluginId);
if (!loadResult.success) {
throw new Error(
loadResult.error
?? `Failed to activate plugin ${loadResult.plugin.pluginKey}`,
);
}
}
async function deactivatePluginRuntime(
pluginId: string,
pluginKey: string,
): Promise<void> {
const supportsRuntimeDeactivation =
typeof pluginLoaderInstance.hasRuntimeServices === "function"
&& typeof pluginLoaderInstance.unloadSingle === "function";
if (supportsRuntimeDeactivation && pluginLoaderInstance.hasRuntimeServices()) {
await pluginLoaderInstance.unloadSingle(pluginId, pluginKey);
return;
}
await stopWorkerIfRunning(pluginId, pluginKey);
}
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
// -- load -------------------------------------------------------------
/**
* load — Transitions a plugin to 'ready' status and starts its worker.
*
* This method is called after a plugin has been successfully installed and
* validated. It marks the plugin as ready in the database and immediately
* triggers the plugin loader to start the worker process.
*
* @param pluginId - The UUID of the plugin to load.
* @returns The updated plugin record.
*/
async load(pluginId: string): Promise<PluginRecord> {
const result = await transition(pluginId, "ready");
await activateReadyPlugin(pluginId);
emitDomain("plugin.loaded", {
pluginId,
pluginKey: result.pluginKey,
});
emitDomain("plugin.enabled", {
pluginId,
pluginKey: result.pluginKey,
});
return result;
},
// -- enable -----------------------------------------------------------
/**
* enable — Re-enables a plugin that was previously in an error or upgrade state.
*
* Similar to load(), this method transitions the plugin to 'ready' and starts
* its worker, but it specifically targets plugins that are currently disabled.
*
* @param pluginId - The UUID of the plugin to enable.
* @returns The updated plugin record.
*/
async enable(pluginId: string): Promise<PluginRecord> {
const plugin = await requirePlugin(pluginId);
// Only allow enabling from disabled, error, or upgrade_pending states
if (plugin.status !== "disabled" && plugin.status !== "error" && plugin.status !== "upgrade_pending") {
throw badRequest(
`Cannot enable plugin in status '${plugin.status}'. ` +
`Plugin must be in 'disabled', 'error', or 'upgrade_pending' status to be enabled.`,
);
}
const result = await transition(pluginId, "ready", null, plugin);
await activateReadyPlugin(pluginId);
emitDomain("plugin.enabled", {
pluginId,
pluginKey: result.pluginKey,
});
return result;
},
// -- disable ----------------------------------------------------------
async disable(pluginId: string, reason?: string): Promise<PluginRecord> {
const plugin = await requirePlugin(pluginId);
// Only allow disabling from ready state
if (plugin.status !== "ready") {
throw badRequest(
`Cannot disable plugin in status '${plugin.status}'. ` +
`Plugin must be in 'ready' status to be disabled.`,
);
}
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
const result = await transition(pluginId, "disabled", reason ?? null, plugin);
emitDomain("plugin.disabled", {
pluginId,
pluginKey: result.pluginKey,
reason,
});
return result;
},
// -- unload -----------------------------------------------------------
async unload(
pluginId: string,
removeData = false,
): Promise<PluginRecord | null> {
const plugin = await requirePlugin(pluginId);
// If already uninstalled and removeData, hard-delete
if (plugin.status === "uninstalled") {
if (removeData) {
await pluginLoaderInstance.cleanupInstallArtifacts(plugin);
const deleted = await registry.uninstall(pluginId, true);
log.info(
{ pluginId, pluginKey: plugin.pluginKey },
"plugin lifecycle: hard-deleted already-uninstalled plugin",
);
emitDomain("plugin.unloaded", {
pluginId,
pluginKey: plugin.pluginKey,
removeData: true,
});
return deleted as PluginRecord | null;
}
throw badRequest(
`Plugin ${plugin.pluginKey} is already uninstalled. ` +
`Use removeData=true to permanently delete it.`,
);
}
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
await pluginLoaderInstance.cleanupInstallArtifacts(plugin);
// Perform the uninstall via registry (handles soft/hard delete)
const result = await registry.uninstall(pluginId, removeData);
log.info(
{ pluginId, pluginKey: plugin.pluginKey, removeData },
`plugin lifecycle: ${plugin.status} → uninstalled${removeData ? " (hard delete)" : ""}`,
);
emitter.emit("plugin.status_changed", {
pluginId,
pluginKey: plugin.pluginKey,
previousStatus: plugin.status,
newStatus: "uninstalled" as PluginStatus,
});
emitDomain("plugin.unloaded", {
pluginId,
pluginKey: plugin.pluginKey,
removeData,
});
return result as PluginRecord | null;
},
// -- markError --------------------------------------------------------
async markError(pluginId: string, error: string): Promise<PluginRecord> {
// Stop the worker — the plugin is in an error state and should not
// continue running. The worker manager's auto-restart is disabled
// because we are intentionally taking the plugin offline.
const plugin = await requirePlugin(pluginId);
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
const result = await transition(pluginId, "error", error, plugin);
emitDomain("plugin.error", {
pluginId,
pluginKey: result.pluginKey,
error,
});
return result;
},
// -- markUpgradePending -----------------------------------------------
async markUpgradePending(pluginId: string): Promise<PluginRecord> {
const plugin = await requirePlugin(pluginId);
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
const result = await transition(pluginId, "upgrade_pending", null, plugin);
emitDomain("plugin.upgrade_pending", {
pluginId,
pluginKey: result.pluginKey,
});
return result;
},
// -- upgrade ----------------------------------------------------------
/**
* Upgrade a plugin to a newer version by performing a package update and
* managing the lifecycle state transition.
*
* Following PLUGIN_SPEC.md §25.3, the upgrade process:
* 1. Stops the current worker process (if running).
* 2. Fetches and validates the new plugin package via the `PluginLoader`.
* 3. Compares the capabilities declared in the new manifest against the old one.
* 4. If new capabilities are added, transitions the plugin to `upgrade_pending`
* to await operator approval (worker stays stopped).
* 5. If no new capabilities are added, transitions the plugin back to `ready`
* with the updated version and manifest metadata.
*
* @param pluginId - The UUID of the plugin to upgrade.
* @param version - Optional target version specifier.
* @returns The updated `PluginRecord`.
* @throws {BadRequest} If the plugin is not in a ready or upgrade_pending state.
*/
async upgrade(pluginId: string, version?: string): Promise<PluginRecord> {
const plugin = await requirePlugin(pluginId);
// Can only upgrade plugins that are ready or already in upgrade_pending
if (plugin.status !== "ready" && plugin.status !== "upgrade_pending") {
throw badRequest(
`Cannot upgrade plugin in status '${plugin.status}'. ` +
`Plugin must be in 'ready' or 'upgrade_pending' status to be upgraded.`,
);
}
log.info(
{ pluginId, pluginKey: plugin.pluginKey, targetVersion: version },
"plugin lifecycle: upgrade requested",
);
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
// 1. Download and validate new package via loader
const { oldManifest, newManifest, discovered } =
await pluginLoaderInstance.upgradePlugin(pluginId, { version });
log.info(
{
pluginId,
pluginKey: plugin.pluginKey,
oldVersion: oldManifest.version,
newVersion: newManifest.version,
},
"plugin lifecycle: package upgraded on disk",
);
// 2. Compare capabilities
const addedCaps = newManifest.capabilities.filter(
(cap) => !oldManifest.capabilities.includes(cap),
);
// 3. Transition state
if (addedCaps.length > 0) {
// New capabilities require operator approval — worker stays stopped
log.info(
{ pluginId, pluginKey: plugin.pluginKey, addedCaps },
"plugin lifecycle: new capabilities detected, transitioning to upgrade_pending",
);
// Skip the inner stopWorkerIfRunning since we already stopped above
const result = await transition(pluginId, "upgrade_pending", null, plugin);
emitDomain("plugin.upgrade_pending", {
pluginId,
pluginKey: result.pluginKey,
});
return result;
} else {
const result = await transition(pluginId, "ready", null, {
...plugin,
version: discovered.version,
manifestJson: newManifest,
} as PluginRecord);
await activateReadyPlugin(pluginId);
emitDomain("plugin.loaded", {
pluginId,
pluginKey: result.pluginKey,
});
emitDomain("plugin.enabled", {
pluginId,
pluginKey: result.pluginKey,
});
return result;
}
},
// -- startWorker ------------------------------------------------------
async startWorker(
pluginId: string,
options: WorkerStartOptions,
): Promise<void> {
if (!workerManager) {
throw badRequest(
"Cannot start worker: no PluginWorkerManager is configured. " +
"Provide a workerManager option when constructing the lifecycle manager.",
);
}
const plugin = await requirePlugin(pluginId);
if (plugin.status !== "ready") {
throw badRequest(
`Cannot start worker for plugin in status '${plugin.status}'. ` +
`Plugin must be in 'ready' status.`,
);
}
log.info(
{ pluginId, pluginKey: plugin.pluginKey },
"plugin lifecycle: starting worker",
);
await workerManager.startWorker(pluginId, options);
emitDomain("plugin.worker_started", {
pluginId,
pluginKey: plugin.pluginKey,
});
log.info(
{ pluginId, pluginKey: plugin.pluginKey },
"plugin lifecycle: worker started",
);
},
// -- stopWorker -------------------------------------------------------
async stopWorker(pluginId: string): Promise<void> {
if (!workerManager) return; // No worker manager — nothing to stop
const plugin = await requirePlugin(pluginId);
await stopWorkerIfRunning(pluginId, plugin.pluginKey);
},
// -- restartWorker ----------------------------------------------------
async restartWorker(pluginId: string): Promise<void> {
if (!workerManager) {
throw badRequest(
"Cannot restart worker: no PluginWorkerManager is configured.",
);
}
const plugin = await requirePlugin(pluginId);
if (plugin.status !== "ready") {
throw badRequest(
`Cannot restart worker for plugin in status '${plugin.status}'. ` +
`Plugin must be in 'ready' status.`,
);
}
const handle = workerManager.getWorker(pluginId);
if (!handle) {
throw badRequest(
`Cannot restart worker for plugin "${plugin.pluginKey}": no worker is running.`,
);
}
log.info(
{ pluginId, pluginKey: plugin.pluginKey },
"plugin lifecycle: restarting worker",
);
await handle.restart();
emitDomain("plugin.worker_stopped", { pluginId, pluginKey: plugin.pluginKey });
emitDomain("plugin.worker_started", { pluginId, pluginKey: plugin.pluginKey });
log.info(
{ pluginId, pluginKey: plugin.pluginKey },
"plugin lifecycle: worker restarted",
);
},
// -- getStatus --------------------------------------------------------
async getStatus(pluginId: string): Promise<PluginStatus | null> {
const plugin = await registry.getById(pluginId);
return plugin?.status ?? null;
},
// -- canTransition ----------------------------------------------------
async canTransition(pluginId: string, to: PluginStatus): Promise<boolean> {
const plugin = await registry.getById(pluginId);
if (!plugin) return false;
return isValidTransition(plugin.status, to);
},
// -- Event subscriptions ----------------------------------------------
on(event, listener) {
emitter.on(event, listener);
},
off(event, listener) {
emitter.off(event, listener);
},
once(event, listener) {
emitter.once(event, listener);
},
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,86 @@
import { lt, sql } from "drizzle-orm";
import type { Db } from "@paperclipai/db";
import { pluginLogs } from "@paperclipai/db";
import { logger } from "../middleware/logger.js";
/** Default retention period: 7 days. */
const DEFAULT_RETENTION_DAYS = 7;
/** Maximum rows to delete per sweep to avoid long-running transactions. */
const DELETE_BATCH_SIZE = 5_000;
/** Maximum number of batches per sweep to guard against unbounded loops. */
const MAX_ITERATIONS = 100;
/**
* Delete plugin log rows older than `retentionDays`.
*
* Deletes in batches of `DELETE_BATCH_SIZE` to keep transaction sizes
* bounded and avoid holding locks for extended periods.
*
* @returns The total number of rows deleted.
*/
export async function prunePluginLogs(
db: Db,
retentionDays: number = DEFAULT_RETENTION_DAYS,
): Promise<number> {
const cutoff = new Date();
cutoff.setDate(cutoff.getDate() - retentionDays);
let totalDeleted = 0;
let iterations = 0;
// Delete in batches to avoid long-running transactions
while (iterations < MAX_ITERATIONS) {
const deleted = await db
.delete(pluginLogs)
.where(lt(pluginLogs.createdAt, cutoff))
.returning({ id: pluginLogs.id })
.then((rows) => rows.length);
totalDeleted += deleted;
iterations++;
if (deleted < DELETE_BATCH_SIZE) break;
}
if (iterations >= MAX_ITERATIONS) {
logger.warn(
{ totalDeleted, iterations, cutoffDate: cutoff },
"Plugin log retention hit iteration limit; some logs may remain",
);
}
if (totalDeleted > 0) {
logger.info({ totalDeleted, retentionDays }, "Pruned expired plugin logs");
}
return totalDeleted;
}
/**
* Start a periodic plugin log cleanup interval.
*
* @param db - Database connection
* @param intervalMs - How often to run (default: 1 hour)
* @param retentionDays - How many days of logs to keep (default: 7)
* @returns A cleanup function that stops the interval
*/
export function startPluginLogRetention(
db: Db,
intervalMs: number = 60 * 60 * 1_000,
retentionDays: number = DEFAULT_RETENTION_DAYS,
): () => void {
const timer = setInterval(() => {
prunePluginLogs(db, retentionDays).catch((err) => {
logger.warn({ err }, "Plugin log retention sweep failed");
});
}, intervalMs);
// Run once immediately on startup
prunePluginLogs(db, retentionDays).catch((err) => {
logger.warn({ err }, "Initial plugin log retention sweep failed");
});
return () => clearInterval(timer);
}

View File

@@ -0,0 +1,163 @@
/**
* PluginManifestValidator — schema validation for plugin manifest files.
*
* Uses the shared Zod schema (`pluginManifestV1Schema`) to validate
* manifest payloads. Provides both a safe `parse()` variant (returns
* a result union) and a throwing `parseOrThrow()` for HTTP error
* propagation at install time.
*
* @see PLUGIN_SPEC.md §10 — Plugin Manifest
* @see packages/shared/src/validators/plugin.ts — Zod schema definition
*/
import { pluginManifestV1Schema } from "@paperclipai/shared";
import type { PaperclipPluginManifestV1 } from "@paperclipai/shared";
import { PLUGIN_API_VERSION } from "@paperclipai/shared";
import { badRequest } from "../errors.js";
// ---------------------------------------------------------------------------
// Supported manifest API versions
// ---------------------------------------------------------------------------
/**
* The set of plugin API versions this host can accept.
* When a new API version is introduced, add it here. Old versions should be
* retained until the host drops support for them.
*/
const SUPPORTED_VERSIONS = [PLUGIN_API_VERSION] as const;
// ---------------------------------------------------------------------------
// Parse result types
// ---------------------------------------------------------------------------
/**
* Successful parse result.
*/
export interface ManifestParseSuccess {
success: true;
manifest: PaperclipPluginManifestV1;
}
/**
* Failed parse result. `errors` is a human-readable description of what went
* wrong; `details` is the raw Zod error list for programmatic inspection.
*/
export interface ManifestParseFailure {
success: false;
errors: string;
details: Array<{ path: (string | number)[]; message: string }>;
}
/** Union of parse outcomes. */
export type ManifestParseResult = ManifestParseSuccess | ManifestParseFailure;
// ---------------------------------------------------------------------------
// PluginManifestValidator interface
// ---------------------------------------------------------------------------
/**
* Service for parsing and validating plugin manifests.
*
* @see PLUGIN_SPEC.md §10 — Plugin Manifest
*/
export interface PluginManifestValidator {
/**
* Try to parse `input` as a plugin manifest.
*
* Returns a {@link ManifestParseSuccess} when the input passes all
* validation rules, or a {@link ManifestParseFailure} with human-readable
* error messages when it does not.
*
* This is the "safe" variant — it never throws.
*/
parse(input: unknown): ManifestParseResult;
/**
* Parse `input` as a plugin manifest, throwing a 400 HttpError on failure.
*
* Use this at install time when an invalid manifest should surface as an
* HTTP error to the caller.
*
* @throws {HttpError} 400 Bad Request if the manifest is invalid.
*/
parseOrThrow(input: unknown): PaperclipPluginManifestV1;
/**
* Return the list of plugin API versions supported by this host.
*
* Callers can use this to present the supported version range to operators
* or to decide whether a candidate plugin can be installed.
*/
getSupportedVersions(): readonly number[];
}
// ---------------------------------------------------------------------------
// Factory
// ---------------------------------------------------------------------------
/**
* Create a {@link PluginManifestValidator}.
*
* Usage:
* ```ts
* const validator = pluginManifestValidator();
*
* // Safe parse — inspect the result
* const result = validator.parse(rawManifest);
* if (!result.success) {
* console.error(result.errors);
* return;
* }
* const manifest = result.manifest;
*
* // Throwing parse — use at install time
* const manifest = validator.parseOrThrow(rawManifest);
*
* // Check supported versions
* const versions = validator.getSupportedVersions(); // [1]
* ```
*/
export function pluginManifestValidator(): PluginManifestValidator {
return {
parse(input: unknown): ManifestParseResult {
const result = pluginManifestV1Schema.safeParse(input);
if (result.success) {
return {
success: true,
manifest: result.data as PaperclipPluginManifestV1,
};
}
const details = result.error.errors.map((issue) => ({
path: issue.path,
message: issue.message,
}));
const errors = details
.map(({ path, message }) =>
path.length > 0 ? `${path.join(".")}: ${message}` : message,
)
.join("; ");
return {
success: false,
errors,
details,
};
},
parseOrThrow(input: unknown): PaperclipPluginManifestV1 {
const result = this.parse(input);
if (!result.success) {
throw badRequest(`Invalid plugin manifest: ${result.errors}`, result.details);
}
return result.manifest;
},
getSupportedVersions(): readonly number[] {
return SUPPORTED_VERSIONS;
},
};
}

View File

@@ -0,0 +1,682 @@
import { asc, eq, ne, sql, and } from "drizzle-orm";
import type { Db } from "@paperclipai/db";
import {
plugins,
pluginConfig,
pluginEntities,
pluginJobs,
pluginJobRuns,
pluginWebhookDeliveries,
} from "@paperclipai/db";
import type {
PaperclipPluginManifestV1,
PluginStatus,
InstallPlugin,
UpdatePluginStatus,
UpsertPluginConfig,
PatchPluginConfig,
PluginEntityRecord,
PluginEntityQuery,
PluginJobRecord,
PluginJobRunRecord,
PluginWebhookDeliveryRecord,
PluginJobStatus,
PluginJobRunStatus,
PluginJobRunTrigger,
PluginWebhookDeliveryStatus,
} from "@paperclipai/shared";
import { conflict, notFound } from "../errors.js";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/**
* Detect if a Postgres error is a unique-constraint violation on the
* `plugins_plugin_key_idx` unique index.
*/
function isPluginKeyConflict(error: unknown): boolean {
if (typeof error !== "object" || error === null) return false;
const err = error as { code?: string; constraint?: string; constraint_name?: string };
const constraint = err.constraint ?? err.constraint_name;
return err.code === "23505" && constraint === "plugins_plugin_key_idx";
}
// ---------------------------------------------------------------------------
// Service
// ---------------------------------------------------------------------------
/**
* PluginRegistry CRUD operations for the `plugins` and `plugin_config`
* tables. Follows the same factory-function pattern used by the rest of
* the Paperclip service layer.
*
* This is the lowest-level persistence layer for plugins. Higher-level
* concerns such as lifecycle state-machine enforcement and capability
* gating are handled by {@link pluginLifecycleManager} and
* {@link pluginCapabilityValidator} respectively.
*
* @see PLUGIN_SPEC.md §21.3 — Required Tables
*/
export function pluginRegistryService(db: Db) {
// -----------------------------------------------------------------------
// Internal helpers
// -----------------------------------------------------------------------
async function getById(id: string) {
return db
.select()
.from(plugins)
.where(eq(plugins.id, id))
.then((rows) => rows[0] ?? null);
}
async function getByKey(pluginKey: string) {
return db
.select()
.from(plugins)
.where(eq(plugins.pluginKey, pluginKey))
.then((rows) => rows[0] ?? null);
}
async function nextInstallOrder(): Promise<number> {
const result = await db
.select({ maxOrder: sql<number>`coalesce(max(${plugins.installOrder}), 0)` })
.from(plugins);
return (result[0]?.maxOrder ?? 0) + 1;
}
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
// ----- Read -----------------------------------------------------------
/** List all registered plugins ordered by install order. */
list: () =>
db
.select()
.from(plugins)
.orderBy(asc(plugins.installOrder)),
/**
* List installed plugins (excludes soft-deleted/uninstalled).
* Use for Plugin Manager and default API list so uninstalled plugins do not appear.
*/
listInstalled: () =>
db
.select()
.from(plugins)
.where(ne(plugins.status, "uninstalled"))
.orderBy(asc(plugins.installOrder)),
/** List plugins filtered by status. */
listByStatus: (status: PluginStatus) =>
db
.select()
.from(plugins)
.where(eq(plugins.status, status))
.orderBy(asc(plugins.installOrder)),
/** Get a single plugin by primary key. */
getById,
/** Get a single plugin by its unique `pluginKey`. */
getByKey,
// ----- Install / Register --------------------------------------------
/**
* Register (install) a new plugin.
*
* The caller is expected to have already resolved and validated the
* manifest from the package. This method persists the plugin row and
* assigns the next install order.
*/
install: async (input: InstallPlugin, manifest: PaperclipPluginManifestV1) => {
const existing = await getByKey(manifest.id);
if (existing) {
if (existing.status !== "uninstalled") {
throw conflict(`Plugin already installed: ${manifest.id}`);
}
// Reinstall after soft-delete: reactivate the existing row so plugin-scoped
// data and references remain stable across uninstall/reinstall cycles.
return db
.update(plugins)
.set({
packageName: input.packageName,
packagePath: input.packagePath ?? null,
version: manifest.version,
apiVersion: manifest.apiVersion,
categories: manifest.categories,
manifestJson: manifest,
status: "installed" as PluginStatus,
lastError: null,
updatedAt: new Date(),
})
.where(eq(plugins.id, existing.id))
.returning()
.then((rows) => rows[0] ?? null);
}
const installOrder = await nextInstallOrder();
try {
const rows = await db
.insert(plugins)
.values({
pluginKey: manifest.id,
packageName: input.packageName,
version: manifest.version,
apiVersion: manifest.apiVersion,
categories: manifest.categories,
manifestJson: manifest,
status: "installed" as PluginStatus,
installOrder,
packagePath: input.packagePath ?? null,
})
.returning();
return rows[0];
} catch (error) {
if (isPluginKeyConflict(error)) {
throw conflict(`Plugin already installed: ${manifest.id}`);
}
throw error;
}
},
// ----- Update ---------------------------------------------------------
/**
* Update a plugin's manifest and version (e.g. on upgrade).
* The plugin must already exist.
*/
update: async (
id: string,
data: {
packageName?: string;
version?: string;
manifest?: PaperclipPluginManifestV1;
},
) => {
const plugin = await getById(id);
if (!plugin) throw notFound("Plugin not found");
const setClause: Partial<typeof plugins.$inferInsert> & { updatedAt: Date } = {
updatedAt: new Date(),
};
if (data.packageName !== undefined) setClause.packageName = data.packageName;
if (data.version !== undefined) setClause.version = data.version;
if (data.manifest !== undefined) {
setClause.manifestJson = data.manifest;
setClause.apiVersion = data.manifest.apiVersion;
setClause.categories = data.manifest.categories;
}
return db
.update(plugins)
.set(setClause)
.where(eq(plugins.id, id))
.returning()
.then((rows) => rows[0] ?? null);
},
// ----- Status ---------------------------------------------------------
/** Update a plugin's lifecycle status and optional error message. */
updateStatus: async (id: string, input: UpdatePluginStatus) => {
const plugin = await getById(id);
if (!plugin) throw notFound("Plugin not found");
return db
.update(plugins)
.set({
status: input.status,
lastError: input.lastError ?? null,
updatedAt: new Date(),
})
.where(eq(plugins.id, id))
.returning()
.then((rows) => rows[0] ?? null);
},
// ----- Uninstall / Remove --------------------------------------------
/**
* Uninstall a plugin.
*
* When `removeData` is true the plugin row (and cascaded config) is
* hard-deleted. Otherwise the status is set to `"uninstalled"` for
* a soft-delete that preserves the record.
*/
uninstall: async (id: string, removeData = false) => {
const plugin = await getById(id);
if (!plugin) throw notFound("Plugin not found");
if (removeData) {
// Hard delete plugin_config cascades via FK onDelete
return db
.delete(plugins)
.where(eq(plugins.id, id))
.returning()
.then((rows) => rows[0] ?? null);
}
// Soft delete mark as uninstalled
return db
.update(plugins)
.set({
status: "uninstalled" as PluginStatus,
updatedAt: new Date(),
})
.where(eq(plugins.id, id))
.returning()
.then((rows) => rows[0] ?? null);
},
// ----- Config ---------------------------------------------------------
/** Retrieve a plugin's instance configuration. */
getConfig: (pluginId: string) =>
db
.select()
.from(pluginConfig)
.where(eq(pluginConfig.pluginId, pluginId))
.then((rows) => rows[0] ?? null),
/**
* Create or fully replace a plugin's instance configuration.
* If a config row already exists for the plugin it is replaced;
* otherwise a new row is inserted.
*/
upsertConfig: async (pluginId: string, input: UpsertPluginConfig) => {
const plugin = await getById(pluginId);
if (!plugin) throw notFound("Plugin not found");
const existing = await db
.select()
.from(pluginConfig)
.where(eq(pluginConfig.pluginId, pluginId))
.then((rows) => rows[0] ?? null);
if (existing) {
return db
.update(pluginConfig)
.set({
configJson: input.configJson,
lastError: null,
updatedAt: new Date(),
})
.where(eq(pluginConfig.pluginId, pluginId))
.returning()
.then((rows) => rows[0]);
}
return db
.insert(pluginConfig)
.values({
pluginId,
configJson: input.configJson,
})
.returning()
.then((rows) => rows[0]);
},
/**
* Partially update a plugin's instance configuration via shallow merge.
* If no config row exists yet one is created with the supplied values.
*/
patchConfig: async (pluginId: string, input: PatchPluginConfig) => {
const plugin = await getById(pluginId);
if (!plugin) throw notFound("Plugin not found");
const existing = await db
.select()
.from(pluginConfig)
.where(eq(pluginConfig.pluginId, pluginId))
.then((rows) => rows[0] ?? null);
if (existing) {
const merged = { ...existing.configJson, ...input.configJson };
return db
.update(pluginConfig)
.set({
configJson: merged,
lastError: null,
updatedAt: new Date(),
})
.where(eq(pluginConfig.pluginId, pluginId))
.returning()
.then((rows) => rows[0]);
}
return db
.insert(pluginConfig)
.values({
pluginId,
configJson: input.configJson,
})
.returning()
.then((rows) => rows[0]);
},
/**
* Record an error against a plugin's config (e.g. validation failure
* against the plugin's instanceConfigSchema).
*/
setConfigError: async (pluginId: string, lastError: string | null) => {
const rows = await db
.update(pluginConfig)
.set({ lastError, updatedAt: new Date() })
.where(eq(pluginConfig.pluginId, pluginId))
.returning();
if (rows.length === 0) throw notFound("Plugin config not found");
return rows[0];
},
/** Delete a plugin's config row. */
deleteConfig: async (pluginId: string) => {
const rows = await db
.delete(pluginConfig)
.where(eq(pluginConfig.pluginId, pluginId))
.returning();
return rows[0] ?? null;
},
// ----- Entities -------------------------------------------------------
/**
* List persistent entity mappings owned by a specific plugin, with filtering and pagination.
*
* @param pluginId - The UUID of the plugin.
* @param query - Optional filters (type, externalId) and pagination (limit, offset).
* @returns A list of matching `PluginEntityRecord` objects.
*/
listEntities: (pluginId: string, query?: PluginEntityQuery) => {
const conditions = [eq(pluginEntities.pluginId, pluginId)];
if (query?.entityType) conditions.push(eq(pluginEntities.entityType, query.entityType));
if (query?.externalId) conditions.push(eq(pluginEntities.externalId, query.externalId));
return db
.select()
.from(pluginEntities)
.where(and(...conditions))
.orderBy(asc(pluginEntities.createdAt))
.limit(query?.limit ?? 100)
.offset(query?.offset ?? 0);
},
/**
* Look up a plugin-owned entity mapping by its external identifier.
*
* @param pluginId - The UUID of the plugin.
* @param entityType - The type of entity (e.g., 'project', 'issue').
* @param externalId - The identifier in the external system.
* @returns The matching `PluginEntityRecord` or null.
*/
getEntityByExternalId: (
pluginId: string,
entityType: string,
externalId: string,
) =>
db
.select()
.from(pluginEntities)
.where(
and(
eq(pluginEntities.pluginId, pluginId),
eq(pluginEntities.entityType, entityType),
eq(pluginEntities.externalId, externalId),
),
)
.then((rows) => rows[0] ?? null),
/**
* Create or update a persistent mapping between a Paperclip object and an
* external entity.
*
* @param pluginId - The UUID of the plugin.
* @param input - The entity data to persist.
* @returns The newly created or updated `PluginEntityRecord`.
*/
upsertEntity: async (
pluginId: string,
input: Omit<typeof pluginEntities.$inferInsert, "id" | "pluginId" | "createdAt" | "updatedAt">,
) => {
// Drizzle doesn't support pg-specific onConflictDoUpdate easily in the insert() call
// with complex where clauses, so we do it manually.
const existing = await db
.select()
.from(pluginEntities)
.where(
and(
eq(pluginEntities.pluginId, pluginId),
eq(pluginEntities.entityType, input.entityType),
eq(pluginEntities.externalId, input.externalId ?? ""),
),
)
.then((rows) => rows[0] ?? null);
if (existing) {
return db
.update(pluginEntities)
.set({
...input,
updatedAt: new Date(),
})
.where(eq(pluginEntities.id, existing.id))
.returning()
.then((rows) => rows[0]);
}
return db
.insert(pluginEntities)
.values({
...input,
pluginId,
} as any)
.returning()
.then((rows) => rows[0]);
},
/**
* Delete a specific plugin-owned entity mapping by its internal UUID.
*
* @param id - The UUID of the entity record.
* @returns The deleted record, or null if not found.
*/
deleteEntity: async (id: string) => {
const rows = await db
.delete(pluginEntities)
.where(eq(pluginEntities.id, id))
.returning();
return rows[0] ?? null;
},
// ----- Jobs -----------------------------------------------------------
/**
* List all scheduled jobs registered for a specific plugin.
*
* @param pluginId - The UUID of the plugin.
* @returns A list of `PluginJobRecord` objects.
*/
listJobs: (pluginId: string) =>
db
.select()
.from(pluginJobs)
.where(eq(pluginJobs.pluginId, pluginId))
.orderBy(asc(pluginJobs.jobKey)),
/**
* Look up a plugin job by its unique job key.
*
* @param pluginId - The UUID of the plugin.
* @param jobKey - The key defined in the plugin manifest.
* @returns The matching `PluginJobRecord` or null.
*/
getJobByKey: (pluginId: string, jobKey: string) =>
db
.select()
.from(pluginJobs)
.where(and(eq(pluginJobs.pluginId, pluginId), eq(pluginJobs.jobKey, jobKey)))
.then((rows) => rows[0] ?? null),
/**
* Register or update a scheduled job for a plugin.
*
* @param pluginId - The UUID of the plugin.
* @param jobKey - The unique key for the job.
* @param input - The schedule (cron) and optional status.
* @returns The updated or created `PluginJobRecord`.
*/
upsertJob: async (
pluginId: string,
jobKey: string,
input: { schedule: string; status?: PluginJobStatus },
) => {
const existing = await db
.select()
.from(pluginJobs)
.where(and(eq(pluginJobs.pluginId, pluginId), eq(pluginJobs.jobKey, jobKey)))
.then((rows) => rows[0] ?? null);
if (existing) {
return db
.update(pluginJobs)
.set({
schedule: input.schedule,
status: input.status ?? existing.status,
updatedAt: new Date(),
})
.where(eq(pluginJobs.id, existing.id))
.returning()
.then((rows) => rows[0]);
}
return db
.insert(pluginJobs)
.values({
pluginId,
jobKey,
schedule: input.schedule,
status: input.status ?? "active",
})
.returning()
.then((rows) => rows[0]);
},
/**
* Record the start of a specific job execution.
*
* @param pluginId - The UUID of the plugin.
* @param jobId - The UUID of the parent job record.
* @param trigger - What triggered this run (e.g., 'schedule', 'manual').
* @returns The newly created `PluginJobRunRecord` in 'pending' status.
*/
createJobRun: async (
pluginId: string,
jobId: string,
trigger: PluginJobRunTrigger,
) => {
return db
.insert(pluginJobRuns)
.values({
pluginId,
jobId,
trigger,
status: "pending",
})
.returning()
.then((rows) => rows[0]);
},
/**
* Update the status, duration, and logs of a job execution record.
*
* @param runId - The UUID of the job run.
* @param input - The update fields (status, error, duration, etc.).
* @returns The updated `PluginJobRunRecord`.
*/
updateJobRun: async (
runId: string,
input: {
status: PluginJobRunStatus;
durationMs?: number;
error?: string;
logs?: string[];
startedAt?: Date;
finishedAt?: Date;
},
) => {
return db
.update(pluginJobRuns)
.set(input)
.where(eq(pluginJobRuns.id, runId))
.returning()
.then((rows) => rows[0] ?? null);
},
// ----- Webhooks -------------------------------------------------------
/**
* Create a record for an incoming webhook delivery.
*
* @param pluginId - The UUID of the receiving plugin.
* @param webhookKey - The endpoint key defined in the manifest.
* @param input - The payload, headers, and optional external ID.
* @returns The newly created `PluginWebhookDeliveryRecord` in 'pending' status.
*/
createWebhookDelivery: async (
pluginId: string,
webhookKey: string,
input: {
externalId?: string;
payload: Record<string, unknown>;
headers?: Record<string, string>;
},
) => {
return db
.insert(pluginWebhookDeliveries)
.values({
pluginId,
webhookKey,
externalId: input.externalId,
payload: input.payload,
headers: input.headers ?? {},
status: "pending",
})
.returning()
.then((rows) => rows[0]);
},
/**
* Update the status and processing metrics of a webhook delivery.
*
* @param deliveryId - The UUID of the delivery record.
* @param input - The update fields (status, error, duration, etc.).
* @returns The updated `PluginWebhookDeliveryRecord`.
*/
updateWebhookDelivery: async (
deliveryId: string,
input: {
status: PluginWebhookDeliveryStatus;
durationMs?: number;
error?: string;
startedAt?: Date;
finishedAt?: Date;
},
) => {
return db
.update(pluginWebhookDeliveries)
.set(input)
.where(eq(pluginWebhookDeliveries.id, deliveryId))
.returning()
.then((rows) => rows[0] ?? null);
},
};
}

View File

@@ -0,0 +1,221 @@
import { existsSync, readFileSync, realpathSync } from "node:fs";
import path from "node:path";
import vm from "node:vm";
import type { PaperclipPluginManifestV1 } from "@paperclipai/shared";
import type { PluginCapabilityValidator } from "./plugin-capability-validator.js";
export class PluginSandboxError extends Error {
constructor(message: string) {
super(message);
this.name = "PluginSandboxError";
}
}
/**
* Sandbox runtime options used when loading a plugin worker module.
*
* `allowedModuleSpecifiers` controls which bare module specifiers are permitted.
* `allowedModules` provides concrete host-provided bindings for those specifiers.
*/
export interface PluginSandboxOptions {
entrypointPath: string;
allowedModuleSpecifiers?: ReadonlySet<string>;
allowedModules?: Readonly<Record<string, Record<string, unknown>>>;
allowedGlobals?: Record<string, unknown>;
timeoutMs?: number;
}
/**
* Operation-level runtime gate for plugin host API calls.
* Every host operation must be checked against manifest capabilities before execution.
*/
export interface CapabilityScopedInvoker {
invoke<T>(operation: string, fn: () => Promise<T> | T): Promise<T>;
}
interface LoadedModule {
namespace: Record<string, unknown>;
}
const DEFAULT_TIMEOUT_MS = 2_000;
const MODULE_PATH_SUFFIXES = ["", ".js", ".mjs", ".cjs", "/index.js", "/index.mjs", "/index.cjs"];
const DEFAULT_GLOBALS: Record<string, unknown> = {
console,
setTimeout,
clearTimeout,
setInterval,
clearInterval,
URL,
URLSearchParams,
TextEncoder,
TextDecoder,
AbortController,
AbortSignal,
};
export function createCapabilityScopedInvoker(
manifest: PaperclipPluginManifestV1,
validator: PluginCapabilityValidator,
): CapabilityScopedInvoker {
return {
async invoke<T>(operation: string, fn: () => Promise<T> | T): Promise<T> {
validator.assertOperation(manifest, operation);
return await fn();
},
};
}
/**
* Load a CommonJS plugin module in a VM context with explicit module import allow-listing.
*
* Security properties:
* - no implicit access to host globals like `process`
* - no unrestricted built-in module imports
* - relative imports are resolved only inside the plugin root directory
*/
export async function loadPluginModuleInSandbox(
options: PluginSandboxOptions,
): Promise<LoadedModule> {
const timeoutMs = options.timeoutMs ?? DEFAULT_TIMEOUT_MS;
const allowedSpecifiers = options.allowedModuleSpecifiers ?? new Set<string>();
const entrypointPath = path.resolve(options.entrypointPath);
const pluginRoot = path.dirname(entrypointPath);
const context = vm.createContext({
...DEFAULT_GLOBALS,
...options.allowedGlobals,
});
const moduleCache = new Map<string, Record<string, unknown>>();
const allowedModules = options.allowedModules ?? {};
const realPluginRoot = realpathSync(pluginRoot);
const loadModuleSync = (modulePath: string): Record<string, unknown> => {
const resolvedPath = resolveModulePathSync(path.resolve(modulePath));
const realPath = realpathSync(resolvedPath);
if (!isWithinRoot(realPath, realPluginRoot)) {
throw new PluginSandboxError(
`Import '${modulePath}' escapes plugin root and is not allowed`,
);
}
const cached = moduleCache.get(realPath);
if (cached) return cached;
const code = readModuleSourceSync(realPath);
if (looksLikeEsm(code)) {
throw new PluginSandboxError(
"Sandbox loader only supports CommonJS modules. Build plugin worker entrypoints as CJS for sandboxed loading.",
);
}
const module = { exports: {} as Record<string, unknown> };
// Cache the module before execution to preserve CommonJS cycle semantics.
moduleCache.set(realPath, module.exports);
const requireInSandbox = (specifier: string): Record<string, unknown> => {
if (!specifier.startsWith(".") && !specifier.startsWith("/")) {
if (!allowedSpecifiers.has(specifier)) {
throw new PluginSandboxError(
`Import denied for module '${specifier}'. Add an explicit sandbox allow-list entry.`,
);
}
const binding = allowedModules[specifier];
if (!binding) {
throw new PluginSandboxError(
`Bare module '${specifier}' is allow-listed but no host binding is registered.`,
);
}
return binding;
}
const candidatePath = path.resolve(path.dirname(realPath), specifier);
return loadModuleSync(candidatePath);
};
// Inject the CJS module arguments into the context so the script can call
// the wrapper immediately. This is critical: the timeout in runInContext
// only applies during script evaluation. By including the self-invocation
// `(fn)(exports, module, ...)` in the script text, the timeout also covers
// the actual module body execution — preventing infinite loops from hanging.
const sandboxArgs = {
__paperclip_exports: module.exports,
__paperclip_module: module,
__paperclip_require: requireInSandbox,
__paperclip_filename: realPath,
__paperclip_dirname: path.dirname(realPath),
};
// Temporarily inject args into the context, run, then remove to avoid pollution.
Object.assign(context, sandboxArgs);
const wrapped = `(function (exports, module, require, __filename, __dirname) {\n${code}\n})(__paperclip_exports, __paperclip_module, __paperclip_require, __paperclip_filename, __paperclip_dirname)`;
const script = new vm.Script(wrapped, { filename: realPath });
try {
script.runInContext(context, { timeout: timeoutMs });
} finally {
for (const key of Object.keys(sandboxArgs)) {
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete (context as Record<string, unknown>)[key];
}
}
const normalizedExports = normalizeModuleExports(module.exports);
moduleCache.set(realPath, normalizedExports);
return normalizedExports;
};
const entryExports = loadModuleSync(entrypointPath);
return {
namespace: { ...entryExports },
};
}
function resolveModulePathSync(candidatePath: string): string {
for (const suffix of MODULE_PATH_SUFFIXES) {
const fullPath = `${candidatePath}${suffix}`;
if (existsSync(fullPath)) {
return fullPath;
}
}
throw new PluginSandboxError(`Unable to resolve module import at path '${candidatePath}'`);
}
/**
* True when `targetPath` is inside `rootPath` (or equals rootPath), false otherwise.
* Uses `path.relative` so sibling-prefix paths (e.g. `/root-a` vs `/root`) cannot bypass checks.
*/
function isWithinRoot(targetPath: string, rootPath: string): boolean {
const relative = path.relative(rootPath, targetPath);
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
}
function readModuleSourceSync(modulePath: string): string {
try {
return readFileSync(modulePath, "utf8");
} catch (error) {
throw new PluginSandboxError(
`Failed to read sandbox module '${modulePath}': ${error instanceof Error ? error.message : String(error)}`,
);
}
}
function normalizeModuleExports(exportsValue: unknown): Record<string, unknown> {
if (typeof exportsValue === "object" && exportsValue !== null) {
return exportsValue as Record<string, unknown>;
}
return { default: exportsValue };
}
/**
* Lightweight guard to reject ESM syntax in the VM CommonJS loader.
*/
function looksLikeEsm(code: string): boolean {
return /(^|\n)\s*import\s+/m.test(code) || /(^|\n)\s*export\s+/m.test(code);
}

View File

@@ -0,0 +1,354 @@
/**
* Plugin secrets host-side handler — resolves secret references through the
* Paperclip secret provider system.
*
* When a plugin worker calls `ctx.secrets.resolve(secretRef)`, the JSON-RPC
* request arrives at the host with `{ secretRef }`. This module provides the
* concrete `HostServices.secrets` adapter that:
*
* 1. Parses the `secretRef` string to identify the secret.
* 2. Looks up the secret record and its latest version in the database.
* 3. Delegates to the configured `SecretProviderModule` to decrypt /
* resolve the raw value.
* 4. Returns the resolved plaintext value to the worker.
*
* ## Secret Reference Format
*
* A `secretRef` is a **secret UUID** — the primary key (`id`) of a row in
* the `company_secrets` table. Operators place these UUIDs into plugin
* config values; plugin workers resolve them at execution time via
* `ctx.secrets.resolve(secretId)`.
*
* ## Security Invariants
*
* - Resolved values are **never** logged, persisted, or included in error
* messages (per PLUGIN_SPEC.md §22).
* - The handler is capability-gated: only plugins with `secrets.read-ref`
* declared in their manifest may call it (enforced by `host-client-factory`).
* - The host handler itself does not cache resolved values. Each call goes
* through the secret provider to honour rotation.
*
* @see PLUGIN_SPEC.md §22 — Secrets
* @see host-client-factory.ts — capability gating
* @see services/secrets.ts — secretService used by agent env bindings
*/
import { eq, and, desc } from "drizzle-orm";
import type { Db } from "@paperclipai/db";
import { companySecrets, companySecretVersions, pluginConfig } from "@paperclipai/db";
import type { SecretProvider } from "@paperclipai/shared";
import { getSecretProvider } from "../secrets/provider-registry.js";
import { pluginRegistryService } from "./plugin-registry.js";
// ---------------------------------------------------------------------------
// Error helpers
// ---------------------------------------------------------------------------
/**
* Create a sanitised error that never leaks secret material.
* Only the ref identifier is included; never the resolved value.
*/
function secretNotFound(secretRef: string): Error {
const err = new Error(`Secret not found: ${secretRef}`);
err.name = "SecretNotFoundError";
return err;
}
function secretVersionNotFound(secretRef: string): Error {
const err = new Error(`No version found for secret: ${secretRef}`);
err.name = "SecretVersionNotFoundError";
return err;
}
function invalidSecretRef(secretRef: string): Error {
const err = new Error(`Invalid secret reference: ${secretRef}`);
err.name = "InvalidSecretRefError";
return err;
}
// ---------------------------------------------------------------------------
// Validation
// ---------------------------------------------------------------------------
/** UUID v4 regex for validating secretRef format. */
const UUID_RE =
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
/**
* Check whether a secretRef looks like a valid UUID.
*/
function isUuid(value: string): boolean {
return UUID_RE.test(value);
}
/**
* Collect the property paths (dot-separated keys) whose schema node declares
* `format: "secret-ref"`. Only top-level and nested `properties` are walked —
* this mirrors the flat/nested object shapes that `JsonSchemaForm` renders.
*/
function collectSecretRefPaths(
schema: Record<string, unknown> | null | undefined,
): Set<string> {
const paths = new Set<string>();
if (!schema || typeof schema !== "object") return paths;
function walk(node: Record<string, unknown>, prefix: string): void {
const props = node.properties as Record<string, Record<string, unknown>> | undefined;
if (!props || typeof props !== "object") return;
for (const [key, propSchema] of Object.entries(props)) {
if (!propSchema || typeof propSchema !== "object") continue;
const path = prefix ? `${prefix}.${key}` : key;
if (propSchema.format === "secret-ref") {
paths.add(path);
}
// Recurse into nested object schemas
if (propSchema.type === "object") {
walk(propSchema, path);
}
}
}
walk(schema, "");
return paths;
}
/**
* Extract secret reference UUIDs from a plugin's configJson, scoped to only
* the fields annotated with `format: "secret-ref"` in the schema.
*
* When no schema is provided, falls back to collecting all UUID-shaped strings
* (backwards-compatible for plugins without a declared instanceConfigSchema).
*/
export function extractSecretRefsFromConfig(
configJson: unknown,
schema?: Record<string, unknown> | null,
): Set<string> {
const refs = new Set<string>();
if (configJson == null || typeof configJson !== "object") return refs;
const secretPaths = collectSecretRefPaths(schema);
// If schema declares secret-ref paths, extract only those values.
if (secretPaths.size > 0) {
for (const dotPath of secretPaths) {
const keys = dotPath.split(".");
let current: unknown = configJson;
for (const k of keys) {
if (current == null || typeof current !== "object") { current = undefined; break; }
current = (current as Record<string, unknown>)[k];
}
if (typeof current === "string" && isUuid(current)) {
refs.add(current);
}
}
return refs;
}
// Fallback: no schema or no secret-ref annotations — collect all UUIDs.
// This preserves backwards compatibility for plugins that omit
// instanceConfigSchema.
function walkAll(value: unknown): void {
if (typeof value === "string") {
if (isUuid(value)) refs.add(value);
} else if (Array.isArray(value)) {
for (const item of value) walkAll(item);
} else if (value !== null && typeof value === "object") {
for (const v of Object.values(value as Record<string, unknown>)) walkAll(v);
}
}
walkAll(configJson);
return refs;
}
// ---------------------------------------------------------------------------
// Handler factory
// ---------------------------------------------------------------------------
/**
* Input shape for the `secrets.resolve` handler.
*
* Matches `WorkerToHostMethods["secrets.resolve"][0]` from `protocol.ts`.
*/
export interface PluginSecretsResolveParams {
/** The secret reference string (a secret UUID). */
secretRef: string;
}
/**
* Options for creating the plugin secrets handler.
*/
export interface PluginSecretsHandlerOptions {
/** Database connection. */
db: Db;
/**
* The plugin ID using this handler.
* Used for logging context only; never included in error payloads
* that reach the plugin worker.
*/
pluginId: string;
}
/**
* The `HostServices.secrets` adapter for the plugin host-client factory.
*/
export interface PluginSecretsService {
/**
* Resolve a secret reference to its current plaintext value.
*
* @param params - Contains the `secretRef` (UUID of the secret)
* @returns The resolved secret value
* @throws {Error} If the secret is not found, has no versions, or
* the provider fails to resolve
*/
resolve(params: PluginSecretsResolveParams): Promise<string>;
}
/**
* Create a `HostServices.secrets` adapter for a specific plugin.
*
* The returned service looks up secrets by UUID, fetches the latest version
* material, and delegates to the appropriate `SecretProviderModule` for
* decryption.
*
* @example
* ```ts
* const secretsHandler = createPluginSecretsHandler({ db, pluginId });
* const handlers = createHostClientHandlers({
* pluginId,
* capabilities: manifest.capabilities,
* services: {
* secrets: secretsHandler,
* // ...
* },
* });
* ```
*
* @param options - Database connection and plugin identity
* @returns A `PluginSecretsService` suitable for `HostServices.secrets`
*/
/** Simple sliding-window rate limiter for secret resolution attempts. */
function createRateLimiter(maxAttempts: number, windowMs: number) {
const attempts = new Map<string, number[]>();
return {
check(key: string): boolean {
const now = Date.now();
const windowStart = now - windowMs;
const existing = (attempts.get(key) ?? []).filter((ts) => ts > windowStart);
if (existing.length >= maxAttempts) return false;
existing.push(now);
attempts.set(key, existing);
return true;
},
};
}
export function createPluginSecretsHandler(
options: PluginSecretsHandlerOptions,
): PluginSecretsService {
const { db, pluginId } = options;
const registry = pluginRegistryService(db);
// Rate limit: max 30 resolution attempts per plugin per minute
const rateLimiter = createRateLimiter(30, 60_000);
let cachedAllowedRefs: Set<string> | null = null;
let cachedAllowedRefsExpiry = 0;
const CONFIG_CACHE_TTL_MS = 30_000; // 30 seconds, matches event bus TTL
return {
async resolve(params: PluginSecretsResolveParams): Promise<string> {
const { secretRef } = params;
// ---------------------------------------------------------------
// 0. Rate limiting — prevent brute-force UUID enumeration
// ---------------------------------------------------------------
if (!rateLimiter.check(pluginId)) {
const err = new Error("Rate limit exceeded for secret resolution");
err.name = "RateLimitExceededError";
throw err;
}
// ---------------------------------------------------------------
// 1. Validate the ref format
// ---------------------------------------------------------------
if (!secretRef || typeof secretRef !== "string" || secretRef.trim().length === 0) {
throw invalidSecretRef(secretRef ?? "<empty>");
}
const trimmedRef = secretRef.trim();
if (!isUuid(trimmedRef)) {
throw invalidSecretRef(trimmedRef);
}
// ---------------------------------------------------------------
// 1b. Scope check — only allow secrets referenced in this plugin's config
// ---------------------------------------------------------------
const now = Date.now();
if (!cachedAllowedRefs || now > cachedAllowedRefsExpiry) {
const [configRow, plugin] = await Promise.all([
db
.select()
.from(pluginConfig)
.where(eq(pluginConfig.pluginId, pluginId))
.then((rows) => rows[0] ?? null),
registry.getById(pluginId),
]);
const schema = (plugin?.manifestJson as unknown as Record<string, unknown> | null)
?.instanceConfigSchema as Record<string, unknown> | undefined;
cachedAllowedRefs = extractSecretRefsFromConfig(configRow?.configJson, schema);
cachedAllowedRefsExpiry = now + CONFIG_CACHE_TTL_MS;
}
if (!cachedAllowedRefs.has(trimmedRef)) {
// Return "not found" to avoid leaking whether the secret exists
throw secretNotFound(trimmedRef);
}
// ---------------------------------------------------------------
// 2. Look up the secret record by UUID
// ---------------------------------------------------------------
const secret = await db
.select()
.from(companySecrets)
.where(eq(companySecrets.id, trimmedRef))
.then((rows) => rows[0] ?? null);
if (!secret) {
throw secretNotFound(trimmedRef);
}
// ---------------------------------------------------------------
// 3. Fetch the latest version's material
// ---------------------------------------------------------------
const versionRow = await db
.select()
.from(companySecretVersions)
.where(
and(
eq(companySecretVersions.secretId, secret.id),
eq(companySecretVersions.version, secret.latestVersion),
),
)
.then((rows) => rows[0] ?? null);
if (!versionRow) {
throw secretVersionNotFound(trimmedRef);
}
// ---------------------------------------------------------------
// 4. Resolve through the appropriate secret provider
// ---------------------------------------------------------------
const provider = getSecretProvider(secret.provider as SecretProvider);
const resolved = await provider.resolveVersion({
material: versionRow.material as Record<string, unknown>,
externalRef: secret.externalRef,
});
return resolved;
},
};
}

View File

@@ -0,0 +1,237 @@
import { and, eq, isNull } from "drizzle-orm";
import type { Db } from "@paperclipai/db";
import { plugins, pluginState } from "@paperclipai/db";
import type {
PluginStateScopeKind,
SetPluginState,
ListPluginState,
} from "@paperclipai/shared";
import { notFound } from "../errors.js";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/** Default namespace used when the plugin does not specify one. */
const DEFAULT_NAMESPACE = "default";
/**
* Build the WHERE clause conditions for a scoped state lookup.
*
* The five-part composite key is:
* `(pluginId, scopeKind, scopeId, namespace, stateKey)`
*
* `scopeId` may be null (for `instance` scope) or a non-empty string.
*/
function scopeConditions(
pluginId: string,
scopeKind: PluginStateScopeKind,
scopeId: string | undefined | null,
namespace: string,
stateKey: string,
) {
const conditions = [
eq(pluginState.pluginId, pluginId),
eq(pluginState.scopeKind, scopeKind),
eq(pluginState.namespace, namespace),
eq(pluginState.stateKey, stateKey),
];
if (scopeId != null && scopeId !== "") {
conditions.push(eq(pluginState.scopeId, scopeId));
} else {
conditions.push(isNull(pluginState.scopeId));
}
return and(...conditions);
}
// ---------------------------------------------------------------------------
// Service
// ---------------------------------------------------------------------------
/**
* Plugin State Store — scoped key-value persistence for plugin workers.
*
* Provides `get`, `set`, `delete`, and `list` operations over the
* `plugin_state` table. Each plugin's data is strictly namespaced by
* `pluginId` so plugins cannot read or write each other's state.
*
* This service implements the server-side backing for the `ctx.state` SDK
* client exposed to plugin workers. The host is responsible for:
* - enforcing `plugin.state.read` capability before calling `get` / `list`
* - enforcing `plugin.state.write` capability before calling `set` / `delete`
*
* @see PLUGIN_SPEC.md §14 — SDK Surface (`ctx.state`)
* @see PLUGIN_SPEC.md §15.1 — Capabilities: Plugin State
* @see PLUGIN_SPEC.md §21.3 — `plugin_state` table
*/
export function pluginStateStore(db: Db) {
// -----------------------------------------------------------------------
// Internal helpers
// -----------------------------------------------------------------------
async function assertPluginExists(pluginId: string): Promise<void> {
const rows = await db
.select({ id: plugins.id })
.from(plugins)
.where(eq(plugins.id, pluginId));
if (rows.length === 0) {
throw notFound(`Plugin not found: ${pluginId}`);
}
}
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
/**
* Read a state value.
*
* Returns the stored JSON value, or `null` if no entry exists for the
* given scope and key.
*
* Requires `plugin.state.read` capability (enforced by the caller).
*
* @param pluginId - UUID of the owning plugin
* @param scopeKind - Granularity of the scope
* @param scopeId - Identifier for the scoped entity (null for `instance` scope)
* @param stateKey - The key to read
* @param namespace - Sub-namespace (defaults to `"default"`)
*/
get: async (
pluginId: string,
scopeKind: PluginStateScopeKind,
stateKey: string,
{
scopeId,
namespace = DEFAULT_NAMESPACE,
}: { scopeId?: string; namespace?: string } = {},
): Promise<unknown> => {
const rows = await db
.select()
.from(pluginState)
.where(scopeConditions(pluginId, scopeKind, scopeId, namespace, stateKey));
return rows[0]?.valueJson ?? null;
},
/**
* Write (create or replace) a state value.
*
* Uses an upsert so the caller does not need to check for prior existence.
* On conflict (same composite key) the existing row's `value_json` and
* `updated_at` are overwritten.
*
* Requires `plugin.state.write` capability (enforced by the caller).
*
* @param pluginId - UUID of the owning plugin
* @param input - Scope key and value to store
*/
set: async (pluginId: string, input: SetPluginState): Promise<void> => {
await assertPluginExists(pluginId);
const namespace = input.namespace ?? DEFAULT_NAMESPACE;
const scopeId = input.scopeId ?? null;
await db
.insert(pluginState)
.values({
pluginId,
scopeKind: input.scopeKind,
scopeId,
namespace,
stateKey: input.stateKey,
valueJson: input.value,
updatedAt: new Date(),
})
.onConflictDoUpdate({
target: [
pluginState.pluginId,
pluginState.scopeKind,
pluginState.scopeId,
pluginState.namespace,
pluginState.stateKey,
],
set: {
valueJson: input.value,
updatedAt: new Date(),
},
});
},
/**
* Delete a state value.
*
* No-ops silently if the entry does not exist (idempotent by design).
*
* Requires `plugin.state.write` capability (enforced by the caller).
*
* @param pluginId - UUID of the owning plugin
* @param scopeKind - Granularity of the scope
* @param stateKey - The key to delete
* @param scopeId - Identifier for the scoped entity (null for `instance` scope)
* @param namespace - Sub-namespace (defaults to `"default"`)
*/
delete: async (
pluginId: string,
scopeKind: PluginStateScopeKind,
stateKey: string,
{
scopeId,
namespace = DEFAULT_NAMESPACE,
}: { scopeId?: string; namespace?: string } = {},
): Promise<void> => {
await db
.delete(pluginState)
.where(scopeConditions(pluginId, scopeKind, scopeId, namespace, stateKey));
},
/**
* List all state entries for a plugin, optionally filtered by scope.
*
* Returns all matching rows as `PluginStateRecord`-shaped objects.
* The `valueJson` field contains the stored value.
*
* Requires `plugin.state.read` capability (enforced by the caller).
*
* @param pluginId - UUID of the owning plugin
* @param filter - Optional scope filters (scopeKind, scopeId, namespace)
*/
list: async (pluginId: string, filter: ListPluginState = {}): Promise<typeof pluginState.$inferSelect[]> => {
const conditions = [eq(pluginState.pluginId, pluginId)];
if (filter.scopeKind !== undefined) {
conditions.push(eq(pluginState.scopeKind, filter.scopeKind));
}
if (filter.scopeId !== undefined) {
conditions.push(eq(pluginState.scopeId, filter.scopeId));
}
if (filter.namespace !== undefined) {
conditions.push(eq(pluginState.namespace, filter.namespace));
}
return db
.select()
.from(pluginState)
.where(and(...conditions));
},
/**
* Delete all state entries owned by a plugin.
*
* Called during plugin uninstall when `removeData = true`. Also useful
* for resetting a plugin's state during testing.
*
* @param pluginId - UUID of the owning plugin
*/
deleteAll: async (pluginId: string): Promise<void> => {
await db
.delete(pluginState)
.where(eq(pluginState.pluginId, pluginId));
},
};
}
export type PluginStateStore = ReturnType<typeof pluginStateStore>;

View File

@@ -0,0 +1,81 @@
/**
* In-memory pub/sub bus for plugin SSE streams.
*
* Workers emit stream events via JSON-RPC notifications. The bus fans out
* each event to all connected SSE clients that match the (pluginId, channel,
* companyId) tuple.
*
* @see PLUGIN_SPEC.md §19.8 — Real-Time Streaming
*/
/** Valid SSE event types for plugin streams. */
export type StreamEventType = "message" | "open" | "close" | "error";
export type StreamSubscriber = (event: unknown, eventType: StreamEventType) => void;
/**
* Composite key for stream subscriptions: pluginId:channel:companyId
*/
function streamKey(pluginId: string, channel: string, companyId: string): string {
return `${pluginId}:${channel}:${companyId}`;
}
export interface PluginStreamBus {
/**
* Subscribe to stream events for a specific (pluginId, channel, companyId).
* Returns an unsubscribe function.
*/
subscribe(
pluginId: string,
channel: string,
companyId: string,
listener: StreamSubscriber,
): () => void;
/**
* Publish an event to all subscribers of (pluginId, channel, companyId).
* Called by the worker manager when it receives a stream notification.
*/
publish(
pluginId: string,
channel: string,
companyId: string,
event: unknown,
eventType?: StreamEventType,
): void;
}
/**
* Create a new PluginStreamBus instance.
*/
export function createPluginStreamBus(): PluginStreamBus {
const subscribers = new Map<string, Set<StreamSubscriber>>();
return {
subscribe(pluginId, channel, companyId, listener) {
const key = streamKey(pluginId, channel, companyId);
let set = subscribers.get(key);
if (!set) {
set = new Set();
subscribers.set(key, set);
}
set.add(listener);
return () => {
set!.delete(listener);
if (set!.size === 0) {
subscribers.delete(key);
}
};
},
publish(pluginId, channel, companyId, event, eventType: StreamEventType = "message") {
const key = streamKey(pluginId, channel, companyId);
const set = subscribers.get(key);
if (!set) return;
for (const listener of set) {
listener(event, eventType);
}
},
};
}

View File

@@ -0,0 +1,448 @@
/**
* PluginToolDispatcher — orchestrates plugin tool discovery, lifecycle
* integration, and execution routing for the agent service.
*
* This service sits between the agent service and the lower-level
* `PluginToolRegistry` + `PluginWorkerManager`, providing a clean API that:
*
* - Discovers tools from loaded plugin manifests and registers them
* in the tool registry.
* - Hooks into `PluginLifecycleManager` events to automatically register
* and unregister tools when plugins are enabled or disabled.
* - Exposes the tool list in an agent-friendly format (with namespaced
* names, descriptions, parameter schemas).
* - Routes `executeTool` calls to the correct plugin worker and returns
* structured results.
* - Validates tool parameters against declared schemas before dispatch.
*
* The dispatcher is created once at server startup and shared across
* the application.
*
* @see PLUGIN_SPEC.md §11 — Agent Tools
* @see PLUGIN_SPEC.md §13.10 — `executeTool`
*/
import type { Db } from "@paperclipai/db";
import type {
PaperclipPluginManifestV1,
PluginRecord,
} from "@paperclipai/shared";
import type { ToolRunContext, ToolResult } from "@paperclipai/plugin-sdk";
import type { PluginWorkerManager } from "./plugin-worker-manager.js";
import type { PluginLifecycleManager } from "./plugin-lifecycle.js";
import {
createPluginToolRegistry,
type PluginToolRegistry,
type RegisteredTool,
type ToolListFilter,
type ToolExecutionResult,
} from "./plugin-tool-registry.js";
import { pluginRegistryService } from "./plugin-registry.js";
import { logger } from "../middleware/logger.js";
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
/**
* An agent-facing tool descriptor — the shape returned when agents
* query for available tools.
*
* This is intentionally simpler than `RegisteredTool`, exposing only
* what agents need to decide whether and how to call a tool.
*/
export interface AgentToolDescriptor {
/** Fully namespaced tool name (e.g. `"acme.linear:search-issues"`). */
name: string;
/** Human-readable display name. */
displayName: string;
/** Description for the agent — explains when and how to use this tool. */
description: string;
/** JSON Schema describing the tool's input parameters. */
parametersSchema: Record<string, unknown>;
/** The plugin that provides this tool. */
pluginId: string;
}
/**
* Options for creating the plugin tool dispatcher.
*/
export interface PluginToolDispatcherOptions {
/** The worker manager used to dispatch RPC calls to plugin workers. */
workerManager?: PluginWorkerManager;
/** The lifecycle manager to listen for plugin state changes. */
lifecycleManager?: PluginLifecycleManager;
/** Database connection for looking up plugin records. */
db?: Db;
}
// ---------------------------------------------------------------------------
// PluginToolDispatcher interface
// ---------------------------------------------------------------------------
/**
* The plugin tool dispatcher — the primary integration point between the
* agent service and the plugin tool system.
*
* Agents use this service to:
* 1. List all available tools (for prompt construction / tool choice)
* 2. Execute a specific tool by its namespaced name
*
* The dispatcher handles lifecycle management internally — when a plugin
* is loaded or unloaded, its tools are automatically registered or removed.
*/
export interface PluginToolDispatcher {
/**
* Initialize the dispatcher — load tools from all currently-ready plugins
* and start listening for lifecycle events.
*
* Must be called once at server startup after the lifecycle manager
* and worker manager are ready.
*/
initialize(): Promise<void>;
/**
* Tear down the dispatcher — unregister lifecycle event listeners
* and clear all tool registrations.
*
* Called during server shutdown.
*/
teardown(): void;
/**
* List all available tools for agents, optionally filtered.
*
* Returns tool descriptors in an agent-friendly format.
*
* @param filter - Optional filter criteria
* @returns Array of agent tool descriptors
*/
listToolsForAgent(filter?: ToolListFilter): AgentToolDescriptor[];
/**
* Look up a tool by its namespaced name.
*
* @param namespacedName - e.g. `"acme.linear:search-issues"`
* @returns The registered tool, or `null` if not found
*/
getTool(namespacedName: string): RegisteredTool | null;
/**
* Execute a tool by its namespaced name, routing to the correct
* plugin worker.
*
* @param namespacedName - Fully qualified tool name
* @param parameters - Input parameters matching the tool's schema
* @param runContext - Agent run context
* @returns The execution result with routing metadata
* @throws {Error} if the tool is not found, the worker is not running,
* or the tool execution fails
*/
executeTool(
namespacedName: string,
parameters: unknown,
runContext: ToolRunContext,
): Promise<ToolExecutionResult>;
/**
* Register all tools from a plugin manifest.
*
* This is called automatically when a plugin transitions to `ready`.
* Can also be called manually for testing or recovery scenarios.
*
* @param pluginId - The plugin's unique identifier
* @param manifest - The plugin manifest containing tool declarations
*/
registerPluginTools(
pluginId: string,
manifest: PaperclipPluginManifestV1,
): void;
/**
* Unregister all tools for a plugin.
*
* Called automatically when a plugin is disabled or unloaded.
*
* @param pluginId - The plugin to unregister
*/
unregisterPluginTools(pluginId: string): void;
/**
* Get the total number of registered tools, optionally scoped to a plugin.
*
* @param pluginId - If provided, count only this plugin's tools
*/
toolCount(pluginId?: string): number;
/**
* Access the underlying tool registry for advanced operations.
*
* This escape hatch exists for internal use (e.g. diagnostics).
* Prefer the dispatcher's own methods for normal operations.
*/
getRegistry(): PluginToolRegistry;
}
// ---------------------------------------------------------------------------
// Factory: createPluginToolDispatcher
// ---------------------------------------------------------------------------
/**
* Create a new `PluginToolDispatcher`.
*
* The dispatcher:
* 1. Creates and owns a `PluginToolRegistry` backed by the given worker manager.
* 2. Listens for lifecycle events (plugin.enabled, plugin.disabled, plugin.unloaded)
* to automatically register and unregister tools.
* 3. On `initialize()`, loads tools from all currently-ready plugins via the DB.
*
* @param options - Configuration options
*
* @example
* ```ts
* // At server startup
* const dispatcher = createPluginToolDispatcher({
* workerManager,
* lifecycleManager,
* db,
* });
* await dispatcher.initialize();
*
* // In agent service — list tools for prompt construction
* const tools = dispatcher.listToolsForAgent();
*
* // In agent service — execute a tool
* const result = await dispatcher.executeTool(
* "acme.linear:search-issues",
* { query: "auth bug" },
* { agentId: "a-1", runId: "r-1", companyId: "c-1", projectId: "p-1" },
* );
* ```
*/
export function createPluginToolDispatcher(
options: PluginToolDispatcherOptions = {},
): PluginToolDispatcher {
const { workerManager, lifecycleManager, db } = options;
const log = logger.child({ service: "plugin-tool-dispatcher" });
// Create the underlying tool registry, backed by the worker manager
const registry = createPluginToolRegistry(workerManager);
// Track lifecycle event listeners so we can remove them on teardown
let enabledListener: ((payload: { pluginId: string; pluginKey: string }) => void) | null = null;
let disabledListener: ((payload: { pluginId: string; pluginKey: string; reason?: string }) => void) | null = null;
let unloadedListener: ((payload: { pluginId: string; pluginKey: string; removeData: boolean }) => void) | null = null;
let initialized = false;
// -----------------------------------------------------------------------
// Internal helpers
// -----------------------------------------------------------------------
/**
* Attempt to register tools for a plugin by looking up its manifest
* from the DB. No-ops gracefully if the plugin or manifest is missing.
*/
async function registerFromDb(pluginId: string): Promise<void> {
if (!db) {
log.warn(
{ pluginId },
"cannot register tools from DB — no database connection configured",
);
return;
}
const pluginRegistry = pluginRegistryService(db);
const plugin = await pluginRegistry.getById(pluginId) as PluginRecord | null;
if (!plugin) {
log.warn({ pluginId }, "plugin not found in registry, cannot register tools");
return;
}
const manifest = plugin.manifestJson;
if (!manifest) {
log.warn({ pluginId }, "plugin has no manifest, cannot register tools");
return;
}
registry.registerPlugin(plugin.pluginKey, manifest, plugin.id);
}
/**
* Convert a `RegisteredTool` to an `AgentToolDescriptor`.
*/
function toAgentDescriptor(tool: RegisteredTool): AgentToolDescriptor {
return {
name: tool.namespacedName,
displayName: tool.displayName,
description: tool.description,
parametersSchema: tool.parametersSchema,
pluginId: tool.pluginDbId,
};
}
// -----------------------------------------------------------------------
// Lifecycle event handlers
// -----------------------------------------------------------------------
function handlePluginEnabled(payload: { pluginId: string; pluginKey: string }): void {
log.debug({ pluginId: payload.pluginId, pluginKey: payload.pluginKey }, "plugin enabled — registering tools");
// Async registration from DB — we fire-and-forget since the lifecycle
// event handler must be synchronous. Any errors are logged.
void registerFromDb(payload.pluginId).catch((err) => {
log.error(
{ pluginId: payload.pluginId, err: err instanceof Error ? err.message : String(err) },
"failed to register tools after plugin enabled",
);
});
}
function handlePluginDisabled(payload: { pluginId: string; pluginKey: string; reason?: string }): void {
log.debug({ pluginId: payload.pluginId, pluginKey: payload.pluginKey }, "plugin disabled — unregistering tools");
registry.unregisterPlugin(payload.pluginKey);
}
function handlePluginUnloaded(payload: { pluginId: string; pluginKey: string; removeData: boolean }): void {
log.debug({ pluginId: payload.pluginId, pluginKey: payload.pluginKey }, "plugin unloaded — unregistering tools");
registry.unregisterPlugin(payload.pluginKey);
}
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
async initialize(): Promise<void> {
if (initialized) {
log.warn("dispatcher already initialized, skipping");
return;
}
log.info("initializing plugin tool dispatcher");
// Step 1: Load tools from all currently-ready plugins
if (db) {
const pluginRegistry = pluginRegistryService(db);
const readyPlugins = await pluginRegistry.listByStatus("ready") as PluginRecord[];
let totalTools = 0;
for (const plugin of readyPlugins) {
const manifest = plugin.manifestJson;
if (manifest?.tools && manifest.tools.length > 0) {
registry.registerPlugin(plugin.pluginKey, manifest, plugin.id);
totalTools += manifest.tools.length;
}
}
log.info(
{ readyPlugins: readyPlugins.length, registeredTools: totalTools },
"loaded tools from ready plugins",
);
}
// Step 2: Subscribe to lifecycle events for dynamic updates
if (lifecycleManager) {
enabledListener = handlePluginEnabled;
disabledListener = handlePluginDisabled;
unloadedListener = handlePluginUnloaded;
lifecycleManager.on("plugin.enabled", enabledListener);
lifecycleManager.on("plugin.disabled", disabledListener);
lifecycleManager.on("plugin.unloaded", unloadedListener);
log.debug("subscribed to lifecycle events");
} else {
log.warn("no lifecycle manager provided — tools will not auto-update on plugin state changes");
}
initialized = true;
log.info(
{ totalTools: registry.toolCount() },
"plugin tool dispatcher initialized",
);
},
teardown(): void {
if (!initialized) return;
// Unsubscribe from lifecycle events
if (lifecycleManager) {
if (enabledListener) lifecycleManager.off("plugin.enabled", enabledListener);
if (disabledListener) lifecycleManager.off("plugin.disabled", disabledListener);
if (unloadedListener) lifecycleManager.off("plugin.unloaded", unloadedListener);
enabledListener = null;
disabledListener = null;
unloadedListener = null;
}
// Note: we do NOT clear the registry here because teardown may be
// called during graceful shutdown where in-flight tool calls should
// still be able to resolve their tool entries.
initialized = false;
log.info("plugin tool dispatcher torn down");
},
listToolsForAgent(filter?: ToolListFilter): AgentToolDescriptor[] {
return registry.listTools(filter).map(toAgentDescriptor);
},
getTool(namespacedName: string): RegisteredTool | null {
return registry.getTool(namespacedName);
},
async executeTool(
namespacedName: string,
parameters: unknown,
runContext: ToolRunContext,
): Promise<ToolExecutionResult> {
log.debug(
{
tool: namespacedName,
agentId: runContext.agentId,
runId: runContext.runId,
},
"dispatching tool execution",
);
const result = await registry.executeTool(
namespacedName,
parameters,
runContext,
);
log.debug(
{
tool: namespacedName,
pluginId: result.pluginId,
hasContent: !!result.result.content,
hasError: !!result.result.error,
},
"tool execution completed",
);
return result;
},
registerPluginTools(
pluginId: string,
manifest: PaperclipPluginManifestV1,
): void {
registry.registerPlugin(pluginId, manifest);
},
unregisterPluginTools(pluginId: string): void {
registry.unregisterPlugin(pluginId);
},
toolCount(pluginId?: string): number {
return registry.toolCount(pluginId);
},
getRegistry(): PluginToolRegistry {
return registry;
},
};
}

View File

@@ -0,0 +1,449 @@
/**
* PluginToolRegistry — host-side registry for plugin-contributed agent tools.
*
* Responsibilities:
* - Store tool declarations (from plugin manifests) alongside routing metadata
* so the host can resolve namespaced tool names to the owning plugin worker.
* - Namespace tools automatically: a tool `"search-issues"` from plugin
* `"acme.linear"` is exposed to agents as `"acme.linear:search-issues"`.
* - Route `executeTool` calls to the correct plugin worker via the
* `PluginWorkerManager`.
* - Provide tool discovery queries so agents can list available tools.
* - Clean up tool registrations when a plugin is unloaded or its worker stops.
*
* The registry is an in-memory structure — tool declarations are derived from
* the plugin manifest at load time and do not need persistence. When a plugin
* worker restarts, the host re-registers its manifest tools.
*
* @see PLUGIN_SPEC.md §11 — Agent Tools
* @see PLUGIN_SPEC.md §13.10 — `executeTool`
*/
import type {
PaperclipPluginManifestV1,
PluginToolDeclaration,
} from "@paperclipai/shared";
import type { ToolRunContext, ToolResult, ExecuteToolParams } from "@paperclipai/plugin-sdk";
import type { PluginWorkerManager } from "./plugin-worker-manager.js";
import { logger } from "../middleware/logger.js";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/**
* Separator between plugin ID and tool name in the namespaced tool identifier.
*
* Example: `"acme.linear:search-issues"`
*/
export const TOOL_NAMESPACE_SEPARATOR = ":";
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
/**
* A registered tool entry stored in the registry.
*
* Combines the manifest-level declaration with routing metadata so the host
* can resolve a namespaced tool name → plugin worker in O(1).
*/
export interface RegisteredTool {
/** The plugin key used for namespacing (e.g. `"acme.linear"`). */
pluginId: string;
/**
* The plugin's database UUID, used for worker routing and availability
* checks. Falls back to `pluginId` when not provided (e.g. in tests
* where `id === pluginKey`).
*/
pluginDbId: string;
/** The tool's bare name (without namespace prefix). */
name: string;
/** Fully namespaced identifier: `"<pluginId>:<toolName>"`. */
namespacedName: string;
/** Human-readable display name. */
displayName: string;
/** Description provided to the agent so it knows when to use this tool. */
description: string;
/** JSON Schema describing the tool's input parameters. */
parametersSchema: Record<string, unknown>;
}
/**
* Filter criteria for listing available tools.
*/
export interface ToolListFilter {
/** Only return tools owned by this plugin. */
pluginId?: string;
}
/**
* Result of executing a tool, extending `ToolResult` with routing metadata.
*/
export interface ToolExecutionResult {
/** The plugin that handled the tool call. */
pluginId: string;
/** The bare tool name that was executed. */
toolName: string;
/** The result returned by the plugin's tool handler. */
result: ToolResult;
}
// ---------------------------------------------------------------------------
// PluginToolRegistry interface
// ---------------------------------------------------------------------------
/**
* The host-side tool registry — held by the host process.
*
* Created once at server startup and shared across the application. Plugins
* register their tools when their worker starts, and unregister when the
* worker stops or the plugin is uninstalled.
*/
export interface PluginToolRegistry {
/**
* Register all tools declared in a plugin's manifest.
*
* Called when a plugin worker starts and its manifest is loaded. Any
* previously registered tools for the same plugin are replaced (idempotent).
*
* @param pluginId - The plugin's unique identifier (e.g. `"acme.linear"`)
* @param manifest - The plugin manifest containing the `tools` array
* @param pluginDbId - The plugin's database UUID, used for worker routing
* and availability checks. If omitted, `pluginId` is used (backwards-compat).
*/
registerPlugin(pluginId: string, manifest: PaperclipPluginManifestV1, pluginDbId?: string): void;
/**
* Remove all tool registrations for a plugin.
*
* Called when a plugin worker stops, crashes, or is uninstalled.
*
* @param pluginId - The plugin to clear
*/
unregisterPlugin(pluginId: string): void;
/**
* Look up a registered tool by its namespaced name.
*
* @param namespacedName - Fully qualified name, e.g. `"acme.linear:search-issues"`
* @returns The registered tool entry, or `null` if not found
*/
getTool(namespacedName: string): RegisteredTool | null;
/**
* Look up a registered tool by plugin ID and bare tool name.
*
* @param pluginId - The owning plugin
* @param toolName - The bare tool name (without namespace prefix)
* @returns The registered tool entry, or `null` if not found
*/
getToolByPlugin(pluginId: string, toolName: string): RegisteredTool | null;
/**
* List all registered tools, optionally filtered.
*
* @param filter - Optional filter criteria
* @returns Array of registered tool entries
*/
listTools(filter?: ToolListFilter): RegisteredTool[];
/**
* Parse a namespaced tool name into plugin ID and bare tool name.
*
* @param namespacedName - e.g. `"acme.linear:search-issues"`
* @returns `{ pluginId, toolName }` or `null` if the format is invalid
*/
parseNamespacedName(namespacedName: string): { pluginId: string; toolName: string } | null;
/**
* Build a namespaced tool name from a plugin ID and bare tool name.
*
* @param pluginId - e.g. `"acme.linear"`
* @param toolName - e.g. `"search-issues"`
* @returns The namespaced name, e.g. `"acme.linear:search-issues"`
*/
buildNamespacedName(pluginId: string, toolName: string): string;
/**
* Execute a tool by its namespaced name, routing to the correct plugin worker.
*
* Resolves the namespaced name to the owning plugin, validates the tool
* exists, and dispatches the `executeTool` RPC call to the worker.
*
* @param namespacedName - Fully qualified tool name (e.g. `"acme.linear:search-issues"`)
* @param parameters - The parsed parameters matching the tool's schema
* @param runContext - Agent run context
* @returns The execution result with routing metadata
* @throws {Error} if the tool is not found or the worker is not running
*/
executeTool(
namespacedName: string,
parameters: unknown,
runContext: ToolRunContext,
): Promise<ToolExecutionResult>;
/**
* Get the number of registered tools, optionally scoped to a plugin.
*
* @param pluginId - If provided, count only this plugin's tools
*/
toolCount(pluginId?: string): number;
}
// ---------------------------------------------------------------------------
// Factory: createPluginToolRegistry
// ---------------------------------------------------------------------------
/**
* Create a new `PluginToolRegistry`.
*
* The registry is backed by two in-memory maps:
* - `byNamespace`: namespaced name → `RegisteredTool` for O(1) lookups.
* - `byPlugin`: pluginId → Set of namespaced names for efficient per-plugin ops.
*
* @param workerManager - The worker manager used to dispatch `executeTool` RPC
* calls to plugin workers. If not provided, `executeTool` will throw.
*
* @example
* ```ts
* const toolRegistry = createPluginToolRegistry(workerManager);
*
* // Register tools from a plugin manifest
* toolRegistry.registerPlugin("acme.linear", linearManifest);
*
* // List all available tools for agents
* const tools = toolRegistry.listTools();
* // → [{ namespacedName: "acme.linear:search-issues", ... }]
*
* // Execute a tool
* const result = await toolRegistry.executeTool(
* "acme.linear:search-issues",
* { query: "auth bug" },
* { agentId: "agent-1", runId: "run-1", companyId: "co-1", projectId: "proj-1" },
* );
* ```
*/
export function createPluginToolRegistry(
workerManager?: PluginWorkerManager,
): PluginToolRegistry {
const log = logger.child({ service: "plugin-tool-registry" });
// Primary index: namespaced name → tool entry
const byNamespace = new Map<string, RegisteredTool>();
// Secondary index: pluginId → set of namespaced names (for bulk operations)
const byPlugin = new Map<string, Set<string>>();
// -----------------------------------------------------------------------
// Internal helpers
// -----------------------------------------------------------------------
function buildName(pluginId: string, toolName: string): string {
return `${pluginId}${TOOL_NAMESPACE_SEPARATOR}${toolName}`;
}
function parseName(namespacedName: string): { pluginId: string; toolName: string } | null {
const sepIndex = namespacedName.lastIndexOf(TOOL_NAMESPACE_SEPARATOR);
if (sepIndex <= 0 || sepIndex >= namespacedName.length - 1) {
return null;
}
return {
pluginId: namespacedName.slice(0, sepIndex),
toolName: namespacedName.slice(sepIndex + 1),
};
}
function addTool(pluginId: string, decl: PluginToolDeclaration, pluginDbId: string): void {
const namespacedName = buildName(pluginId, decl.name);
const entry: RegisteredTool = {
pluginId,
pluginDbId,
name: decl.name,
namespacedName,
displayName: decl.displayName,
description: decl.description,
parametersSchema: decl.parametersSchema,
};
byNamespace.set(namespacedName, entry);
let pluginTools = byPlugin.get(pluginId);
if (!pluginTools) {
pluginTools = new Set();
byPlugin.set(pluginId, pluginTools);
}
pluginTools.add(namespacedName);
}
function removePluginTools(pluginId: string): number {
const pluginTools = byPlugin.get(pluginId);
if (!pluginTools) return 0;
const count = pluginTools.size;
for (const name of pluginTools) {
byNamespace.delete(name);
}
byPlugin.delete(pluginId);
return count;
}
// -----------------------------------------------------------------------
// Public API
// -----------------------------------------------------------------------
return {
registerPlugin(pluginId: string, manifest: PaperclipPluginManifestV1, pluginDbId?: string): void {
const dbId = pluginDbId ?? pluginId;
// Remove any previously registered tools for this plugin (idempotent)
const previousCount = removePluginTools(pluginId);
if (previousCount > 0) {
log.debug(
{ pluginId, previousCount },
"cleared previous tool registrations before re-registering",
);
}
const tools = manifest.tools ?? [];
if (tools.length === 0) {
log.debug({ pluginId }, "plugin declares no tools");
return;
}
for (const decl of tools) {
addTool(pluginId, decl, dbId);
}
log.info(
{
pluginId,
toolCount: tools.length,
tools: tools.map((t) => buildName(pluginId, t.name)),
},
`registered ${tools.length} tool(s) for plugin`,
);
},
unregisterPlugin(pluginId: string): void {
const removed = removePluginTools(pluginId);
if (removed > 0) {
log.info(
{ pluginId, removedCount: removed },
`unregistered ${removed} tool(s) for plugin`,
);
}
},
getTool(namespacedName: string): RegisteredTool | null {
return byNamespace.get(namespacedName) ?? null;
},
getToolByPlugin(pluginId: string, toolName: string): RegisteredTool | null {
const namespacedName = buildName(pluginId, toolName);
return byNamespace.get(namespacedName) ?? null;
},
listTools(filter?: ToolListFilter): RegisteredTool[] {
if (filter?.pluginId) {
const pluginTools = byPlugin.get(filter.pluginId);
if (!pluginTools) return [];
const result: RegisteredTool[] = [];
for (const name of pluginTools) {
const tool = byNamespace.get(name);
if (tool) result.push(tool);
}
return result;
}
return Array.from(byNamespace.values());
},
parseNamespacedName(namespacedName: string): { pluginId: string; toolName: string } | null {
return parseName(namespacedName);
},
buildNamespacedName(pluginId: string, toolName: string): string {
return buildName(pluginId, toolName);
},
async executeTool(
namespacedName: string,
parameters: unknown,
runContext: ToolRunContext,
): Promise<ToolExecutionResult> {
// 1. Resolve the namespaced name
const parsed = parseName(namespacedName);
if (!parsed) {
throw new Error(
`Invalid tool name "${namespacedName}". Expected format: "<pluginId>${TOOL_NAMESPACE_SEPARATOR}<toolName>"`,
);
}
const { pluginId, toolName } = parsed;
// 2. Verify the tool is registered
const tool = byNamespace.get(namespacedName);
if (!tool) {
throw new Error(
`Tool "${namespacedName}" is not registered. ` +
`The plugin may not be installed or its worker may not be running.`,
);
}
// 3. Verify the worker manager is available
if (!workerManager) {
throw new Error(
`Cannot execute tool "${namespacedName}" — no worker manager configured. ` +
`Tool execution requires a PluginWorkerManager.`,
);
}
// 4. Verify the plugin worker is running (use DB UUID for worker lookup)
const dbId = tool.pluginDbId;
if (!workerManager.isRunning(dbId)) {
throw new Error(
`Cannot execute tool "${namespacedName}" — ` +
`worker for plugin "${pluginId}" is not running.`,
);
}
// 5. Dispatch the executeTool RPC call to the worker
log.debug(
{ pluginId, pluginDbId: dbId, toolName, namespacedName, agentId: runContext.agentId, runId: runContext.runId },
"executing tool via plugin worker",
);
const rpcParams: ExecuteToolParams = {
toolName,
parameters,
runContext,
};
const result = await workerManager.call(dbId, "executeTool", rpcParams);
log.debug(
{
pluginId,
toolName,
namespacedName,
hasContent: !!result.content,
hasData: result.data !== undefined,
hasError: !!result.error,
},
"tool execution completed",
);
return { pluginId, toolName, result };
},
toolCount(pluginId?: string): number {
if (pluginId !== undefined) {
return byPlugin.get(pluginId)?.size ?? 0;
}
return byNamespace.size;
},
};
}

File diff suppressed because it is too large Load Diff