Fix cursor model defaults and add dynamic model discovery

This commit is contained in:
Dotta
2026-03-05 07:52:23 -06:00
parent db54f77b73
commit e835c5cee9
4 changed files with 251 additions and 7 deletions

View File

@@ -1,14 +1,51 @@
export const type = "cursor";
export const label = "Cursor CLI (local)";
export const DEFAULT_CURSOR_LOCAL_MODEL = "gpt-5";
export const DEFAULT_CURSOR_LOCAL_MODEL = "auto";
export const models = [
{ id: DEFAULT_CURSOR_LOCAL_MODEL, label: DEFAULT_CURSOR_LOCAL_MODEL },
{ id: "gpt-5-mini", label: "gpt-5-mini" },
{ id: "sonnet-4", label: "sonnet-4" },
{ id: "sonnet-4-thinking", label: "sonnet-4-thinking" },
const CURSOR_FALLBACK_MODEL_IDS = [
"auto",
"composer-1.5",
"composer-1",
"gpt-5.3-codex-low",
"gpt-5.3-codex-low-fast",
"gpt-5.3-codex",
"gpt-5.3-codex-fast",
"gpt-5.3-codex-high",
"gpt-5.3-codex-high-fast",
"gpt-5.3-codex-xhigh",
"gpt-5.3-codex-xhigh-fast",
"gpt-5.3-codex-spark-preview",
"gpt-5.2",
"gpt-5.2-codex-low",
"gpt-5.2-codex-low-fast",
"gpt-5.2-codex",
"gpt-5.2-codex-fast",
"gpt-5.2-codex-high",
"gpt-5.2-codex-high-fast",
"gpt-5.2-codex-xhigh",
"gpt-5.2-codex-xhigh-fast",
"gpt-5.1-codex-max",
"gpt-5.1-codex-max-high",
"gpt-5.2-high",
"gpt-5.1-high",
"gpt-5.1-codex-mini",
"opus-4.6-thinking",
"opus-4.6",
"opus-4.5",
"opus-4.5-thinking",
"sonnet-4.6",
"sonnet-4.6-thinking",
"sonnet-4.5",
"sonnet-4.5-thinking",
"gemini-3.1-pro",
"gemini-3-pro",
"gemini-3-flash",
"grok",
"kimi-k2.5",
];
export const models = CURSOR_FALLBACK_MODEL_IDS.map((id) => ({ id, label: id }));
export const agentConfigurationDoc = `# cursor agent configuration
Adapter: cursor
@@ -27,7 +64,7 @@ Core fields:
- cwd (string, optional): default absolute working directory fallback for the agent process (created if missing when possible)
- instructionsFilePath (string, optional): absolute path to a markdown instructions file prepended to the run prompt
- promptTemplate (string, optional): run prompt template
- model (string, optional): Cursor model id (for example gpt-5)
- model (string, optional): Cursor model id (for example auto or gpt-5.3-codex)
- mode (string, optional): Cursor execution mode passed as --mode (plan|ask)
- command (string, optional): defaults to "agent"
- extraArgs (string[], optional): additional CLI args

View File

@@ -1,12 +1,16 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { models as codexFallbackModels } from "@paperclipai/adapter-codex-local";
import { models as cursorFallbackModels } from "@paperclipai/adapter-cursor-local";
import { listAdapterModels } from "../adapters/index.js";
import { resetCodexModelsCacheForTests } from "../adapters/codex-models.js";
import { resetCursorModelsCacheForTests, setCursorModelsRunnerForTests } from "../adapters/cursor-models.js";
describe("adapter model listing", () => {
beforeEach(() => {
delete process.env.OPENAI_API_KEY;
resetCodexModelsCacheForTests();
resetCursorModelsCacheForTests();
setCursorModelsRunnerForTests(null);
vi.restoreAllMocks();
});
@@ -55,4 +59,35 @@ describe("adapter model listing", () => {
const models = await listAdapterModels("codex_local");
expect(models).toEqual(codexFallbackModels);
});
it("returns cursor fallback models when CLI discovery is unavailable", async () => {
setCursorModelsRunnerForTests(() => ({
status: null,
stdout: "",
stderr: "",
hasError: true,
}));
const models = await listAdapterModels("cursor");
expect(models).toEqual(cursorFallbackModels);
});
it("loads cursor models dynamically and caches them", async () => {
const runner = vi.fn(() => ({
status: 0,
stdout: "Available models: auto, composer-1.5, gpt-5.3-codex-high, sonnet-4.6",
stderr: "",
hasError: false,
}));
setCursorModelsRunnerForTests(runner);
const first = await listAdapterModels("cursor");
const second = await listAdapterModels("cursor");
expect(runner).toHaveBeenCalledTimes(1);
expect(first).toEqual(second);
expect(first.some((model) => model.id === "auto")).toBe(true);
expect(first.some((model) => model.id === "gpt-5.3-codex-high")).toBe(true);
expect(first.some((model) => model.id === "composer-1")).toBe(true);
});
});

View File

@@ -0,0 +1,170 @@
import { spawnSync } from "node:child_process";
import { models as cursorFallbackModels } from "@paperclipai/adapter-cursor-local";
import type { AdapterModel } from "./types.js";
const CURSOR_MODELS_TIMEOUT_MS = 5_000;
const CURSOR_MODELS_CACHE_TTL_MS = 60_000;
const MAX_BUFFER_BYTES = 512 * 1024;
let cached: { expiresAt: number; models: AdapterModel[] } | null = null;
type CursorModelsCommandResult = {
status: number | null;
stdout: string;
stderr: string;
hasError: boolean;
};
function dedupeModels(models: AdapterModel[]): AdapterModel[] {
const seen = new Set<string>();
const deduped: AdapterModel[] = [];
for (const model of models) {
const id = model.id.trim();
if (!id || seen.has(id)) continue;
seen.add(id);
deduped.push({ id, label: model.label.trim() || id });
}
return deduped;
}
function sanitizeModelId(raw: string): string {
return raw
.trim()
.replace(/^["'`]+|["'`]+$/g, "")
.replace(/\(.*\)\s*$/g, "")
.trim();
}
function isLikelyModelId(raw: string): boolean {
const value = sanitizeModelId(raw);
if (!value) return false;
return /^[A-Za-z0-9][A-Za-z0-9._/-]*$/.test(value);
}
function pushModelId(target: AdapterModel[], raw: string) {
const id = sanitizeModelId(raw);
if (!isLikelyModelId(id)) return;
target.push({ id, label: id });
}
function collectFromJsonValue(value: unknown, target: AdapterModel[]) {
if (typeof value === "string") {
pushModelId(target, value);
return;
}
if (!Array.isArray(value)) return;
for (const item of value) {
if (typeof item === "string") {
pushModelId(target, item);
continue;
}
if (typeof item !== "object" || item === null) continue;
const id = (item as { id?: unknown }).id;
if (typeof id === "string") {
pushModelId(target, id);
}
}
}
export function parseCursorModelsOutput(stdout: string, stderr: string): AdapterModel[] {
const models: AdapterModel[] = [];
const combined = `${stdout}\n${stderr}`;
const trimmedStdout = stdout.trim();
if (trimmedStdout.startsWith("{") || trimmedStdout.startsWith("[")) {
try {
const parsed = JSON.parse(trimmedStdout) as unknown;
if (Array.isArray(parsed)) {
collectFromJsonValue(parsed, models);
} else if (typeof parsed === "object" && parsed !== null) {
const rec = parsed as Record<string, unknown>;
collectFromJsonValue(rec.models, models);
collectFromJsonValue(rec.data, models);
}
} catch {
// Ignore malformed JSON and continue parsing plain text formats.
}
}
for (const match of combined.matchAll(/available models?:\s*([^\n]+)/gi)) {
const list = match[1] ?? "";
for (const token of list.split(",")) {
pushModelId(models, token);
}
}
for (const lineRaw of combined.split(/\r?\n/)) {
const line = lineRaw.trim();
if (!line) continue;
const bullet = line.replace(/^[-*]\s+/, "").trim();
if (!bullet || bullet.includes(" ")) continue;
pushModelId(models, bullet);
}
return dedupeModels(models);
}
function mergedWithFallback(models: AdapterModel[]): AdapterModel[] {
return dedupeModels([...models, ...cursorFallbackModels]);
}
function defaultCursorModelsRunner(): CursorModelsCommandResult {
const result = spawnSync("agent", ["models"], {
encoding: "utf8",
timeout: CURSOR_MODELS_TIMEOUT_MS,
maxBuffer: MAX_BUFFER_BYTES,
});
return {
status: result.status,
stdout: typeof result.stdout === "string" ? result.stdout : "",
stderr: typeof result.stderr === "string" ? result.stderr : "",
hasError: Boolean(result.error),
};
}
let cursorModelsRunner: () => CursorModelsCommandResult = defaultCursorModelsRunner;
function fetchCursorModelsFromCli(): AdapterModel[] {
const result = cursorModelsRunner();
const { stdout, stderr } = result;
if (result.hasError && stdout.trim().length === 0 && stderr.trim().length === 0) {
return [];
}
if ((result.status ?? 1) !== 0 && !/available models?:/i.test(`${stdout}\n${stderr}`)) {
return [];
}
return parseCursorModelsOutput(stdout, stderr);
}
export async function listCursorModels(): Promise<AdapterModel[]> {
const now = Date.now();
if (cached && cached.expiresAt > now) {
return cached.models;
}
const discovered = fetchCursorModelsFromCli();
if (discovered.length > 0) {
const merged = mergedWithFallback(discovered);
cached = {
expiresAt: now + CURSOR_MODELS_CACHE_TTL_MS,
models: merged,
};
return merged;
}
if (cached && cached.models.length > 0) {
return cached.models;
}
return dedupeModels(cursorFallbackModels);
}
export function resetCursorModelsCacheForTests() {
cached = null;
}
export function setCursorModelsRunnerForTests(runner: (() => CursorModelsCommandResult) | null) {
cursorModelsRunner = runner ?? defaultCursorModelsRunner;
}

View File

@@ -32,6 +32,7 @@ import {
models as openclawModels,
} from "@paperclipai/adapter-openclaw";
import { listCodexModels } from "./codex-models.js";
import { listCursorModels } from "./cursor-models.js";
import { processAdapter } from "./process/index.js";
import { httpAdapter } from "./http/index.js";
@@ -72,6 +73,7 @@ const cursorLocalAdapter: ServerAdapterModule = {
testEnvironment: cursorTestEnvironment,
sessionCodec: cursorSessionCodec,
models: cursorModels,
listModels: listCursorModels,
supportsLocalAgentJwt: true,
agentConfigurationDoc: cursorAgentConfigurationDoc,
};