Add cli/ package with initial scaffolding. Add config-schema to shared package for typed configuration. Add server config-file loader for paperclip.config.ts support. Register cli in pnpm workspace. Add .paperclip/ and .pnpm-store/ to gitignore. Minor Companies page fix. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
87 lines
2.5 KiB
TypeScript
87 lines
2.5 KiB
TypeScript
import type { PaperclipConfig } from "../config/schema.js";
|
|
import type { CheckResult } from "./index.js";
|
|
|
|
export async function llmCheck(config: PaperclipConfig): Promise<CheckResult> {
|
|
if (!config.llm) {
|
|
return {
|
|
name: "LLM provider",
|
|
status: "warn",
|
|
message: "No LLM provider configured",
|
|
canRepair: false,
|
|
repairHint: "Run `paperclip configure --section llm` to set one up",
|
|
};
|
|
}
|
|
|
|
if (!config.llm.apiKey) {
|
|
return {
|
|
name: "LLM provider",
|
|
status: "warn",
|
|
message: `${config.llm.provider} configured but no API key set`,
|
|
canRepair: false,
|
|
repairHint: "Run `paperclip configure --section llm`",
|
|
};
|
|
}
|
|
|
|
try {
|
|
if (config.llm.provider === "claude") {
|
|
const res = await fetch("https://api.anthropic.com/v1/messages", {
|
|
method: "POST",
|
|
headers: {
|
|
"x-api-key": config.llm.apiKey,
|
|
"anthropic-version": "2023-06-01",
|
|
"content-type": "application/json",
|
|
},
|
|
body: JSON.stringify({
|
|
model: "claude-sonnet-4-5-20250929",
|
|
max_tokens: 1,
|
|
messages: [{ role: "user", content: "hi" }],
|
|
}),
|
|
});
|
|
if (res.ok || res.status === 400) {
|
|
return { name: "LLM provider", status: "pass", message: "Claude API key is valid" };
|
|
}
|
|
if (res.status === 401) {
|
|
return {
|
|
name: "LLM provider",
|
|
status: "fail",
|
|
message: "Claude API key is invalid (401)",
|
|
canRepair: false,
|
|
repairHint: "Run `paperclip configure --section llm`",
|
|
};
|
|
}
|
|
return {
|
|
name: "LLM provider",
|
|
status: "warn",
|
|
message: `Claude API returned status ${res.status}`,
|
|
};
|
|
} else {
|
|
const res = await fetch("https://api.openai.com/v1/models", {
|
|
headers: { Authorization: `Bearer ${config.llm.apiKey}` },
|
|
});
|
|
if (res.ok) {
|
|
return { name: "LLM provider", status: "pass", message: "OpenAI API key is valid" };
|
|
}
|
|
if (res.status === 401) {
|
|
return {
|
|
name: "LLM provider",
|
|
status: "fail",
|
|
message: "OpenAI API key is invalid (401)",
|
|
canRepair: false,
|
|
repairHint: "Run `paperclip configure --section llm`",
|
|
};
|
|
}
|
|
return {
|
|
name: "LLM provider",
|
|
status: "warn",
|
|
message: `OpenAI API returned status ${res.status}`,
|
|
};
|
|
}
|
|
} catch {
|
|
return {
|
|
name: "LLM provider",
|
|
status: "warn",
|
|
message: "Could not reach API to validate key",
|
|
};
|
|
}
|
|
}
|