Fix portability import and org chart test blockers

This commit is contained in:
dotta
2026-03-20 14:06:37 -05:00
parent 1830216078
commit 553e7b6b30
5 changed files with 251 additions and 12 deletions

View File

@@ -623,6 +623,124 @@ describe("company portability", () => {
]); ]);
}); });
it("imports a vendor-neutral package without .paperclip.yaml", async () => {
const portability = companyPortabilityService({} as any);
companySvc.create.mockResolvedValue({
id: "company-imported",
name: "Imported Paperclip",
});
accessSvc.ensureMembership.mockResolvedValue(undefined);
agentSvc.create.mockResolvedValue({
id: "agent-created",
name: "ClaudeCoder",
});
const preview = await portability.previewImport({
source: {
type: "inline",
rootPath: "paperclip-demo",
files: {
"COMPANY.md": [
"---",
'schema: "agentcompanies/v1"',
'name: "Imported Paperclip"',
'description: "Portable company package"',
"---",
"",
"# Imported Paperclip",
"",
].join("\n"),
"agents/claudecoder/AGENTS.md": [
"---",
'name: "ClaudeCoder"',
'title: "Software Engineer"',
"---",
"",
"# ClaudeCoder",
"",
"You write code.",
"",
].join("\n"),
},
},
include: {
company: true,
agents: true,
projects: false,
issues: false,
},
target: {
mode: "new_company",
newCompanyName: "Imported Paperclip",
},
agents: "all",
collisionStrategy: "rename",
});
expect(preview.errors).toEqual([]);
expect(preview.manifest.company?.name).toBe("Imported Paperclip");
expect(preview.manifest.agents).toEqual([
expect.objectContaining({
slug: "claudecoder",
name: "ClaudeCoder",
adapterType: "process",
}),
]);
expect(preview.envInputs).toEqual([]);
await portability.importBundle({
source: {
type: "inline",
rootPath: "paperclip-demo",
files: {
"COMPANY.md": [
"---",
'schema: "agentcompanies/v1"',
'name: "Imported Paperclip"',
'description: "Portable company package"',
"---",
"",
"# Imported Paperclip",
"",
].join("\n"),
"agents/claudecoder/AGENTS.md": [
"---",
'name: "ClaudeCoder"',
'title: "Software Engineer"',
"---",
"",
"# ClaudeCoder",
"",
"You write code.",
"",
].join("\n"),
},
},
include: {
company: true,
agents: true,
projects: false,
issues: false,
},
target: {
mode: "new_company",
newCompanyName: "Imported Paperclip",
},
agents: "all",
collisionStrategy: "rename",
}, "user-1");
expect(companySvc.create).toHaveBeenCalledWith(expect.objectContaining({
name: "Imported Paperclip",
description: "Portable company package",
}));
expect(agentSvc.create).toHaveBeenCalledWith("company-imported", expect.objectContaining({
name: "ClaudeCoder",
adapterType: "process",
}));
});
it("treats no-separator auth and api key env names as secrets during export", async () => { it("treats no-separator auth and api key env names as secrets during export", async () => {
const portability = companyPortabilityService({} as any); const portability = companyPortabilityService({} as any);

View File

@@ -3,7 +3,6 @@
* Supports 5 visual styles: monochrome, nebula, circuit, warmth, schematic. * Supports 5 visual styles: monochrome, nebula, circuit, warmth, schematic.
* Pure SVG output — no browser/Playwright needed. PNG via sharp. * Pure SVG output — no browser/Playwright needed. PNG via sharp.
*/ */
import sharp from "sharp";
export interface OrgNode { export interface OrgNode {
id: string; id: string;
@@ -546,6 +545,8 @@ export function renderOrgChartSvg(orgTree: OrgNode[], style: OrgChartStyle = "wa
export async function renderOrgChartPng(orgTree: OrgNode[], style: OrgChartStyle = "warmth"): Promise<Buffer> { export async function renderOrgChartPng(orgTree: OrgNode[], style: OrgChartStyle = "warmth"): Promise<Buffer> {
const svg = renderOrgChartSvg(orgTree, style); const svg = renderOrgChartSvg(orgTree, style);
const sharpModule = await import("sharp");
const sharp = sharpModule.default;
// Render at 2x density for retina quality, resize to exact target dimensions // Render at 2x density for retina quality, resize to exact target dimensions
return sharp(Buffer.from(svg), { density: 144 }) return sharp(Buffer.from(svg), { density: 144 })
.resize(TARGET_W, TARGET_H) .resize(TARGET_W, TARGET_H)

View File

@@ -1,5 +1,6 @@
// @vitest-environment node // @vitest-environment node
import { deflateRawSync } from "node:zlib";
import { describe, expect, it } from "vitest"; import { describe, expect, it } from "vitest";
import { createZipArchive, readZipArchive } from "./zip"; import { createZipArchive, readZipArchive } from "./zip";
@@ -20,6 +21,95 @@ function readString(bytes: Uint8Array, offset: number, length: number) {
return new TextDecoder().decode(bytes.slice(offset, offset + length)); return new TextDecoder().decode(bytes.slice(offset, offset + length));
} }
function writeUint16(target: Uint8Array, offset: number, value: number) {
target[offset] = value & 0xff;
target[offset + 1] = (value >>> 8) & 0xff;
}
function writeUint32(target: Uint8Array, offset: number, value: number) {
target[offset] = value & 0xff;
target[offset + 1] = (value >>> 8) & 0xff;
target[offset + 2] = (value >>> 16) & 0xff;
target[offset + 3] = (value >>> 24) & 0xff;
}
function crc32(bytes: Uint8Array) {
let crc = 0xffffffff;
for (const byte of bytes) {
crc ^= byte;
for (let bit = 0; bit < 8; bit += 1) {
crc = (crc & 1) === 1 ? (crc >>> 1) ^ 0xedb88320 : crc >>> 1;
}
}
return (crc ^ 0xffffffff) >>> 0;
}
function createDeflatedZipArchive(files: Record<string, string>, rootPath: string) {
const encoder = new TextEncoder();
const localChunks: Uint8Array[] = [];
const centralChunks: Uint8Array[] = [];
let localOffset = 0;
let entryCount = 0;
for (const [relativePath, content] of Object.entries(files).sort(([a], [b]) => a.localeCompare(b))) {
const fileName = encoder.encode(`${rootPath}/${relativePath}`);
const rawBody = encoder.encode(content);
const deflatedBody = new Uint8Array(deflateRawSync(rawBody));
const checksum = crc32(rawBody);
const localHeader = new Uint8Array(30 + fileName.length);
writeUint32(localHeader, 0, 0x04034b50);
writeUint16(localHeader, 4, 20);
writeUint16(localHeader, 6, 0x0800);
writeUint16(localHeader, 8, 8);
writeUint32(localHeader, 14, checksum);
writeUint32(localHeader, 18, deflatedBody.length);
writeUint32(localHeader, 22, rawBody.length);
writeUint16(localHeader, 26, fileName.length);
localHeader.set(fileName, 30);
const centralHeader = new Uint8Array(46 + fileName.length);
writeUint32(centralHeader, 0, 0x02014b50);
writeUint16(centralHeader, 4, 20);
writeUint16(centralHeader, 6, 20);
writeUint16(centralHeader, 8, 0x0800);
writeUint16(centralHeader, 10, 8);
writeUint32(centralHeader, 16, checksum);
writeUint32(centralHeader, 20, deflatedBody.length);
writeUint32(centralHeader, 24, rawBody.length);
writeUint16(centralHeader, 28, fileName.length);
writeUint32(centralHeader, 42, localOffset);
centralHeader.set(fileName, 46);
localChunks.push(localHeader, deflatedBody);
centralChunks.push(centralHeader);
localOffset += localHeader.length + deflatedBody.length;
entryCount += 1;
}
const centralDirectoryLength = centralChunks.reduce((sum, chunk) => sum + chunk.length, 0);
const archive = new Uint8Array(
localChunks.reduce((sum, chunk) => sum + chunk.length, 0) + centralDirectoryLength + 22,
);
let offset = 0;
for (const chunk of localChunks) {
archive.set(chunk, offset);
offset += chunk.length;
}
const centralDirectoryOffset = offset;
for (const chunk of centralChunks) {
archive.set(chunk, offset);
offset += chunk.length;
}
writeUint32(archive, offset, 0x06054b50);
writeUint16(archive, offset + 8, entryCount);
writeUint16(archive, offset + 10, entryCount);
writeUint32(archive, offset + 12, centralDirectoryLength);
writeUint32(archive, offset + 16, centralDirectoryOffset);
return archive;
}
describe("createZipArchive", () => { describe("createZipArchive", () => {
it("writes a zip archive with the export root path prefixed into each entry", () => { it("writes a zip archive with the export root path prefixed into each entry", () => {
const archive = createZipArchive( const archive = createZipArchive(
@@ -51,7 +141,7 @@ describe("createZipArchive", () => {
expect(readUint16(archive, endOffset + 10)).toBe(2); expect(readUint16(archive, endOffset + 10)).toBe(2);
}); });
it("reads a Paperclip zip archive back into rootPath and file contents", () => { it("reads a Paperclip zip archive back into rootPath and file contents", async () => {
const archive = createZipArchive( const archive = createZipArchive(
{ {
"COMPANY.md": "# Company\n", "COMPANY.md": "# Company\n",
@@ -61,7 +151,7 @@ describe("createZipArchive", () => {
"paperclip-demo", "paperclip-demo",
); );
expect(readZipArchive(archive)).toEqual({ await expect(readZipArchive(archive)).resolves.toEqual({
rootPath: "paperclip-demo", rootPath: "paperclip-demo",
files: { files: {
"COMPANY.md": "# Company\n", "COMPANY.md": "# Company\n",
@@ -71,7 +161,7 @@ describe("createZipArchive", () => {
}); });
}); });
it("round-trips binary image files without coercing them to text", () => { it("round-trips binary image files without coercing them to text", async () => {
const archive = createZipArchive( const archive = createZipArchive(
{ {
"images/company-logo.png": { "images/company-logo.png": {
@@ -83,7 +173,7 @@ describe("createZipArchive", () => {
"paperclip-demo", "paperclip-demo",
); );
expect(readZipArchive(archive)).toEqual({ await expect(readZipArchive(archive)).resolves.toEqual({
rootPath: "paperclip-demo", rootPath: "paperclip-demo",
files: { files: {
"images/company-logo.png": { "images/company-logo.png": {
@@ -94,4 +184,22 @@ describe("createZipArchive", () => {
}, },
}); });
}); });
it("reads standard DEFLATE zip archives created outside Paperclip", async () => {
const archive = createDeflatedZipArchive(
{
"COMPANY.md": "# Company\n",
"agents/ceo/AGENTS.md": "# CEO\n",
},
"paperclip-demo",
);
await expect(readZipArchive(archive)).resolves.toEqual({
rootPath: "paperclip-demo",
files: {
"COMPANY.md": "# Company\n",
"agents/ceo/AGENTS.md": "# CEO\n",
},
});
});
}); });

View File

@@ -136,10 +136,24 @@ function portableFileEntryToBytes(entry: CompanyPortabilityFileEntry): Uint8Arra
return base64ToBytes(entry.data); return base64ToBytes(entry.data);
} }
export function readZipArchive(source: ArrayBuffer | Uint8Array): { async function inflateZipEntry(compressionMethod: number, bytes: Uint8Array) {
if (compressionMethod === 0) return bytes;
if (compressionMethod !== 8) {
throw new Error("Unsupported zip archive: only STORE and DEFLATE entries are supported.");
}
if (typeof DecompressionStream !== "function") {
throw new Error("Unsupported zip archive: this browser cannot read compressed zip entries.");
}
const body = new Uint8Array(bytes.byteLength);
body.set(bytes);
const stream = new Blob([body]).stream().pipeThrough(new DecompressionStream("deflate-raw"));
return new Uint8Array(await new Response(stream).arrayBuffer());
}
export async function readZipArchive(source: ArrayBuffer | Uint8Array): Promise<{
rootPath: string | null; rootPath: string | null;
files: Record<string, CompanyPortabilityFileEntry>; files: Record<string, CompanyPortabilityFileEntry>;
} { }> {
const bytes = source instanceof Uint8Array ? source : new Uint8Array(source); const bytes = source instanceof Uint8Array ? source : new Uint8Array(source);
const entries: Array<{ path: string; body: CompanyPortabilityFileEntry }> = []; const entries: Array<{ path: string; body: CompanyPortabilityFileEntry }> = [];
let offset = 0; let offset = 0;
@@ -164,9 +178,6 @@ export function readZipArchive(source: ArrayBuffer | Uint8Array): {
if ((generalPurposeFlag & 0x0008) !== 0) { if ((generalPurposeFlag & 0x0008) !== 0) {
throw new Error("Unsupported zip archive: data descriptors are not supported."); throw new Error("Unsupported zip archive: data descriptors are not supported.");
} }
if (compressionMethod !== 0) {
throw new Error("Unsupported zip archive: only uncompressed entries are supported.");
}
const nameOffset = offset + 30; const nameOffset = offset + 30;
const bodyOffset = nameOffset + fileNameLength + extraFieldLength; const bodyOffset = nameOffset + fileNameLength + extraFieldLength;
@@ -179,9 +190,10 @@ export function readZipArchive(source: ArrayBuffer | Uint8Array): {
textDecoder.decode(bytes.slice(nameOffset, nameOffset + fileNameLength)), textDecoder.decode(bytes.slice(nameOffset, nameOffset + fileNameLength)),
); );
if (archivePath && !archivePath.endsWith("/")) { if (archivePath && !archivePath.endsWith("/")) {
const entryBytes = await inflateZipEntry(compressionMethod, bytes.slice(bodyOffset, bodyEnd));
entries.push({ entries.push({
path: archivePath, path: archivePath,
body: bytesToPortableFileEntry(archivePath, bytes.slice(bodyOffset, bodyEnd)), body: bytesToPortableFileEntry(archivePath, entryBytes),
}); });
} }

View File

@@ -588,7 +588,7 @@ async function readLocalPackageZip(file: File): Promise<{
if (!/\.zip$/i.test(file.name)) { if (!/\.zip$/i.test(file.name)) {
throw new Error("Select a .zip company package."); throw new Error("Select a .zip company package.");
} }
const archive = readZipArchive(await file.arrayBuffer()); const archive = await readZipArchive(await file.arrayBuffer());
if (Object.keys(archive.files).length === 0) { if (Object.keys(archive.files).length === 0) {
throw new Error("No package files were found in the selected zip archive."); throw new Error("No package files were found in the selected zip archive.");
} }