Fix portability import and org chart test blockers

This commit is contained in:
dotta
2026-03-20 14:06:37 -05:00
parent 1830216078
commit 553e7b6b30
5 changed files with 251 additions and 12 deletions

View File

@@ -1,5 +1,6 @@
// @vitest-environment node
import { deflateRawSync } from "node:zlib";
import { describe, expect, it } from "vitest";
import { createZipArchive, readZipArchive } from "./zip";
@@ -20,6 +21,95 @@ function readString(bytes: Uint8Array, offset: number, length: number) {
return new TextDecoder().decode(bytes.slice(offset, offset + length));
}
function writeUint16(target: Uint8Array, offset: number, value: number) {
target[offset] = value & 0xff;
target[offset + 1] = (value >>> 8) & 0xff;
}
function writeUint32(target: Uint8Array, offset: number, value: number) {
target[offset] = value & 0xff;
target[offset + 1] = (value >>> 8) & 0xff;
target[offset + 2] = (value >>> 16) & 0xff;
target[offset + 3] = (value >>> 24) & 0xff;
}
function crc32(bytes: Uint8Array) {
let crc = 0xffffffff;
for (const byte of bytes) {
crc ^= byte;
for (let bit = 0; bit < 8; bit += 1) {
crc = (crc & 1) === 1 ? (crc >>> 1) ^ 0xedb88320 : crc >>> 1;
}
}
return (crc ^ 0xffffffff) >>> 0;
}
function createDeflatedZipArchive(files: Record<string, string>, rootPath: string) {
const encoder = new TextEncoder();
const localChunks: Uint8Array[] = [];
const centralChunks: Uint8Array[] = [];
let localOffset = 0;
let entryCount = 0;
for (const [relativePath, content] of Object.entries(files).sort(([a], [b]) => a.localeCompare(b))) {
const fileName = encoder.encode(`${rootPath}/${relativePath}`);
const rawBody = encoder.encode(content);
const deflatedBody = new Uint8Array(deflateRawSync(rawBody));
const checksum = crc32(rawBody);
const localHeader = new Uint8Array(30 + fileName.length);
writeUint32(localHeader, 0, 0x04034b50);
writeUint16(localHeader, 4, 20);
writeUint16(localHeader, 6, 0x0800);
writeUint16(localHeader, 8, 8);
writeUint32(localHeader, 14, checksum);
writeUint32(localHeader, 18, deflatedBody.length);
writeUint32(localHeader, 22, rawBody.length);
writeUint16(localHeader, 26, fileName.length);
localHeader.set(fileName, 30);
const centralHeader = new Uint8Array(46 + fileName.length);
writeUint32(centralHeader, 0, 0x02014b50);
writeUint16(centralHeader, 4, 20);
writeUint16(centralHeader, 6, 20);
writeUint16(centralHeader, 8, 0x0800);
writeUint16(centralHeader, 10, 8);
writeUint32(centralHeader, 16, checksum);
writeUint32(centralHeader, 20, deflatedBody.length);
writeUint32(centralHeader, 24, rawBody.length);
writeUint16(centralHeader, 28, fileName.length);
writeUint32(centralHeader, 42, localOffset);
centralHeader.set(fileName, 46);
localChunks.push(localHeader, deflatedBody);
centralChunks.push(centralHeader);
localOffset += localHeader.length + deflatedBody.length;
entryCount += 1;
}
const centralDirectoryLength = centralChunks.reduce((sum, chunk) => sum + chunk.length, 0);
const archive = new Uint8Array(
localChunks.reduce((sum, chunk) => sum + chunk.length, 0) + centralDirectoryLength + 22,
);
let offset = 0;
for (const chunk of localChunks) {
archive.set(chunk, offset);
offset += chunk.length;
}
const centralDirectoryOffset = offset;
for (const chunk of centralChunks) {
archive.set(chunk, offset);
offset += chunk.length;
}
writeUint32(archive, offset, 0x06054b50);
writeUint16(archive, offset + 8, entryCount);
writeUint16(archive, offset + 10, entryCount);
writeUint32(archive, offset + 12, centralDirectoryLength);
writeUint32(archive, offset + 16, centralDirectoryOffset);
return archive;
}
describe("createZipArchive", () => {
it("writes a zip archive with the export root path prefixed into each entry", () => {
const archive = createZipArchive(
@@ -51,7 +141,7 @@ describe("createZipArchive", () => {
expect(readUint16(archive, endOffset + 10)).toBe(2);
});
it("reads a Paperclip zip archive back into rootPath and file contents", () => {
it("reads a Paperclip zip archive back into rootPath and file contents", async () => {
const archive = createZipArchive(
{
"COMPANY.md": "# Company\n",
@@ -61,7 +151,7 @@ describe("createZipArchive", () => {
"paperclip-demo",
);
expect(readZipArchive(archive)).toEqual({
await expect(readZipArchive(archive)).resolves.toEqual({
rootPath: "paperclip-demo",
files: {
"COMPANY.md": "# Company\n",
@@ -71,7 +161,7 @@ describe("createZipArchive", () => {
});
});
it("round-trips binary image files without coercing them to text", () => {
it("round-trips binary image files without coercing them to text", async () => {
const archive = createZipArchive(
{
"images/company-logo.png": {
@@ -83,7 +173,7 @@ describe("createZipArchive", () => {
"paperclip-demo",
);
expect(readZipArchive(archive)).toEqual({
await expect(readZipArchive(archive)).resolves.toEqual({
rootPath: "paperclip-demo",
files: {
"images/company-logo.png": {
@@ -94,4 +184,22 @@ describe("createZipArchive", () => {
},
});
});
it("reads standard DEFLATE zip archives created outside Paperclip", async () => {
const archive = createDeflatedZipArchive(
{
"COMPANY.md": "# Company\n",
"agents/ceo/AGENTS.md": "# CEO\n",
},
"paperclip-demo",
);
await expect(readZipArchive(archive)).resolves.toEqual({
rootPath: "paperclip-demo",
files: {
"COMPANY.md": "# Company\n",
"agents/ceo/AGENTS.md": "# CEO\n",
},
});
});
});

View File

@@ -136,10 +136,24 @@ function portableFileEntryToBytes(entry: CompanyPortabilityFileEntry): Uint8Arra
return base64ToBytes(entry.data);
}
export function readZipArchive(source: ArrayBuffer | Uint8Array): {
async function inflateZipEntry(compressionMethod: number, bytes: Uint8Array) {
if (compressionMethod === 0) return bytes;
if (compressionMethod !== 8) {
throw new Error("Unsupported zip archive: only STORE and DEFLATE entries are supported.");
}
if (typeof DecompressionStream !== "function") {
throw new Error("Unsupported zip archive: this browser cannot read compressed zip entries.");
}
const body = new Uint8Array(bytes.byteLength);
body.set(bytes);
const stream = new Blob([body]).stream().pipeThrough(new DecompressionStream("deflate-raw"));
return new Uint8Array(await new Response(stream).arrayBuffer());
}
export async function readZipArchive(source: ArrayBuffer | Uint8Array): Promise<{
rootPath: string | null;
files: Record<string, CompanyPortabilityFileEntry>;
} {
}> {
const bytes = source instanceof Uint8Array ? source : new Uint8Array(source);
const entries: Array<{ path: string; body: CompanyPortabilityFileEntry }> = [];
let offset = 0;
@@ -164,9 +178,6 @@ export function readZipArchive(source: ArrayBuffer | Uint8Array): {
if ((generalPurposeFlag & 0x0008) !== 0) {
throw new Error("Unsupported zip archive: data descriptors are not supported.");
}
if (compressionMethod !== 0) {
throw new Error("Unsupported zip archive: only uncompressed entries are supported.");
}
const nameOffset = offset + 30;
const bodyOffset = nameOffset + fileNameLength + extraFieldLength;
@@ -179,9 +190,10 @@ export function readZipArchive(source: ArrayBuffer | Uint8Array): {
textDecoder.decode(bytes.slice(nameOffset, nameOffset + fileNameLength)),
);
if (archivePath && !archivePath.endsWith("/")) {
const entryBytes = await inflateZipEntry(compressionMethod, bytes.slice(bodyOffset, bodyEnd));
entries.push({
path: archivePath,
body: bytesToPortableFileEntry(archivePath, bytes.slice(bodyOffset, bodyEnd)),
body: bytesToPortableFileEntry(archivePath, entryBytes),
});
}

View File

@@ -588,7 +588,7 @@ async function readLocalPackageZip(file: File): Promise<{
if (!/\.zip$/i.test(file.name)) {
throw new Error("Select a .zip company package.");
}
const archive = readZipArchive(await file.arrayBuffer());
const archive = await readZipArchive(await file.arrayBuffer());
if (Object.keys(archive.files).length === 0) {
throw new Error("No package files were found in the selected zip archive.");
}