🧮 fix: Count Rejected Skill Import Bytes (#13063)

This commit is contained in:
Danny Avila 2026-05-11 08:40:31 -04:00 committed by GitHub
parent 822ad6c36a
commit 9797c85e23
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 147 additions and 10 deletions

View file

@ -1,4 +1,94 @@
import { parseFrontmatter } from '../import';
import JSZip from 'jszip';
import { Types } from 'mongoose';
import type { ISkill, ISkillFile, CreateSkillResult } from '@librechat/data-schemas';
import type { Response } from 'express';
import type { ImportSkillDeps } from '../import';
import { createImportHandler, parseFrontmatter } from '../import';
type ImportRequest = Parameters<ReturnType<typeof createImportHandler>>[0];
interface MockResponse extends Response {
body?: unknown;
}
interface ImportSummary {
filesProcessed: number;
filesSucceeded: number;
filesFailed: number;
errors: Array<{ path: string; status: 'ok' | 'error'; error?: string }>;
}
function mockResponse(): MockResponse {
const res = {} as MockResponse;
res.status = jest.fn((statusCode: number) => {
res.statusCode = statusCode;
return res;
}) as MockResponse['status'];
res.json = jest.fn((body: unknown) => {
res.body = body;
return res;
}) as MockResponse['json'];
return res;
}
function mockImportDeps(limits?: ImportSkillDeps['limits']): ImportSkillDeps {
const skillId = new Types.ObjectId();
const skill = {
_id: skillId,
name: 'tiny-limit-skill',
description: 'A skill used by import handler tests.',
} as ISkill & { _id: Types.ObjectId };
const skillFile = { _id: new Types.ObjectId() } as ISkillFile & { _id: Types.ObjectId };
return {
limits,
createSkill: jest.fn(async () => ({ skill }) as unknown as CreateSkillResult),
getSkillById: jest.fn(async () => skill),
deleteSkill: jest.fn(async () => ({ deleted: true })),
upsertSkillFile: jest.fn(async () => skillFile),
saveBuffer: jest.fn(async () => ({ filepath: '/tmp/imported-file', source: 'local' })),
grantPermission: jest.fn(async () => undefined),
};
}
function mockZipRequest(buffer: Buffer): ImportRequest {
return {
user: {
id: 'user-1',
_id: new Types.ObjectId(),
username: 'tester',
},
file: {
originalname: 'tiny-limit-skill.skill',
buffer,
},
} as unknown as ImportRequest;
}
function importSummary(body: unknown): ImportSummary {
return (body as { _importSummary: ImportSummary })._importSummary;
}
async function zipWithAdditionalFiles(fileCount: number, fileBytes: number): Promise<Buffer> {
const zip = new JSZip();
zip.file(
'SKILL.md',
[
'---',
'name: tiny-limit-skill',
'description: A skill used by import handler tests.',
'---',
'# Test skill',
].join('\n'),
);
const content = 'a'.repeat(fileBytes);
for (let i = 0; i < fileCount; i++) {
zip.file(`files/${i}.txt`, content);
}
return zip.generateAsync({ type: 'nodebuffer', compression: 'DEFLATE' });
}
describe('parseFrontmatter', () => {
it('extracts name + description from a minimal frontmatter block', () => {
@ -125,3 +215,27 @@ describe('parseFrontmatter', () => {
expect(result.invalidBooleans).toEqual([]);
});
});
describe('createImportHandler', () => {
it('counts rejected oversized zip entries toward the cumulative decompressed limit', async () => {
const kib = 1024;
const deps = mockImportDeps({
maxZipBytes: 1024 * kib,
maxEntries: 10,
maxSingleFileBytes: 10 * kib,
maxDecompressedBytes: 32 * kib,
});
const handler = createImportHandler(deps);
const buffer = await zipWithAdditionalFiles(4, 11 * kib);
const res = mockResponse();
await handler(mockZipRequest(buffer), res);
expect(res.status).toHaveBeenCalledWith(201);
const summary = importSummary(res.body);
expect(summary.filesProcessed).toBe(3);
expect(summary.filesSucceeded).toBe(0);
expect(summary.filesFailed).toBe(3);
expect(summary.errors).toHaveLength(3);
});
});

View file

@ -20,6 +20,13 @@ const MAX_ENTRIES = 500;
const MAX_SINGLE_FILE_BYTES = 10 * 1024 * 1024; // 10 MB per file
const SKILL_MD = 'SKILL.md';
export interface ImportLimits {
maxZipBytes: number;
maxDecompressedBytes: number;
maxEntries: number;
maxSingleFileBytes: number;
}
/** Strip surrounding YAML quotes (single or double) from a scalar value. */
function unquoteYaml(value: string): string {
if (
@ -156,6 +163,7 @@ function isDuplicateKeyError(error: unknown): boolean {
}
export interface ImportSkillDeps {
limits?: Partial<ImportLimits>;
createSkill: (data: CreateSkillInput) => Promise<CreateSkillResult>;
getSkillById: (id: string | Types.ObjectId) => Promise<(ISkill & { _id: Types.ObjectId }) | null>;
deleteSkill: (id: string) => Promise<{ deleted: boolean }>;
@ -238,6 +246,15 @@ export function createImportHandler(deps: ImportSkillDeps) {
};
}
function getImportLimits(limits?: Partial<ImportLimits>): ImportLimits {
return {
maxZipBytes: limits?.maxZipBytes ?? MAX_ZIP_BYTES,
maxDecompressedBytes: limits?.maxDecompressedBytes ?? MAX_DECOMPRESSED_BYTES,
maxEntries: limits?.maxEntries ?? MAX_ENTRIES,
maxSingleFileBytes: limits?.maxSingleFileBytes ?? MAX_SINGLE_FILE_BYTES,
};
}
/** Resolve author metadata from the request user. */
function getAuthorInfo(req: ServerRequest) {
const user = req.user;
@ -334,11 +351,14 @@ async function handleZip(
file: Express.Multer.File,
) {
const userId = req.user.id;
const limits = getImportLimits(deps.limits);
const zipBuffer = file.buffer;
if (zipBuffer.length > MAX_ZIP_BYTES) {
return res.status(400).json({ error: `File too large (max ${MAX_ZIP_BYTES / 1024 / 1024}MB)` });
if (zipBuffer.length > limits.maxZipBytes) {
return res
.status(400)
.json({ error: `File too large (max ${limits.maxZipBytes / 1024 / 1024}MB)` });
}
let zip: JSZip;
@ -349,8 +369,8 @@ async function handleZip(
}
const entries = Object.keys(zip.files);
if (entries.length > MAX_ENTRIES) {
return res.status(400).json({ error: `Too many files in archive (max ${MAX_ENTRIES})` });
if (entries.length > limits.maxEntries) {
return res.status(400).json({ error: `Too many files in archive (max ${limits.maxEntries})` });
}
// Find SKILL.md — at root or one level deep
@ -377,7 +397,7 @@ async function handleZip(
const declaredSize =
(skillMdEntry as unknown as { _data?: { uncompressedSize?: number } })?._data
?.uncompressedSize ?? 0;
if (declaredSize > MAX_SINGLE_FILE_BYTES) {
if (declaredSize > limits.maxSingleFileBytes) {
return res
.status(400)
.json({ error: `SKILL.md too large (${Math.round(declaredSize / 1024 / 1024)}MB)` });
@ -386,7 +406,7 @@ async function handleZip(
if (!skillMdContent) {
return res.status(400).json({ error: 'Could not read SKILL.md from archive' });
}
if (Buffer.byteLength(skillMdContent, 'utf-8') > MAX_SINGLE_FILE_BYTES) {
if (Buffer.byteLength(skillMdContent, 'utf-8') > limits.maxSingleFileBytes) {
return res.status(400).json({ error: 'SKILL.md exceeds maximum file size' });
}
@ -461,8 +481,8 @@ async function handleZip(
try {
// Stream-decompress with hard byte cap. JSZip's nodeStream decompresses
// incrementally so we can abort mid-entry without buffering the full file.
const perFileLimit = MAX_SINGLE_FILE_BYTES;
const cumulativeLimit = MAX_DECOMPRESSED_BYTES - totalDecompressed;
const perFileLimit = limits.maxSingleFileBytes;
const cumulativeLimit = limits.maxDecompressedBytes - totalDecompressed;
const effectiveLimit = Math.min(perFileLimit, cumulativeLimit);
if (effectiveLimit <= 0) {
@ -482,7 +502,11 @@ async function handleZip(
await new Promise<void>((resolve, reject) => {
entryStream.on('data', (chunk: Buffer) => {
if (exceededLimit) {
return;
}
entryBytes += chunk.length;
totalDecompressed += chunk.length;
if (entryBytes > effectiveLimit) {
exceededLimit = true;
if ('destroy' in entryStream && typeof entryStream.destroy === 'function') {
@ -510,7 +534,6 @@ async function handleZip(
}
const fileBuffer = Buffer.concat(chunks);
totalDecompressed += fileBuffer.length;
const fileId = crypto.randomUUID();
const filename = path.basename(relativePath);