Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions .github/workflows/bench-pr.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: PR Benchmarks

on:
pull_request:
branches: [main]
paths:
- "src/**/*.ts"
- "benchmarks/**/*.ts"

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

permissions:
contents: read
pull-requests: write

jobs:
bench:
name: Run Benchmarks
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup Bun
uses: oven-sh/setup-bun@v2

- name: Install dependencies
run: bun install --frozen-lockfile

- name: Run benchmarks
run: bun run bench -- --outputJson bench-results.json

- name: Generate comment
run: bun run scripts/bench-comment.ts bench-results.json bench-comment.md
env:
BENCH_RUNNER: ${{ runner.os }} (${{ runner.arch }})

- name: Post or update PR comment
uses: marocchino/sticky-pull-request-comment@v2
with:
header: benchmark-results
path: bench-comment.md
5 changes: 4 additions & 1 deletion bun.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
"@noble/ciphers": "^2.1.1",
"@noble/hashes": "^2.0.1",
"@scure/base": "^2.0.0",
"lru-cache": "^11.2.6",
"pako": "^2.1.0",
"pkijs": "^3.3.3"
},
Expand Down
120 changes: 120 additions & 0 deletions scripts/bench-comment.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
/**
* Format benchmark JSON results as a markdown comment for PRs.
*
* Each benchmark file gets its own collapsible section.
*
* Usage:
* bun run scripts/bench-comment.ts <results.json> <output.md>
*/

import { readFileSync } from "node:fs";

interface Bench {
name: string;
mean: number;
hz: number;
p99: number;
rme: number;
sampleCount: number;
}

interface Group {
fullName: string;
benchmarks: Bench[];
}

interface File {
filepath: string;
groups: Group[];
}

interface Output {
files: File[];
}

function formatMs(ms: number): string {
if (ms >= 1000) {
return `${(ms / 1000).toFixed(2)}s`;
}

if (ms >= 1) {
return `${ms.toFixed(2)}ms`;
}

return `${(ms * 1000).toFixed(0)}μs`;
}

function formatRme(rme: number): string {
return `±${rme.toFixed(1)}%`;
}

function fileLabel(filepath: string): string {
const match = filepath.match(/([^/]+)\.bench\.ts$/);

if (!match) {
return filepath;
}

const name = match[1];

return name.charAt(0).toUpperCase() + name.slice(1);
}

// ─────────────────────────────────────────────────────────────────────────────

const [inputPath, outputPath] = process.argv.slice(2);

if (!inputPath || !outputPath) {
console.error("Usage: bun run scripts/bench-comment.ts <results.json> <output.md>");
process.exit(1);
}

const data: Output = JSON.parse(readFileSync(inputPath, "utf-8"));

const lines: string[] = [];
lines.push("## Benchmark Results");
lines.push("");

for (const file of data.files) {
const label = fileLabel(file.filepath);

lines.push(`<details>`);
lines.push(`<summary><strong>${label}</strong></summary>`);
lines.push("");

for (const group of file.groups) {
const groupName = group.fullName.includes(" > ")
? group.fullName.split(" > ").slice(1).join(" > ")
: group.fullName;

lines.push(`**${groupName}**`);
lines.push("");
lines.push("| Benchmark | Mean | p99 | RME | Samples |");
lines.push("|:---|---:|---:|---:|---:|");

for (const b of group.benchmarks) {
lines.push(
`| ${b.name} | ${formatMs(b.mean)} | ${formatMs(b.p99)} | ${formatRme(b.rme)} | ${b.sampleCount} |`,
);
}

lines.push("");
}

lines.push(`</details>`);
lines.push("");
}

const runner = process.env.BENCH_RUNNER ?? "local";

lines.push(
`<details><summary>Environment</summary>\n\n` +
`- Runner: \`${runner}\`\n` +
`- Runtime: Bun ${process.versions.bun}\n\n` +
`*Results are machine-dependent.*\n` +
`</details>`,
);

const body = lines.join("\n");
await Bun.write(outputPath, body);
console.log(body);
30 changes: 28 additions & 2 deletions src/api/pdf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,26 @@ export interface SaveOptions {
* @default false
*/
subsetFonts?: boolean;

/**
* Compress uncompressed streams with FlateDecode (default: true).
*
* When enabled, streams without a /Filter entry will be compressed
* before writing. Streams that already have filters (including image
* formats like DCTDecode/JPXDecode) are left unchanged.
*/
compressStreams?: boolean;

/**
* Minimum stream size in bytes to attempt compression (default: 512).
*
* Streams smaller than this threshold are written uncompressed.
* Deflate initialization has a fixed overhead that dominates for small
* payloads, and tiny streams rarely achieve meaningful compression.
*
* Set to 0 to compress all streams regardless of size.
*/
compressionThreshold?: number;
}

/**
Expand Down Expand Up @@ -1698,6 +1718,7 @@ export class PDF {
* const [duplicate] = await pdf.copyPagesFrom(pdf, [0], { insertAt: 1 });
* ```
*/
// oxlint-disable-next-line typescript/require-await -- Public async API kept for backward compat; ObjectCopier is sync.
async copyPagesFrom(
source: PDF,
indices: number[],
Expand Down Expand Up @@ -1729,7 +1750,7 @@ export class PDF {
throw new Error(`Source page ${index} not found`);
}

const copiedPageRef = await copier.copyPage(srcPage.ref);
const copiedPageRef = copier.copyPage(srcPage.ref);
copiedRefs.push(copiedPageRef);
}

Expand Down Expand Up @@ -1824,6 +1845,7 @@ export class PDF {
* }
* ```
*/
// oxlint-disable-next-line typescript/require-await -- Public async API kept for backward compat; ObjectCopier is sync.
async embedPage(source: PDF, pageIndex: number): Promise<PDFEmbeddedPage> {
const srcPage = source.getPage(pageIndex);

Expand All @@ -1842,7 +1864,7 @@ export class PDF {
let resources: PdfDict;

if (srcResources) {
const copied = await copier.copyObject(srcResources);
const copied = copier.copyObject(srcResources);

// This is guaranteed by our checks above
resources = copied instanceof PdfDict ? copied : new PdfDict();
Expand Down Expand Up @@ -3139,6 +3161,8 @@ export class PDF {
id: fileId,
useXRefStream,
securityHandler,
compressStreams: options.compressStreams,
compressionThreshold: options.compressionThreshold,
});

// Reset pending security state after successful save
Expand All @@ -3156,6 +3180,8 @@ export class PDF {
id: fileId,
useXRefStream,
securityHandler,
compressStreams: options.compressStreams,
compressionThreshold: options.compressionThreshold,
});

// Reset pending security state after successful save
Expand Down
Loading