chore: add benchmark script for comparing install performance against main (#10632)

This commit is contained in:
Zoltan Kochan
2026-02-16 23:55:53 +01:00
committed by GitHub
parent 7116f35027
commit 9ae2e03450
5 changed files with 455 additions and 3 deletions

52
benchmarks/README.md Normal file
View File

@@ -0,0 +1,52 @@
# pnpm Benchmarks
Compares `pnpm install` performance between the current branch and `main`.
## Prerequisites
- [hyperfine](https://github.com/sharkdp/hyperfine) — install via `brew install hyperfine`
- The current branch must be compiled (`pnpm run compile`)
- If providing a pre-existing main checkout path, it must also be compiled
## Usage
```sh
pnpm run compile
./benchmarks/bench.sh
```
If a git worktree with `main` already exists, the script finds and uses it automatically. Otherwise it creates one at `../.pnpm-bench-main` (a sibling of the repo), installs dependencies, and compiles.
You can also point to a specific checkout of main:
```sh
./benchmarks/bench.sh /path/to/main
```
## Scenarios
| # | Name | Lockfile | Store + Cache | Description |
|---|---|---|---|---|
| 1 | Headless | ✔ frozen | warm | Repeat install with warm store |
| 2 | Re-resolution | ✔ + add dep | warm | Add a new dependency to an existing lockfile |
| 3 | Full resolution | ✗ | warm | Resolve everything from scratch with warm store and cache |
| 4 | Headless cold | ✔ frozen | cold | Typical CI install — fetch all packages with lockfile |
| 5 | Cold install | ✗ | cold | True cold start — nothing cached |
All scenarios use `--ignore-scripts` and isolated store/cache directories per variant.
## Output
Results are printed to the terminal and saved as:
- `results.md` — consolidated markdown table
- `<scenario>-main.json` / `<scenario>-branch.json` — raw hyperfine data
All files are written to a temp directory printed at the end of the run.
## Configuration
Edit the variables at the top of `bench.sh`:
- `WARMUP` — number of warmup runs before timing (default: 1)
- `RUNS` — number of timed runs per benchmark (default: 10)

236
benchmarks/bench.sh Executable file
View File

@@ -0,0 +1,236 @@
#!/bin/bash
set -euo pipefail
# Benchmark script for pnpm install performance.
# Compares the current (active) branch against a baseline checkout of main.
#
# Prerequisites:
# - hyperfine (https://github.com/sharkdp/hyperfine)
# - The current branch must be compiled (pnpm run compile)
#
# Usage:
# ./benchmarks/bench.sh [path-to-main-checkout]
#
# If no path is given, a git worktree for main is created automatically,
# dependencies are installed, and pnpm is compiled in it.
#
# Examples:
# pnpm run compile
# ./benchmarks/bench.sh
# ./benchmarks/bench.sh /Volumes/src/pnpm/pnpm/main
BRANCH_DIR="$(cd "$(dirname "$0")/.." && pwd)"
if [ -n "${1:-}" ]; then
MAIN_DIR="$1"
else
# Look for an existing worktree that has main checked out
EXISTING=$(git -C "$BRANCH_DIR" worktree list --porcelain \
| awk '/^worktree /{wt=$2} /^branch refs\/heads\/main$/{print wt}')
if [ -n "$EXISTING" ]; then
MAIN_DIR="$EXISTING"
echo "── Using existing main worktree at $MAIN_DIR ──"
else
MAIN_DIR="$BRANCH_DIR/../.pnpm-bench-main"
echo "── Creating main worktree at $MAIN_DIR ──"
git -C "$BRANCH_DIR" worktree add "$MAIN_DIR" main
fi
cd "$MAIN_DIR"
echo "Installing dependencies..."
pnpm install
echo "Compiling..."
pnpm run compile
echo ""
cd "$BRANCH_DIR"
fi
BENCH_DIR="$(mktemp -d "${TMPDIR:-/tmp}/pnpm-bench.XXXXXX")"
WARMUP=1
RUNS=10
# ── Per-variant configuration ─────────────────────────────────────────────
VARIANTS=("main" "branch")
VARIANT_DIRS=("$MAIN_DIR" "$BRANCH_DIR")
VARIANT_BINS=("$MAIN_DIR/pnpm/dist/pnpm.mjs" "$BRANCH_DIR/pnpm/dist/pnpm.mjs")
VARIANT_PROJECTS=("$BENCH_DIR/project-main" "$BENCH_DIR/project-branch")
VARIANT_STORES=("$BENCH_DIR/store-main" "$BENCH_DIR/store-branch")
VARIANT_CACHES=("$BENCH_DIR/cache-main" "$BENCH_DIR/cache-branch")
# ── Validation ──────────────────────────────────────────────────────────────
if ! command -v hyperfine &>/dev/null; then
echo "error: hyperfine is required. Install via: brew install hyperfine" >&2
exit 1
fi
for bin in "${VARIANT_BINS[@]}"; do
if [ ! -f "$bin" ]; then
echo "error: compiled pnpm not found at $bin" >&2
echo "Run 'pnpm run compile' in both repos first." >&2
exit 1
fi
done
for i in "${!VARIANTS[@]}"; do
echo "${VARIANTS[$i]}: $(node "${VARIANT_BINS[$i]}" --version) (${VARIANT_DIRS[$i]})"
done
echo "workdir: $BENCH_DIR"
echo ""
# ── Project setup ───────────────────────────────────────────────────────────
# Each variant gets its own project directory with isolated store and cache
# so there is no shared state between them.
for i in "${!VARIANTS[@]}"; do
dir="${VARIANT_PROJECTS[$i]}"
mkdir -p "$dir" "${VARIANT_CACHES[$i]}"
cp "$BRANCH_DIR/benchmarks/fixture.package.json" "$dir/package.json"
printf "storeDir: %s\ncacheDir: %s\n" "${VARIANT_STORES[$i]}" "${VARIANT_CACHES[$i]}" > "$dir/pnpm-workspace.yaml"
done
# Keep a pristine copy of package.json for the peek benchmark
cp "$BRANCH_DIR/benchmarks/fixture.package.json" "$BENCH_DIR/original-package.json"
# ── Populate stores and caches ─────────────────────────────────────────────
# A full install populates both the content-addressable store and the
# registry metadata cache for each variant.
for i in "${!VARIANTS[@]}"; do
label="${VARIANTS[$i]}"
dir="${VARIANT_PROJECTS[$i]}"
bin="${VARIANT_BINS[$i]}"
echo "Populating store and cache for $label..."
rm -rf "$dir/node_modules" "$dir/pnpm-lock.yaml"
cd "$dir" && node "$bin" install --ignore-scripts --no-frozen-lockfile >/dev/null 2>&1
if [ ! -f "$dir/pnpm-lock.yaml" ]; then
echo "error: pnpm-lock.yaml was not created for $label in $dir" >&2
exit 1
fi
cp "$dir/pnpm-lock.yaml" "$BENCH_DIR/saved-lockfile-$label.yaml"
done
# ── Helper ──────────────────────────────────────────────────────────────────
# run_bench <name> <prepare_template> <cmd_template>
#
# Templates use placeholders that are substituted per variant:
# {project} → project directory
# {bin} → compiled pnpm binary
# {store} → store directory
# {cache} → cache directory
# {lockfile} → saved lockfile path
run_bench() {
local bench_name=$1
local prepare_tpl=$2
local cmd_tpl=$3
for i in "${!VARIANTS[@]}"; do
local variant="${VARIANTS[$i]}"
local project="${VARIANT_PROJECTS[$i]}"
local bin="${VARIANT_BINS[$i]}"
local store="${VARIANT_STORES[$i]}"
local cache="${VARIANT_CACHES[$i]}"
local lockfile="$BENCH_DIR/saved-lockfile-$variant.yaml"
local prepare="$prepare_tpl"
prepare="${prepare//\{project\}/$project}"
prepare="${prepare//\{bin\}/$bin}"
prepare="${prepare//\{store\}/$store}"
prepare="${prepare//\{cache\}/$cache}"
prepare="${prepare//\{lockfile\}/$lockfile}"
local cmd="$cmd_tpl"
cmd="${cmd//\{project\}/$project}"
cmd="${cmd//\{bin\}/$bin}"
cmd="${cmd//\{store\}/$store}"
cmd="${cmd//\{cache\}/$cache}"
cmd="${cmd//\{lockfile\}/$lockfile}"
echo ""
echo " $variant:"
hyperfine \
--warmup "$WARMUP" \
--runs "$RUNS" \
--ignore-failure \
--prepare "$prepare" \
--command-name "$variant" \
"$cmd" \
--export-json "$BENCH_DIR/${bench_name}-${variant}.json" \
|| true
done
}
# ── Benchmark 1: Headless install ──────────────────────────────────────────
# Lockfile present, node_modules deleted, store and cache warm.
# This is the common "CI install" or "fresh clone + install" path.
echo ""
echo "━━━ Benchmark 1: Headless install (frozen lockfile, warm store+cache) ━━━"
run_bench "headless" \
"rm -rf {project}/node_modules && cp {lockfile} {project}/pnpm-lock.yaml" \
"cd {project} && node {bin} install --frozen-lockfile --ignore-scripts >/dev/null 2>&1"
# ── Benchmark 2: Re-resolution with existing lockfile ─────────────────────
# Lockfile present, add a new dependency to trigger re-resolution.
# Store and cache warm. This exercises the peekManifestFromStore path.
echo ""
echo "━━━ Benchmark 2: Re-resolution (add dep to existing lockfile, warm store+cache) ━━━"
run_bench "peek" \
"rm -rf {project}/node_modules && cp {lockfile} {project}/pnpm-lock.yaml && cp $BENCH_DIR/original-package.json {project}/package.json" \
"cd {project} && node {bin} add is-odd --ignore-scripts >/dev/null 2>&1"
# ── Benchmark 3: Full resolution (warm store+cache) ──────────────────────
# No lockfile, no node_modules, store and cache warm.
# Resolution runs for all packages using cached registry metadata.
echo ""
echo "━━━ Benchmark 3: Full resolution (no lockfile, warm store+cache) ━━━"
run_bench "nolockfile" \
"rm -rf {project}/node_modules {project}/pnpm-lock.yaml && cp $BENCH_DIR/original-package.json {project}/package.json" \
"cd {project} && node {bin} install --ignore-scripts --no-frozen-lockfile >/dev/null 2>&1"
# ── Benchmark 4: Headless cold (lockfile, no store, no cache) ─────────────
# Lockfile present, but store and cache are empty.
# This tests the fetch-from-registry + link path guided by a lockfile.
echo ""
echo "━━━ Benchmark 4: Headless install (frozen lockfile, cold store+cache) ━━━"
run_bench "headless-cold" \
"rm -rf {project}/node_modules {store} {cache} && cp {lockfile} {project}/pnpm-lock.yaml" \
"cd {project} && node {bin} install --frozen-lockfile --ignore-scripts >/dev/null 2>&1"
# ── Benchmark 5: Cold install (no store, no cache, no lockfile) ───────────
# Everything is deleted before each run. This is the true cold start.
echo ""
echo "━━━ Benchmark 5: Cold install (no store, no cache, no lockfile) ━━━"
run_bench "cold" \
"rm -rf {project}/node_modules {project}/pnpm-lock.yaml {store} {cache} && cp $BENCH_DIR/original-package.json {project}/package.json" \
"cd {project} && node {bin} install --ignore-scripts --no-frozen-lockfile >/dev/null 2>&1"
# ── Summary ─────────────────────────────────────────────────────────────────
RESULTS_MD="$BENCH_DIR/results.md"
echo ""
echo "━━━ Results ━━━"
node "$BRANCH_DIR/benchmarks/generate-results.js" "$BENCH_DIR" "$RESULTS_MD"
echo ""
echo "Results saved to: $RESULTS_MD"
# Cleanup
for project in "${VARIANT_PROJECTS[@]}"; do
rm -rf "$project/node_modules"
done
echo ""
echo "Temp directory kept at: $BENCH_DIR"
echo "Remove with: rm -rf $BENCH_DIR"

View File

@@ -0,0 +1,110 @@
{
"name": "bench-project",
"version": "1.0.0",
"dependencies": {
"express": "^4.21.0",
"lodash": "^4.17.21",
"axios": "^1.7.0",
"chalk": "^4.1.2",
"debug": "^4.3.4",
"commander": "^12.0.0",
"glob": "^10.3.0",
"minimatch": "^9.0.0",
"semver": "^7.6.0",
"yargs": "^17.7.0",
"inquirer": "^9.2.0",
"ora": "^5.4.1",
"fs-extra": "^11.2.0",
"rimraf": "^5.0.0",
"mkdirp": "^3.0.0",
"cross-spawn": "^7.0.3",
"execa": "^5.1.1",
"which": "^4.0.0",
"dotenv": "^16.4.0",
"uuid": "^9.0.0",
"moment": "^2.30.0",
"dayjs": "^1.11.0",
"date-fns": "^3.0.0",
"fast-glob": "^3.3.0",
"chokidar": "^3.6.0",
"ws": "^8.16.0",
"node-fetch": "^2.7.0",
"form-data": "^4.0.0",
"http-proxy-agent": "^7.0.0",
"https-proxy-agent": "^7.0.0",
"js-yaml": "^4.1.0",
"ini": "^4.1.0",
"toml": "^3.0.0",
"strip-ansi": "^6.0.1",
"wrap-ansi": "^7.0.0",
"string-width": "^4.2.3",
"cli-table3": "^0.6.3",
"figures": "^3.2.0",
"log-symbols": "^4.1.0",
"boxen": "^5.1.2",
"p-limit": "^3.1.0",
"p-map": "^4.0.0",
"p-queue": "^6.6.2",
"retry": "^0.13.1",
"graceful-fs": "^4.2.11",
"jsonfile": "^6.1.0",
"tar": "^7.0.0",
"archiver": "^7.0.0",
"decompress": "^4.2.1",
"mime-types": "^2.1.35",
"content-type": "^1.0.5",
"accepts": "^1.3.8",
"negotiator": "^0.6.3",
"cors": "^2.8.5",
"helmet": "^7.1.0",
"compression": "^1.7.4",
"cookie-parser": "^1.4.6",
"body-parser": "^1.20.2",
"multer": "^1.4.5-lts.1",
"morgan": "^1.10.0",
"winston": "^3.11.0",
"pino": "^8.18.0",
"bunyan": "^1.8.15",
"ajv": "^8.12.0",
"zod": "^3.22.0",
"joi": "^17.12.0",
"yup": "^1.3.0",
"ramda": "^0.29.0",
"underscore": "^1.13.6",
"rxjs": "^7.8.1",
"eventemitter3": "^5.0.1",
"bluebird": "^3.7.2",
"async": "^3.2.5",
"lru-cache": "^10.2.0",
"node-cache": "^5.1.2",
"keyv": "^4.5.4",
"got": "^11.8.6",
"superagent": "^8.1.2",
"cheerio": "^1.0.0-rc.12",
"marked": "^12.0.0",
"highlight.js": "^11.9.0",
"sharp": "^0.33.0",
"jimp": "^0.22.12",
"canvas": "^2.11.2",
"socket.io": "^4.7.0",
"redis": "^4.6.0",
"ioredis": "^5.3.0",
"mongoose": "^8.1.0",
"typeorm": "^0.3.20",
"knex": "^3.1.0",
"pg": "^8.11.0",
"mysql2": "^3.9.0",
"better-sqlite3": "^9.4.0",
"bcrypt": "^5.1.1",
"jsonwebtoken": "^9.0.2",
"passport": "^0.7.0",
"nanoid": "^3.3.7",
"cuid": "^3.0.0",
"shortid": "^2.2.16",
"color": "^4.2.3",
"pluralize": "^8.0.0",
"change-case": "^4.1.2",
"camelcase": "^6.3.0",
"escape-string-regexp": "^4.0.0"
}
}

View File

@@ -0,0 +1,44 @@
const fs = require('fs')
const benchDir = process.argv[2]
const outputFile = process.argv[3]
const benchmarks = [
['headless', 'Headless (warm store+cache)'],
['peek', 'Re-resolution (add dep, warm)'],
['nolockfile', 'Full resolution (warm, no lockfile)'],
['headless-cold', 'Headless (cold store+cache)'],
['cold', 'Cold install (nothing warm)'],
]
function readResult (benchDir, name, variant) {
try {
const data = JSON.parse(fs.readFileSync(`${benchDir}/${name}-${variant}.json`, 'utf8'))
const r = data.results[0]
return `${r.mean.toFixed(3)}s ± ${r.stddev.toFixed(3)}s`
} catch (err) {
if (err && err.code !== 'ENOENT') {
console.error(`Warning: failed to read ${name}-${variant}: ${err.message}`)
}
return 'n/a'
}
}
const lines = [
'# Benchmark Results',
'',
'| # | Scenario | main | branch |',
'|---|---|---|---|',
]
benchmarks.forEach(([name, label], i) => {
const mainCell = readResult(benchDir, name, 'main')
const branchCell = readResult(benchDir, name, 'branch')
lines.push(`| ${i + 1} | ${label} | ${mainCell} | ${branchCell} |`)
})
lines.push('')
const output = lines.join('\n')
fs.writeFileSync(outputFile, output)
console.log(output)

View File

@@ -1,6 +1,16 @@
import eslintConfig from "@pnpm/eslint-config";
import * as regexpPlugin from "eslint-plugin-regexp";
export default [{
ignores: ["**/fixtures", "**/__fixtures__", "**/node_modules", "**/lib"],
}, ...eslintConfig, regexpPlugin.configs['flat/recommended']];
export default [
{
ignores: ["**/fixtures", "**/__fixtures__", "**/node_modules", "**/lib"],
},
...eslintConfig,
regexpPlugin.configs['flat/recommended'],
{
files: ["pnpm/src/**/*.ts"],
rules: {
"import-x/no-extraneous-dependencies": "off",
},
},
]