mirror of
https://github.com/pnpm/pnpm.git
synced 2026-04-10 18:18:56 -04:00
* fix: skip re-importing packages when global virtual store is warm
When node_modules is deleted but the global virtual store directories
survive, pnpm previously re-fetched every package because the skip
logic required currentLockfile to be present. Add a fast-path that
checks pathExists(dir) for GVS directories even when currentLockfile
is missing, since the GVS directory hash encodes engine, integrity,
and full dependency subgraph.
* fix: remove includeUnchangedDeps guard from GVS fast-path
The includeUnchangedDeps flag is true whenever currentHoistPattern
differs from the desired hoistPattern. After deleting node_modules,
currentHoistPattern is always undefined (read from .modules.yaml),
so the flag is always true when hoisting is configured — defeating
the optimization in the exact scenario it targets.
The guard is unnecessary because the fast-path only skips fetch/import
(fetchResponse = {}), not graph inclusion. The package is still added
to the graph with children populated, so hoisting recalculation works.
* perf: add GVS warm reinstall benchmark scenario
Adds benchmark 6: frozen lockfile reinstall with a warm global virtual
store after deleting node_modules. This measures the reattach fast-path
where all packages are skipped (no fetch/import) because their GVS
hash directories already exist.
* fix: use proper types in fetchPackage spy to pass tsgo strict checks
46 lines
1.2 KiB
JavaScript
46 lines
1.2 KiB
JavaScript
const fs = require('fs')
|
|
|
|
const benchDir = process.argv[2]
|
|
const outputFile = process.argv[3]
|
|
|
|
const benchmarks = [
|
|
['headless', 'Headless (warm store+cache)'],
|
|
['peek', 'Re-resolution (add dep, warm)'],
|
|
['nolockfile', 'Full resolution (warm, no lockfile)'],
|
|
['headless-cold', 'Headless (cold store+cache)'],
|
|
['cold', 'Cold install (nothing warm)'],
|
|
['gvs-warm', 'GVS warm reinstall (warm global store)'],
|
|
]
|
|
|
|
function readResult (benchDir, name, variant) {
|
|
try {
|
|
const data = JSON.parse(fs.readFileSync(`${benchDir}/${name}-${variant}.json`, 'utf8'))
|
|
const r = data.results[0]
|
|
return `${r.mean.toFixed(3)}s ± ${r.stddev.toFixed(3)}s`
|
|
} catch (err) {
|
|
if (err && err.code !== 'ENOENT') {
|
|
console.error(`Warning: failed to read ${name}-${variant}: ${err.message}`)
|
|
}
|
|
return 'n/a'
|
|
}
|
|
}
|
|
|
|
const lines = [
|
|
'# Benchmark Results',
|
|
'',
|
|
'| # | Scenario | main | branch |',
|
|
'|---|---|---|---|',
|
|
]
|
|
|
|
benchmarks.forEach(([name, label], i) => {
|
|
const mainCell = readResult(benchDir, name, 'main')
|
|
const branchCell = readResult(benchDir, name, 'branch')
|
|
lines.push(`| ${i + 1} | ${label} | ${mainCell} | ${branchCell} |`)
|
|
})
|
|
|
|
lines.push('')
|
|
|
|
const output = lines.join('\n')
|
|
fs.writeFileSync(outputFile, output)
|
|
console.log(output)
|