mirror of
https://github.com/pnpm/pnpm.git
synced 2025-12-25 08:08:14 -05:00
feat(dlx): cache (#7835)
close #5277 --------- Co-authored-by: Zoltan Kochan <z@kochan.io>
This commit is contained in:
9
.changeset/thin-rice-promise.md
Normal file
9
.changeset/thin-rice-promise.md
Normal file
@@ -0,0 +1,9 @@
|
||||
---
|
||||
"@pnpm/plugin-commands-script-runners": minor
|
||||
"@pnpm/config": minor
|
||||
"pnpm": minor
|
||||
"@pnpm/plugin-commands-outdated": patch
|
||||
"@pnpm/plugin-commands-store": patch
|
||||
---
|
||||
|
||||
Added cache for `pnpm dlx` [#5277](https://github.com/pnpm/pnpm/issues/5277).
|
||||
@@ -6,7 +6,6 @@
|
||||
"dependencies": {
|
||||
"@pnpm/assert-project": "workspace:*",
|
||||
"@pnpm/types": "workspace:*",
|
||||
"unique-string": "^2.0.0",
|
||||
"write-json5-file": "^3.1.0",
|
||||
"write-pkg": "4.0.0",
|
||||
"write-yaml-file": "^5.0.0"
|
||||
|
||||
@@ -2,7 +2,6 @@ import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { assertProject, type Modules, type Project } from '@pnpm/assert-project'
|
||||
import { type ProjectManifest } from '@pnpm/types'
|
||||
import uniqueString from 'unique-string'
|
||||
import { sync as writeJson5File } from 'write-json5-file'
|
||||
import { sync as writeYamlFile } from 'write-yaml-file'
|
||||
import writePkg from 'write-pkg'
|
||||
@@ -12,7 +11,17 @@ export type ManifestFormat = 'JSON' | 'JSON5' | 'YAML'
|
||||
|
||||
// The testing folder should be outside of the project to avoid lookup in the project's node_modules
|
||||
// Not using the OS temp directory due to issues on Windows CI.
|
||||
const tmpPath = path.join(__dirname, `../../../../pnpm_tmp/${uniqueString()}`)
|
||||
const tmpBaseDir = path.join(__dirname, '../../../../pnpm_tmp')
|
||||
|
||||
function getFilesCountInDir (dir: string): number {
|
||||
try {
|
||||
return fs.readdirSync(dir).length
|
||||
} catch {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
const tmpPath = path.join(tmpBaseDir, `${getFilesCountInDir(tmpBaseDir).toString()}_${process.pid.toString()}`)
|
||||
|
||||
let dirNumber = 0
|
||||
|
||||
|
||||
@@ -156,6 +156,7 @@ export interface Config {
|
||||
enablePnp?: boolean
|
||||
enableModulesDir: boolean
|
||||
modulesCacheMaxAge: number
|
||||
dlxCacheMaxAge: number
|
||||
embedReadme?: boolean
|
||||
gitShallowHosts?: string[]
|
||||
legacyDirFiltering?: boolean
|
||||
|
||||
@@ -92,6 +92,7 @@ export const types = Object.assign({
|
||||
loglevel: ['silent', 'error', 'warn', 'info', 'debug'],
|
||||
maxsockets: Number,
|
||||
'modules-cache-max-age': Number,
|
||||
'dlx-cache-max-age': Number,
|
||||
'modules-dir': String,
|
||||
'network-concurrency': Number,
|
||||
'node-linker': ['pnp', 'isolated', 'hoisted'],
|
||||
@@ -238,6 +239,7 @@ export async function getConfig (
|
||||
'link-workspace-packages': false,
|
||||
'lockfile-include-tarball-url': false,
|
||||
'modules-cache-max-age': 7 * 24 * 60, // 7 days
|
||||
'dlx-cache-max-age': 24 * 60, // 1 day
|
||||
'node-linker': 'isolated',
|
||||
'package-lock': npmDefaults['package-lock'],
|
||||
pending: false,
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
"@pnpm/cli-utils": "workspace:*",
|
||||
"@pnpm/command": "workspace:*",
|
||||
"@pnpm/common-cli-options-help": "workspace:*",
|
||||
"@pnpm/crypto.base32-hash": "workspace:*",
|
||||
"@pnpm/config": "workspace:*",
|
||||
"@pnpm/error": "workspace:*",
|
||||
"@pnpm/lifecycle": "workspace:*",
|
||||
@@ -66,6 +67,7 @@
|
||||
"ramda": "npm:@pnpm/ramda@0.28.1",
|
||||
"realpath-missing": "^1.1.0",
|
||||
"render-help": "^1.0.3",
|
||||
"symlink-dir": "^5.2.1",
|
||||
"which": "npm:@pnpm/which@^3.0.1",
|
||||
"write-json-file": "^4.3.0"
|
||||
},
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
import fs from 'fs'
|
||||
import fs, { type Stats } from 'fs'
|
||||
import path from 'path'
|
||||
import util from 'util'
|
||||
import { docsUrl } from '@pnpm/cli-utils'
|
||||
import { OUTPUT_OPTIONS } from '@pnpm/common-cli-options-help'
|
||||
import { type Config, types } from '@pnpm/config'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { PnpmError } from '@pnpm/error'
|
||||
import { add } from '@pnpm/plugin-commands-installation'
|
||||
import { readPackageJsonFromDir } from '@pnpm/read-package-json'
|
||||
import { getBinsFromPackageManifest } from '@pnpm/package-bins'
|
||||
import { getStorePath } from '@pnpm/store-path'
|
||||
import execa from 'execa'
|
||||
import omit from 'ramda/src/omit'
|
||||
import pick from 'ramda/src/pick'
|
||||
import renderHelp from 'render-help'
|
||||
import symlinkDir from 'symlink-dir'
|
||||
import { makeEnv } from './makeEnv'
|
||||
|
||||
export const commandNames = ['dlx']
|
||||
@@ -63,47 +64,40 @@ export function help () {
|
||||
export type DlxCommandOptions = {
|
||||
package?: string[]
|
||||
shellMode?: boolean
|
||||
} & Pick<Config, 'reporter' | 'userAgent'> & add.AddCommandOptions
|
||||
} & Pick<Config, 'reporter' | 'userAgent' | 'cacheDir' | 'dlxCacheMaxAge' > & add.AddCommandOptions
|
||||
|
||||
export async function handler (
|
||||
opts: DlxCommandOptions,
|
||||
[command, ...args]: string[]
|
||||
) {
|
||||
const dlxDir = await getDlxDir({
|
||||
dir: opts.dir,
|
||||
pnpmHomeDir: opts.pnpmHomeDir,
|
||||
storeDir: opts.storeDir,
|
||||
})
|
||||
const prefix = path.join(dlxDir, `dlx-${process.pid.toString()}`)
|
||||
const modulesDir = path.join(prefix, 'node_modules')
|
||||
const binsDir = path.join(modulesDir, '.bin')
|
||||
fs.mkdirSync(prefix, { recursive: true })
|
||||
process.on('exit', () => {
|
||||
try {
|
||||
fs.rmdirSync(prefix, {
|
||||
recursive: true,
|
||||
maxRetries: 3,
|
||||
})
|
||||
} catch (err) {}
|
||||
})
|
||||
const pkgs = opts.package ?? [command]
|
||||
const { cacheLink, prepareDir } = findCache(pkgs, {
|
||||
dlxCacheMaxAge: opts.dlxCacheMaxAge,
|
||||
cacheDir: opts.cacheDir,
|
||||
})
|
||||
if (prepareDir) {
|
||||
fs.mkdirSync(prepareDir, { recursive: true })
|
||||
await add.handler({
|
||||
// Ideally the config reader should ignore these settings when the dlx command is executed.
|
||||
// This is a temporary solution until "@pnpm/config" is refactored.
|
||||
...omit(['workspaceDir', 'rootProjectManifest'], opts),
|
||||
bin: path.join(prepareDir, 'node_modules/.bin'),
|
||||
dir: prepareDir,
|
||||
lockfileDir: prepareDir,
|
||||
rootProjectManifestDir: prepareDir, // This property won't be used as rootProjectManifest will be undefined
|
||||
saveProd: true, // dlx will be looking for the package in the "dependencies" field!
|
||||
saveDev: false,
|
||||
saveOptional: false,
|
||||
savePeer: false,
|
||||
}, pkgs)
|
||||
await symlinkDir(prepareDir, cacheLink, { overwrite: true })
|
||||
}
|
||||
const modulesDir = path.join(cacheLink, 'node_modules')
|
||||
const binsDir = path.join(modulesDir, '.bin')
|
||||
const env = makeEnv({ userAgent: opts.userAgent, prependPaths: [binsDir] })
|
||||
await add.handler({
|
||||
// Ideally the config reader should ignore these settings when the dlx command is executed.
|
||||
// This is a temporary solution until "@pnpm/config" is refactored.
|
||||
...omit(['workspaceDir', 'rootProjectManifest'], opts),
|
||||
bin: binsDir,
|
||||
dir: prefix,
|
||||
lockfileDir: prefix,
|
||||
rootProjectManifestDir: prefix, // This property won't be used as rootProjectManifest will be undefined
|
||||
saveProd: true, // dlx will be looking for the package in the "dependencies" field!
|
||||
saveDev: false,
|
||||
saveOptional: false,
|
||||
savePeer: false,
|
||||
}, pkgs)
|
||||
const binName = opts.package
|
||||
? command
|
||||
: await getBinName(modulesDir, await getPkgName(prefix))
|
||||
: await getBinName(modulesDir, await getPkgName(cacheLink))
|
||||
try {
|
||||
await execa(binName, args, {
|
||||
cwd: process.cwd(),
|
||||
@@ -159,17 +153,40 @@ function scopeless (pkgName: string) {
|
||||
return pkgName
|
||||
}
|
||||
|
||||
async function getDlxDir (
|
||||
opts: {
|
||||
dir: string
|
||||
storeDir?: string
|
||||
pnpmHomeDir: string
|
||||
}
|
||||
): Promise<string> {
|
||||
const storeDir = await getStorePath({
|
||||
pkgRoot: opts.dir,
|
||||
storePath: opts.storeDir,
|
||||
pnpmHomeDir: opts.pnpmHomeDir,
|
||||
})
|
||||
return path.join(storeDir, 'tmp')
|
||||
function findCache (pkgs: string[], opts: {
|
||||
cacheDir: string
|
||||
dlxCacheMaxAge: number
|
||||
}) {
|
||||
const dlxCommandCacheDir = createDlxCommandCacheDir(opts.cacheDir, pkgs)
|
||||
const cacheLink = path.join(dlxCommandCacheDir, 'pkg')
|
||||
const valid = isCacheValid(cacheLink, opts.dlxCacheMaxAge)
|
||||
const prepareDir = valid ? null : getPrepareDir(dlxCommandCacheDir)
|
||||
return { cacheLink, prepareDir }
|
||||
}
|
||||
|
||||
function createDlxCommandCacheDir (cacheDir: string, pkgs: string[]) {
|
||||
const dlxCacheDir = path.resolve(cacheDir, 'dlx')
|
||||
const hashStr = pkgs.join('\n') // '\n' is not a URL-friendly character, and therefore not a valid package name, which can be used as separator
|
||||
const cacheKey = createBase32Hash(hashStr)
|
||||
const cachePath = path.join(dlxCacheDir, cacheKey)
|
||||
fs.mkdirSync(cachePath, { recursive: true })
|
||||
return cachePath
|
||||
}
|
||||
|
||||
function isCacheValid (cacheLink: string, dlxCacheMaxAge: number): boolean {
|
||||
let stats: Stats
|
||||
try {
|
||||
stats = fs.lstatSync(cacheLink)
|
||||
} catch (err) {
|
||||
if (util.types.isNativeError(err) && 'code' in err && err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
throw err
|
||||
}
|
||||
return stats.mtime.getTime() + dlxCacheMaxAge * 60_000 >= new Date().getTime()
|
||||
}
|
||||
|
||||
function getPrepareDir (cachePath: string): string {
|
||||
const name = `${new Date().getTime().toString(16)}-${process.pid.toString(16)}`
|
||||
return path.join(cachePath, name)
|
||||
}
|
||||
|
||||
@@ -1,11 +1,56 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { add } from '@pnpm/plugin-commands-installation'
|
||||
import { dlx } from '@pnpm/plugin-commands-script-runners'
|
||||
import { prepareEmpty } from '@pnpm/prepare'
|
||||
import { DLX_DEFAULT_OPTS as DEFAULT_OPTS } from './utils'
|
||||
|
||||
const testOnWindowsOnly = process.platform === 'win32' ? test : test.skip
|
||||
|
||||
function sanitizeDlxCacheComponent (cacheName: string): string {
|
||||
if (cacheName === 'pkg') return cacheName
|
||||
const segments = cacheName.split('-')
|
||||
if (segments.length !== 2) {
|
||||
throw new Error(`Unexpected name: ${cacheName}`)
|
||||
}
|
||||
const [date, pid] = segments
|
||||
if (!/[0-9a-f]+/.test(date) && !/[0-9a-f]+/.test(pid)) {
|
||||
throw new Error(`Name ${cacheName} doesn't end with 2 hex numbers`)
|
||||
}
|
||||
return '***********-*****'
|
||||
}
|
||||
|
||||
function verifyDlxCache (cacheName: string): void {
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', cacheName))
|
||||
.map(sanitizeDlxCacheComponent)
|
||||
.sort()
|
||||
).toStrictEqual([
|
||||
'pkg',
|
||||
'***********-*****',
|
||||
].sort())
|
||||
verifyDlxCacheLink(cacheName)
|
||||
}
|
||||
|
||||
function verifyDlxCacheLink (cacheName: string): void {
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', cacheName, 'pkg'))
|
||||
.sort()
|
||||
).toStrictEqual([
|
||||
'node_modules',
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
].sort())
|
||||
expect(
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', cacheName, 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', cacheName))
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
test('dlx', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
@@ -13,6 +58,7 @@ test('dlx', async () => {
|
||||
...DEFAULT_OPTS,
|
||||
dir: path.resolve('project'),
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
}, ['shx', 'touch', 'foo'])
|
||||
|
||||
expect(fs.existsSync('foo')).toBeTruthy()
|
||||
@@ -24,7 +70,9 @@ test('dlx install from git', async () => {
|
||||
await dlx.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: process.cwd(),
|
||||
}, ['shelljs/shx#61aca968cd7afc712ca61a4fc4ec3201e3770dc7', 'touch', 'foo'])
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
}, ['shelljs/shx#0dcbb9d1022037268959f8b706e0f06a6fd43fde', 'touch', 'foo'])
|
||||
|
||||
expect(fs.existsSync('foo')).toBeTruthy()
|
||||
})
|
||||
@@ -36,6 +84,7 @@ test('dlx should work when the package name differs from the bin name', async ()
|
||||
...DEFAULT_OPTS,
|
||||
dir: path.resolve('project'),
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
}, ['@pnpm.e2e/touch-file-one-bin'])
|
||||
|
||||
expect(fs.existsSync('touch.txt')).toBeTruthy()
|
||||
@@ -72,6 +121,7 @@ test('dlx --package <pkg1> [--package <pkg2>]', async () => {
|
||||
...DEFAULT_OPTS,
|
||||
dir: path.resolve('project'),
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
package: [
|
||||
'zkochan/for-testing-pnpm-dlx',
|
||||
'is-positive',
|
||||
@@ -132,4 +182,102 @@ testOnWindowsOnly('dlx should work when running in the root of a Windows Drive',
|
||||
dir: 'C:\\',
|
||||
storeDir: path.resolve('store'),
|
||||
}, ['cowsay', 'hello'])
|
||||
})
|
||||
})
|
||||
|
||||
test('dlx with cache', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
const spy = jest.spyOn(add, 'handler')
|
||||
|
||||
await dlx.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: path.resolve('project'),
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['shx', 'touch', 'foo'])
|
||||
|
||||
expect(fs.existsSync('foo')).toBe(true)
|
||||
verifyDlxCache(createBase32Hash('shx'))
|
||||
expect(spy).toHaveBeenCalled()
|
||||
|
||||
spy.mockReset()
|
||||
|
||||
await dlx.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: path.resolve('project'),
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['shx', 'touch', 'bar'])
|
||||
|
||||
expect(fs.existsSync('bar')).toBe(true)
|
||||
verifyDlxCache(createBase32Hash('shx'))
|
||||
expect(spy).not.toHaveBeenCalled()
|
||||
|
||||
spy.mockRestore()
|
||||
})
|
||||
|
||||
test('dlx does not reuse expired cache', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
const now = new Date()
|
||||
|
||||
// first execution to initialize the cache
|
||||
await dlx.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: path.resolve('project'),
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['shx', 'echo', 'hello world'])
|
||||
verifyDlxCache(createBase32Hash('shx'))
|
||||
|
||||
// change the date attributes of the cache to 30 minutes older than now
|
||||
const newDate = new Date(now.getTime() - 30 * 60_000)
|
||||
fs.lutimesSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg'), newDate, newDate)
|
||||
|
||||
const spy = jest.spyOn(add, 'handler')
|
||||
|
||||
// main dlx execution
|
||||
await dlx.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: path.resolve('project'),
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
dlxCacheMaxAge: 10, // 10 minutes should make 30 minutes old cache expired
|
||||
}, ['shx', 'touch', 'BAR'])
|
||||
|
||||
expect(fs.existsSync('BAR')).toBe(true)
|
||||
expect(spy).toHaveBeenCalledWith(expect.anything(), ['shx'])
|
||||
|
||||
spy.mockRestore()
|
||||
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx')))
|
||||
.map(sanitizeDlxCacheComponent)
|
||||
.sort()
|
||||
).toStrictEqual([
|
||||
'pkg',
|
||||
'***********-*****',
|
||||
'***********-*****',
|
||||
].sort())
|
||||
verifyDlxCacheLink(createBase32Hash('shx'))
|
||||
})
|
||||
|
||||
test('dlx still saves cache even if execution fails', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
fs.writeFileSync(path.resolve('not-a-dir'), 'to make `shx mkdir` fails')
|
||||
|
||||
await dlx.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: path.resolve('project'),
|
||||
storeDir: path.resolve('store'),
|
||||
cacheDir: path.resolve('cache'),
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['shx', 'mkdir', path.resolve('not-a-dir')])
|
||||
|
||||
expect(fs.readFileSync(path.resolve('not-a-dir'), 'utf-8')).toEqual(expect.anything())
|
||||
verifyDlxCache(createBase32Hash('shx'))
|
||||
})
|
||||
|
||||
@@ -66,6 +66,7 @@ export const DLX_DEFAULT_OPTS = {
|
||||
cacheDir: path.join(tmp, 'cache'),
|
||||
extraEnv: {},
|
||||
cliOptions: {},
|
||||
dlxCacheMaxAge: Infinity,
|
||||
include: {
|
||||
dependencies: true,
|
||||
devDependencies: true,
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
{
|
||||
"path": "../../config/config"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/crypto.base32-hash"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/error"
|
||||
},
|
||||
|
||||
12
pnpm-lock.yaml
generated
12
pnpm-lock.yaml
generated
@@ -298,9 +298,6 @@ importers:
|
||||
'@pnpm/types':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/types
|
||||
unique-string:
|
||||
specifier: ^2.0.0
|
||||
version: 2.0.0
|
||||
write-json5-file:
|
||||
specifier: ^3.1.0
|
||||
version: 3.1.0
|
||||
@@ -1381,6 +1378,9 @@ importers:
|
||||
'@pnpm/config':
|
||||
specifier: workspace:*
|
||||
version: link:../../config/config
|
||||
'@pnpm/crypto.base32-hash':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/crypto.base32-hash
|
||||
'@pnpm/error':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/error
|
||||
@@ -1441,6 +1441,9 @@ importers:
|
||||
render-help:
|
||||
specifier: ^1.0.3
|
||||
version: 1.0.3
|
||||
symlink-dir:
|
||||
specifier: ^5.2.1
|
||||
version: 5.2.1
|
||||
which:
|
||||
specifier: npm:@pnpm/which@^3.0.1
|
||||
version: '@pnpm/which@3.0.1'
|
||||
@@ -6058,6 +6061,9 @@ importers:
|
||||
'@pnpm/assert-store':
|
||||
specifier: workspace:*
|
||||
version: link:../../__utils__/assert-store
|
||||
'@pnpm/crypto.base32-hash':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/crypto.base32-hash
|
||||
'@pnpm/lockfile-file':
|
||||
specifier: workspace:*
|
||||
version: link:../../lockfile/lockfile-file
|
||||
|
||||
140
pnpm/test/run.ts
140
pnpm/test/run.ts
@@ -1,6 +1,7 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import PATH_NAME from 'path-name'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { prepare, prepareEmpty, preparePackages } from '@pnpm/prepare'
|
||||
import isWindows from 'is-windows'
|
||||
import { execPnpm, execPnpmSync } from './utils'
|
||||
@@ -284,3 +285,142 @@ test('dlx should work with npm_config_save_dev env variable', async () => {
|
||||
})
|
||||
expect(result.status).toBe(0)
|
||||
})
|
||||
|
||||
test('parallel dlx calls of the same package', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
// parallel dlx calls without cache
|
||||
await Promise.all(['foo', 'bar', 'baz'].map(
|
||||
name => execPnpm([
|
||||
`--config.store-dir=${path.resolve('store')}`,
|
||||
`--config.cache-dir=${path.resolve('cache')}`,
|
||||
'--config.dlx-cache-max-age=Infinity',
|
||||
'dlx', 'shx', 'touch', name])
|
||||
))
|
||||
|
||||
expect(['foo', 'bar', 'baz'].filter(name => fs.existsSync(name))).toStrictEqual(['foo', 'bar', 'baz'])
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'))).length).toBe(4)
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg'))
|
||||
).toStrictEqual([
|
||||
'node_modules',
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
])
|
||||
expect(
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createBase32Hash('shx')))
|
||||
|
||||
// parallel dlx calls with cache
|
||||
await Promise.all(['abc', 'def', 'ghi'].map(
|
||||
name => execPnpm(['dlx', 'shx', 'mkdir', name])
|
||||
))
|
||||
|
||||
expect(['abc', 'def', 'ghi'].filter(name => fs.existsSync(name))).toStrictEqual(['abc', 'def', 'ghi'])
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'))).length).toBe(4)
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg'))
|
||||
).toStrictEqual([
|
||||
'node_modules',
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
])
|
||||
expect(
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createBase32Hash('shx')))
|
||||
|
||||
// parallel dlx calls with expired cache
|
||||
await Promise.all(['a/b/c', 'd/e/f', 'g/h/i'].map(
|
||||
dirPath => execPnpm([
|
||||
`--config.store-dir=${path.resolve('store')}`,
|
||||
`--config.cache-dir=${path.resolve('cache')}`,
|
||||
'--config.dlx-cache-max-age=0',
|
||||
'dlx', 'shx', 'mkdir', '-p', dirPath])
|
||||
))
|
||||
|
||||
expect(['a/b/c', 'd/e/f', 'g/h/i'].filter(name => fs.existsSync(name))).toStrictEqual(['a/b/c', 'd/e/f', 'g/h/i'])
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'))).length).toBe(7)
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg'))
|
||||
).toStrictEqual([
|
||||
'node_modules',
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
])
|
||||
expect(
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createBase32Hash('shx')))
|
||||
})
|
||||
|
||||
test('dlx creates cache and store prune cleans cache', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
const commands = {
|
||||
shx: ['echo', 'hello from shx'],
|
||||
'shelljs/shx#61aca968cd7afc712ca61a4fc4ec3201e3770dc7': ['echo', 'hello from shx.git'],
|
||||
'@pnpm.e2e/touch-file-good-bin-name': [],
|
||||
'@pnpm.e2e/touch-file-one-bin': [],
|
||||
} satisfies Record<string, string[]>
|
||||
|
||||
const settings = [
|
||||
`--config.store-dir=${path.resolve('store')}`,
|
||||
`--config.cache-dir=${path.resolve('cache')}`,
|
||||
'--config.dlx-cache-max-age=50', // big number to avoid false negative should test unexpectedly takes too long to run
|
||||
]
|
||||
|
||||
await Promise.all(Object.entries(commands).map(([cmd, args]) => execPnpm([...settings, 'dlx', cmd, ...args])))
|
||||
|
||||
// ensure that the dlx cache has certain structure
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx'))
|
||||
.sort()
|
||||
).toStrictEqual(
|
||||
Object.keys(commands)
|
||||
.map(createBase32Hash)
|
||||
.sort()
|
||||
)
|
||||
for (const cmd of Object.keys(commands)) {
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash(cmd))).length).toBe(2)
|
||||
}
|
||||
|
||||
// modify the dates of the cache items
|
||||
const ageTable = {
|
||||
shx: 20,
|
||||
'shelljs/shx#61aca968cd7afc712ca61a4fc4ec3201e3770dc7': 75,
|
||||
'@pnpm.e2e/touch-file-good-bin-name': 33,
|
||||
'@pnpm.e2e/touch-file-one-bin': 123,
|
||||
} satisfies Record<keyof typeof commands, number>
|
||||
const now = new Date()
|
||||
await Promise.all(Object.entries(ageTable).map(async ([cmd, age]) => {
|
||||
const newDate = new Date(now.getTime() - age * 60_000)
|
||||
const dlxCacheLink = path.resolve('cache', 'dlx', createBase32Hash(cmd), 'pkg')
|
||||
await fs.promises.lutimes(dlxCacheLink, newDate, newDate)
|
||||
}))
|
||||
|
||||
await execPnpm([...settings, 'store', 'prune'])
|
||||
|
||||
// test to see if dlx cache items are deleted or kept as expected
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx'))
|
||||
.sort()
|
||||
).toStrictEqual(
|
||||
['shx', '@pnpm.e2e/touch-file-good-bin-name']
|
||||
.map(createBase32Hash)
|
||||
.sort()
|
||||
)
|
||||
for (const cmd of ['shx', '@pnpm.e2e/touch-file-good-bin-name']) {
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash(cmd))).length).toBe(2)
|
||||
}
|
||||
|
||||
await execPnpm([
|
||||
`--config.store-dir=${path.resolve('store')}`,
|
||||
`--config.cache-dir=${path.resolve('cache')}`,
|
||||
'--config.dlx-cache-max-age=0',
|
||||
'store', 'prune'])
|
||||
|
||||
// test to see if all dlx cache items are deleted
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx'))
|
||||
.sort()
|
||||
).toStrictEqual([])
|
||||
})
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
"homepage": "https://github.com/pnpm/pnpm/blob/main/store/plugin-commands-store#readme",
|
||||
"devDependencies": {
|
||||
"@pnpm/assert-store": "workspace:*",
|
||||
"@pnpm/crypto.base32-hash": "workspace:*",
|
||||
"@pnpm/lockfile-file": "workspace:*",
|
||||
"@pnpm/plugin-commands-store": "workspace:*",
|
||||
"@pnpm/prepare": "workspace:*",
|
||||
|
||||
197
store/plugin-commands-store/src/cleanExpiredDlxCache.test.ts
Normal file
197
store/plugin-commands-store/src/cleanExpiredDlxCache.test.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { prepareEmpty } from '@pnpm/prepare'
|
||||
import { cleanExpiredDlxCache, cleanOrphans } from './cleanExpiredDlxCache'
|
||||
|
||||
function createSampleDlxCacheLinkTarget (dirPath: string): void {
|
||||
fs.mkdirSync(path.join(dirPath, 'node_modules', '.pnpm'), { recursive: true })
|
||||
fs.mkdirSync(path.join(dirPath, 'node_modules', '.bin'), { recursive: true })
|
||||
fs.writeFileSync(path.join(dirPath, 'node_modules', '.modules.yaml'), '')
|
||||
fs.writeFileSync(path.join(dirPath, 'package.json'), '')
|
||||
fs.writeFileSync(path.join(dirPath, 'pnpm-lock.yaml'), '')
|
||||
}
|
||||
|
||||
function createSampleDlxCacheItem (cacheDir: string, cmd: string, now: Date, age: number): void {
|
||||
const hash = createBase32Hash(cmd)
|
||||
const newDate = new Date(now.getTime() - age * 60_000)
|
||||
const timeError = 432 // just an arbitrary amount, nothing is special about this number
|
||||
const pid = 71014 // just an arbitrary number to represent pid
|
||||
const targetName = `${(newDate.getTime() - timeError).toString(16)}-${pid.toString(16)}`
|
||||
const linkTarget = path.join(cacheDir, 'dlx', hash, targetName)
|
||||
const linkPath = path.join(cacheDir, 'dlx', hash, 'pkg')
|
||||
createSampleDlxCacheLinkTarget(linkTarget)
|
||||
fs.symlinkSync(linkTarget, linkPath, 'junction')
|
||||
fs.lutimesSync(linkPath, newDate, newDate)
|
||||
}
|
||||
|
||||
function createSampleDlxCacheFsTree (cacheDir: string, now: Date, ageTable: Record<string, number>): void {
|
||||
for (const [cmd, age] of Object.entries(ageTable)) {
|
||||
createSampleDlxCacheItem(cacheDir, cmd, now, age)
|
||||
}
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
test('cleanExpiredCache removes items that outlive dlxCacheMaxAge', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
const cacheDir = path.resolve('cache')
|
||||
const dlxCacheMaxAge = 7
|
||||
const now = new Date()
|
||||
|
||||
createSampleDlxCacheFsTree(cacheDir, now, {
|
||||
foo: 1,
|
||||
bar: 5,
|
||||
baz: 20,
|
||||
})
|
||||
|
||||
const readdirSyncSpy = jest.spyOn(fs, 'readdirSync')
|
||||
const lstatSpy = jest.spyOn(fs.promises, 'lstat')
|
||||
const rmSpy = jest.spyOn(fs.promises, 'rm')
|
||||
|
||||
await cleanExpiredDlxCache({
|
||||
cacheDir,
|
||||
dlxCacheMaxAge,
|
||||
now,
|
||||
})
|
||||
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('foo'))).length).toBe(2)
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('bar'))).length).toBe(2)
|
||||
expect(fs.existsSync(path.join(cacheDir, 'dlx', createBase32Hash('baz')))).toBeFalsy()
|
||||
|
||||
expect(readdirSyncSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx'), expect.anything())
|
||||
for (const key of ['foo', 'bar', 'baz']) {
|
||||
expect(lstatSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx', createBase32Hash(key), 'pkg'))
|
||||
}
|
||||
expect(rmSpy).not.toHaveBeenCalledWith(
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createBase32Hash('foo'))),
|
||||
expect.anything()
|
||||
)
|
||||
expect(rmSpy).not.toHaveBeenCalledWith(
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createBase32Hash('bar'))),
|
||||
expect.anything()
|
||||
)
|
||||
expect(rmSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createBase32Hash('baz'))),
|
||||
{ recursive: true }
|
||||
)
|
||||
|
||||
readdirSyncSpy.mockRestore()
|
||||
lstatSpy.mockRestore()
|
||||
rmSpy.mockRestore()
|
||||
})
|
||||
|
||||
test('cleanExpiredCache removes all directories without checking stat if dlxCacheMaxAge is 0', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
const cacheDir = path.resolve('cache')
|
||||
const dlxCacheMaxAge = 0
|
||||
const now = new Date()
|
||||
|
||||
createSampleDlxCacheFsTree(cacheDir, now, {
|
||||
foo: 1,
|
||||
bar: 5,
|
||||
baz: 20,
|
||||
})
|
||||
|
||||
const readdirSyncSpy = jest.spyOn(fs, 'readdirSync')
|
||||
const lstatSpy = jest.spyOn(fs.promises, 'lstat')
|
||||
const rmSpy = jest.spyOn(fs.promises, 'rm')
|
||||
|
||||
await cleanExpiredDlxCache({
|
||||
cacheDir,
|
||||
dlxCacheMaxAge,
|
||||
now,
|
||||
})
|
||||
|
||||
expect(
|
||||
fs.readdirSync(path.join(cacheDir, 'dlx'))
|
||||
.sort()
|
||||
).toStrictEqual([])
|
||||
|
||||
expect(readdirSyncSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx'), expect.anything())
|
||||
expect(lstatSpy).not.toHaveBeenCalled()
|
||||
for (const key of ['foo', 'bar', 'baz']) {
|
||||
expect(rmSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx', createBase32Hash(key)), { recursive: true })
|
||||
}
|
||||
|
||||
readdirSyncSpy.mockRestore()
|
||||
lstatSpy.mockRestore()
|
||||
rmSpy.mockRestore()
|
||||
})
|
||||
|
||||
test('cleanExpiredCache does nothing if dlxCacheMaxAge is Infinity', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
const cacheDir = path.resolve('cache')
|
||||
const dlxCacheMaxAge = Infinity
|
||||
const now = new Date()
|
||||
|
||||
createSampleDlxCacheFsTree(cacheDir, now, {
|
||||
foo: 1,
|
||||
bar: 5,
|
||||
baz: 20,
|
||||
})
|
||||
|
||||
const readdirSpy = jest.spyOn(fs.promises, 'readdir')
|
||||
const lstatSpy = jest.spyOn(fs.promises, 'lstat')
|
||||
const rmSpy = jest.spyOn(fs.promises, 'rm')
|
||||
|
||||
await cleanExpiredDlxCache({
|
||||
cacheDir,
|
||||
dlxCacheMaxAge,
|
||||
now,
|
||||
})
|
||||
|
||||
const dlxCacheDir = path.join(cacheDir, 'dlx')
|
||||
|
||||
const entries = fs.readdirSync(dlxCacheDir).sort()
|
||||
expect(entries).toStrictEqual(
|
||||
['foo', 'bar', 'baz']
|
||||
.map(createBase32Hash)
|
||||
.sort()
|
||||
)
|
||||
|
||||
for (const entry of entries) {
|
||||
expect(fs.readdirSync(path.join(dlxCacheDir, entry)).length).toBe(2)
|
||||
}
|
||||
|
||||
expect(readdirSpy).not.toHaveBeenCalled()
|
||||
expect(lstatSpy).not.toHaveBeenCalled()
|
||||
expect(rmSpy).not.toHaveBeenCalled()
|
||||
|
||||
readdirSpy.mockRestore()
|
||||
lstatSpy.mockRestore()
|
||||
rmSpy.mockRestore()
|
||||
})
|
||||
|
||||
test("cleanOrphans deletes dirs that don't contain `link` and subdirs that aren't pointed to by `link` from the same parent", async () => {
|
||||
prepareEmpty()
|
||||
|
||||
const cacheDir = path.resolve('cache')
|
||||
const now = new Date()
|
||||
|
||||
// has link and orphans
|
||||
createSampleDlxCacheItem(cacheDir, 'foo', now, 0)
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('foo'), `${now.getTime().toString(16)}-${(7000).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('foo'), `${now.getTime().toString(16)}-${(7005).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('foo'), `${now.getTime().toString(16)}-${(7102).toString(16)}`))
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('foo'))).length).toBe(5)
|
||||
|
||||
// has no link, only orphans
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('bar'), `${now.getTime().toString(16)}-${(7000).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('bar'), `${now.getTime().toString(16)}-${(7005).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('bar'), `${now.getTime().toString(16)}-${(7102).toString(16)}`))
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('bar'))).length).toBe(3)
|
||||
|
||||
await cleanOrphans(path.join(cacheDir, 'dlx'))
|
||||
|
||||
// expecting all subdirectories that aren't pointed to by `link` to be deleted.
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('foo'))).length).toBe(2)
|
||||
|
||||
// expecting directory that doesn't contain `link` to be deleted.
|
||||
expect(fs.existsSync(path.join(cacheDir, 'dlx', createBase32Hash('bar')))).toBe(false)
|
||||
})
|
||||
96
store/plugin-commands-store/src/cleanExpiredDlxCache.ts
Normal file
96
store/plugin-commands-store/src/cleanExpiredDlxCache.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { readdirSync, type Stats } from 'fs'
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import util from 'util'
|
||||
|
||||
export async function cleanExpiredDlxCache ({
|
||||
cacheDir,
|
||||
dlxCacheMaxAge,
|
||||
now,
|
||||
}: {
|
||||
cacheDir: string
|
||||
dlxCacheMaxAge: number
|
||||
now: Date
|
||||
}): Promise<void> {
|
||||
if (dlxCacheMaxAge === Infinity) return
|
||||
|
||||
const dlxCacheDir = path.join(cacheDir, 'dlx')
|
||||
const dlxCacheNames = readOptDir(dlxCacheDir)
|
||||
if (!dlxCacheNames) return
|
||||
|
||||
await Promise.all(dlxCacheNames.map(async (dlxCacheName) => {
|
||||
const dlxCachePath = path.join(dlxCacheDir, dlxCacheName)
|
||||
const dlxCacheLink = path.join(dlxCachePath, 'pkg')
|
||||
let shouldClean: boolean
|
||||
if (dlxCacheMaxAge <= 0) {
|
||||
shouldClean = true
|
||||
} else {
|
||||
const dlxCacheLinkStats = await getStats(dlxCacheLink)
|
||||
shouldClean = dlxCacheLinkStats !== 'ENOENT' && isOutdated(dlxCacheLinkStats, dlxCacheMaxAge, now)
|
||||
}
|
||||
if (shouldClean) {
|
||||
// delete the symlink, the symlink's target, and orphans (if any)
|
||||
await fs.rm(dlxCachePath, { recursive: true })
|
||||
}
|
||||
}))
|
||||
|
||||
await cleanOrphans(dlxCacheDir)
|
||||
}
|
||||
|
||||
export async function cleanOrphans (dlxCacheDir: string): Promise<void> {
|
||||
const dlxCacheNames = readOptDir(dlxCacheDir)
|
||||
if (!dlxCacheNames) return
|
||||
await Promise.all(dlxCacheNames.map(async dlxCacheName => {
|
||||
const dlxCachePath = path.join(dlxCacheDir, dlxCacheName)
|
||||
const dlxCacheLink = path.join(dlxCachePath, 'pkg')
|
||||
const dlxCacheLinkStats = await getStats(dlxCacheLink)
|
||||
if (dlxCacheLinkStats === 'ENOENT') {
|
||||
return fs.rm(dlxCachePath, { recursive: true })
|
||||
}
|
||||
const dlxCacheLinkTarget = await getRealPath(dlxCacheLink)
|
||||
const children = await fs.readdir(dlxCachePath)
|
||||
await Promise.all(children.map(async name => {
|
||||
if (name === 'pkg') return
|
||||
const fullPath = path.join(dlxCachePath, name)
|
||||
if (fullPath === dlxCacheLinkTarget) return
|
||||
await fs.rm(fullPath, { recursive: true })
|
||||
}))
|
||||
}))
|
||||
}
|
||||
|
||||
function isOutdated (stats: Stats, dlxCacheMaxAge: number, now: Date): boolean {
|
||||
return stats.mtime.getTime() + dlxCacheMaxAge * 60_000 < now.getTime()
|
||||
}
|
||||
|
||||
async function getStats (path: string): Promise<Stats | 'ENOENT'> {
|
||||
try {
|
||||
return await fs.lstat(path)
|
||||
} catch (err) {
|
||||
if (util.types.isNativeError(err) && 'code' in err && err.code === 'ENOENT') {
|
||||
return 'ENOENT'
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
function readOptDir (dirPath: string): string[] | null {
|
||||
try {
|
||||
return readdirSync(dirPath, 'utf-8')
|
||||
} catch (err) {
|
||||
if (util.types.isNativeError(err) && 'code' in err && err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async function getRealPath (linkPath: string): Promise<string | null> {
|
||||
try {
|
||||
return await fs.realpath(linkPath)
|
||||
} catch (err) {
|
||||
if (util.types.isNativeError(err) && 'code' in err && err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
@@ -73,7 +73,7 @@ class StoreStatusError extends PnpmError {
|
||||
}
|
||||
}
|
||||
|
||||
export type StoreCommandOptions = Pick<Config, 'dir' | 'registries' | 'tag' | 'storeDir' | 'force'> & CreateStoreControllerOptions & {
|
||||
export type StoreCommandOptions = Pick<Config, 'dir' | 'registries' | 'tag' | 'storeDir' | 'force' | 'dlxCacheMaxAge'> & CreateStoreControllerOptions & {
|
||||
reporter?: (logObj: LogBase) => void
|
||||
}
|
||||
|
||||
@@ -94,6 +94,8 @@ export async function handler (opts: StoreCommandOptions, params: string[]) {
|
||||
storeController: store.ctrl,
|
||||
storeDir: store.dir,
|
||||
removeAlienFiles: opts.force,
|
||||
cacheDir: opts.cacheDir,
|
||||
dlxCacheMaxAge: opts.dlxCacheMaxAge,
|
||||
})
|
||||
return storePrune(storePruneOptions)
|
||||
}
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import { streamParser } from '@pnpm/logger'
|
||||
import { type StoreController } from '@pnpm/store-controller-types'
|
||||
import { type ReporterFunction } from './types'
|
||||
import { cleanExpiredDlxCache } from './cleanExpiredDlxCache'
|
||||
|
||||
export async function storePrune (
|
||||
opts: {
|
||||
reporter?: ReporterFunction
|
||||
storeController: StoreController
|
||||
removeAlienFiles?: boolean
|
||||
cacheDir: string
|
||||
dlxCacheMaxAge: number
|
||||
}
|
||||
) {
|
||||
const reporter = opts?.reporter
|
||||
@@ -16,6 +19,12 @@ export async function storePrune (
|
||||
await opts.storeController.prune(opts.removeAlienFiles)
|
||||
await opts.storeController.close()
|
||||
|
||||
await cleanExpiredDlxCache({
|
||||
cacheDir: opts.cacheDir,
|
||||
dlxCacheMaxAge: opts.dlxCacheMaxAge,
|
||||
now: new Date(),
|
||||
})
|
||||
|
||||
if ((reporter != null) && typeof reporter === 'function') {
|
||||
streamParser.removeListener('data', reporter)
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ test('pnpm store add express@4.16.3', async () => {
|
||||
registries: { default: `http://localhost:${REGISTRY_MOCK_PORT}/` },
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: 0,
|
||||
}, ['add', 'express@4.16.3'])
|
||||
|
||||
const { cafsHas } = assertStore(path.join(storeDir, STORE_VERSION))
|
||||
@@ -48,6 +49,7 @@ test('pnpm store add scoped package that uses not the standard registry', async
|
||||
},
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: 0,
|
||||
}, ['add', '@foo/no-deps@1.0.0'])
|
||||
|
||||
const { cafsHas } = assertStore(path.join(storeDir, STORE_VERSION))
|
||||
@@ -76,6 +78,7 @@ test('should fail if some packages can not be added', async () => {
|
||||
},
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: 0,
|
||||
}, ['add', '@pnpm/this-does-not-exist'])
|
||||
} catch (e: any) { // eslint-disable-line
|
||||
thrown = true
|
||||
|
||||
@@ -19,6 +19,7 @@ test('CLI prints the current store path', async () => {
|
||||
registries: { default: REGISTRY },
|
||||
storeDir: '/home/example/.pnpm-store',
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: 0,
|
||||
}, ['path'])
|
||||
|
||||
const expectedStorePath = os.platform() === 'win32'
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { assertStore } from '@pnpm/assert-store'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { store } from '@pnpm/plugin-commands-store'
|
||||
import { prepare } from '@pnpm/prepare'
|
||||
import { prepare, prepareEmpty } from '@pnpm/prepare'
|
||||
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
import { sync as rimraf } from '@zkochan/rimraf'
|
||||
import execa from 'execa'
|
||||
@@ -47,6 +48,7 @@ test('remove unreferenced packages', async () => {
|
||||
reporter,
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['prune'])
|
||||
|
||||
expect(reporter).toHaveBeenCalledWith(
|
||||
@@ -70,6 +72,7 @@ test('remove unreferenced packages', async () => {
|
||||
reporter,
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['prune'])
|
||||
|
||||
expect(reporter).not.toHaveBeenCalledWith(
|
||||
@@ -105,6 +108,7 @@ test.skip('remove packages that are used by project that no longer exist', async
|
||||
reporter,
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['prune'])
|
||||
|
||||
expect(reporter).toHaveBeenCalledWith(
|
||||
@@ -143,6 +147,7 @@ test('keep dependencies used by others', async () => {
|
||||
registries: { default: REGISTRY },
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['prune'])
|
||||
|
||||
project.storeHasNot('camelcase-keys', '3.0.0')
|
||||
@@ -167,6 +172,7 @@ test('keep dependency used by package', async () => {
|
||||
registries: { default: REGISTRY },
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['prune'])
|
||||
|
||||
project.storeHas('is-positive', '3.1.0')
|
||||
@@ -189,6 +195,7 @@ test('prune will skip scanning non-directory in storeDir', async () => {
|
||||
registries: { default: REGISTRY },
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['prune'])
|
||||
})
|
||||
|
||||
@@ -215,6 +222,7 @@ test('prune does not fail if the store contains an unexpected directory', async
|
||||
reporter,
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['prune'])
|
||||
|
||||
expect(reporter).toHaveBeenCalledWith(
|
||||
@@ -252,6 +260,7 @@ test('prune removes alien files from the store if the --force flag is used', asy
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
force: true,
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['prune'])
|
||||
expect(reporter).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
@@ -261,3 +270,70 @@ test('prune removes alien files from the store if the --force flag is used', asy
|
||||
)
|
||||
expect(fs.existsSync(alienDir)).toBeFalsy()
|
||||
})
|
||||
|
||||
function createSampleDlxCacheLinkTarget (dirPath: string): void {
|
||||
fs.mkdirSync(path.join(dirPath, 'node_modules', '.pnpm'), { recursive: true })
|
||||
fs.mkdirSync(path.join(dirPath, 'node_modules', '.bin'), { recursive: true })
|
||||
fs.writeFileSync(path.join(dirPath, 'node_modules', '.modules.yaml'), '')
|
||||
fs.writeFileSync(path.join(dirPath, 'package.json'), '')
|
||||
fs.writeFileSync(path.join(dirPath, 'pnpm-lock.yaml'), '')
|
||||
}
|
||||
|
||||
function createSampleDlxCacheItem (cacheDir: string, cmd: string, now: Date, age: number): void {
|
||||
const hash = createBase32Hash(cmd)
|
||||
const newDate = new Date(now.getTime() - age * 60_000)
|
||||
const timeError = 432 // just an arbitrary amount, nothing is special about this number
|
||||
const pid = 71014 // just an arbitrary number to represent pid
|
||||
const targetName = `${(newDate.getTime() - timeError).toString(16)}-${pid.toString(16)}`
|
||||
const linkTarget = path.join(cacheDir, 'dlx', hash, targetName)
|
||||
const linkPath = path.join(cacheDir, 'dlx', hash, 'pkg')
|
||||
createSampleDlxCacheLinkTarget(linkTarget)
|
||||
fs.symlinkSync(linkTarget, linkPath, 'junction')
|
||||
fs.lutimesSync(linkPath, newDate, newDate)
|
||||
}
|
||||
|
||||
function createSampleDlxCacheFsTree (cacheDir: string, now: Date, ageTable: Record<string, number>): void {
|
||||
for (const [cmd, age] of Object.entries(ageTable)) {
|
||||
createSampleDlxCacheItem(cacheDir, cmd, now, age)
|
||||
}
|
||||
}
|
||||
|
||||
test('prune removes cache directories that outlives dlx-cache-max-age', async () => {
|
||||
prepareEmpty()
|
||||
const cacheDir = path.resolve('cache')
|
||||
const storeDir = path.resolve('store')
|
||||
|
||||
fs.mkdirSync(path.join(storeDir, 'v3', 'files'), { recursive: true })
|
||||
fs.mkdirSync(path.join(storeDir, 'v3', 'tmp'), { recursive: true })
|
||||
|
||||
const now = new Date()
|
||||
|
||||
createSampleDlxCacheFsTree(cacheDir, now, {
|
||||
foo: 1,
|
||||
bar: 5,
|
||||
baz: 20,
|
||||
})
|
||||
|
||||
await store.handler({
|
||||
cacheDir,
|
||||
dir: process.cwd(),
|
||||
pnpmHomeDir: '',
|
||||
rawConfig: {
|
||||
registry: REGISTRY,
|
||||
},
|
||||
registries: { default: REGISTRY },
|
||||
reporter () {},
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: 7,
|
||||
}, ['prune'])
|
||||
|
||||
expect(
|
||||
fs.readdirSync(path.join(cacheDir, 'dlx'))
|
||||
.sort()
|
||||
).toStrictEqual(
|
||||
['foo', 'bar']
|
||||
.map(createBase32Hash)
|
||||
.sort()
|
||||
)
|
||||
})
|
||||
|
||||
@@ -40,6 +40,7 @@ test('CLI fails when store status finds modified packages', async () => {
|
||||
registries: modulesState!.registries!,
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: 0,
|
||||
}, ['status'])
|
||||
} catch (_err: any) { // eslint-disable-line
|
||||
err = _err
|
||||
@@ -91,5 +92,6 @@ test('CLI does not fail when store status does not find modified packages', asyn
|
||||
registries: modulesState!.registries!,
|
||||
storeDir,
|
||||
userConfig: {},
|
||||
dlxCacheMaxAge: 0,
|
||||
}, ['status'])
|
||||
})
|
||||
|
||||
@@ -33,6 +33,9 @@
|
||||
{
|
||||
"path": "../../lockfile/lockfile-utils"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/crypto.base32-hash"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/dependency-path"
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user