mirror of
https://github.com/pnpm/pnpm.git
synced 2026-01-12 00:48:21 -05:00
fix(dlx/cache): account for customized registries (#8299)
* fix(dlx/cache): account for customized registries Different registries potentially return different packages for the same name, so reusing dlx cache for packages from a different registry would be incorrect. * style: eslint * refactor: dlx --------- Co-authored-by: Zoltan Kochan <z@kochan.io>
This commit is contained in:
7
.changeset/wild-mayflies-compete.md
Normal file
7
.changeset/wild-mayflies-compete.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
"@pnpm/plugin-commands-script-runners": major
|
||||
"@pnpm/plugin-commands-store": patch
|
||||
"pnpm": patch
|
||||
---
|
||||
|
||||
Add registries information to the calculation of dlx cache hash.
|
||||
@@ -64,7 +64,7 @@ export function help (): string {
|
||||
export type DlxCommandOptions = {
|
||||
package?: string[]
|
||||
shellMode?: boolean
|
||||
} & Pick<Config, 'extraBinPaths' | 'reporter' | 'userAgent' | 'cacheDir' | 'dlxCacheMaxAge' | 'useNodeVersion' > & add.AddCommandOptions
|
||||
} & Pick<Config, 'extraBinPaths' | 'registries' | 'reporter' | 'userAgent' | 'cacheDir' | 'dlxCacheMaxAge' | 'useNodeVersion'> & add.AddCommandOptions
|
||||
|
||||
export async function handler (
|
||||
opts: DlxCommandOptions,
|
||||
@@ -74,6 +74,7 @@ export async function handler (
|
||||
const { cacheLink, prepareDir } = findCache(pkgs, {
|
||||
dlxCacheMaxAge: opts.dlxCacheMaxAge,
|
||||
cacheDir: opts.cacheDir,
|
||||
registries: opts.registries,
|
||||
})
|
||||
if (prepareDir) {
|
||||
fs.mkdirSync(prepareDir, { recursive: true })
|
||||
@@ -159,23 +160,36 @@ function scopeless (pkgName: string): string {
|
||||
function findCache (pkgs: string[], opts: {
|
||||
cacheDir: string
|
||||
dlxCacheMaxAge: number
|
||||
registries: Record<string, string>
|
||||
}): { cacheLink: string, prepareDir: string | null } {
|
||||
const dlxCommandCacheDir = createDlxCommandCacheDir(opts.cacheDir, pkgs)
|
||||
const dlxCommandCacheDir = createDlxCommandCacheDir(pkgs, opts)
|
||||
const cacheLink = path.join(dlxCommandCacheDir, 'pkg')
|
||||
const valid = isCacheValid(cacheLink, opts.dlxCacheMaxAge)
|
||||
const prepareDir = valid ? null : getPrepareDir(dlxCommandCacheDir)
|
||||
return { cacheLink, prepareDir }
|
||||
}
|
||||
|
||||
function createDlxCommandCacheDir (cacheDir: string, pkgs: string[]): string {
|
||||
const dlxCacheDir = path.resolve(cacheDir, 'dlx')
|
||||
const hashStr = pkgs.join('\n') // '\n' is not a URL-friendly character, and therefore not a valid package name, which can be used as separator
|
||||
const cacheKey = createBase32Hash(hashStr)
|
||||
function createDlxCommandCacheDir (
|
||||
pkgs: string[],
|
||||
opts: {
|
||||
registries: Record<string, string>
|
||||
cacheDir: string
|
||||
}
|
||||
): string {
|
||||
const dlxCacheDir = path.resolve(opts.cacheDir, 'dlx')
|
||||
const cacheKey = createCacheKey(pkgs, opts.registries)
|
||||
const cachePath = path.join(dlxCacheDir, cacheKey)
|
||||
fs.mkdirSync(cachePath, { recursive: true })
|
||||
return cachePath
|
||||
}
|
||||
|
||||
export function createCacheKey (pkgs: string[], registries: Record<string, string>): string {
|
||||
const sortedPkgs = [...pkgs].sort((a, b) => a.localeCompare(b))
|
||||
const sortedRegistries = Object.entries(registries).sort(([k1], [k2]) => k1.localeCompare(k2))
|
||||
const hashStr = JSON.stringify([sortedPkgs, sortedRegistries])
|
||||
return createBase32Hash(hashStr)
|
||||
}
|
||||
|
||||
function isCacheValid (cacheLink: string, dlxCacheMaxAge: number): boolean {
|
||||
let stats: Stats
|
||||
try {
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { createCacheKey } from '../src/dlx'
|
||||
|
||||
test('creates a hash', () => {
|
||||
const received = createCacheKey(['shx', '@foo/bar'], {
|
||||
default: 'https://registry.npmjs.com/',
|
||||
'@foo': 'https://example.com/npm-registry/foo/',
|
||||
})
|
||||
const expected = createBase32Hash(JSON.stringify([['@foo/bar', 'shx'], [
|
||||
['@foo', 'https://example.com/npm-registry/foo/'],
|
||||
['default', 'https://registry.npmjs.com/'],
|
||||
]]))
|
||||
expect(received).toBe(expected)
|
||||
})
|
||||
|
||||
test('is agnostic to package order', () => {
|
||||
const registries = { default: 'https://registry.npmjs.com/' }
|
||||
expect(createCacheKey(['a', 'c', 'b'], registries)).toBe(createCacheKey(['a', 'b', 'c'], registries))
|
||||
expect(createCacheKey(['b', 'a', 'c'], registries)).toBe(createCacheKey(['a', 'b', 'c'], registries))
|
||||
expect(createCacheKey(['b', 'c', 'a'], registries)).toBe(createCacheKey(['a', 'b', 'c'], registries))
|
||||
expect(createCacheKey(['c', 'a', 'b'], registries)).toBe(createCacheKey(['a', 'b', 'c'], registries))
|
||||
expect(createCacheKey(['c', 'b', 'a'], registries)).toBe(createCacheKey(['a', 'b', 'c'], registries))
|
||||
})
|
||||
|
||||
test('is agnostic to registry key order', () => {
|
||||
const packages = ['a', 'b', 'c']
|
||||
const foo = 'https://example.com/foo/'
|
||||
const bar = 'https://example.com/bar/'
|
||||
expect(createCacheKey(packages, { '@foo': foo, '@bar': bar })).toBe(createCacheKey(packages, { '@bar': bar, '@foo': foo }))
|
||||
})
|
||||
@@ -1,6 +1,5 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { add } from '@pnpm/plugin-commands-installation'
|
||||
import { dlx } from '@pnpm/plugin-commands-script-runners'
|
||||
import { prepareEmpty } from '@pnpm/prepare'
|
||||
@@ -21,6 +20,8 @@ function sanitizeDlxCacheComponent (cacheName: string): string {
|
||||
return '***********-*****'
|
||||
}
|
||||
|
||||
const createCacheKey = (...pkgs: string[]): string => dlx.createCacheKey(pkgs, DEFAULT_OPTS.registries)
|
||||
|
||||
function verifyDlxCache (cacheName: string): void {
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', cacheName))
|
||||
@@ -198,7 +199,7 @@ test('dlx with cache', async () => {
|
||||
}, ['shx', 'touch', 'foo'])
|
||||
|
||||
expect(fs.existsSync('foo')).toBe(true)
|
||||
verifyDlxCache(createBase32Hash('shx'))
|
||||
verifyDlxCache(createCacheKey('shx'))
|
||||
expect(spy).toHaveBeenCalled()
|
||||
|
||||
spy.mockReset()
|
||||
@@ -212,7 +213,7 @@ test('dlx with cache', async () => {
|
||||
}, ['shx', 'touch', 'bar'])
|
||||
|
||||
expect(fs.existsSync('bar')).toBe(true)
|
||||
verifyDlxCache(createBase32Hash('shx'))
|
||||
verifyDlxCache(createCacheKey('shx'))
|
||||
expect(spy).not.toHaveBeenCalled()
|
||||
|
||||
spy.mockRestore()
|
||||
@@ -231,11 +232,11 @@ test('dlx does not reuse expired cache', async () => {
|
||||
cacheDir: path.resolve('cache'),
|
||||
dlxCacheMaxAge: Infinity,
|
||||
}, ['shx', 'echo', 'hello world'])
|
||||
verifyDlxCache(createBase32Hash('shx'))
|
||||
verifyDlxCache(createCacheKey('shx'))
|
||||
|
||||
// change the date attributes of the cache to 30 minutes older than now
|
||||
const newDate = new Date(now.getTime() - 30 * 60_000)
|
||||
fs.lutimesSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg'), newDate, newDate)
|
||||
fs.lutimesSync(path.resolve('cache', 'dlx', createCacheKey('shx'), 'pkg'), newDate, newDate)
|
||||
|
||||
const spy = jest.spyOn(add, 'handler')
|
||||
|
||||
@@ -254,7 +255,7 @@ test('dlx does not reuse expired cache', async () => {
|
||||
spy.mockRestore()
|
||||
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx')))
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey('shx')))
|
||||
.map(sanitizeDlxCacheComponent)
|
||||
.sort()
|
||||
).toStrictEqual([
|
||||
@@ -262,7 +263,7 @@ test('dlx does not reuse expired cache', async () => {
|
||||
'***********-*****',
|
||||
'***********-*****',
|
||||
].sort())
|
||||
verifyDlxCacheLink(createBase32Hash('shx'))
|
||||
verifyDlxCacheLink(createCacheKey('shx'))
|
||||
})
|
||||
|
||||
test('dlx still saves cache even if execution fails', async () => {
|
||||
@@ -279,5 +280,5 @@ test('dlx still saves cache even if execution fails', async () => {
|
||||
}, ['shx', 'mkdir', path.resolve('not-a-dir')])
|
||||
|
||||
expect(fs.readFileSync(path.resolve('not-a-dir'), 'utf-8')).toEqual(expect.anything())
|
||||
verifyDlxCache(createBase32Hash('shx'))
|
||||
verifyDlxCache(createCacheKey('shx'))
|
||||
})
|
||||
|
||||
6
pnpm-lock.yaml
generated
6
pnpm-lock.yaml
generated
@@ -6825,12 +6825,12 @@ importers:
|
||||
'@pnpm/assert-store':
|
||||
specifier: workspace:*
|
||||
version: link:../../__utils__/assert-store
|
||||
'@pnpm/crypto.base32-hash':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/crypto.base32-hash
|
||||
'@pnpm/lockfile-file':
|
||||
specifier: workspace:*
|
||||
version: link:../../lockfile/lockfile-file
|
||||
'@pnpm/plugin-commands-script-runners':
|
||||
specifier: workspace:*
|
||||
version: link:../../exec/plugin-commands-script-runners
|
||||
'@pnpm/plugin-commands-store':
|
||||
specifier: workspace:*
|
||||
version: 'link:'
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import PATH_NAME from 'path-name'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { prepare, prepareEmpty } from '@pnpm/prepare'
|
||||
import { readModulesManifest } from '@pnpm/modules-yaml'
|
||||
import { addUser, REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
import { execPnpm, execPnpmSync } from './utils'
|
||||
import { dlx } from '@pnpm/plugin-commands-script-runners'
|
||||
import { execPnpm, execPnpmSync, testDefaults } from './utils'
|
||||
|
||||
const createCacheKey = (...pkgs: string[]): string => dlx.createCacheKey(pkgs, { default: testDefaults({}).registry })
|
||||
|
||||
test('silent dlx prints the output of the child process only', async () => {
|
||||
prepare({})
|
||||
@@ -72,17 +74,17 @@ test('parallel dlx calls of the same package', async () => {
|
||||
|
||||
expect(['foo', 'bar', 'baz'].filter(name => fs.existsSync(name))).toStrictEqual(['foo', 'bar', 'baz'])
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg'))
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey('shx'), 'pkg'))
|
||||
).toStrictEqual([
|
||||
'node_modules',
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
])
|
||||
expect(
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createBase32Hash('shx')))
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createCacheKey('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createCacheKey('shx')))
|
||||
|
||||
const cacheContentAfterFirstRun = fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'))).sort()
|
||||
const cacheContentAfterFirstRun = fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey('shx'))).sort()
|
||||
|
||||
// parallel dlx calls with cache
|
||||
await Promise.all(['abc', 'def', 'ghi'].map(
|
||||
@@ -90,17 +92,17 @@ test('parallel dlx calls of the same package', async () => {
|
||||
))
|
||||
|
||||
expect(['abc', 'def', 'ghi'].filter(name => fs.existsSync(name))).toStrictEqual(['abc', 'def', 'ghi'])
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'))).sort()).toStrictEqual(cacheContentAfterFirstRun)
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey('shx'))).sort()).toStrictEqual(cacheContentAfterFirstRun)
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg'))
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey('shx'), 'pkg'))
|
||||
).toStrictEqual([
|
||||
'node_modules',
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
])
|
||||
expect(
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createBase32Hash('shx')))
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createCacheKey('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createCacheKey('shx')))
|
||||
|
||||
// parallel dlx calls with expired cache
|
||||
await Promise.all(['a/b/c', 'd/e/f', 'g/h/i'].map(
|
||||
@@ -112,17 +114,17 @@ test('parallel dlx calls of the same package', async () => {
|
||||
))
|
||||
|
||||
expect(['a/b/c', 'd/e/f', 'g/h/i'].filter(name => fs.existsSync(name))).toStrictEqual(['a/b/c', 'd/e/f', 'g/h/i'])
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'))).length).toBeGreaterThan(cacheContentAfterFirstRun.length)
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey('shx'))).length).toBeGreaterThan(cacheContentAfterFirstRun.length)
|
||||
expect(
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg'))
|
||||
fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey('shx'), 'pkg'))
|
||||
).toStrictEqual([
|
||||
'node_modules',
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
])
|
||||
expect(
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createBase32Hash('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createBase32Hash('shx')))
|
||||
path.dirname(fs.realpathSync(path.resolve('cache', 'dlx', createCacheKey('shx'), 'pkg')))
|
||||
).toBe(path.resolve('cache', 'dlx', createCacheKey('shx')))
|
||||
})
|
||||
|
||||
test('dlx creates cache and store prune cleans cache', async () => {
|
||||
@@ -149,11 +151,11 @@ test('dlx creates cache and store prune cleans cache', async () => {
|
||||
.sort()
|
||||
).toStrictEqual(
|
||||
Object.keys(commands)
|
||||
.map(createBase32Hash)
|
||||
.map(cmd => createCacheKey(cmd))
|
||||
.sort()
|
||||
)
|
||||
for (const cmd of Object.keys(commands)) {
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash(cmd))).length).toBe(2)
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey(cmd))).length).toBe(2)
|
||||
}
|
||||
|
||||
// modify the dates of the cache items
|
||||
@@ -166,7 +168,7 @@ test('dlx creates cache and store prune cleans cache', async () => {
|
||||
const now = new Date()
|
||||
await Promise.all(Object.entries(ageTable).map(async ([cmd, age]) => {
|
||||
const newDate = new Date(now.getTime() - age * 60_000)
|
||||
const dlxCacheLink = path.resolve('cache', 'dlx', createBase32Hash(cmd), 'pkg')
|
||||
const dlxCacheLink = path.resolve('cache', 'dlx', createCacheKey(cmd), 'pkg')
|
||||
await fs.promises.lutimes(dlxCacheLink, newDate, newDate)
|
||||
}))
|
||||
|
||||
@@ -178,11 +180,11 @@ test('dlx creates cache and store prune cleans cache', async () => {
|
||||
.sort()
|
||||
).toStrictEqual(
|
||||
['shx', '@pnpm.e2e/touch-file-good-bin-name']
|
||||
.map(createBase32Hash)
|
||||
.map(cmd => createCacheKey(cmd))
|
||||
.sort()
|
||||
)
|
||||
for (const cmd of ['shx', '@pnpm.e2e/touch-file-good-bin-name']) {
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createBase32Hash(cmd))).length).toBe(2)
|
||||
expect(fs.readdirSync(path.resolve('cache', 'dlx', createCacheKey(cmd))).length).toBe(2)
|
||||
}
|
||||
|
||||
await execPnpm([
|
||||
@@ -208,7 +210,7 @@ test('dlx should ignore non-auth info from .npmrc in the current directory', asy
|
||||
`--config.cache-dir=${cacheDir}`,
|
||||
'dlx', 'shx', 'echo', 'hi'])
|
||||
|
||||
const modulesManifest = await readModulesManifest(path.join(cacheDir, 'dlx', createBase32Hash('shx'), 'pkg/node_modules'))
|
||||
const modulesManifest = await readModulesManifest(path.join(cacheDir, 'dlx', createCacheKey('shx'), 'pkg/node_modules'))
|
||||
expect(modulesManifest?.hoistPattern).toStrictEqual(['*'])
|
||||
})
|
||||
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
"homepage": "https://github.com/pnpm/pnpm/blob/main/store/plugin-commands-store#readme",
|
||||
"devDependencies": {
|
||||
"@pnpm/assert-store": "workspace:*",
|
||||
"@pnpm/crypto.base32-hash": "workspace:*",
|
||||
"@pnpm/lockfile-file": "workspace:*",
|
||||
"@pnpm/plugin-commands-script-runners": "workspace:*",
|
||||
"@pnpm/plugin-commands-store": "workspace:*",
|
||||
"@pnpm/prepare": "workspace:*",
|
||||
"@pnpm/registry-mock": "catalog:",
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { dlx } from '@pnpm/plugin-commands-script-runners'
|
||||
import { prepareEmpty } from '@pnpm/prepare'
|
||||
import { cleanExpiredDlxCache, cleanOrphans } from './cleanExpiredDlxCache'
|
||||
|
||||
const createCacheKey = (...pkgs: string[]): string => dlx.createCacheKey(pkgs, { default: 'https://registry.npmjs.com/' })
|
||||
|
||||
function createSampleDlxCacheLinkTarget (dirPath: string): void {
|
||||
fs.mkdirSync(path.join(dirPath, 'node_modules', '.pnpm'), { recursive: true })
|
||||
fs.mkdirSync(path.join(dirPath, 'node_modules', '.bin'), { recursive: true })
|
||||
@@ -13,7 +15,7 @@ function createSampleDlxCacheLinkTarget (dirPath: string): void {
|
||||
}
|
||||
|
||||
function createSampleDlxCacheItem (cacheDir: string, cmd: string, now: Date, age: number): void {
|
||||
const hash = createBase32Hash(cmd)
|
||||
const hash = createCacheKey(cmd)
|
||||
const newDate = new Date(now.getTime() - age * 60_000)
|
||||
const timeError = 432 // just an arbitrary amount, nothing is special about this number
|
||||
const pid = 71014 // just an arbitrary number to represent pid
|
||||
@@ -58,24 +60,24 @@ test('cleanExpiredCache removes items that outlive dlxCacheMaxAge', async () =>
|
||||
now,
|
||||
})
|
||||
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('foo'))).length).toBe(2)
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('bar'))).length).toBe(2)
|
||||
expect(fs.existsSync(path.join(cacheDir, 'dlx', createBase32Hash('baz')))).toBeFalsy()
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createCacheKey('foo'))).length).toBe(2)
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createCacheKey('bar'))).length).toBe(2)
|
||||
expect(fs.existsSync(path.join(cacheDir, 'dlx', createCacheKey('baz')))).toBeFalsy()
|
||||
|
||||
expect(readdirSyncSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx'), expect.anything())
|
||||
for (const key of ['foo', 'bar', 'baz']) {
|
||||
expect(lstatSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx', createBase32Hash(key), 'pkg'))
|
||||
expect(lstatSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx', createCacheKey(key), 'pkg'))
|
||||
}
|
||||
expect(rmSpy).not.toHaveBeenCalledWith(
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createBase32Hash('foo'))),
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createCacheKey('foo'))),
|
||||
expect.anything()
|
||||
)
|
||||
expect(rmSpy).not.toHaveBeenCalledWith(
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createBase32Hash('bar'))),
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createCacheKey('bar'))),
|
||||
expect.anything()
|
||||
)
|
||||
expect(rmSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createBase32Hash('baz'))),
|
||||
expect.stringContaining(path.join(cacheDir, 'dlx', createCacheKey('baz'))),
|
||||
{ recursive: true }
|
||||
)
|
||||
|
||||
@@ -115,7 +117,7 @@ test('cleanExpiredCache removes all directories without checking stat if dlxCach
|
||||
expect(readdirSyncSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx'), expect.anything())
|
||||
expect(lstatSpy).not.toHaveBeenCalled()
|
||||
for (const key of ['foo', 'bar', 'baz']) {
|
||||
expect(rmSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx', createBase32Hash(key)), { recursive: true })
|
||||
expect(rmSpy).toHaveBeenCalledWith(path.join(cacheDir, 'dlx', createCacheKey(key)), { recursive: true })
|
||||
}
|
||||
|
||||
readdirSyncSpy.mockRestore()
|
||||
@@ -151,7 +153,7 @@ test('cleanExpiredCache does nothing if dlxCacheMaxAge is Infinity', async () =>
|
||||
const entries = fs.readdirSync(dlxCacheDir).sort()
|
||||
expect(entries).toStrictEqual(
|
||||
['foo', 'bar', 'baz']
|
||||
.map(createBase32Hash)
|
||||
.map(cmd => createCacheKey(cmd))
|
||||
.sort()
|
||||
)
|
||||
|
||||
@@ -176,22 +178,22 @@ test("cleanOrphans deletes dirs that don't contain `link` and subdirs that aren'
|
||||
|
||||
// has link and orphans
|
||||
createSampleDlxCacheItem(cacheDir, 'foo', now, 0)
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('foo'), `${now.getTime().toString(16)}-${(7000).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('foo'), `${now.getTime().toString(16)}-${(7005).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('foo'), `${now.getTime().toString(16)}-${(7102).toString(16)}`))
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('foo'))).length).toBe(5)
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createCacheKey('foo'), `${now.getTime().toString(16)}-${(7000).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createCacheKey('foo'), `${now.getTime().toString(16)}-${(7005).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createCacheKey('foo'), `${now.getTime().toString(16)}-${(7102).toString(16)}`))
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createCacheKey('foo'))).length).toBe(5)
|
||||
|
||||
// has no link, only orphans
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('bar'), `${now.getTime().toString(16)}-${(7000).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('bar'), `${now.getTime().toString(16)}-${(7005).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createBase32Hash('bar'), `${now.getTime().toString(16)}-${(7102).toString(16)}`))
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('bar'))).length).toBe(3)
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createCacheKey('bar'), `${now.getTime().toString(16)}-${(7000).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createCacheKey('bar'), `${now.getTime().toString(16)}-${(7005).toString(16)}`))
|
||||
createSampleDlxCacheLinkTarget(path.join(cacheDir, 'dlx', createCacheKey('bar'), `${now.getTime().toString(16)}-${(7102).toString(16)}`))
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createCacheKey('bar'))).length).toBe(3)
|
||||
|
||||
await cleanOrphans(path.join(cacheDir, 'dlx'))
|
||||
|
||||
// expecting all subdirectories that aren't pointed to by `link` to be deleted.
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createBase32Hash('foo'))).length).toBe(2)
|
||||
expect(fs.readdirSync(path.join(cacheDir, 'dlx', createCacheKey('foo'))).length).toBe(2)
|
||||
|
||||
// expecting directory that doesn't contain `link` to be deleted.
|
||||
expect(fs.existsSync(path.join(cacheDir, 'dlx', createBase32Hash('bar')))).toBe(false)
|
||||
expect(fs.existsSync(path.join(cacheDir, 'dlx', createCacheKey('bar')))).toBe(false)
|
||||
})
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { assertStore } from '@pnpm/assert-store'
|
||||
import { createBase32Hash } from '@pnpm/crypto.base32-hash'
|
||||
import { dlx } from '@pnpm/plugin-commands-script-runners'
|
||||
import { store } from '@pnpm/plugin-commands-store'
|
||||
import { prepare, prepareEmpty } from '@pnpm/prepare'
|
||||
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
@@ -13,6 +13,8 @@ const STORE_VERSION = 'v3'
|
||||
const REGISTRY = `http://localhost:${REGISTRY_MOCK_PORT}/`
|
||||
const pnpmBin = path.join(__dirname, '../../../pnpm/bin/pnpm.cjs')
|
||||
|
||||
const createCacheKey = (...pkgs: string[]): string => dlx.createCacheKey(pkgs, { default: REGISTRY })
|
||||
|
||||
test('remove unreferenced packages', async () => {
|
||||
const project = prepare()
|
||||
const cacheDir = path.resolve('cache')
|
||||
@@ -288,7 +290,7 @@ function createSampleDlxCacheLinkTarget (dirPath: string): void {
|
||||
}
|
||||
|
||||
function createSampleDlxCacheItem (cacheDir: string, cmd: string, now: Date, age: number): void {
|
||||
const hash = createBase32Hash(cmd)
|
||||
const hash = createCacheKey(cmd)
|
||||
const newDate = new Date(now.getTime() - age * 60_000)
|
||||
const timeError = 432 // just an arbitrary amount, nothing is special about this number
|
||||
const pid = 71014 // just an arbitrary number to represent pid
|
||||
@@ -342,7 +344,7 @@ test('prune removes cache directories that outlives dlx-cache-max-age', async ()
|
||||
.sort()
|
||||
).toStrictEqual(
|
||||
['foo', 'bar']
|
||||
.map(createBase32Hash)
|
||||
.map(cmd => createCacheKey(cmd))
|
||||
.sort()
|
||||
)
|
||||
})
|
||||
|
||||
@@ -27,15 +27,15 @@
|
||||
{
|
||||
"path": "../../config/pick-registry-for-package"
|
||||
},
|
||||
{
|
||||
"path": "../../exec/plugin-commands-script-runners"
|
||||
},
|
||||
{
|
||||
"path": "../../lockfile/lockfile-file"
|
||||
},
|
||||
{
|
||||
"path": "../../lockfile/lockfile-utils"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/crypto.base32-hash"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/dependency-path"
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user