mirror of
https://github.com/pnpm/pnpm.git
synced 2026-01-07 14:38:32 -05:00
perf: reduce directory nesting in the store
Store the package index files in the CAFS. ref #2521 PR #2527
This commit is contained in:
6
.changeset/breezy-maps-double.md
Normal file
6
.changeset/breezy-maps-double.md
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
"@pnpm/package-requester": major
|
||||
"@pnpm/plugin-commands-store": major
|
||||
---
|
||||
|
||||
Store the package index files in the CAFS to reduce directory nesting.
|
||||
@@ -3,7 +3,7 @@ import rimraf = require('@zkochan/rimraf')
|
||||
import fs = require('mz/fs')
|
||||
import pLimit from 'p-limit'
|
||||
import ssri = require('ssri')
|
||||
import { getFilePathInCafs } from '.'
|
||||
import { getFilePathByModeInCafs } from './getFilePathInCafs'
|
||||
import { parseJsonBuffer } from './parseJson'
|
||||
|
||||
const limit = pLimit(20)
|
||||
@@ -25,7 +25,7 @@ export default async function (
|
||||
}
|
||||
if (
|
||||
!await verifyFile(
|
||||
getFilePathInCafs(cafsDir, fstat),
|
||||
getFilePathByModeInCafs(cafsDir, fstat.integrity, fstat.mode),
|
||||
fstat,
|
||||
f === 'package.json' ? manifest : undefined,
|
||||
)
|
||||
|
||||
44
packages/cafs/src/getFilePathInCafs.ts
Normal file
44
packages/cafs/src/getFilePathInCafs.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import path = require('path')
|
||||
import { Hash } from 'ssri'
|
||||
import ssri = require('ssri')
|
||||
|
||||
export const modeIsExecutable = (mode: number) => (mode & 0o111) === 0o111
|
||||
|
||||
export type FileType = 'exec' | 'nonexec' | 'index'
|
||||
|
||||
export function getFilePathByModeInCafs (
|
||||
cafsDir: string,
|
||||
integrity: string | Hash,
|
||||
mode: number,
|
||||
) {
|
||||
const fileType = modeIsExecutable(mode) ? 'exec' : 'nonexec'
|
||||
return path.join(cafsDir, contentPathFromIntegrity(integrity, fileType))
|
||||
}
|
||||
|
||||
export default function getFilePathInCafs (
|
||||
cafsDir: string,
|
||||
integrity: string | Hash,
|
||||
fileType: FileType,
|
||||
) {
|
||||
return path.join(cafsDir, contentPathFromIntegrity(integrity, fileType))
|
||||
}
|
||||
|
||||
function contentPathFromIntegrity (
|
||||
integrity: string | Hash,
|
||||
fileType: FileType,
|
||||
) {
|
||||
const sri = ssri.parse(integrity, { single: true })
|
||||
return contentPathFromHex(fileType, sri.hexDigest())
|
||||
}
|
||||
|
||||
export function contentPathFromHex (fileType: FileType, hex: string) {
|
||||
const p = path.join(hex.slice(0, 2), hex.slice(2))
|
||||
switch (fileType) {
|
||||
case 'exec':
|
||||
return `x${path.sep}${p}`
|
||||
case 'nonexec':
|
||||
return p
|
||||
case 'index':
|
||||
return `${p}.json`
|
||||
}
|
||||
}
|
||||
@@ -4,13 +4,23 @@ import exists = require('path-exists')
|
||||
import pathTemp = require('path-temp')
|
||||
import renameOverwrite = require('rename-overwrite')
|
||||
import ssri = require('ssri')
|
||||
import { Hash } from 'ssri'
|
||||
import addFilesFromDir from './addFilesFromDir'
|
||||
import addFilesFromTarball from './addFilesFromTarball'
|
||||
import checkFilesIntegrity from './checkFilesIntegrity'
|
||||
import getFilePathInCafs, {
|
||||
contentPathFromHex,
|
||||
FileType,
|
||||
getFilePathByModeInCafs,
|
||||
modeIsExecutable,
|
||||
} from './getFilePathInCafs'
|
||||
import writeFile from './writeFile'
|
||||
|
||||
export { checkFilesIntegrity }
|
||||
export {
|
||||
checkFilesIntegrity,
|
||||
FileType,
|
||||
getFilePathByModeInCafs,
|
||||
getFilePathInCafs,
|
||||
}
|
||||
|
||||
export default function createCafs (cafsDir: string, ignore?: ((filename: string) => Boolean)) {
|
||||
const locker = new Map()
|
||||
@@ -32,7 +42,6 @@ async function addStreamToCafs (
|
||||
return addBufferToCafs(writeBufferToCafs, buffer, mode)
|
||||
}
|
||||
|
||||
const modeIsExecutable = (mode: number) => (mode & 0o111) === 0o111
|
||||
type WriteBufferToCafs = (buffer: Buffer, fileDest: string, mode: number | undefined) => Promise<void>
|
||||
|
||||
async function addBufferToCafs (
|
||||
@@ -42,7 +51,7 @@ async function addBufferToCafs (
|
||||
): Promise<ssri.Integrity> {
|
||||
const integrity = ssri.fromData(buffer)
|
||||
const isExecutable = modeIsExecutable(mode)
|
||||
const fileDest = contentPathFromHex(isExecutable, integrity.hexDigest())
|
||||
const fileDest = contentPathFromHex(isExecutable ? 'exec' : 'nonexec', integrity.hexDigest())
|
||||
await writeBufferToCafs(buffer, fileDest, isExecutable ? 0o755 : undefined)
|
||||
return integrity
|
||||
}
|
||||
@@ -78,27 +87,3 @@ async function writeBufferToCafs (
|
||||
locker.set(fileDest, p)
|
||||
await p
|
||||
}
|
||||
|
||||
export function getFilePathInCafs (
|
||||
cafsDir: string,
|
||||
file: {
|
||||
integrity: string | Hash,
|
||||
mode: number,
|
||||
},
|
||||
) {
|
||||
return path.join(cafsDir, contentPathFromIntegrity(file.integrity, file.mode))
|
||||
}
|
||||
|
||||
function contentPathFromIntegrity (
|
||||
integrity: string | Hash,
|
||||
mode: number,
|
||||
) {
|
||||
const sri = ssri.parse(integrity, { single: true })
|
||||
const isExecutable = modeIsExecutable(mode)
|
||||
return contentPathFromHex(isExecutable, sri.hexDigest())
|
||||
}
|
||||
|
||||
function contentPathFromHex (isExecutable: boolean, hex: string) {
|
||||
return (isExecutable ? `x${path.sep}` : '') +
|
||||
path.join(hex.slice(0, 2), hex.slice(2))
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import createCafs, {
|
||||
checkFilesIntegrity as _checkFilesIntegrity,
|
||||
FileType,
|
||||
getFilePathByModeInCafs as _getFilePathByModeInCafs,
|
||||
getFilePathInCafs as _getFilePathInCafs,
|
||||
} from '@pnpm/cafs'
|
||||
import { fetchingProgressLogger } from '@pnpm/core-loggers'
|
||||
@@ -94,6 +96,7 @@ export default function (
|
||||
checkFilesIntegrity: _checkFilesIntegrity.bind(null, cafsDir),
|
||||
fetch,
|
||||
fetchingLocker: new Map(),
|
||||
getFilePathByModeInCafs: _getFilePathByModeInCafs.bind(null, cafsDir),
|
||||
getFilePathInCafs,
|
||||
requestsQueue,
|
||||
storeDir: opts.storeDir,
|
||||
@@ -258,7 +261,8 @@ function fetchToStore (
|
||||
bundledManifest?: Promise<BundledManifest>,
|
||||
inStoreLocation: string,
|
||||
}>,
|
||||
getFilePathInCafs: (file: { mode: number, integrity: string }) => string,
|
||||
getFilePathInCafs: (integrity: string, fileType: FileType) => string,
|
||||
getFilePathByModeInCafs: (integrity: string, mode: number) => string,
|
||||
requestsQueue: {add: <T>(fn: () => Promise<T>, opts: {priority: number}) => Promise<T>},
|
||||
storeIndex: StoreIndex,
|
||||
storeDir: string,
|
||||
@@ -349,7 +353,11 @@ function fetchToStore (
|
||||
|
||||
if (opts.fetchRawManifest && !result.bundledManifest) {
|
||||
result.bundledManifest = removeKeyOnFail(
|
||||
result.files.then(({ filesIndex }) => readBundledManifest(ctx.getFilePathInCafs(filesIndex['package.json']))),
|
||||
result.files.then(({ filesIndex }) => {
|
||||
const { integrity, mode } = filesIndex['package.json']
|
||||
const manifestPath = ctx.getFilePathByModeInCafs(integrity, mode)
|
||||
return readBundledManifest(manifestPath)
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -376,6 +384,9 @@ function fetchToStore (
|
||||
) {
|
||||
try {
|
||||
const isLocalTarballDep = opts.pkgId.startsWith('file:')
|
||||
const pkgIndexFilePath = opts.resolution['integrity']
|
||||
? ctx.getFilePathInCafs(opts.resolution['integrity'], 'index')
|
||||
: path.join(target, 'integrity.json')
|
||||
|
||||
if (
|
||||
!opts.force &&
|
||||
@@ -386,7 +397,7 @@ function fetchToStore (
|
||||
) {
|
||||
let integrity
|
||||
try {
|
||||
integrity = await loadJsonFile<Record<string, { size: number, mode: number, integrity: string }>>(path.join(target, 'integrity.json'))
|
||||
integrity = await loadJsonFile<Record<string, { size: number, mode: number, integrity: string }>>(pkgIndexFilePath)
|
||||
} catch (err) {
|
||||
// ignoring. It is fine if the integrity file is not present. Just refetch the package
|
||||
}
|
||||
@@ -476,10 +487,11 @@ function fetchToStore (
|
||||
}
|
||||
}),
|
||||
)
|
||||
await writeJsonFile(path.join(target, 'integrity.json'), integrity, { indent: undefined })
|
||||
await writeJsonFile(pkgIndexFilePath, integrity)
|
||||
finishing.resolve(undefined)
|
||||
|
||||
if (isLocalTarballDep && opts.resolution['integrity']) { // tslint:disable-line:no-string-literal
|
||||
await fs.mkdir(target, { recursive: true })
|
||||
await fs.writeFile(path.join(target, TARBALL_INTEGRITY_FILENAME), opts.resolution['integrity'], 'utf8') // tslint:disable-line:no-string-literal
|
||||
}
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ const fetch = createFetcher({
|
||||
|
||||
test('request package', async t => {
|
||||
const storeDir = tempy.directory()
|
||||
t.comment(storeDir)
|
||||
const storeIndex = {}
|
||||
const requestPackage = createPackageRequester(resolve, fetch, {
|
||||
networkConcurrency: 1,
|
||||
@@ -458,7 +459,7 @@ test('fetchPackageToStore() concurrency check', async (t) => {
|
||||
const fetchResult = fetchResults[0]
|
||||
const files = await fetchResult.files()
|
||||
|
||||
ino1 = fs.statSync(getFilePathInCafs(cafsDir, files.filesIndex['package.json'])).ino
|
||||
ino1 = fs.statSync(getFilePathInCafs(cafsDir, files.filesIndex['package.json'].integrity, 'nonexec')).ino
|
||||
|
||||
t.deepEqual(Object.keys(files.filesIndex).sort(),
|
||||
['package.json', 'index.js', 'license', 'readme.md'].sort(),
|
||||
@@ -473,7 +474,7 @@ test('fetchPackageToStore() concurrency check', async (t) => {
|
||||
const fetchResult = fetchResults[1]
|
||||
const files = await fetchResult.files()
|
||||
|
||||
ino2 = fs.statSync(getFilePathInCafs(cafsDir, files.filesIndex['package.json'])).ino
|
||||
ino2 = fs.statSync(getFilePathInCafs(cafsDir, files.filesIndex['package.json'].integrity, 'nonexec')).ino
|
||||
|
||||
t.deepEqual(Object.keys(files.filesIndex).sort(),
|
||||
['package.json', 'index.js', 'license', 'readme.md'].sort(),
|
||||
@@ -682,7 +683,7 @@ test('refetch package to store if it has been modified', async (t) => {
|
||||
})
|
||||
|
||||
const { filesIndex } = await fetchResult.files()
|
||||
indexJsFile = getFilePathInCafs(cafsDir, filesIndex['index.js'])
|
||||
indexJsFile = getFilePathInCafs(cafsDir, filesIndex['index.js'].integrity, 'nonexec')
|
||||
}
|
||||
|
||||
// Adding some content to the file to change its integrity
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getFilePathInCafs as _getFilePathInCafs } from '@pnpm/cafs'
|
||||
import { getFilePathByModeInCafs as _getFilePathByModeInCafs } from '@pnpm/cafs'
|
||||
import { FetchFunction } from '@pnpm/fetcher-base'
|
||||
import lock from '@pnpm/fs-locker'
|
||||
import { globalInfo, globalWarn } from '@pnpm/logger'
|
||||
@@ -59,11 +59,11 @@ export default async function (
|
||||
|
||||
const impPkg = createImportPackage(initOpts.packageImportMethod)
|
||||
const cafsDir = path.join(storeDir, 'files')
|
||||
const getFilePathInCafs = _getFilePathInCafs.bind(null, cafsDir)
|
||||
const getFilePathByModeInCafs = _getFilePathByModeInCafs.bind(null, cafsDir)
|
||||
const importPackage: ImportPackageFunction = (to, opts) => {
|
||||
const filesMap = {} as Record<string, string>
|
||||
for (const [fileName, fileMeta] of Object.entries(opts.filesResponse.filesIndex)) {
|
||||
filesMap[fileName] = getFilePathInCafs(fileMeta)
|
||||
filesMap[fileName] = getFilePathByModeInCafs(fileMeta.integrity, fileMeta.mode)
|
||||
}
|
||||
return impPkg(to, { filesMap, fromStore: opts.filesResponse.fromStore, force: opts.force })
|
||||
}
|
||||
|
||||
@@ -42,14 +42,17 @@
|
||||
"@types/common-tags": "^1.8.0",
|
||||
"@types/ramda": "^0.27.4",
|
||||
"@types/sinon": "^9.0.0",
|
||||
"@types/ssri": "^6.0.2",
|
||||
"@zkochan/rimraf": "1.0.0",
|
||||
"execa": "4.0.0",
|
||||
"load-json-file": "6.2.0",
|
||||
"path-exists": "4.0.0",
|
||||
"sinon": "9.0.2",
|
||||
"ssri": "^8.0.0",
|
||||
"tempy": "0.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pnpm/cafs": "workspace:^1.0.0-alpha.2",
|
||||
"@pnpm/check-package": "3.0.1",
|
||||
"@pnpm/cli-utils": "workspace:0.4.4",
|
||||
"@pnpm/config": "workspace:8.3.0",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { getFilePathInCafs } from '@pnpm/cafs'
|
||||
import { getContextForSingleImporter } from '@pnpm/get-context'
|
||||
import { nameVerFromPkgSnapshot } from '@pnpm/lockfile-utils'
|
||||
import { streamParser } from '@pnpm/logger'
|
||||
@@ -31,15 +32,21 @@ export default async function (maybeOpts: StoreStatusOptions) {
|
||||
const pkgs = Object.keys(wantedLockfile.packages || {})
|
||||
.filter((relDepPath) => !skipped.has(relDepPath))
|
||||
.map((relDepPath) => {
|
||||
const pkg = wantedLockfile.packages![relDepPath]
|
||||
return {
|
||||
integrity: pkg.resolution['integrity'],
|
||||
pkgPath: dp.resolve(registries, relDepPath),
|
||||
...nameVerFromPkgSnapshot(relDepPath, wantedLockfile.packages![relDepPath]),
|
||||
...nameVerFromPkgSnapshot(relDepPath, pkg),
|
||||
}
|
||||
})
|
||||
|
||||
const modified = await pFilter(pkgs, async ({ pkgPath, name }) => {
|
||||
const integrity = await loadJsonFile(path.join(storeDir, pkgPath, 'integrity.json'))
|
||||
return (await dint.check(path.join(virtualStoreDir, pkgPath, 'node_modules', name), integrity)) === false
|
||||
const cafsDir = path.join(storeDir, 'files')
|
||||
const modified = await pFilter(pkgs, async ({ integrity, pkgPath, name }) => {
|
||||
const pkgIndexFilePath = integrity
|
||||
? getFilePathInCafs(cafsDir, integrity, 'index')
|
||||
: path.join(storeDir, pkgPath, 'integrity.json')
|
||||
const pkgIndex = await loadJsonFile(pkgIndexFilePath)
|
||||
return (await dint.check(path.join(virtualStoreDir, pkgPath, 'node_modules', name), pkgIndex)) === false
|
||||
})
|
||||
|
||||
if (reporter) {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import assertStore from '@pnpm/assert-store'
|
||||
import { store } from '@pnpm/plugin-commands-store'
|
||||
import { tempDir } from '@pnpm/prepare'
|
||||
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
@@ -24,7 +25,7 @@ test('pnpm store add express@4.16.3', async function (t) {
|
||||
storeDir,
|
||||
}, ['add', 'express@4.16.3'])
|
||||
|
||||
const pathToCheck = path.join(storeDir, STORE_VERSION, `localhost+${REGISTRY_MOCK_PORT}`, 'express', '4.16.3')
|
||||
const pathToCheck = path.join(storeDir, STORE_VERSION, 'files/6a/f8a502350db3246ecc4becf6b5a34d22f7ed53.json')
|
||||
t.ok(await exists(pathToCheck), `express@4.16.3 is in store (at ${pathToCheck})`)
|
||||
|
||||
const storeIndex = await loadJsonFile(path.join(storeDir, STORE_VERSION, 'store.json'))
|
||||
@@ -56,8 +57,8 @@ test('pnpm store add scoped package that uses not the standard registry', async
|
||||
storeDir,
|
||||
}, ['add', '@foo/no-deps@1.0.0'])
|
||||
|
||||
const pathToCheck = path.join(storeDir, STORE_VERSION, `localhost+${REGISTRY_MOCK_PORT}`, '@foo', 'no-deps', '1.0.0')
|
||||
t.ok(await exists(pathToCheck), `@foo/no-deps@1.0.0 is in store (at ${pathToCheck})`)
|
||||
const { cafsHas } = assertStore(t, path.join(storeDir, STORE_VERSION))
|
||||
await cafsHas('@foo/no-deps', '1.0.0')
|
||||
|
||||
const storeIndex = await loadJsonFile(path.join(storeDir, STORE_VERSION, 'store.json'))
|
||||
t.deepEqual(
|
||||
|
||||
@@ -8,6 +8,7 @@ import execa = require('execa')
|
||||
import path = require('path')
|
||||
import R = require('ramda')
|
||||
import sinon = require('sinon')
|
||||
import ssri = require('ssri')
|
||||
import test = require('tape')
|
||||
|
||||
const STORE_VERSION = 'v3'
|
||||
@@ -60,16 +61,16 @@ test('remove unreferenced packages', async (t) => {
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('remove packages that are used by project that no longer exist', async (t) => {
|
||||
test.skip('remove packages that are used by project that no longer exist', async (t) => {
|
||||
prepare(t)
|
||||
const storeDir = path.resolve('store', STORE_VERSION)
|
||||
const { storeHas, storeHasNot } = assertStore(t, storeDir)
|
||||
const { cafsHas, cafsHasNot } = assertStore(t, storeDir)
|
||||
|
||||
await execa('pnpm', ['add', 'is-negative@2.1.0', '--store-dir', storeDir, '--registry', REGISTRY])
|
||||
|
||||
await rimraf('node_modules')
|
||||
|
||||
await storeHas('is-negative', '2.1.0')
|
||||
await cafsHas(ssri.fromHex('f0d86377aa15a64c34961f38ac2a9be2b40a1187', 'sha1').toString())
|
||||
|
||||
const reporter = sinon.spy()
|
||||
await store.handler({
|
||||
@@ -88,7 +89,7 @@ test('remove packages that are used by project that no longer exist', async (t)
|
||||
message: `- localhost+${REGISTRY_MOCK_PORT}/is-negative/2.1.0`,
|
||||
}))
|
||||
|
||||
await storeHasNot('is-negative', '2.1.0')
|
||||
await cafsHasNot(ssri.fromHex('f0d86377aa15a64c34961f38ac2a9be2b40a1187', 'sha1').toString())
|
||||
t.end()
|
||||
})
|
||||
|
||||
|
||||
@@ -9,12 +9,13 @@ import test = require('tape')
|
||||
import tempy = require('tempy')
|
||||
|
||||
const REGISTRY = `http://localhost:${REGISTRY_MOCK_PORT}/`
|
||||
const pnpmBin = path.join(__dirname, '../../pnpm/bin/pnpm.js')
|
||||
|
||||
test('CLI fails when store status finds modified packages', async function (t) {
|
||||
const project = prepare(t)
|
||||
const storeDir = tempy.directory()
|
||||
|
||||
await execa('pnpm', ['add', 'is-positive@3.1.0', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity'])
|
||||
await execa('node', [pnpmBin, 'add', 'is-positive@3.1.0', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity'])
|
||||
|
||||
await rimraf(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive/3.1.0/node_modules/is-positive/index.js`)
|
||||
|
||||
@@ -42,9 +43,9 @@ test('CLI does not fail when store status does not find modified packages', asyn
|
||||
const project = prepare(t)
|
||||
const storeDir = tempy.directory()
|
||||
|
||||
await execa('pnpm', ['add', 'is-positive@3.1.0', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity'])
|
||||
await execa('node', [pnpmBin, 'add', 'is-positive@3.1.0', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity'])
|
||||
// store status does not fail on not installed optional dependencies
|
||||
await execa('pnpm', ['add', 'not-compatible-with-any-os', '--save-optional', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity'])
|
||||
await execa('node', [pnpmBin, 'add', 'not-compatible-with-any-os', '--save-optional', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity'])
|
||||
|
||||
await store.handler({
|
||||
dir: process.cwd(),
|
||||
|
||||
@@ -5,6 +5,8 @@ import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
import { stripIndent } from 'common-tags'
|
||||
import execa = require('execa')
|
||||
import path = require('path')
|
||||
import exists = require('path-exists')
|
||||
import ssri = require('ssri')
|
||||
import test = require('tape')
|
||||
|
||||
const STORE_VERSION = 'v3'
|
||||
@@ -16,14 +18,15 @@ const DEFAULT_OPTS = {
|
||||
},
|
||||
registries: { default: REGISTRY },
|
||||
}
|
||||
const pnpmBin = path.join(__dirname, '../../pnpm/bin/pnpm.js')
|
||||
|
||||
test('find usages for single package in store and in a project', async (t) => {
|
||||
const project = prepare(t)
|
||||
const storeDir = path.resolve('store')
|
||||
|
||||
// Install deps
|
||||
await execa('pnpm', ['add', 'is-negative@2.1.0', 'is-odd@3.0.0', '--store-dir', storeDir, '--registry', REGISTRY])
|
||||
await project.storeHas('is-negative', '2.1.0')
|
||||
await execa('node', [pnpmBin, 'add', 'is-negative@2.1.0', 'is-odd@3.0.0', '--store-dir', storeDir, '--registry', REGISTRY])
|
||||
await project.cafsHas(ssri.fromHex('f0d86377aa15a64c34961f38ac2a9be2b40a1187', 'sha1').toString())
|
||||
|
||||
{
|
||||
const output = await store.handler({
|
||||
@@ -96,7 +99,7 @@ test('find usages for single package in store and in a project', async (t) => {
|
||||
test('find usages for package(s) in store but not in any projects', async (t) => {
|
||||
prepareEmpty(t)
|
||||
const storeDir = path.resolve('store')
|
||||
const { storeHas } = assertStore(t, path.join(storeDir, STORE_VERSION))
|
||||
const { cafsHas } = assertStore(t, path.join(storeDir, STORE_VERSION))
|
||||
|
||||
// Add dependency directly to store (not to the project)
|
||||
await store.handler({
|
||||
@@ -104,7 +107,7 @@ test('find usages for package(s) in store but not in any projects', async (t) =>
|
||||
dir: process.cwd(),
|
||||
storeDir,
|
||||
}, ['add', 'is-negative@2.1.0'])
|
||||
await storeHas('is-negative', '2.1.0')
|
||||
await cafsHas(ssri.fromHex('f0d86377aa15a64c34961f38ac2a9be2b40a1187', 'sha1').toString())
|
||||
|
||||
{
|
||||
const output = await store.handler({
|
||||
@@ -126,7 +129,7 @@ test('find usages for package(s) in store but not in any projects', async (t) =>
|
||||
dir: process.cwd(),
|
||||
storeDir,
|
||||
}, ['add', 'is-negative@2.0.0'])
|
||||
await storeHas('is-negative', '2.0.0')
|
||||
await cafsHas(ssri.fromHex('09f4cb20dd1bddff37cb6630c618a9bc57915fd6', 'sha1').toString())
|
||||
{
|
||||
const output = await store.handler({
|
||||
...DEFAULT_OPTS,
|
||||
|
||||
@@ -160,12 +160,12 @@ test("don't fail on case insensitive filesystems when package has 2 files with s
|
||||
|
||||
await project.has('with-same-file-in-different-cases')
|
||||
|
||||
const storeDir = await project.getStorePath()
|
||||
const integrityFile = await import(path.join(storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'with-same-file-in-different-cases', '1.0.0', 'integrity.json'))
|
||||
const integrityFile = await loadJsonFile(await project.getPkgIndexFilePath('with-same-file-in-different-cases', '1.0.0'))
|
||||
const packageFiles = Object.keys(integrityFile).sort()
|
||||
|
||||
t.deepEqual(packageFiles, ['Foo.js', 'foo.js', 'package.json'])
|
||||
const files = await fs.readdir('node_modules/with-same-file-in-different-cases')
|
||||
const storeDir = await project.getStorePath()
|
||||
if (await dirIsCaseSensitive(storeDir)) {
|
||||
t.deepEqual(files, ['Foo.js', 'foo.js', 'package.json'])
|
||||
} else {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import assertStore from '@pnpm/assert-store'
|
||||
import { WANTED_LOCKFILE } from '@pnpm/constants'
|
||||
import { prepareEmpty } from '@pnpm/prepare'
|
||||
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
@@ -19,9 +20,12 @@ test('install with lockfileOnly = true', async (t: tape.Test) => {
|
||||
|
||||
const opts = await testDefaults({ lockfileOnly: true, pinnedVersion: 'patch' as const })
|
||||
const manifest = await addDependenciesToPackage({}, ['pkg-with-1-dep@100.0.0'], opts)
|
||||
const { cafsHas } = assertStore(t, opts.storeDir)
|
||||
|
||||
t.deepEqual(await fs.readdir(path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'pkg-with-1-dep')), ['100.0.0', 'index.json'])
|
||||
t.deepEqual(await fs.readdir(path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'dep-of-pkg-with-1-dep')), ['100.1.0', 'index.json'])
|
||||
await cafsHas('pkg-with-1-dep', '100.0.0')
|
||||
t.deepEqual(await fs.readdir(path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'pkg-with-1-dep')), ['index.json'])
|
||||
await cafsHas('dep-of-pkg-with-1-dep', '100.1.0')
|
||||
t.deepEqual(await fs.readdir(path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'dep-of-pkg-with-1-dep')), ['index.json'])
|
||||
await project.hasNot('pkg-with-1-dep')
|
||||
|
||||
t.ok(manifest.dependencies!['pkg-with-1-dep'], 'the new dependency added to package.json')
|
||||
@@ -37,8 +41,10 @@ test('install with lockfileOnly = true', async (t: tape.Test) => {
|
||||
t.comment(`doing repeat install when ${WANTED_LOCKFILE} is available already`)
|
||||
await install(manifest, opts)
|
||||
|
||||
t.deepEqual(await fs.readdir(path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'pkg-with-1-dep')), ['100.0.0', 'index.json'])
|
||||
t.deepEqual(await fs.readdir(path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'dep-of-pkg-with-1-dep')), ['100.1.0', 'index.json'])
|
||||
await cafsHas('pkg-with-1-dep', '100.0.0')
|
||||
t.deepEqual(await fs.readdir(path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'pkg-with-1-dep')), ['index.json'])
|
||||
await cafsHas('dep-of-pkg-with-1-dep', '100.1.0')
|
||||
t.deepEqual(await fs.readdir(path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}`, 'dep-of-pkg-with-1-dep')), ['index.json'])
|
||||
await project.hasNot('pkg-with-1-dep')
|
||||
|
||||
t.notOk(await project.readCurrentLockfile(), 'current lockfile not created')
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
///<reference path="../../../typings/index.d.ts" />
|
||||
import createCafs, { getFilePathInCafs as _getFilePathInCafs } from '@pnpm/cafs'
|
||||
import createCafs, { getFilePathByModeInCafs as _getFilePathByModeInCafs } from '@pnpm/cafs'
|
||||
import { LogBase, streamParser } from '@pnpm/logger'
|
||||
import readPackage from '@pnpm/read-package-json'
|
||||
import createFetcher from '@pnpm/tarball-fetcher'
|
||||
@@ -15,7 +15,7 @@ import tempy = require('tempy')
|
||||
const cafsDir = tempy.directory()
|
||||
console.log(cafsDir)
|
||||
const cafs = createCafs(cafsDir)
|
||||
const getFilePathInCafs = _getFilePathInCafs.bind(_getFilePathInCafs, cafsDir)
|
||||
const getFilePathByModeInCafs = _getFilePathByModeInCafs.bind(_getFilePathByModeInCafs, cafsDir)
|
||||
|
||||
const tarballPath = path.join(__dirname, 'tars', 'babel-helper-hoist-variables-6.24.1.tgz')
|
||||
const tarballSize = 1279
|
||||
@@ -106,7 +106,7 @@ test('redownload the tarball when the one in cache does not satisfy integrity',
|
||||
streamParser.removeListener('data', reporter as any) // tslint:disable-line:no-any
|
||||
|
||||
const pkgJsonIntegrity = await filesIndex['package.json'].generatingIntegrity
|
||||
t.equal((await readPackage(getFilePathInCafs({ integrity: pkgJsonIntegrity, ...filesIndex['package.json'] }))).version, '6.24.1')
|
||||
t.equal((await readPackage(getFilePathByModeInCafs(pkgJsonIntegrity, filesIndex['package.json'].mode))).version, '6.24.1')
|
||||
|
||||
t.ok(scope.isDone())
|
||||
t.end()
|
||||
|
||||
28
pnpm-lock.yaml
generated
28
pnpm-lock.yaml
generated
@@ -1994,6 +1994,7 @@ importers:
|
||||
tree-kill: ^1.2.2
|
||||
packages/plugin-commands-store:
|
||||
dependencies:
|
||||
'@pnpm/cafs': 'link:../cafs'
|
||||
'@pnpm/check-package': 3.0.1
|
||||
'@pnpm/cli-utils': 'link:../cli-utils'
|
||||
'@pnpm/config': 'link:../config'
|
||||
@@ -2024,14 +2025,17 @@ importers:
|
||||
'@types/common-tags': 1.8.0
|
||||
'@types/ramda': 0.27.4
|
||||
'@types/sinon': 9.0.0
|
||||
'@types/ssri': 6.0.2
|
||||
'@zkochan/rimraf': 1.0.0
|
||||
execa: 4.0.0
|
||||
load-json-file: 6.2.0
|
||||
path-exists: 4.0.0
|
||||
sinon: 9.0.2
|
||||
ssri: 8.0.0
|
||||
tempy: 0.5.0
|
||||
specifiers:
|
||||
'@pnpm/assert-store': 'workspace:1.0.0'
|
||||
'@pnpm/cafs': 'workspace:^1.0.0-alpha.2'
|
||||
'@pnpm/check-package': 3.0.1
|
||||
'@pnpm/cli-utils': 'workspace:0.4.4'
|
||||
'@pnpm/config': 'workspace:8.3.0'
|
||||
@@ -2053,6 +2057,7 @@ importers:
|
||||
'@types/common-tags': ^1.8.0
|
||||
'@types/ramda': ^0.27.4
|
||||
'@types/sinon': ^9.0.0
|
||||
'@types/ssri': ^6.0.2
|
||||
'@zkochan/rimraf': 1.0.0
|
||||
archy: 1.0.0
|
||||
common-tags: 1.8.0
|
||||
@@ -2065,6 +2070,7 @@ importers:
|
||||
ramda: 0.27.0
|
||||
render-help: 1.0.0
|
||||
sinon: 9.0.2
|
||||
ssri: ^8.0.0
|
||||
tempy: 0.5.0
|
||||
packages/pnpm:
|
||||
dependencies:
|
||||
@@ -2864,11 +2870,13 @@ importers:
|
||||
write-pkg: 4.0.0
|
||||
privatePackages/assert-store:
|
||||
dependencies:
|
||||
'@pnpm/cafs': 'link:../../packages/cafs'
|
||||
path-exists: 4.0.0
|
||||
devDependencies:
|
||||
'@pnpm/assert-store': 'link:'
|
||||
specifiers:
|
||||
'@pnpm/assert-store': 'link:'
|
||||
'@pnpm/cafs': 'workspace:^1.0.0-alpha.2'
|
||||
path-exists: 4.0.0
|
||||
privatePackages/prepare:
|
||||
dependencies:
|
||||
@@ -8754,6 +8762,14 @@ packages:
|
||||
dev: false
|
||||
resolution:
|
||||
integrity: sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==
|
||||
/minipass/3.1.1:
|
||||
dependencies:
|
||||
yallist: 4.0.0
|
||||
dev: true
|
||||
engines:
|
||||
node: '>=8'
|
||||
resolution:
|
||||
integrity: sha512-UFqVihv6PQgwj8/yTGvl9kPz7xIAY+R5z6XYjRInD3Gk3qx6QGSD6zEcpeG4Dy/lQnv1J6zv8ejV90hyYIKf3w==
|
||||
/minizlib/1.3.3:
|
||||
dependencies:
|
||||
minipass: 2.9.0
|
||||
@@ -11228,6 +11244,14 @@ packages:
|
||||
dev: false
|
||||
resolution:
|
||||
integrity: sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==
|
||||
/ssri/8.0.0:
|
||||
dependencies:
|
||||
minipass: 3.1.1
|
||||
dev: true
|
||||
engines:
|
||||
node: '>= 8'
|
||||
resolution:
|
||||
integrity: sha512-aq/pz989nxVYwn16Tsbj1TqFpD5LLrQxHf5zaHuieFV+R0Bbr4y8qUsOA45hXT/N4/9UNXTarBjnjVmjSOVaAA==
|
||||
/stacktracey/1.2.127:
|
||||
dependencies:
|
||||
as-table: 1.0.55
|
||||
@@ -12663,6 +12687,10 @@ packages:
|
||||
dev: false
|
||||
resolution:
|
||||
integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
|
||||
/yallist/4.0.0:
|
||||
dev: true
|
||||
resolution:
|
||||
integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
|
||||
/yaml-tag/1.1.0:
|
||||
dependencies:
|
||||
js-yaml: 3.13.1
|
||||
|
||||
@@ -20,6 +20,9 @@ export interface Project {
|
||||
hasNot (pkgName: string, modulesDir?: string): Promise<void>
|
||||
getStorePath (): Promise<string>
|
||||
resolve (pkgName: string, version?: string, relativePath?: string): Promise<string>
|
||||
getPkgIndexFilePath (pkgName: string, version?: string): Promise<string>
|
||||
cafsHas (pkgName: string, version?: string): Promise<void>
|
||||
cafsHasNot (pkgName: string, version?: string): Promise<void>
|
||||
storeHas (pkgName: string, version?: string): Promise<string>
|
||||
storeHasNot (pkgName: string, version?: string): Promise<void>
|
||||
isExecutable (pathToExe: string): Promise<void>
|
||||
@@ -45,6 +48,9 @@ export default (t: Test, projectPath: string, encodedRegistryName?: string): Pro
|
||||
|
||||
let cachedStore: {
|
||||
storePath: string;
|
||||
getPkgIndexFilePath (pkgName: string, version?: string): Promise<string>;
|
||||
cafsHas (pkgName: string, version?: string | undefined): Promise<void>;
|
||||
cafsHasNot (pkgName: string, version?: string | undefined): Promise<void>;
|
||||
storeHas (pkgName: string, version?: string | undefined): Promise<void>;
|
||||
storeHasNot (pkgName: string, version?: string | undefined): Promise<void>;
|
||||
resolve (pkgName: string, version?: string | undefined, relativePath?: string | undefined): Promise<string>
|
||||
@@ -91,6 +97,18 @@ export default (t: Test, projectPath: string, encodedRegistryName?: string): Pro
|
||||
const store = await getStoreInstance()
|
||||
return store.resolve(pkgName, version, relativePath)
|
||||
},
|
||||
async getPkgIndexFilePath (pkgName: string, version?: string): Promise<string> {
|
||||
const store = await getStoreInstance()
|
||||
return store.getPkgIndexFilePath(pkgName, version)
|
||||
},
|
||||
async cafsHas (pkgName: string, version?: string) {
|
||||
const store = await getStoreInstance()
|
||||
return store.cafsHas(pkgName, version)
|
||||
},
|
||||
async cafsHasNot (pkgName: string, version?: string) {
|
||||
const store = await getStoreInstance()
|
||||
return store.cafsHasNot(pkgName, version)
|
||||
},
|
||||
async storeHas (pkgName: string, version?: string) {
|
||||
const store = await getStoreInstance()
|
||||
return store.resolve(pkgName, version)
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
"test": "pnpm run tsc && ts-node test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pnpm/cafs": "workspace:^1.0.0-alpha.2",
|
||||
"path-exists": "4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,11 +1,29 @@
|
||||
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
import { getFilePathInCafs } from '@pnpm/cafs'
|
||||
import { getIntegrity, REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
import path = require('path')
|
||||
import exists = require('path-exists')
|
||||
import { Test } from 'tape'
|
||||
|
||||
export default (t: Test, storePath: string | Promise<string>, encodedRegistryName?: string) => {
|
||||
export default (
|
||||
t: Test,
|
||||
storePath: string | Promise<string>,
|
||||
encodedRegistryName?: string,
|
||||
) => {
|
||||
const ern = encodedRegistryName || `localhost+${REGISTRY_MOCK_PORT}`
|
||||
const store = {
|
||||
async getPkgIndexFilePath (pkgName: string, version?: string): Promise<string> {
|
||||
const cafsDir = path.join(await storePath, 'files')
|
||||
const integrity = version ? getIntegrity(pkgName, version) : pkgName
|
||||
return getFilePathInCafs(cafsDir, integrity, 'index')
|
||||
},
|
||||
async cafsHas (pkgName: string, version?: string): Promise<void> {
|
||||
const pathToCheck = await store.getPkgIndexFilePath(pkgName, version)
|
||||
t.ok(await exists(pathToCheck), `${pkgName}@${version} is in store (at ${pathToCheck})`)
|
||||
},
|
||||
async cafsHasNot (pkgName: string, version?: string): Promise<void> {
|
||||
const pathToCheck = await store.getPkgIndexFilePath(pkgName, version)
|
||||
t.notOk(await exists(pathToCheck), `${pkgName}@${version} is not in store (at ${pathToCheck})`)
|
||||
},
|
||||
async storeHas (pkgName: string, version?: string): Promise<void> {
|
||||
const pathToCheck = await store.resolve(pkgName, version)
|
||||
t.ok(await exists(pathToCheck), `${pkgName}@${version} is in store (at ${pathToCheck})`)
|
||||
|
||||
Reference in New Issue
Block a user