feat!: reduce nesting in the virtual store

PR #2546
This commit is contained in:
Zoltan Kochan
2020-05-11 21:31:21 +03:00
committed by GitHub
parent 7179cc560b
commit 9fbb74ecb5
26 changed files with 233 additions and 236 deletions

View File

@@ -0,0 +1,14 @@
---
"@pnpm/headless": major
"@pnpm/hoist": major
"@pnpm/modules-cleaner": major
"@pnpm/plugin-commands-rebuild": major
"@pnpm/plugin-commands-store": major
"pnpm": major
"@pnpm/resolve-dependencies": major
"supi": minor
---
The structure of virtual store directory changed. No subdirectory created with the registry name.
So instead of storing packages inside `node_modules/.pnpm/<registry>/<pkg>`, packages are stored
inside `node_modules/.pnpm/<pkg>`.

View File

@@ -297,7 +297,7 @@ export default async (opts: HeadlessOptions) => {
.concat( .concat(
depNodes depNodes
.filter(({ requiresBuild }) => requiresBuild) .filter(({ requiresBuild }) => requiresBuild)
.map(({ relDepPath }) => relDepPath), .map(({ depPath }) => depPath),
) )
} else { } else {
const directNodes = new Set<string>() const directNodes = new Set<string>()
@@ -430,7 +430,6 @@ async function linkRootPackages (
}) })
return return
} }
const depPath = dp.refToAbsolute(allDeps[alias], alias, opts.registries)
const peripheralLocation = opts.rootDependencies[alias] const peripheralLocation = opts.rootDependencies[alias]
// Skipping linked packages // Skipping linked packages
if (!peripheralLocation) { if (!peripheralLocation) {
@@ -442,12 +441,12 @@ async function linkRootPackages (
const isDev = projectSnapshot.devDependencies?.[alias] const isDev = projectSnapshot.devDependencies?.[alias]
const isOptional = projectSnapshot.optionalDependencies?.[alias] const isOptional = projectSnapshot.optionalDependencies?.[alias]
const relDepPath = dp.refToRelative(allDeps[alias], alias) const depPath = dp.refToRelative(allDeps[alias], alias)
if (relDepPath === null) return if (depPath === null) return
const pkgSnapshot = lockfile.packages?.[relDepPath] const pkgSnapshot = lockfile.packages?.[depPath]
if (!pkgSnapshot) return // this won't ever happen. Just making typescript happy if (!pkgSnapshot) return // this won't ever happen. Just making typescript happy
const pkgId = pkgSnapshot.id || depPath || undefined const pkgId = pkgSnapshot.id || dp.refToAbsolute(allDeps[alias], alias, opts.registries) || undefined
const pkgInfo = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot) const pkgInfo = nameVerFromPkgSnapshot(depPath, pkgSnapshot)
rootLogger.debug({ rootLogger.debug({
added: { added: {
dependencyType: isDev && 'dev' || isOptional && 'optional' || 'prod', dependencyType: isDev && 'dev' || isOptional && 'optional' || 'prod',
@@ -488,13 +487,12 @@ async function lockfileToDepGraph (
if (lockfile.packages) { if (lockfile.packages) {
const pkgSnapshotByLocation = {} const pkgSnapshotByLocation = {}
await Promise.all( await Promise.all(
Object.keys(lockfile.packages).map(async (relDepPath) => { Object.keys(lockfile.packages).map(async (depPath) => {
const depPath = dp.resolve(opts.registries, relDepPath) const pkgSnapshot = lockfile.packages![depPath]
const pkgSnapshot = lockfile.packages![relDepPath]
// TODO: optimize. This info can be already returned by pkgSnapshotToResolution() // TODO: optimize. This info can be already returned by pkgSnapshotToResolution()
const pkgName = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot).name const pkgName = nameVerFromPkgSnapshot(depPath, pkgSnapshot).name
const modules = path.join(opts.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules') const modules = path.join(opts.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules')
const packageId = packageIdFromSnapshot(relDepPath, pkgSnapshot, opts.registries) const packageId = packageIdFromSnapshot(depPath, pkgSnapshot, opts.registries)
const pkgLocation = await opts.storeController.getPackageLocation(packageId, pkgName, { const pkgLocation = await opts.storeController.getPackageLocation(packageId, pkgName, {
lockfileDir: opts.lockfileDir, lockfileDir: opts.lockfileDir,
targetEngine: opts.sideEffectsCacheRead && !opts.force && ENGINE_NAME || undefined, targetEngine: opts.sideEffectsCacheRead && !opts.force && ENGINE_NAME || undefined,
@@ -505,8 +503,8 @@ async function lockfileToDepGraph (
? path.join(modules, pkgName) ? path.join(modules, pkgName)
: pkgLocation.dir : pkgLocation.dir
if ( if (
currentPackages[relDepPath] && R.equals(currentPackages[relDepPath].dependencies, lockfile.packages![relDepPath].dependencies) && currentPackages[depPath] && R.equals(currentPackages[depPath].dependencies, lockfile.packages![depPath].dependencies) &&
R.equals(currentPackages[relDepPath].optionalDependencies, lockfile.packages![relDepPath].optionalDependencies) R.equals(currentPackages[depPath].optionalDependencies, lockfile.packages![depPath].optionalDependencies)
) { ) {
if (await fs.exists(peripheralLocation)) { if (await fs.exists(peripheralLocation)) {
return return
@@ -516,7 +514,7 @@ async function lockfileToDepGraph (
missing: peripheralLocation, missing: peripheralLocation,
}) })
} }
const resolution = pkgSnapshotToResolution(relDepPath, pkgSnapshot, opts.registries) const resolution = pkgSnapshotToResolution(depPath, pkgSnapshot, opts.registries)
progressLogger.debug({ progressLogger.debug({
packageId, packageId,
requester: opts.lockfileDir, requester: opts.lockfileDir,
@@ -543,6 +541,7 @@ async function lockfileToDepGraph (
}) })
graph[peripheralLocation] = { graph[peripheralLocation] = {
children: {}, children: {},
depPath,
fetchingFiles: fetchResponse.files, fetchingFiles: fetchResponse.files,
finishing: fetchResponse.finishing, finishing: fetchResponse.finishing,
hasBin: pkgSnapshot.hasBin === true, hasBin: pkgSnapshot.hasBin === true,
@@ -556,7 +555,6 @@ async function lockfileToDepGraph (
packageId, packageId,
peripheralLocation, peripheralLocation,
prepare: pkgSnapshot.prepare === true, prepare: pkgSnapshot.prepare === true,
relDepPath,
requiresBuild: pkgSnapshot.requiresBuild === true, requiresBuild: pkgSnapshot.requiresBuild === true,
} }
pkgSnapshotByLocation[peripheralLocation] = pkgSnapshot pkgSnapshotByLocation[peripheralLocation] = pkgSnapshot
@@ -567,7 +565,7 @@ async function lockfileToDepGraph (
graph, graph,
independentLeaves: opts.independentLeaves, independentLeaves: opts.independentLeaves,
lockfileDir: opts.lockfileDir, lockfileDir: opts.lockfileDir,
pkgSnapshotsByRelDepPaths: lockfile.packages, pkgSnapshotsByDepPaths: lockfile.packages,
registries: opts.registries, registries: opts.registries,
sideEffectsCacheRead: opts.sideEffectsCacheRead, sideEffectsCacheRead: opts.sideEffectsCacheRead,
skipped: opts.skipped, skipped: opts.skipped,
@@ -606,7 +604,7 @@ async function getChildrenPaths (
independentLeaves: boolean, independentLeaves: boolean,
storeDir: string, storeDir: string,
skipped: Set<string>, skipped: Set<string>,
pkgSnapshotsByRelDepPaths: {[relDepPath: string]: PackageSnapshot}, pkgSnapshotsByDepPaths: Record<string, PackageSnapshot>,
lockfileDir: string, lockfileDir: string,
sideEffectsCacheRead: boolean, sideEffectsCacheRead: boolean,
storeController: StoreController, storeController: StoreController,
@@ -621,9 +619,9 @@ async function getChildrenPaths (
continue continue
} }
const childRelDepPath = dp.refToRelative(allDeps[alias], alias) as string const childRelDepPath = dp.refToRelative(allDeps[alias], alias) as string
const childPkgSnapshot = ctx.pkgSnapshotsByRelDepPaths[childRelDepPath] const childPkgSnapshot = ctx.pkgSnapshotsByDepPaths[childRelDepPath]
if (ctx.graph[childDepPath]) { if (ctx.graph[childRelDepPath]) {
children[alias] = ctx.graph[childDepPath].peripheralLocation children[alias] = ctx.graph[childRelDepPath].peripheralLocation
} else if (childPkgSnapshot) { } else if (childPkgSnapshot) {
if (ctx.independentLeaves && packageIsIndependent(childPkgSnapshot)) { if (ctx.independentLeaves && packageIsIndependent(childPkgSnapshot)) {
const pkgId = childPkgSnapshot.id || childDepPath const pkgId = childPkgSnapshot.id || childDepPath
@@ -635,7 +633,7 @@ async function getChildrenPaths (
children[alias] = pkgLocation.dir children[alias] = pkgLocation.dir
} else { } else {
const pkgName = nameVerFromPkgSnapshot(childRelDepPath, childPkgSnapshot).name const pkgName = nameVerFromPkgSnapshot(childRelDepPath, childPkgSnapshot).name
children[alias] = path.join(ctx.virtualStoreDir, pkgIdToFilename(childDepPath, ctx.lockfileDir), 'node_modules', pkgName) children[alias] = path.join(ctx.virtualStoreDir, pkgIdToFilename(childRelDepPath, ctx.lockfileDir), 'node_modules', pkgName)
} }
} else if (allDeps[alias].indexOf('file:') === 0) { } else if (allDeps[alias].indexOf('file:') === 0) {
children[alias] = path.resolve(ctx.lockfileDir, allDeps[alias].substr(5)) children[alias] = path.resolve(ctx.lockfileDir, allDeps[alias].substr(5))
@@ -659,7 +657,7 @@ export interface DependenciesGraphNode {
independent: boolean, independent: boolean,
optionalDependencies: Set<string>, optionalDependencies: Set<string>,
optional: boolean, optional: boolean,
relDepPath: string, // this option is only needed for saving pendingBuild when running with --ignore-scripts flag depPath: string, // this option is only needed for saving pendingBuild when running with --ignore-scripts flag
packageId: string, // TODO: this option is currently only needed when running postinstall scripts but even there it should be not used packageId: string, // TODO: this option is currently only needed when running postinstall scripts but even there it should be not used
isBuilt: boolean, isBuilt: boolean,
requiresBuild: boolean, requiresBuild: boolean,

View File

@@ -43,7 +43,7 @@ test('installing a simple project', async (t) => {
t.ok(project.requireModule('colors'), 'optional dep installed') t.ok(project.requireModule('colors'), 'optional dep installed')
// test that independent leaves is false by default // test that independent leaves is false by default
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/colors@1.2.0`) // colors is not symlinked from the store await project.has(`.pnpm/colors@1.2.0`) // colors is not symlinked from the store
await project.isExecutable('.bin/rimraf') await project.isExecutable('.bin/rimraf')
@@ -274,10 +274,10 @@ test('installing with independent-leaves and hoistPattern=*', async (t) => {
await project.has('.pnpm/node_modules/path-is-absolute') await project.has('.pnpm/node_modules/path-is-absolute')
// wrappy is linked directly from the store // wrappy is linked directly from the store
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/wrappy@1.0.2`) await project.hasNot(`.pnpm/wrappy@1.0.2`)
await project.storeHas('wrappy', '1.0.2') await project.storeHas('wrappy', '1.0.2')
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/rimraf@2.5.1`) await project.has(`.pnpm/rimraf@2.5.1`)
await project.isExecutable('.bin/rimraf') await project.isExecutable('.bin/rimraf')
@@ -556,8 +556,8 @@ test('independent-leaves: installing a simple project', async (t) => {
t.ok(project.requireModule('rimraf'), 'prod dep installed') t.ok(project.requireModule('rimraf'), 'prod dep installed')
t.ok(project.requireModule('is-negative'), 'dev dep installed') t.ok(project.requireModule('is-negative'), 'dev dep installed')
t.ok(project.requireModule('colors'), 'optional dep installed') t.ok(project.requireModule('colors'), 'optional dep installed')
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/rimraf@2.7.1`) // rimraf is not symlinked from the store await project.has(`.pnpm/rimraf@2.7.1`) // rimraf is not symlinked from the store
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/colors@1.2.0`) // colors is symlinked from the store await project.hasNot(`.pnpm/colors@1.2.0`) // colors is symlinked from the store
await project.isExecutable('.bin/rimraf') await project.isExecutable('.bin/rimraf')
@@ -605,7 +605,7 @@ test('installing with hoistPattern=*', async (t) => {
t.ok(project.requireModule('colors'), 'optional dep installed') t.ok(project.requireModule('colors'), 'optional dep installed')
// test that independent leaves is false by default // test that independent leaves is false by default
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/colors@1.2.0`) // colors is not symlinked from the store await project.has(`.pnpm/colors@1.2.0`) // colors is not symlinked from the store
await project.isExecutable('.bin/rimraf') await project.isExecutable('.bin/rimraf')
await project.isExecutable('.pnpm/node_modules/.bin/hello-world-js-bin') await project.isExecutable('.pnpm/node_modules/.bin/hello-world-js-bin')
@@ -645,7 +645,7 @@ test('installing with hoistPattern=*', async (t) => {
const modules = await project.readModulesManifest() const modules = await project.readModulesManifest()
t.deepEqual(modules!.hoistedAliases[`localhost+${REGISTRY_MOCK_PORT}/balanced-match/1.0.0`], ['balanced-match'], 'hoisted field populated in .modules.yaml') t.deepEqual(modules!.hoistedAliases['/balanced-match/1.0.0'], ['balanced-match'], 'hoisted field populated in .modules.yaml')
t.end() t.end()
}) })
@@ -665,7 +665,7 @@ test('installing with hoistPattern=* and shamefullyHoist=true', async (t) => {
t.ok(project.requireModule('colors'), 'optional dep installed') t.ok(project.requireModule('colors'), 'optional dep installed')
// test that independent leaves is false by default // test that independent leaves is false by default
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/colors@1.2.0`) // colors is not symlinked from the store await project.has(`.pnpm/colors@1.2.0`) // colors is not symlinked from the store
await project.isExecutable('.bin/rimraf') await project.isExecutable('.bin/rimraf')
await project.isExecutable('.bin/hello-world-js-bin') await project.isExecutable('.bin/hello-world-js-bin')
@@ -705,7 +705,7 @@ test('installing with hoistPattern=* and shamefullyHoist=true', async (t) => {
const modules = await project.readModulesManifest() const modules = await project.readModulesManifest()
t.deepEqual(modules!.hoistedAliases[`localhost+${REGISTRY_MOCK_PORT}/balanced-match/1.0.0`], ['balanced-match'], 'hoisted field populated in .modules.yaml') t.deepEqual(modules!.hoistedAliases['/balanced-match/1.0.0'], ['balanced-match'], 'hoisted field populated in .modules.yaml')
t.end() t.end()
}) })
@@ -853,7 +853,7 @@ test('independent-leaves: installing in a workspace', async (t) => {
const projectBar = assertProject(t, path.join(workspaceFixture, 'bar')) const projectBar = assertProject(t, path.join(workspaceFixture, 'bar'))
await projectBar.has('foo') await projectBar.has('foo')
t.ok(await exists(path.join(workspaceFixture, `node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/express@4.16.4/node_modules/array-flatten`)), 'independent package linked') t.ok(await exists(path.join(workspaceFixture, `node_modules/.pnpm/express@4.16.4/node_modules/array-flatten`)), 'independent package linked')
t.end() t.end()
}) })

View File

@@ -33,13 +33,14 @@ export default async function hoistByLockfile (
) )
const deps = [ const deps = [
{ {
absolutePath: '',
children: directDeps children: directDeps
.reduce((acc, dep) => { .reduce((acc, { alias, relDepPath }) => {
if (acc[dep.alias]) return acc if (!acc[alias]) {
acc[dep.alias] = dp.resolve(opts.registries, dep.relDepPath) acc[alias] = relDepPath
}
return acc return acc
}, {}), }, {}),
depPath: '',
depth: -1, depth: -1,
location: '', location: '',
}, },
@@ -93,18 +94,18 @@ async function getDependencies (
for (const { pkgSnapshot, relDepPath, next } of step.dependencies) { for (const { pkgSnapshot, relDepPath, next } of step.dependencies) {
const absolutePath = dp.resolve(opts.registries, relDepPath) const absolutePath = dp.resolve(opts.registries, relDepPath)
const pkgName = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot).name const pkgName = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot).name
const modules = path.join(opts.virtualStoreDir, pkgIdToFilename(absolutePath, opts.lockfileDir), 'node_modules') const modules = path.join(opts.virtualStoreDir, pkgIdToFilename(relDepPath, opts.lockfileDir), 'node_modules')
const independent = opts.getIndependentPackageLocation && packageIsIndependent(pkgSnapshot) const independent = opts.getIndependentPackageLocation && packageIsIndependent(pkgSnapshot)
const allDeps = { const allDeps = {
...pkgSnapshot.dependencies, ...pkgSnapshot.dependencies,
...pkgSnapshot.optionalDependencies, ...pkgSnapshot.optionalDependencies,
} }
deps.push({ deps.push({
absolutePath,
children: Object.keys(allDeps).reduce((children, alias) => { children: Object.keys(allDeps).reduce((children, alias) => {
children[alias] = dp.refToAbsolute(allDeps[alias], alias, opts.registries) children[alias] = dp.refToRelative(allDeps[alias], alias)
return children return children
}, {}), }, {}),
depPath: relDepPath,
depth, depth,
location: !independent location: !independent
? path.join(modules, pkgName) ? path.join(modules, pkgName)
@@ -130,8 +131,8 @@ async function getDependencies (
export interface Dependency { export interface Dependency {
location: string, location: string,
children: {[alias: string]: string}, children: {[alias: string]: string},
depPath: string,
depth: number, depth: number,
absolutePath: string,
} }
async function hoistGraph ( async function hoistGraph (
@@ -150,7 +151,7 @@ async function hoistGraph (
// sort by depth and then alphabetically // sort by depth and then alphabetically
.sort((a, b) => { .sort((a, b) => {
const depthDiff = a.depth - b.depth const depthDiff = a.depth - b.depth
return depthDiff === 0 ? a.absolutePath.localeCompare(b.absolutePath) : depthDiff return depthDiff === 0 ? a.depPath.localeCompare(b.depPath) : depthDiff
}) })
// build the alias map and the id map // build the alias map and the id map
.map((depNode) => { .map((depNode) => {
@@ -170,7 +171,7 @@ async function hoistGraph (
return depNode return depNode
}) })
.map(async (depNode) => { .map(async (depNode) => {
const pkgAliases = aliasesByDependencyPath[depNode.absolutePath] const pkgAliases = aliasesByDependencyPath[depNode.depPath]
if (!pkgAliases) { if (!pkgAliases) {
return return
} }

View File

@@ -165,11 +165,10 @@ function mergeDependencies (projectSnapshot: ProjectSnapshot): { [depName: strin
function getPkgsDepPaths ( function getPkgsDepPaths (
registries: Registries, registries: Registries,
packages: PackageSnapshots, packages: PackageSnapshots,
): {[depPath: string]: string} { ): {[relDepPath: string]: string} {
const pkgIdsByDepPath = {} const pkgIdsByDepPath = {}
for (const relDepPath of Object.keys(packages)) { for (const relDepPath of Object.keys(packages)) {
const depPath = dp.resolve(registries, relDepPath) pkgIdsByDepPath[relDepPath] = packageIdFromSnapshot(relDepPath, packages[relDepPath], registries)
pkgIdsByDepPath[depPath] = packageIdFromSnapshot(relDepPath, packages[relDepPath], registries)
} }
return pkgIdsByDepPath return pkgIdsByDepPath
} }

View File

@@ -270,7 +270,7 @@ async function _rebuild (
const pkgInfo = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot) const pkgInfo = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot)
const independent = ctx.independentLeaves && packageIsIndependent(pkgSnapshot) const independent = ctx.independentLeaves && packageIsIndependent(pkgSnapshot)
const pkgRoot = !independent const pkgRoot = !independent
? path.join(ctx.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules', pkgInfo.name) ? path.join(ctx.virtualStoreDir, pkgIdToFilename(relDepPath, opts.lockfileDir), 'node_modules', pkgInfo.name)
: await ( : await (
async () => { async () => {
const { dir } = await opts.storeController.getPackageLocation(pkgSnapshot.id || depPath, pkgInfo.name, { const { dir } = await opts.storeController.getPackageLocation(pkgSnapshot.id || depPath, pkgInfo.name, {
@@ -282,7 +282,7 @@ async function _rebuild (
)() )()
try { try {
if (!independent) { if (!independent) {
const modules = path.join(ctx.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules') const modules = path.join(ctx.virtualStoreDir, pkgIdToFilename(relDepPath, opts.lockfileDir), 'node_modules')
const binPath = path.join(pkgRoot, 'node_modules', '.bin') const binPath = path.join(pkgRoot, 'node_modules', '.bin')
await linkBins(modules, binPath, { warn }) await linkBins(modules, binPath, { warn })
} }
@@ -328,7 +328,7 @@ async function _rebuild (
const depPath = dp.resolve(opts.registries, relDepPath) const depPath = dp.resolve(opts.registries, relDepPath)
const pkgSnapshot = pkgSnapshots[relDepPath] const pkgSnapshot = pkgSnapshots[relDepPath]
const pkgInfo = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot) const pkgInfo = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot)
const modules = path.join(ctx.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules') const modules = path.join(ctx.virtualStoreDir, pkgIdToFilename(relDepPath, opts.lockfileDir), 'node_modules')
const binPath = path.join(modules, pkgInfo.name, 'node_modules', '.bin') const binPath = path.join(modules, pkgInfo.name, 'node_modules', '.bin')
return linkBins(modules, binPath, { warn }) return linkBins(modules, binPath, { warn })
})), })),

View File

@@ -217,7 +217,7 @@ test('rebuild dependencies in correct order', async (t) => {
t.ok(modules) t.ok(modules)
t.doesNotEqual(modules!.pendingBuilds.length, 0) t.doesNotEqual(modules!.pendingBuilds.length, 0)
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`) await project.hasNot(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)
await project.hasNot('with-postinstall-a/output.json') await project.hasNot('with-postinstall-a/output.json')
await rebuild.handler({ await rebuild.handler({
@@ -231,7 +231,7 @@ test('rebuild dependencies in correct order', async (t) => {
t.ok(modules) t.ok(modules)
t.equal(modules!.pendingBuilds.length, 0) t.equal(modules!.pendingBuilds.length, 0)
t.ok(+project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0]) t.ok(+project.requireModule(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
t.end() t.end()
}) })
@@ -256,7 +256,7 @@ test('rebuild dependencies in correct order when node_modules uses independent-l
t.ok(modules) t.ok(modules)
t.doesNotEqual(modules!.pendingBuilds.length, 0) t.doesNotEqual(modules!.pendingBuilds.length, 0)
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`) await project.hasNot(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)
await project.hasNot('with-postinstall-a/output.json') await project.hasNot('with-postinstall-a/output.json')
await rebuild.handler({ await rebuild.handler({
@@ -271,7 +271,7 @@ test('rebuild dependencies in correct order when node_modules uses independent-l
t.ok(modules) t.ok(modules)
t.equal(modules!.pendingBuilds.length, 0) t.equal(modules!.pendingBuilds.length, 0)
t.ok(+project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0]) t.ok(+project.requireModule(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
t.end() t.end()
}) })

View File

@@ -37,17 +37,18 @@ export default async function (maybeOpts: StoreStatusOptions) {
return { return {
integrity: pkg.resolution['integrity'], integrity: pkg.resolution['integrity'],
pkgPath: dp.resolve(registries, relDepPath), pkgPath: dp.resolve(registries, relDepPath),
relDepPath,
...nameVerFromPkgSnapshot(relDepPath, pkg), ...nameVerFromPkgSnapshot(relDepPath, pkg),
} }
}) })
const cafsDir = path.join(storeDir, 'files') const cafsDir = path.join(storeDir, 'files')
const modified = await pFilter(pkgs, async ({ integrity, pkgPath, name }) => { const modified = await pFilter(pkgs, async ({ integrity, pkgPath, relDepPath, name }) => {
const pkgIndexFilePath = integrity const pkgIndexFilePath = integrity
? getFilePathInCafs(cafsDir, integrity, 'index') ? getFilePathInCafs(cafsDir, integrity, 'index')
: path.join(storeDir, pkgPath, 'integrity.json') : path.join(storeDir, pkgPath, 'integrity.json')
const { files } = await loadJsonFile(pkgIndexFilePath) const { files } = await loadJsonFile(pkgIndexFilePath)
return (await dint.check(path.join(virtualStoreDir, pkgIdToFilename(pkgPath, opts.dir), 'node_modules', name), files)) === false return (await dint.check(path.join(virtualStoreDir, pkgIdToFilename(relDepPath, opts.dir), 'node_modules', name), files)) === false
}) })
if (reporter) { if (reporter) {

View File

@@ -17,7 +17,7 @@ test('CLI fails when store status finds modified packages', async function (t) {
await execa('node', [pnpmBin, 'add', 'is-positive@3.1.0', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity']) await execa('node', [pnpmBin, 'add', 'is-positive@3.1.0', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity'])
await rimraf(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@3.1.0/node_modules/is-positive/index.js`) await rimraf(`node_modules/.pnpm/is-positive@3.1.0/node_modules/is-positive/index.js`)
let err!: PnpmError let err!: PnpmError
try { try {

View File

@@ -448,7 +448,7 @@ test('using a custom virtual-store-dir location', async (t: tape.Test) => {
await execPnpm(['install', '--virtual-store-dir=.pnpm']) await execPnpm(['install', '--virtual-store-dir=.pnpm'])
t.ok(await exists(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/rimraf@2.5.1/node_modules/rimraf/package.json`)) t.ok(await exists(`.pnpm/rimraf@2.5.1/node_modules/rimraf/package.json`))
t.ok(await exists('.pnpm/lock.yaml')) t.ok(await exists('.pnpm/lock.yaml'))
t.ok(await exists('.pnpm/node_modules/once/package.json')) t.ok(await exists('.pnpm/node_modules/once/package.json'))
@@ -457,7 +457,7 @@ test('using a custom virtual-store-dir location', async (t: tape.Test) => {
await execPnpm(['install', '--virtual-store-dir=.pnpm', '--frozen-lockfile']) await execPnpm(['install', '--virtual-store-dir=.pnpm', '--frozen-lockfile'])
t.ok(await exists(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/rimraf@2.5.1/node_modules/rimraf/package.json`)) t.ok(await exists(`.pnpm/rimraf@2.5.1/node_modules/rimraf/package.json`))
t.ok(await exists('.pnpm/lock.yaml')) t.ok(await exists('.pnpm/lock.yaml'))
t.ok(await exists('.pnpm/node_modules/once/package.json')) t.ok(await exists('.pnpm/node_modules/once/package.json'))
}) })

View File

@@ -168,6 +168,7 @@ export interface ResolvedPackage {
hasBundledDependencies: boolean, hasBundledDependencies: boolean,
independent: boolean, independent: boolean,
prepare: boolean, prepare: boolean,
depPath: string,
requiresBuild: boolean | undefined, // added to fix issue #1201 requiresBuild: boolean | undefined, // added to fix issue #1201
additionalInfo: { additionalInfo: {
deprecated?: string, deprecated?: string,
@@ -504,7 +505,7 @@ async function resolveDependency (
// we can safely assume that it doesn't exist in `node_modules` // we can safely assume that it doesn't exist in `node_modules`
currentLockfileContainsTheDep && currentLockfileContainsTheDep &&
options.relDepPath && options.dependencyLockfile && options.relDepPath && options.dependencyLockfile &&
await exists(path.join(ctx.virtualStoreDir, `${pkgIdToFilename(options.depPath, ctx.prefix)}/node_modules/${nameVerFromPkgSnapshot(options.relDepPath, options.dependencyLockfile).name}/package.json`)) && await exists(path.join(ctx.virtualStoreDir, `${pkgIdToFilename(options.relDepPath, ctx.prefix)}/node_modules/${nameVerFromPkgSnapshot(options.relDepPath, options.dependencyLockfile).name}/package.json`)) &&
(options.currentDepth > 0 || wantedDependency.alias && await exists(path.join(ctx.modulesDir, wantedDependency.alias)))) (options.currentDepth > 0 || wantedDependency.alias && await exists(path.join(ctx.modulesDir, wantedDependency.alias))))
if (!proceed && depIsLinked) { if (!proceed && depIsLinked) {
@@ -695,6 +696,7 @@ async function resolveDependency (
ctx.resolvedPackagesByPackageId[pkgResponse.body.id] = getResolvedPackage({ ctx.resolvedPackagesByPackageId[pkgResponse.body.id] = getResolvedPackage({
dependencyLockfile: options.dependencyLockfile, dependencyLockfile: options.dependencyLockfile,
depPath: dp.relative(ctx.registries, pkg.name, pkgResponse.body.id),
force: ctx.force, force: ctx.force,
hasBin, hasBin,
pkg, pkg,
@@ -735,6 +737,7 @@ async function resolveDependency (
function getResolvedPackage ( function getResolvedPackage (
options: { options: {
dependencyLockfile?: PackageSnapshot, dependencyLockfile?: PackageSnapshot,
depPath: string,
force: boolean, force: boolean,
hasBin: boolean, hasBin: boolean,
pkg: PackageManifest, pkg: PackageManifest,
@@ -756,6 +759,7 @@ function getResolvedPackage (
peerDependencies, peerDependencies,
peerDependenciesMeta: options.pkg.peerDependenciesMeta, peerDependenciesMeta: options.pkg.peerDependenciesMeta,
}, },
depPath: options.depPath,
dev: options.wantedDependency.dev, dev: options.wantedDependency.dev,
engineCache: !options.force && options.pkgResponse.body.cacheByEngine?.[ENGINE_NAME], engineCache: !options.force && options.pkgResponse.body.cacheByEngine?.[ENGINE_NAME],
fetchingBundledManifest: options.pkgResponse.bundledManifest, fetchingBundledManifest: options.pkgResponse.bundledManifest,

View File

@@ -43,7 +43,6 @@ import {
Registries, Registries,
} from '@pnpm/types' } from '@pnpm/types'
import rimraf = require('@zkochan/rimraf') import rimraf = require('@zkochan/rimraf')
import * as dp from 'dependency-path'
import isInnerLink = require('is-inner-link') import isInnerLink = require('is-inner-link')
import isSubdir = require('is-subdir') import isSubdir = require('is-subdir')
import pFilter = require('p-filter') import pFilter = require('p-filter')
@@ -72,7 +71,7 @@ import linkPackages, {
DependenciesGraphNode, DependenciesGraphNode,
Project as ProjectToLink, Project as ProjectToLink,
} from './link' } from './link'
import { absolutePathToRef } from './lockfile' import { depPathToRef } from './lockfile'
export type DependenciesMutation = ( export type DependenciesMutation = (
{ {
@@ -742,15 +741,14 @@ async function installInContext (
) )
ctx.pendingBuilds = ctx.pendingBuilds ctx.pendingBuilds = ctx.pendingBuilds
.filter((relDepPath) => !result.removedDepPaths.has(dp.resolve(ctx.registries, relDepPath))) .filter((relDepPath) => !result.removedDepPaths.has(relDepPath))
if (opts.ignoreScripts) { if (opts.ignoreScripts) {
// we can use concat here because we always only append new packages, which are guaranteed to not be there by definition // we can use concat here because we always only append new packages, which are guaranteed to not be there by definition
ctx.pendingBuilds = ctx.pendingBuilds ctx.pendingBuilds = ctx.pendingBuilds
.concat( .concat(
result.newDepPaths result.newDepPaths
.filter((depPath) => result.depGraph[depPath].requiresBuild) .filter((depPath) => result.depGraph[depPath].requiresBuild),
.map((depPath) => dp.relative(ctx.registries, result.depGraph[depPath].name, depPath)),
) )
} }
@@ -962,7 +960,7 @@ function addDirectDependenciesToLockfile (
for (const alias of allDeps) { for (const alias of allDeps) {
if (directDependenciesByAlias[alias]) { if (directDependenciesByAlias[alias]) {
const dep = directDependenciesByAlias[alias] const dep = directDependenciesByAlias[alias]
const ref = absolutePathToRef(dep.id, { const ref = depPathToRef(dep.id, {
alias: dep.alias, alias: dep.alias,
realName: dep.name, realName: dep.name,
registries, registries,

View File

@@ -21,12 +21,11 @@ import { DependenciesTree, LinkedDependency } from '@pnpm/resolve-dependencies'
import { StoreController } from '@pnpm/store-controller-types' import { StoreController } from '@pnpm/store-controller-types'
import symlinkDependency, { symlinkDirectRootDependency } from '@pnpm/symlink-dependency' import symlinkDependency, { symlinkDirectRootDependency } from '@pnpm/symlink-dependency'
import { ProjectManifest, Registries } from '@pnpm/types' import { ProjectManifest, Registries } from '@pnpm/types'
import * as dp from 'dependency-path'
import fs = require('mz/fs') import fs = require('mz/fs')
import pLimit from 'p-limit' import pLimit from 'p-limit'
import path = require('path') import path = require('path')
import R = require('ramda') import R = require('ramda')
import { absolutePathToRef } from './lockfile' import { depPathToRef } from './lockfile'
import resolvePeers, { import resolvePeers, {
DependenciesGraph, DependenciesGraph,
DependenciesGraphNode, DependenciesGraphNode,
@@ -93,7 +92,7 @@ export default async function linkPackages (
// The `Creating dependency graph` is not good to report in all cases as // The `Creating dependency graph` is not good to report in all cases as
// sometimes node_modules is alread up-to-date // sometimes node_modules is alread up-to-date
// logger.info(`Creating dependency graph`) // logger.info(`Creating dependency graph`)
const { depGraph, projectsDirectAbsolutePathsByAlias } = resolvePeers({ const { depGraph, projectsDirectPathsByAlias } = resolvePeers({
dependenciesTree, dependenciesTree,
independentLeaves: opts.independentLeaves, independentLeaves: opts.independentLeaves,
lockfileDir: opts.lockfileDir, lockfileDir: opts.lockfileDir,
@@ -102,12 +101,12 @@ export default async function linkPackages (
virtualStoreDir: opts.virtualStoreDir, virtualStoreDir: opts.virtualStoreDir,
}) })
for (const { id } of projects) { for (const { id } of projects) {
for (const [alias, depPath] of R.toPairs(projectsDirectAbsolutePathsByAlias[id])) { for (const [alias, depPath] of R.toPairs(projectsDirectPathsByAlias[id])) {
const depNode = depGraph[depPath] const depNode = depGraph[depPath]
if (depNode.isPure) continue if (depNode.isPure) continue
const projectSnapshot = opts.wantedLockfile.importers[id] const projectSnapshot = opts.wantedLockfile.importers[id]
const ref = absolutePathToRef(depPath, { const ref = depPathToRef(depPath, {
alias, alias,
realName: depNode.name, realName: depNode.name,
registries: opts.registries, registries: opts.registries,
@@ -127,17 +126,16 @@ export default async function linkPackages (
? opts.afterAllResolvedHook(newLockfile) ? opts.afterAllResolvedHook(newLockfile)
: newLockfile : newLockfile
let depNodes = R.values(depGraph).filter(({ absolutePath, name, packageId }) => { let depNodes = R.values(depGraph).filter(({ depPath, packageId }) => {
const relDepPath = dp.relative(opts.registries, name, absolutePath) if (newWantedLockfile.packages?.[depPath] && !newWantedLockfile.packages[depPath].optional) {
if (newWantedLockfile.packages?.[relDepPath] && !newWantedLockfile.packages[relDepPath].optional) { opts.skipped.delete(depPath)
opts.skipped.delete(relDepPath)
return true return true
} }
if (opts.wantedToBeSkippedPackageIds.has(packageId)) { if (opts.wantedToBeSkippedPackageIds.has(packageId)) {
opts.skipped.add(relDepPath) opts.skipped.add(depPath)
return false return false
} }
opts.skipped.delete(relDepPath) opts.skipped.delete(depPath)
return true return true
}) })
if (!opts.include.dependencies) { if (!opts.include.dependencies) {
@@ -205,15 +203,15 @@ export default async function linkPackages (
const rootDepsByDepPath = depNodes const rootDepsByDepPath = depNodes
.filter(({ depth }) => depth === 0) .filter(({ depth }) => depth === 0)
.reduce((acc, depNode) => { .reduce((acc, depNode) => {
acc[depNode.absolutePath] = depNode acc[depNode.depPath] = depNode
return acc return acc
}, {}) as {[absolutePath: string]: DependenciesGraphNode} }, {})
await Promise.all(projects.map(({ id, manifest, modulesDir, rootDir }) => { await Promise.all(projects.map(({ id, manifest, modulesDir, rootDir }) => {
const directAbsolutePathsByAlias = projectsDirectAbsolutePathsByAlias[id] const directPathsByAlias = projectsDirectPathsByAlias[id]
return Promise.all( return Promise.all(
Object.keys(directAbsolutePathsByAlias) Object.keys(directPathsByAlias)
.map((rootAlias) => ({ rootAlias, depGraphNode: rootDepsByDepPath[directAbsolutePathsByAlias[rootAlias]] })) .map((rootAlias) => ({ rootAlias, depGraphNode: rootDepsByDepPath[directPathsByAlias[rootAlias]] }))
.filter(({ depGraphNode }) => depGraphNode) .filter(({ depGraphNode }) => depGraphNode)
.map(async ({ rootAlias, depGraphNode }) => { .map(async ({ rootAlias, depGraphNode }) => {
if ( if (
@@ -242,11 +240,11 @@ export default async function linkPackages (
newWantedLockfile.lockfileVersion = LOCKFILE_VERSION newWantedLockfile.lockfileVersion = LOCKFILE_VERSION
} }
await Promise.all(pendingRequiresBuilds.map(async ({ absoluteDepPath, relativeDepPath }) => { await Promise.all(pendingRequiresBuilds.map(async (depPath) => {
const depNode = depGraph[absoluteDepPath] const depNode = depGraph[depPath]
if (!depNode.fetchingBundledManifest) { if (!depNode.fetchingBundledManifest) {
// This should never ever happen // This should never ever happen
throw new Error(`Cannot create ${WANTED_LOCKFILE} because raw manifest (aka package.json) wasn't fetched for "${absoluteDepPath}"`) throw new Error(`Cannot create ${WANTED_LOCKFILE} because raw manifest (aka package.json) wasn't fetched for "${depPath}"`)
} }
const filesResponse = await depNode.fetchingFiles() const filesResponse = await depNode.fetchingFiles()
// The npm team suggests to always read the package.json for deciding whether the package has lifecycle scripts // The npm team suggests to always read the package.json for deciding whether the package has lifecycle scripts
@@ -259,8 +257,8 @@ export default async function linkPackages (
// TODO: try to cover with unit test the case when entry is no longer available in lockfile // TODO: try to cover with unit test the case when entry is no longer available in lockfile
// It is an edge that probably happens if the entry is removed during lockfile prune // It is an edge that probably happens if the entry is removed during lockfile prune
if (depNode.requiresBuild && newWantedLockfile.packages![relativeDepPath]) { if (depNode.requiresBuild && newWantedLockfile.packages![depPath]) {
newWantedLockfile.packages![relativeDepPath].requiresBuild = true newWantedLockfile.packages![depPath].requiresBuild = true
} }
})) }))
@@ -272,10 +270,9 @@ export default async function linkPackages (
) { ) {
const packages = opts.currentLockfile.packages || {} const packages = opts.currentLockfile.packages || {}
if (newWantedLockfile.packages) { if (newWantedLockfile.packages) {
for (const relDepPath in newWantedLockfile.packages) { // tslint:disable-line:forin for (const depPath in newWantedLockfile.packages) { // tslint:disable-line:forin
const depPath = dp.resolve(opts.registries, relDepPath)
if (depGraph[depPath]) { if (depGraph[depPath]) {
packages[relDepPath] = newWantedLockfile.packages[relDepPath] packages[depPath] = newWantedLockfile.packages[depPath]
} }
} }
} }
@@ -378,7 +375,6 @@ async function linkNewPackages (
if (opts.force) { if (opts.force) {
newDepPathsSet = new Set( newDepPathsSet = new Set(
wantedRelDepPaths wantedRelDepPaths
.map((relDepPath) => dp.resolve(opts.registries, relDepPath))
// when installing a new package, not all the nodes are analyzed // when installing a new package, not all the nodes are analyzed
// just skip the ones that are in the lockfile but were not analyzed // just skip the ones that are in the lockfile but were not analyzed
.filter((depPath) => depGraph[depPath]), .filter((depPath) => depGraph[depPath]),
@@ -396,11 +392,10 @@ async function linkNewPackages (
if (!opts.force && currentLockfile.packages && wantedLockfile.packages) { if (!opts.force && currentLockfile.packages && wantedLockfile.packages) {
// add subdependencies that have been updated // add subdependencies that have been updated
// TODO: no need to relink everything. Can be relinked only what was changed // TODO: no need to relink everything. Can be relinked only what was changed
for (const relDepPath of wantedRelDepPaths) { for (const depPath of wantedRelDepPaths) {
if (currentLockfile.packages[relDepPath] && if (currentLockfile.packages[depPath] &&
(!R.equals(currentLockfile.packages[relDepPath].dependencies, wantedLockfile.packages[relDepPath].dependencies) || (!R.equals(currentLockfile.packages[depPath].dependencies, wantedLockfile.packages[depPath].dependencies) ||
!R.equals(currentLockfile.packages[relDepPath].optionalDependencies, wantedLockfile.packages[relDepPath].optionalDependencies))) { !R.equals(currentLockfile.packages[depPath].optionalDependencies, wantedLockfile.packages[depPath].optionalDependencies))) {
const depPath = dp.resolve(opts.registries, relDepPath)
// TODO: come up with a test that triggers the usecase of depGraph[depPath] undefined // TODO: come up with a test that triggers the usecase of depGraph[depPath] undefined
// see related issue: https://github.com/pnpm/pnpm/issues/870 // see related issue: https://github.com/pnpm/pnpm/issues/870
@@ -446,11 +441,10 @@ async function selectNewFromWantedDeps (
const prevRelDepPaths = new Set(R.keys(currentLockfile.packages)) const prevRelDepPaths = new Set(R.keys(currentLockfile.packages))
await Promise.all( await Promise.all(
wantedRelDepPaths.map( wantedRelDepPaths.map(
async (wantedRelDepPath: string) => { async (depPath: string) => {
const depPath = dp.resolve(opts.registries, wantedRelDepPath)
const depNode = depGraph[depPath] const depNode = depGraph[depPath]
if (!depNode) return if (!depNode) return
if (prevRelDepPaths.has(wantedRelDepPath)) { if (prevRelDepPaths.has(depPath)) {
if (await fs.exists(depNode.peripheralLocation)) { if (await fs.exists(depNode.peripheralLocation)) {
return return
} }

View File

@@ -3,8 +3,8 @@ import { Registries } from '@pnpm/types'
import { getRegistryByPackageName } from 'dependency-path' import { getRegistryByPackageName } from 'dependency-path'
import encodeRegistry = require('encode-registry') import encodeRegistry = require('encode-registry')
export function absolutePathToRef ( export function depPathToRef (
absolutePath: string, depPath: string,
opts: { opts: {
alias: string, alias: string,
realName: string, realName: string,
@@ -12,15 +12,15 @@ export function absolutePathToRef (
resolution: Resolution, resolution: Resolution,
}, },
) { ) {
if (opts.resolution.type) return absolutePath if (opts.resolution.type) return depPath
const registryName = encodeRegistry(getRegistryByPackageName(opts.registries, opts.realName)) const registryName = encodeRegistry(getRegistryByPackageName(opts.registries, opts.realName))
if (absolutePath.startsWith(`${registryName}/`) && !absolutePath.includes('/-/')) { if (depPath.startsWith(`${registryName}/`) && !depPath.includes('/-/')) {
if (opts.alias === opts.realName) { depPath = depPath.replace(`${registryName}/`, '/')
const ref = absolutePath.replace(`${registryName}/${opts.realName}/`, '')
if (!ref.includes('/')) return ref
}
return absolutePath.replace(`${registryName}/`, '/')
} }
return absolutePath if (opts.alias === opts.realName) {
const ref = depPath.replace(`/${opts.realName}/`, '')
if (!ref.includes('/')) return ref
}
return depPath
} }

View File

@@ -34,7 +34,7 @@ export interface DependenciesGraphNode {
independent: boolean, independent: boolean,
optionalDependencies: Set<string>, optionalDependencies: Set<string>,
depth: number, depth: number,
absolutePath: string, depPath: string,
prod: boolean, prod: boolean,
dev: boolean, dev: boolean,
optional: boolean, optional: boolean,
@@ -80,10 +80,10 @@ export default function (
}, },
): { ): {
depGraph: DependenciesGraph, depGraph: DependenciesGraph,
projectsDirectAbsolutePathsByAlias: {[id: string]: {[alias: string]: string}}, projectsDirectPathsByAlias: {[id: string]: {[alias: string]: string}},
} { } {
const depGraph: DependenciesGraph = {} const depGraph: DependenciesGraph = {}
const absolutePathsByNodeId = {} const pathsByNodeId = {}
for (const { directNodeIdsByAlias, topParents, rootDir } of opts.projects) { for (const { directNodeIdsByAlias, topParents, rootDir } of opts.projects) {
const pkgsByName = Object.assign( const pkgsByName = Object.assign(
@@ -108,11 +108,11 @@ export default function (
) )
resolvePeersOfChildren(directNodeIdsByAlias, pkgsByName, { resolvePeersOfChildren(directNodeIdsByAlias, pkgsByName, {
absolutePathsByNodeId,
dependenciesTree: opts.dependenciesTree, dependenciesTree: opts.dependenciesTree,
depGraph, depGraph,
independentLeaves: opts.independentLeaves, independentLeaves: opts.independentLeaves,
lockfileDir: opts.lockfileDir, lockfileDir: opts.lockfileDir,
pathsByNodeId,
purePkgs: new Set(), purePkgs: new Set(),
rootDir, rootDir,
strictPeerDependencies: opts.strictPeerDependencies, strictPeerDependencies: opts.strictPeerDependencies,
@@ -122,21 +122,21 @@ export default function (
R.values(depGraph).forEach((node) => { R.values(depGraph).forEach((node) => {
node.children = R.keys(node.children).reduce((acc, alias) => { node.children = R.keys(node.children).reduce((acc, alias) => {
acc[alias] = absolutePathsByNodeId[node.children[alias]] acc[alias] = pathsByNodeId[node.children[alias]]
return acc return acc
}, {}) }, {})
}) })
const projectsDirectAbsolutePathsByAlias: {[id: string]: {[alias: string]: string}} = {} const projectsDirectPathsByAlias: {[id: string]: {[alias: string]: string}} = {}
for (const { directNodeIdsByAlias, id } of opts.projects) { for (const { directNodeIdsByAlias, id } of opts.projects) {
projectsDirectAbsolutePathsByAlias[id] = R.keys(directNodeIdsByAlias).reduce((rootAbsolutePathsByAlias, alias) => { projectsDirectPathsByAlias[id] = R.keys(directNodeIdsByAlias).reduce((rootPathsByAlias, alias) => {
rootAbsolutePathsByAlias[alias] = absolutePathsByNodeId[directNodeIdsByAlias[alias]] rootPathsByAlias[alias] = pathsByNodeId[directNodeIdsByAlias[alias]]
return rootAbsolutePathsByAlias return rootPathsByAlias
}, {}) }, {})
} }
return { return {
depGraph, depGraph,
projectsDirectAbsolutePathsByAlias, projectsDirectPathsByAlias,
} }
} }
@@ -145,7 +145,7 @@ function resolvePeersOfNode (
parentParentPkgs: ParentRefs, parentParentPkgs: ParentRefs,
ctx: { ctx: {
dependenciesTree: DependenciesTree, dependenciesTree: DependenciesTree,
absolutePathsByNodeId: {[nodeId: string]: string}, pathsByNodeId: {[nodeId: string]: string},
depGraph: DependenciesGraph, depGraph: DependenciesGraph,
independentLeaves: boolean, independentLeaves: boolean,
virtualStoreDir: string, virtualStoreDir: string,
@@ -156,8 +156,8 @@ function resolvePeersOfNode (
}, },
): {[alias: string]: string} { ): {[alias: string]: string} {
const node = ctx.dependenciesTree[nodeId] const node = ctx.dependenciesTree[nodeId]
if (ctx.purePkgs.has(node.resolvedPackage.id) && ctx.depGraph[node.resolvedPackage.id].depth <= node.depth) { if (ctx.purePkgs.has(node.resolvedPackage.depPath) && ctx.depGraph[node.resolvedPackage.depPath].depth <= node.depth) {
ctx.absolutePathsByNodeId[nodeId] = node.resolvedPackage.id ctx.pathsByNodeId[nodeId] = node.resolvedPackage.depPath
return {} return {}
} }
@@ -184,14 +184,14 @@ function resolvePeersOfNode (
const allResolvedPeers = Object.assign(unknownResolvedPeersOfChildren, resolvedPeers) const allResolvedPeers = Object.assign(unknownResolvedPeersOfChildren, resolvedPeers)
let modules: string let modules: string
let absolutePath: string let depPath: string
const localLocation = path.join(ctx.virtualStoreDir, pkgIdToFilename(node.resolvedPackage.id, ctx.lockfileDir)) const localLocation = path.join(ctx.virtualStoreDir, pkgIdToFilename(node.resolvedPackage.depPath, ctx.lockfileDir))
const isPure = R.isEmpty(allResolvedPeers) const isPure = R.isEmpty(allResolvedPeers)
if (isPure) { if (isPure) {
modules = path.join(localLocation, 'node_modules') modules = path.join(localLocation, 'node_modules')
absolutePath = node.resolvedPackage.id depPath = node.resolvedPackage.depPath
if (R.isEmpty(node.resolvedPackage.peerDependencies)) { if (R.isEmpty(node.resolvedPackage.peerDependencies)) {
ctx.purePkgs.add(node.resolvedPackage.id) ctx.purePkgs.add(node.resolvedPackage.depPath)
} }
} else { } else {
const peersFolderSuffix = createPeersFolderSuffix( const peersFolderSuffix = createPeersFolderSuffix(
@@ -200,11 +200,11 @@ function resolvePeersOfNode (
version: ctx.dependenciesTree[allResolvedPeers[alias]].resolvedPackage.version, version: ctx.dependenciesTree[allResolvedPeers[alias]].resolvedPackage.version,
}))) })))
modules = path.join(`${localLocation}${peersFolderSuffix}`, 'node_modules') modules = path.join(`${localLocation}${peersFolderSuffix}`, 'node_modules')
absolutePath = `${node.resolvedPackage.id}${peersFolderSuffix}` depPath = `${node.resolvedPackage.depPath}${peersFolderSuffix}`
} }
ctx.absolutePathsByNodeId[nodeId] = absolutePath ctx.pathsByNodeId[nodeId] = depPath
if (!ctx.depGraph[absolutePath] || ctx.depGraph[absolutePath].depth > node.depth) { if (!ctx.depGraph[depPath] || ctx.depGraph[depPath].depth > node.depth) {
const independent = ctx.independentLeaves && node.resolvedPackage.independent const independent = ctx.independentLeaves && node.resolvedPackage.independent
const centralLocation = node.resolvedPackage.engineCache || path.join(node.resolvedPackage.path, 'node_modules', node.resolvedPackage.name) const centralLocation = node.resolvedPackage.engineCache || path.join(node.resolvedPackage.path, 'node_modules', node.resolvedPackage.name)
const peripheralLocation = !independent const peripheralLocation = !independent
@@ -222,10 +222,10 @@ function resolvePeersOfNode (
} }
} }
} }
ctx.depGraph[absolutePath] = { ctx.depGraph[depPath] = {
absolutePath,
additionalInfo: node.resolvedPackage.additionalInfo, additionalInfo: node.resolvedPackage.additionalInfo,
children: Object.assign(children, resolvedPeers), children: Object.assign(children, resolvedPeers),
depPath,
depth: node.depth, depth: node.depth,
dev: node.resolvedPackage.dev, dev: node.resolvedPackage.dev,
fetchingBundledManifest: node.resolvedPackage.fetchingBundledManifest, fetchingBundledManifest: node.resolvedPackage.fetchingBundledManifest,
@@ -258,7 +258,7 @@ function resolvePeersOfChildren (
}, },
parentPkgs: ParentRefs, parentPkgs: ParentRefs,
ctx: { ctx: {
absolutePathsByNodeId: {[nodeId: string]: string}, pathsByNodeId: {[nodeId: string]: string},
independentLeaves: boolean, independentLeaves: boolean,
virtualStoreDir: string, virtualStoreDir: string,
purePkgs: Set<string>, purePkgs: Set<string>,

View File

@@ -15,7 +15,7 @@ import {
import * as dp from 'dependency-path' import * as dp from 'dependency-path'
import getNpmTarballUrl from 'get-npm-tarball-url' import getNpmTarballUrl from 'get-npm-tarball-url'
import R = require('ramda') import R = require('ramda')
import { absolutePathToRef } from './lockfile' import { depPathToRef } from './lockfile'
import { DependenciesGraph } from './resolvePeers' import { DependenciesGraph } from './resolvePeers'
export default function ( export default function (
@@ -25,26 +25,24 @@ export default function (
registries: Registries, registries: Registries,
): { ): {
newLockfile: Lockfile, newLockfile: Lockfile,
pendingRequiresBuilds: PendingRequiresBuild[], pendingRequiresBuilds: string[],
} { } {
lockfile.packages = lockfile.packages || {} lockfile.packages = lockfile.packages || {}
const pendingRequiresBuilds = [] as PendingRequiresBuild[] const pendingRequiresBuilds = [] as string[]
for (const depPath of Object.keys(depGraph)) { for (const depPath of Object.keys(depGraph)) {
const depNode = depGraph[depPath] const depNode = depGraph[depPath]
const relDepPath = dp.relative(registries, depNode.name, depPath) const [updatedOptionalDeps, updatedDeps] = R.partition(
const result = R.partition(
(child) => depNode.optionalDependencies.has(depGraph[child.depPath].name), (child) => depNode.optionalDependencies.has(depGraph[child.depPath].name),
Object.keys(depNode.children).map((alias) => ({ alias, depPath: depNode.children[alias] })), Object.keys(depNode.children).map((alias) => ({ alias, depPath: depNode.children[alias] })),
) )
lockfile.packages[relDepPath] = toLockfileDependency(pendingRequiresBuilds, depNode.additionalInfo, { lockfile.packages[depPath] = toLockfileDependency(pendingRequiresBuilds, depNode.additionalInfo, {
depGraph, depGraph,
depPath, depPath,
prevSnapshot: lockfile.packages[relDepPath], prevSnapshot: lockfile.packages[depPath],
registries, registries,
registry: dp.getRegistryByPackageName(registries, depNode.name), registry: dp.getRegistryByPackageName(registries, depNode.name),
relDepPath, updatedDeps,
updatedDeps: result[1], updatedOptionalDeps,
updatedOptionalDeps: result[0],
}) })
} }
const warn = (message: string) => logger.warn({ message, prefix }) const warn = (message: string) => logger.warn({ message, prefix })
@@ -54,13 +52,8 @@ export default function (
} }
} }
export interface PendingRequiresBuild {
relativeDepPath: string,
absoluteDepPath: string,
}
function toLockfileDependency ( function toLockfileDependency (
pendingRequiresBuilds: PendingRequiresBuild[], pendingRequiresBuilds: string[],
pkg: { pkg: {
deprecated?: string, deprecated?: string,
peerDependencies?: Dependencies, peerDependencies?: Dependencies,
@@ -76,7 +69,6 @@ function toLockfileDependency (
}, },
opts: { opts: {
depPath: string, depPath: string,
relDepPath: string,
registry: string, registry: string,
registries: Registries, registries: Registries,
updatedDeps: Array<{alias: string, depPath: string}>, updatedDeps: Array<{alias: string, depPath: string}>,
@@ -88,7 +80,7 @@ function toLockfileDependency (
const depNode = opts.depGraph[opts.depPath] const depNode = opts.depGraph[opts.depPath]
const lockfileResolution = toLockfileResolution( const lockfileResolution = toLockfileResolution(
{ name: depNode.name, version: depNode.version }, { name: depNode.name, version: depNode.version },
opts.relDepPath, opts.depPath,
depNode.resolution, depNode.resolution,
opts.registry, opts.registry,
) )
@@ -108,7 +100,7 @@ function toLockfileDependency (
resolution: lockfileResolution, resolution: lockfileResolution,
} }
// tslint:disable:no-string-literal // tslint:disable:no-string-literal
if (dp.isAbsolute(opts.relDepPath)) { if (dp.isAbsolute(opts.depPath)) {
result['name'] = depNode.name result['name'] = depNode.name
// There is no guarantee that a non-npmjs.org-hosted package // There is no guarantee that a non-npmjs.org-hosted package
@@ -131,7 +123,7 @@ function toLockfileDependency (
if (depNode.optional) { if (depNode.optional) {
result['optional'] = true result['optional'] = true
} }
if (opts.relDepPath[0] !== '/' && opts.depPath !== depNode.packageId) { if (opts.depPath[0] !== '/' && !depNode.packageId.endsWith(opts.depPath)) {
result['id'] = depNode.packageId result['id'] = depNode.packageId
} }
if (pkg.peerDependencies) { if (pkg.peerDependencies) {
@@ -185,10 +177,7 @@ function toLockfileDependency (
result['requiresBuild'] = true result['requiresBuild'] = true
} }
} else { } else {
pendingRequiresBuilds.push({ pendingRequiresBuilds.push(opts.depPath)
absoluteDepPath: opts.depPath,
relativeDepPath: opts.relDepPath,
})
} }
depNode.requiresBuild = result['requiresBuild'] depNode.requiresBuild = result['requiresBuild']
// tslint:enable:no-string-literal // tslint:enable:no-string-literal
@@ -210,7 +199,7 @@ function updateResolvedDeps (
const depNode = depGraph[depPath] const depNode = depGraph[depPath]
return [ return [
alias, alias,
absolutePathToRef(depNode.absolutePath, { depPathToRef(depNode.depPath, {
alias, alias,
realName: depNode.name, realName: depNode.name,
registries, registries,
@@ -230,12 +219,12 @@ function toLockfileResolution (
name: string, name: string,
version: string, version: string,
}, },
relDepPath: string, depPath: string,
resolution: Resolution, resolution: Resolution,
registry: string, registry: string,
): LockfileResolution { ): LockfileResolution {
// tslint:disable:no-string-literal // tslint:disable:no-string-literal
if (dp.isAbsolute(relDepPath) || resolution.type !== undefined || !resolution['integrity']) { if (dp.isAbsolute(depPath) || resolution.type !== undefined || !resolution['integrity']) {
return resolution as LockfileResolution return resolution as LockfileResolution
} }
const base = registry !== resolution['registry'] ? { registry: resolution['registry'] } : {} const base = registry !== resolution['registry'] ? { registry: resolution['registry'] } : {}

View File

@@ -119,7 +119,7 @@ test('a subdependency is from a github repo with different name', async (t: tape
await project.isExecutable('has-aliased-git-dependency/node_modules/.bin/hi') await project.isExecutable('has-aliased-git-dependency/node_modules/.bin/hi')
await project.isExecutable('has-aliased-git-dependency/node_modules/.bin/szia') await project.isExecutable('has-aliased-git-dependency/node_modules/.bin/szia')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/has-say-hi-peer@1.0.0_say-hi@1.0.0/node_modules/has-say-hi-peer`)), t.ok(await exists(path.resolve(`node_modules/.pnpm/has-say-hi-peer@1.0.0_say-hi@1.0.0/node_modules/has-say-hi-peer`)),
'aliased name used to resolve a peer dependency') 'aliased name used to resolve a peer dependency')
}) })

View File

@@ -1,6 +1,5 @@
import assertProject from '@pnpm/assert-project' import assertProject from '@pnpm/assert-project'
import { prepareEmpty, preparePackages } from '@pnpm/prepare' import { prepareEmpty, preparePackages } from '@pnpm/prepare'
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
import rimraf = require('@zkochan/rimraf') import rimraf = require('@zkochan/rimraf')
import fs = require('fs') import fs = require('fs')
import path = require('path') import path = require('path')
@@ -114,7 +113,7 @@ test('should rehoist when uninstalling a package', async (t: tape.Test) => {
const modules = await project.readModulesManifest() const modules = await project.readModulesManifest()
t.ok(modules) t.ok(modules)
t.deepEqual(modules!.hoistedAliases[`localhost+${REGISTRY_MOCK_PORT}/debug/2.6.9`], ['debug'], 'new hoisted debug added to .modules.yaml') t.deepEqual(modules!.hoistedAliases[`/debug/2.6.9`], ['debug'], 'new hoisted debug added to .modules.yaml')
}) })
test('should rehoist after running a general install', async (t) => { test('should rehoist after running a general install', async (t) => {
@@ -211,7 +210,7 @@ test('hoist by alias', async (t: tape.Test) => {
const modules = await project.readModulesManifest() const modules = await project.readModulesManifest()
t.ok(modules) t.ok(modules)
t.deepEqual(modules!.hoistedAliases, { [`localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep/100.1.0`]: [ 'dep' ] }, '.modules.yaml updated correctly') t.deepEqual(modules!.hoistedAliases, { [`/dep-of-pkg-with-1-dep/100.1.0`]: [ 'dep' ] }, '.modules.yaml updated correctly')
}) })
test('should remove aliased hoisted dependencies', async (t) => { test('should remove aliased hoisted dependencies', async (t) => {
@@ -434,8 +433,8 @@ test('hoist when updating in one of the workspace projects', async (t) => {
{ {
const modulesManifest = await rootModules.readModulesManifest() const modulesManifest = await rootModules.readModulesManifest()
t.deepEqual(modulesManifest?.hoistedAliases, { t.deepEqual(modulesManifest?.hoistedAliases, {
[`localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep/100.0.0`]: ['dep-of-pkg-with-1-dep'], [`/dep-of-pkg-with-1-dep/100.0.0`]: ['dep-of-pkg-with-1-dep'],
[`localhost+${REGISTRY_MOCK_PORT}/foo/100.0.0`]: ['foo'], [`/foo/100.0.0`]: ['foo'],
}) })
} }
@@ -462,7 +461,7 @@ test('hoist when updating in one of the workspace projects', async (t) => {
{ {
const modulesManifest = await rootModules.readModulesManifest() const modulesManifest = await rootModules.readModulesManifest()
t.deepEqual(modulesManifest?.hoistedAliases, { t.deepEqual(modulesManifest?.hoistedAliases, {
[`localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep/100.0.0`]: ['dep-of-pkg-with-1-dep'], [`/dep-of-pkg-with-1-dep/100.0.0`]: ['dep-of-pkg-with-1-dep'],
}) })
} }
}) })

View File

@@ -77,8 +77,8 @@ test('installing with independent-leaves and hoistPattern', async (t) => {
await project.has('.pnpm/node_modules/dep-of-pkg-with-1-dep') await project.has('.pnpm/node_modules/dep-of-pkg-with-1-dep')
// wrappy is linked directly from the store // wrappy is linked directly from the store
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep@100.0.0`) await project.hasNot(`.pnpm/dep-of-pkg-with-1-dep@100.0.0`)
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0') await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0`) await project.has(`.pnpm/pkg-with-1-dep@100.0.0`)
}) })

View File

@@ -178,14 +178,14 @@ test("reports child's output", async (t: tape.Test) => {
await addDependenciesToPackage({}, ['count-to-10'], await testDefaults({ fastUnpack: false, reporter })) await addDependenciesToPackage({}, ['count-to-10'], await testDefaults({ fastUnpack: false, reporter }))
t.ok(reporter.calledWithMatch({ t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`, depPath: '/count-to-10/1.0.0',
level: 'debug', level: 'debug',
name: 'pnpm:lifecycle', name: 'pnpm:lifecycle',
script: 'node postinstall', script: 'node postinstall',
stage: 'postinstall', stage: 'postinstall',
} as LifecycleLog)) } as LifecycleLog))
t.ok(reporter.calledWithMatch({ t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`, depPath: '/count-to-10/1.0.0',
level: 'debug', level: 'debug',
line: '1', line: '1',
name: 'pnpm:lifecycle', name: 'pnpm:lifecycle',
@@ -193,7 +193,7 @@ test("reports child's output", async (t: tape.Test) => {
stdio: 'stdout', stdio: 'stdout',
} as LifecycleLog)) } as LifecycleLog))
t.ok(reporter.calledWithMatch({ t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`, depPath: '/count-to-10/1.0.0',
level: 'debug', level: 'debug',
line: '2', line: '2',
name: 'pnpm:lifecycle', name: 'pnpm:lifecycle',
@@ -201,7 +201,7 @@ test("reports child's output", async (t: tape.Test) => {
stdio: 'stdout', stdio: 'stdout',
} as LifecycleLog)) } as LifecycleLog))
t.ok(reporter.calledWithMatch({ t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`, depPath: '/count-to-10/1.0.0',
level: 'debug', level: 'debug',
line: '6', line: '6',
name: 'pnpm:lifecycle', name: 'pnpm:lifecycle',
@@ -209,7 +209,7 @@ test("reports child's output", async (t: tape.Test) => {
stdio: 'stderr', stdio: 'stderr',
} as LifecycleLog)) } as LifecycleLog))
t.ok(reporter.calledWithMatch({ t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`, depPath: '/count-to-10/1.0.0',
exitCode: 0, exitCode: 0,
level: 'debug', level: 'debug',
name: 'pnpm:lifecycle', name: 'pnpm:lifecycle',
@@ -227,7 +227,7 @@ test("reports child's close event", async (t: tape.Test) => {
t.fail() t.fail()
} catch (err) { } catch (err) {
t.ok(reporter.calledWithMatch({ t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/failing-postinstall/1.0.0`, depPath: '/failing-postinstall/1.0.0',
exitCode: 1, exitCode: 1,
level: 'debug', level: 'debug',
name: 'pnpm:lifecycle', name: 'pnpm:lifecycle',
@@ -262,7 +262,7 @@ test('run lifecycle scripts of dependent packages after running scripts of their
await addDependenciesToPackage({}, ['with-postinstall-a'], await testDefaults({ fastUnpack: false })) await addDependenciesToPackage({}, ['with-postinstall-a'], await testDefaults({ fastUnpack: false }))
t.ok(+project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0]) t.ok(+project.requireModule(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
}) })
test('run prepare script for git-hosted dependencies', async (t: tape.Test) => { test('run prepare script for git-hosted dependencies', async (t: tape.Test) => {

View File

@@ -457,10 +457,10 @@ test('concurrent circular deps', async (t: tape.Test) => {
const m = project.requireModule('es6-iterator') const m = project.requireModule('es6-iterator')
t.ok(m, 'es6-iterator is installed') t.ok(m, 'es6-iterator is installed')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/es6-iterator@2.0.0/node_modules/es5-ext`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/es6-iterator@2.0.0/node_modules/es5-ext`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/es6-iterator@2.0.1/node_modules/es5-ext`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/es6-iterator@2.0.1/node_modules/es5-ext`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/es5-ext@0.10.31/node_modules/es6-iterator`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/es5-ext@0.10.31/node_modules/es6-iterator`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/es5-ext@0.10.31/node_modules/es6-symbol`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/es5-ext@0.10.31/node_modules/es6-symbol`)))
}) })
test('concurrent installation of the same packages', async (t) => { test('concurrent installation of the same packages', async (t) => {
@@ -737,7 +737,7 @@ test('lockfile locks npm dependencies', async (t: tape.Test) => {
status: 'found_in_store', status: 'found_in_store',
} as ProgressLog), 'logged that package was found in store') } as ProgressLog), 'logged that package was found in store')
const m = project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0/node_modules/dep-of-pkg-with-1-dep/package.json`) const m = project.requireModule(`.pnpm/pkg-with-1-dep@100.0.0/node_modules/dep-of-pkg-with-1-dep/package.json`)
t.equal(m.version, '100.0.0', `dependency specified in ${WANTED_LOCKFILE} is installed`) t.equal(m.version, '100.0.0', `dependency specified in ${WANTED_LOCKFILE} is installed`)
}) })
@@ -837,7 +837,7 @@ test("don't fail on case insensitive filesystems when package has 2 files with s
test('reinstalls missing packages to node_modules', async (t) => { test('reinstalls missing packages to node_modules', async (t) => {
prepareEmpty(t) prepareEmpty(t)
const reporter = sinon.spy() const reporter = sinon.spy()
const depLocation = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive`) const depLocation = path.resolve(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive`)
const missingDepLog = { const missingDepLog = {
level: 'debug', level: 'debug',
missing: depLocation, missing: depLocation,
@@ -873,7 +873,7 @@ test('reinstalls missing packages to node_modules', async (t) => {
test('reinstalls missing packages to node_modules during headless install', async (t) => { test('reinstalls missing packages to node_modules during headless install', async (t) => {
prepareEmpty(t) prepareEmpty(t)
const reporter = sinon.spy() const reporter = sinon.spy()
const depLocation = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive`) const depLocation = path.resolve(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive`)
const missingDepLog = { const missingDepLog = {
level: 'debug', level: 'debug',
missing: depLocation, missing: depLocation,
@@ -1002,7 +1002,7 @@ test('all the subdeps of dependencies are linked when a node_modules is partiall
], await testDefaults({ preferFrozenLockfile: false })) ], await testDefaults({ preferFrozenLockfile: false }))
t.deepEqual( t.deepEqual(
await fs.readdir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobarqar@1.0.1/node_modules`)), await fs.readdir(path.resolve(`node_modules/.pnpm/foobarqar@1.0.1/node_modules`)),
[ [
'bar', 'bar',
'foo', 'foo',
@@ -1089,7 +1089,7 @@ test('subdep symlinks are updated if the lockfile has new subdep versions specif
}, },
], await testDefaults({ preferFrozenLockfile: false })) ], await testDefaults({ preferFrozenLockfile: false }))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0/node_modules/dep-of-pkg-with-1-dep/package.json`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/pkg-with-1-dep@100.0.0/node_modules/dep-of-pkg-with-1-dep/package.json`)))
}) })
test('fail if none of the available resolvers support a version spec', async (t: tape.Test) => { test('fail if none of the available resolvers support a version spec', async (t: tape.Test) => {

View File

@@ -66,8 +66,8 @@ test('install only the dependencies of the specified importer', async (t) => {
await projects['project-2'].hasNot('is-negative') await projects['project-2'].hasNot('is-negative')
const rootModules = assertProject(t, process.cwd()) const rootModules = assertProject(t, process.cwd())
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`) await rootModules.has(`.pnpm/is-positive@1.0.0`)
await rootModules.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`) await rootModules.hasNot(`.pnpm/is-negative@1.0.0`)
}) })
test('install only the dependencies of the specified importer. The current lockfile has importers that do not exist anymore', async (t) => { test('install only the dependencies of the specified importer. The current lockfile has importers that do not exist anymore', async (t) => {
@@ -194,9 +194,9 @@ test('dependencies of other importers are not pruned when installing for a subse
await projects['project-2'].has('is-negative') await projects['project-2'].has('is-negative')
const rootModules = assertProject(t, process.cwd()) const rootModules = assertProject(t, process.cwd())
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@2.0.0`) await rootModules.has(`.pnpm/is-positive@2.0.0`)
await rootModules.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`) await rootModules.hasNot(`.pnpm/is-positive@1.0.0`)
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`) await rootModules.has(`.pnpm/is-negative@1.0.0`)
const lockfile = await rootModules.readCurrentLockfile() const lockfile = await rootModules.readCurrentLockfile()
t.deepEqual(Object.keys(lockfile.importers), ['project-1', 'project-2']) t.deepEqual(Object.keys(lockfile.importers), ['project-1', 'project-2'])
@@ -259,9 +259,9 @@ test('dependencies of other importers are not pruned when (headless) installing
await projects['project-2'].has('is-negative') await projects['project-2'].has('is-negative')
const rootModules = assertProject(t, process.cwd()) const rootModules = assertProject(t, process.cwd())
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@2.0.0`) await rootModules.has(`.pnpm/is-positive@2.0.0`)
await rootModules.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`) await rootModules.hasNot(`.pnpm/is-positive@1.0.0`)
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`) await rootModules.has(`.pnpm/is-negative@1.0.0`)
}) })
test('adding a new dev dependency to project that uses a shared lockfile', async (t) => { test('adding a new dev dependency to project that uses a shared lockfile', async (t) => {
@@ -532,9 +532,9 @@ test('partial installation in a monorepo does not remove dependencies of other w
}, },
], await testDefaults()) ], await testDefaults())
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@2.0.0/node_modules/is-positive`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/is-positive@2.0.0/node_modules/is-positive`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0/node_modules/pkg-with-1-dep`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/pkg-with-1-dep@100.0.0/node_modules/pkg-with-1-dep`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep@100.1.0/node_modules/dep-of-pkg-with-1-dep`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/dep-of-pkg-with-1-dep@100.1.0/node_modules/dep-of-pkg-with-1-dep`)))
}) })
test('partial installation in a monorepo does not remove dependencies of other workspace projects when lockfile is frozen', async (t: tape.Test) => { test('partial installation in a monorepo does not remove dependencies of other workspace projects when lockfile is frozen', async (t: tape.Test) => {
@@ -625,9 +625,9 @@ test('partial installation in a monorepo does not remove dependencies of other w
}, },
], await testDefaults({ frozenLockfile: true })) ], await testDefaults({ frozenLockfile: true }))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0/node_modules/pkg-with-1-dep`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/pkg-with-1-dep@100.0.0/node_modules/pkg-with-1-dep`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep@100.1.0/node_modules/dep-of-pkg-with-1-dep`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/dep-of-pkg-with-1-dep@100.1.0/node_modules/dep-of-pkg-with-1-dep`)))
}) })
test('adding a new dependency with the workspace: protocol', async (t) => { test('adding a new dependency with the workspace: protocol', async (t) => {
@@ -829,8 +829,8 @@ test('remove dependencies of a project that was removed from the workspace (duri
t.deepEqual(Object.keys(currentLockfile.importers), ['project-1', 'project-2']) t.deepEqual(Object.keys(currentLockfile.importers), ['project-1', 'project-2'])
t.deepEqual(Object.keys(currentLockfile.packages), ['/is-negative/1.0.0', '/is-positive/1.0.0']) t.deepEqual(Object.keys(currentLockfile.packages), ['/is-negative/1.0.0', '/is-positive/1.0.0'])
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`) await project.has(`.pnpm/is-positive@1.0.0`)
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`) await project.has(`.pnpm/is-negative@1.0.0`)
} }
await mutateModules(importers.slice(0, 1), await testDefaults({ preferFrozenLockfile: false })) await mutateModules(importers.slice(0, 1), await testDefaults({ preferFrozenLockfile: false }))
@@ -839,7 +839,7 @@ test('remove dependencies of a project that was removed from the workspace (duri
t.deepEqual(Object.keys(currentLockfile.importers), ['project-1']) t.deepEqual(Object.keys(currentLockfile.importers), ['project-1'])
t.deepEqual(Object.keys(currentLockfile.packages), ['/is-positive/1.0.0']) t.deepEqual(Object.keys(currentLockfile.packages), ['/is-positive/1.0.0'])
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`) await project.has(`.pnpm/is-positive@1.0.0`)
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`) await project.hasNot(`.pnpm/is-negative@1.0.0`)
} }
}) })

View File

@@ -70,7 +70,7 @@ test('skip optional dependency that does not support the current OS', async (t:
await project.hasNot('not-compatible-with-any-os') await project.hasNot('not-compatible-with-any-os')
await project.storeHas('not-compatible-with-any-os', '1.0.0') await project.storeHas('not-compatible-with-any-os', '1.0.0')
t.notOk(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/dep-of-optional-pkg@1.0.0`)), "isn't linked into node_modules") t.notOk(await exists(path.resolve(`node_modules/.pnpm/dep-of-optional-pkg@1.0.0`)), "isn't linked into node_modules")
const lockfile = await project.readLockfile() const lockfile = await project.readLockfile()
t.ok(lockfile.packages['/not-compatible-with-any-os/1.0.0'], 'lockfile contains optional dependency') t.ok(lockfile.packages['/not-compatible-with-any-os/1.0.0'], 'lockfile contains optional dependency')
@@ -213,8 +213,8 @@ test('optional subdependency is skipped', async (t: tape.Test) => {
t.deepEqual(modulesInfo.skipped, ['/not-compatible-with-any-os/1.0.0'], 'optional subdep skipped') t.deepEqual(modulesInfo.skipped, ['/not-compatible-with-any-os/1.0.0'], 'optional subdep skipped')
} }
t.ok(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-optional@1.0.0`), 'regular dependency linked') t.ok(await exists(`node_modules/.pnpm/pkg-with-optional@1.0.0`), 'regular dependency linked')
t.notOk(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/not-compatible-with-any-os@1.0.0`), 'optional dependency not linked') t.notOk(await exists(`node_modules/.pnpm/not-compatible-with-any-os@1.0.0`), 'optional dependency not linked')
const logMatcher = sinon.match({ const logMatcher = sinon.match({
package: { package: {
@@ -240,7 +240,7 @@ test('optional subdependency is skipped', async (t: tape.Test) => {
await testDefaults({ force: true, frozenLockfile: true }), await testDefaults({ force: true, frozenLockfile: true }),
) )
t.ok(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/not-compatible-with-any-os@1.0.0`), 'optional dependency linked after forced headless install') t.ok(await exists(`node_modules/.pnpm/not-compatible-with-any-os@1.0.0`), 'optional dependency linked after forced headless install')
{ {
const modulesInfo = await readYamlFile<{ skipped: string[] }>(path.join('node_modules', '.modules.yaml')) const modulesInfo = await readYamlFile<{ skipped: string[] }>(path.join('node_modules', '.modules.yaml'))
@@ -354,10 +354,10 @@ test('only skip optional dependencies', async (t: tape.Test) => {
}, },
}, await testDefaults({ fastUnpack: false, preferredVersions })) }, await testDefaults({ fastUnpack: false, preferredVersions }))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/duplexify@3.6.0`)), 'duplexify is linked into node_modules') t.ok(await exists(path.resolve(`node_modules/.pnpm/duplexify@3.6.0`)), 'duplexify is linked into node_modules')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/stream-shift@1.0.0`)), 'stream-shift is linked into node_modules') t.ok(await exists(path.resolve(`node_modules/.pnpm/stream-shift@1.0.0`)), 'stream-shift is linked into node_modules')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/got@3.3.1/node_modules/duplexify`)), 'duplexify is linked into node_modules of got') t.ok(await exists(path.resolve(`node_modules/.pnpm/got@3.3.1/node_modules/duplexify`)), 'duplexify is linked into node_modules of got')
}) })
test('skip optional dependency that does not support the current OS, when doing install on a subset of workspace projects', async (t: tape.Test) => { test('skip optional dependency that does not support the current OS, when doing install on a subset of workspace projects', async (t: tape.Test) => {

View File

@@ -32,7 +32,7 @@ test('peer dependency is grouped with dependency when peer is resolved not from
const opts = await testDefaults() const opts = await testDefaults()
let manifest = await addDependenciesToPackage({}, ['using-ajv'], opts) let manifest = await addDependenciesToPackage({}, ['using-ajv'], opts)
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked') t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4') t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4')
// testing that peers are reinstalled correctly using info from the lockfile // testing that peers are reinstalled correctly using info from the lockfile
@@ -40,7 +40,7 @@ test('peer dependency is grouped with dependency when peer is resolved not from
await rimraf(path.resolve('..', '.store')) await rimraf(path.resolve('..', '.store'))
manifest = await install(manifest, await testDefaults()) manifest = await install(manifest, await testDefaults())
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked') t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4') t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4')
await addDependenciesToPackage(manifest, ['using-ajv'], await testDefaults({ update: true })) await addDependenciesToPackage(manifest, ['using-ajv'], await testDefaults({ update: true }))
@@ -62,8 +62,8 @@ test('nothing is needlessly removed from node_modules', async (t: tape.Test) =>
const opts = await testDefaults() const opts = await testDefaults()
const manifest = await addDependenciesToPackage({}, ['using-ajv', 'ajv-keywords@1.5.0'], opts) const manifest = await addDependenciesToPackage({}, ['using-ajv', 'ajv-keywords@1.5.0'], opts)
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked') t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'root dependency resolution is present') t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'root dependency resolution is present')
t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4') t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4')
await mutateModules([ await mutateModules([
@@ -75,8 +75,8 @@ test('nothing is needlessly removed from node_modules', async (t: tape.Test) =>
}, },
], opts) ], opts)
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency link is not removed') t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency link is not removed')
t.notOk(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'root dependency resolution is removed') t.notOk(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'root dependency resolution is removed')
}) })
test('peer dependency is grouped with dependent when the peer is a top dependency', async (t: tape.Test) => { test('peer dependency is grouped with dependent when the peer is a top dependency', async (t: tape.Test) => {
@@ -90,7 +90,7 @@ test('peer dependency is grouped with dependent when the peer is a top dependenc
message: `localhost+${REGISTRY_MOCK_PORT}/ajv-keywords/1.5.0 requires a peer of ajv@>=4.10.0 but none was installed.`, message: `localhost+${REGISTRY_MOCK_PORT}/ajv-keywords/1.5.0 requires a peer of ajv@>=4.10.0 but none was installed.`,
}), 'no warning is logged about unresolved peer dep') }), 'no warning is logged about unresolved peer dep')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv-keywords`)), 'dependent is grouped with top peer dep') t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv-keywords`)), 'dependent is grouped with top peer dep')
}) })
test('the right peer dependency is used in every workspace package', async (t: tape.Test) => { test('the right peer dependency is used in every workspace package', async (t: tape.Test) => {
@@ -256,8 +256,8 @@ test('top peer dependency is linked on subsequent install', async (t: tape.Test)
await addDependenciesToPackage(manifest, ['ajv-keywords@1.5.0'], await testDefaults()) await addDependenciesToPackage(manifest, ['ajv-keywords@1.5.0'], await testDefaults())
t.notOk(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'dependency without peer is prunned') t.notOk(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'dependency without peer is prunned')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked') t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
}) })
async function okFile (t: tape.Test, filename: string) { async function okFile (t: tape.Test, filename: string) {
@@ -273,7 +273,7 @@ test('peer dependencies are linked when running one named installation', async (
const manifest = await addDependenciesToPackage({}, ['abc-grand-parent-with-c', 'abc-parent-with-ab', 'peer-c@2.0.0'], await testDefaults()) const manifest = await addDependenciesToPackage({}, ['abc-grand-parent-with-c', 'abc-parent-with-ab', 'peer-c@2.0.0'], await testDefaults())
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc@1.0.0`) const pkgVariationsDir = path.resolve(`node_modules/.pnpm/abc@1.0.0`)
const pkgVariation1 = path.join(pkgVariationsDir + '_165e1e08a3f7e7f77ddb572ad0e55660/node_modules') const pkgVariation1 = path.join(pkgVariationsDir + '_165e1e08a3f7e7f77ddb572ad0e55660/node_modules')
await okFile(t, path.join(pkgVariation1, 'abc')) await okFile(t, path.join(pkgVariation1, 'abc'))
@@ -305,7 +305,7 @@ test('peer dependencies are linked when running two separate named installations
const manifest = await addDependenciesToPackage({}, ['abc-grand-parent-with-c', 'peer-c@2.0.0'], await testDefaults()) const manifest = await addDependenciesToPackage({}, ['abc-grand-parent-with-c', 'peer-c@2.0.0'], await testDefaults())
await addDependenciesToPackage(manifest, ['abc-parent-with-ab'], await testDefaults()) await addDependenciesToPackage(manifest, ['abc-parent-with-ab'], await testDefaults())
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc@1.0.0`) const pkgVariationsDir = path.resolve(`node_modules/.pnpm/abc@1.0.0`)
const pkgVariation1 = path.join(pkgVariationsDir + '_165e1e08a3f7e7f77ddb572ad0e55660/node_modules') const pkgVariation1 = path.join(pkgVariationsDir + '_165e1e08a3f7e7f77ddb572ad0e55660/node_modules')
await okFile(t, path.join(pkgVariation1, 'abc')) await okFile(t, path.join(pkgVariation1, 'abc'))
@@ -337,7 +337,7 @@ test.skip('peer dependencies are linked', async (t: tape.Test) => {
}, },
}, await testDefaults()) }, await testDefaults())
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc@1.0.0`) const pkgVariationsDir = path.resolve(`node_modules/.pnpm/abc@1.0.0`)
const pkgVariation1 = path.join(pkgVariationsDir, '165e1e08a3f7e7f77ddb572ad0e55660/node_modules') const pkgVariation1 = path.join(pkgVariationsDir, '165e1e08a3f7e7f77ddb572ad0e55660/node_modules')
await okFile(t, path.join(pkgVariation1, 'abc')) await okFile(t, path.join(pkgVariation1, 'abc'))
@@ -363,7 +363,7 @@ test('scoped peer dependency is linked', async (t: tape.Test) => {
prepareEmpty(t) prepareEmpty(t)
await addDependenciesToPackage({}, ['for-testing-scoped-peers'], await testDefaults()) await addDependenciesToPackage({}, ['for-testing-scoped-peers'], await testDefaults())
const pkgVariation = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/@having/scoped-peer@1.0.0_@scoped+peer@1.0.0/node_modules`) const pkgVariation = path.resolve(`node_modules/.pnpm/@having/scoped-peer@1.0.0_@scoped+peer@1.0.0/node_modules`)
await okFile(t, path.join(pkgVariation, '@having', 'scoped-peer')) await okFile(t, path.join(pkgVariation, '@having', 'scoped-peer'))
await okFile(t, path.join(pkgVariation, '@scoped', 'peer')) await okFile(t, path.join(pkgVariation, '@scoped', 'peer'))
}) })
@@ -373,7 +373,7 @@ test('peer bins are linked', async (t: tape.Test) => {
await addDependenciesToPackage({}, ['for-testing-peers-having-bins'], await testDefaults({ fastUnpack: false })) await addDependenciesToPackage({}, ['for-testing-peers-having-bins'], await testDefaults({ fastUnpack: false }))
const pkgVariation = path.join(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-peer-having-bin@1.0.0_peer-with-bin@1.0.0/node_modules`) const pkgVariation = path.join(`.pnpm/pkg-with-peer-having-bin@1.0.0_peer-with-bin@1.0.0/node_modules`)
await project.isExecutable(path.join(pkgVariation, 'pkg-with-peer-having-bin/node_modules/.bin', 'peer-with-bin')) await project.isExecutable(path.join(pkgVariation, 'pkg-with-peer-having-bin/node_modules/.bin', 'peer-with-bin'))
@@ -385,11 +385,11 @@ test('run pre/postinstall scripts of each variations of packages with peer depen
prepareEmpty(t) prepareEmpty(t)
await addDependenciesToPackage({}, ['parent-of-pkg-with-events-and-peers', 'pkg-with-events-and-peers', 'peer-c@2.0.0'], await testDefaults({ fastUnpack: false })) await addDependenciesToPackage({}, ['parent-of-pkg-with-events-and-peers', 'pkg-with-events-and-peers', 'peer-c@2.0.0'], await testDefaults({ fastUnpack: false }))
const pkgVariation1 = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-events-and-peers@1.0.0_peer-c@1.0.0/node_modules`) const pkgVariation1 = path.resolve(`node_modules/.pnpm/pkg-with-events-and-peers@1.0.0_peer-c@1.0.0/node_modules`)
await okFile(t, path.join(pkgVariation1, 'pkg-with-events-and-peers', 'generated-by-preinstall.js')) await okFile(t, path.join(pkgVariation1, 'pkg-with-events-and-peers', 'generated-by-preinstall.js'))
await okFile(t, path.join(pkgVariation1, 'pkg-with-events-and-peers', 'generated-by-postinstall.js')) await okFile(t, path.join(pkgVariation1, 'pkg-with-events-and-peers', 'generated-by-postinstall.js'))
const pkgVariation2 = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-events-and-peers@1.0.0_peer-c@2.0.0/node_modules`) const pkgVariation2 = path.resolve(`node_modules/.pnpm/pkg-with-events-and-peers@1.0.0_peer-c@2.0.0/node_modules`)
await okFile(t, path.join(pkgVariation2, 'pkg-with-events-and-peers', 'generated-by-preinstall.js')) await okFile(t, path.join(pkgVariation2, 'pkg-with-events-and-peers', 'generated-by-preinstall.js'))
await okFile(t, path.join(pkgVariation2, 'pkg-with-events-and-peers', 'generated-by-postinstall.js')) await okFile(t, path.join(pkgVariation2, 'pkg-with-events-and-peers', 'generated-by-postinstall.js'))
}) })
@@ -405,7 +405,7 @@ test('package that resolves its own peer dependency', async (t: tape.Test) => {
t.equal(deepRequireCwd(['pkg-with-resolved-peer', 'peer-c', './package.json']).version, '1.0.0') t.equal(deepRequireCwd(['pkg-with-resolved-peer', 'peer-c', './package.json']).version, '1.0.0')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-resolved-peer@1.0.0/node_modules/pkg-with-resolved-peer`))) t.ok(await exists(path.resolve(`node_modules/.pnpm/pkg-with-resolved-peer@1.0.0/node_modules/pkg-with-resolved-peer`)))
const lockfile = await project.readLockfile() const lockfile = await project.readLockfile()
@@ -443,7 +443,7 @@ test('peer dependency is grouped with dependent when the peer is a top dependenc
message: `localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0 requires a peer of ajv@>=4.10.0 but none was installed.`, message: `localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0 requires a peer of ajv@>=4.10.0 but none was installed.`,
}), 'no warning is logged about unresolved peer dep') }), 'no warning is logged about unresolved peer dep')
t.ok(await exists(path.join(`../node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv-keywords`))) t.ok(await exists(path.join(`../node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv-keywords`)))
const lockfile = await readYamlFile<Lockfile>(path.join('..', WANTED_LOCKFILE)) const lockfile = await readYamlFile<Lockfile>(path.join('..', WANTED_LOCKFILE))
@@ -473,7 +473,7 @@ test('peer dependency is grouped correctly with peer installed via separate inst
}, await testDefaults({ reporter, lockfileDir })) }, await testDefaults({ reporter, lockfileDir }))
await addDependenciesToPackage(manifest, ['peer-c@2.0.0'], await testDefaults({ reporter, lockfileDir })) await addDependenciesToPackage(manifest, ['peer-c@2.0.0'], await testDefaults({ reporter, lockfileDir }))
t.ok(await exists(path.join(`../node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc@1.0.0_peer-c@2.0.0/node_modules/dep-of-pkg-with-1-dep`))) t.ok(await exists(path.join(`../node_modules/.pnpm/abc@1.0.0_peer-c@2.0.0/node_modules/dep-of-pkg-with-1-dep`)))
}) })
test('peer dependency is grouped with dependent when the peer is a top dependency and external node_modules is used', async (t: tape.Test) => { test('peer dependency is grouped with dependent when the peer is a top dependency and external node_modules is used', async (t: tape.Test) => {
@@ -619,7 +619,7 @@ test('external lockfile: peer dependency is grouped with dependent even after a
}) })
} }
t.ok(await exists(path.join(`../node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc-parent-with-ab@1.0.0_peer-c@2.0.0/node_modules/is-positive`))) t.ok(await exists(path.join(`../node_modules/.pnpm/abc-parent-with-ab@1.0.0_peer-c@2.0.0/node_modules/is-positive`)))
}) })
test('regular dependencies are not removed on update from transitive packages that have children with peers resolved from above', async (t: tape.Test) => { test('regular dependencies are not removed on update from transitive packages that have children with peers resolved from above', async (t: tape.Test) => {
@@ -635,7 +635,7 @@ test('regular dependencies are not removed on update from transitive packages th
await addDistTag({ package: 'peer-c', version: '1.0.1', distTag: 'latest' }) await addDistTag({ package: 'peer-c', version: '1.0.1', distTag: 'latest' })
await install(manifest, await testDefaults({ lockfileDir, update: true, depth: 2 })) await install(manifest, await testDefaults({ lockfileDir, update: true, depth: 2 }))
t.ok(await exists(path.join(`../node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc-parent-with-ab@1.0.1_peer-c@1.0.1/node_modules/is-positive`))) t.ok(await exists(path.join(`../node_modules/.pnpm/abc-parent-with-ab@1.0.1_peer-c@1.0.1/node_modules/is-positive`)))
}) })
test('peer dependency is resolved from parent package', async (t) => { test('peer dependency is resolved from parent package', async (t) => {

View File

@@ -210,11 +210,11 @@ test('node_modules is pruned after linking', async (t: tape.Test) => {
const manifest = await addDependenciesToPackage({}, ['is-positive@1.0.0'], await testDefaults()) const manifest = await addDependenciesToPackage({}, ['is-positive@1.0.0'], await testDefaults())
t.ok(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive/package.json`)) t.ok(await exists(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive/package.json`))
await link(['../is-positive'], path.resolve('node_modules'), await testDefaults({ manifest, dir: process.cwd() })) await link(['../is-positive'], path.resolve('node_modules'), await testDefaults({ manifest, dir: process.cwd() }))
t.notOk(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive/package.json`), 'pruned') t.notOk(await exists(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive/package.json`), 'pruned')
}) })
test('relative link uses realpath when contained in a symlinked dir', async (t: tape.Test) => { test('relative link uses realpath when contained in a symlinked dir', async (t: tape.Test) => {

View File

@@ -161,7 +161,7 @@ test("lockfile doesn't lock subdependencies that don't satisfy the new specs", a
await addDependenciesToPackage(manifest, ['react-datetime@1.3.0'], await testDefaults({ save: true })) await addDependenciesToPackage(manifest, ['react-datetime@1.3.0'], await testDefaults({ save: true }))
t.equal( t.equal(
project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/react-datetime@1.3.0/node_modules/react-onclickoutside/package.json`).version, project.requireModule(`.pnpm/react-datetime@1.3.0/node_modules/react-onclickoutside/package.json`).version,
'0.3.4', '0.3.4',
'react-datetime@1.3.0 has react-onclickoutside@0.3.4 in its node_modules') 'react-datetime@1.3.0 has react-onclickoutside@0.3.4 in its node_modules')
@@ -335,8 +335,8 @@ test(`respects ${WANTED_LOCKFILE} for top dependencies`, async (t: tape.Test) =>
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'foo'))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'foo'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'bar'))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'bar'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'qar'))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'qar'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobar@100.0.0/node_modules/foo`))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/foobar@100.0.0/node_modules/foo`))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobar@100.0.0/node_modules/bar`))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/foobar@100.0.0/node_modules/bar`))).version, '100.0.0')
await Promise.all(pkgs.map((pkgName) => addDistTag(pkgName, '100.1.0', 'latest'))) await Promise.all(pkgs.map((pkgName) => addDistTag(pkgName, '100.1.0', 'latest')))
@@ -361,8 +361,8 @@ test(`respects ${WANTED_LOCKFILE} for top dependencies`, async (t: tape.Test) =>
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'foo'))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'foo'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'bar'))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'bar'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'qar'))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'qar'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobar@100.0.0/node_modules/foo`))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/foobar@100.0.0/node_modules/foo`))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobar@100.0.0/node_modules/bar`))).version, '100.0.0') t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/foobar@100.0.0/node_modules/bar`))).version, '100.0.0')
}) })
test(`subdeps are updated on repeat install if outer ${WANTED_LOCKFILE} does not match the inner one`, async (t: tape.Test) => { test(`subdeps are updated on repeat install if outer ${WANTED_LOCKFILE} does not match the inner one`, async (t: tape.Test) => {