feat!: reduce nesting in the virtual store

PR #2546
This commit is contained in:
Zoltan Kochan
2020-05-11 21:31:21 +03:00
committed by GitHub
parent 7179cc560b
commit 9fbb74ecb5
26 changed files with 233 additions and 236 deletions

View File

@@ -0,0 +1,14 @@
---
"@pnpm/headless": major
"@pnpm/hoist": major
"@pnpm/modules-cleaner": major
"@pnpm/plugin-commands-rebuild": major
"@pnpm/plugin-commands-store": major
"pnpm": major
"@pnpm/resolve-dependencies": major
"supi": minor
---
The structure of virtual store directory changed. No subdirectory created with the registry name.
So instead of storing packages inside `node_modules/.pnpm/<registry>/<pkg>`, packages are stored
inside `node_modules/.pnpm/<pkg>`.

View File

@@ -297,7 +297,7 @@ export default async (opts: HeadlessOptions) => {
.concat(
depNodes
.filter(({ requiresBuild }) => requiresBuild)
.map(({ relDepPath }) => relDepPath),
.map(({ depPath }) => depPath),
)
} else {
const directNodes = new Set<string>()
@@ -430,7 +430,6 @@ async function linkRootPackages (
})
return
}
const depPath = dp.refToAbsolute(allDeps[alias], alias, opts.registries)
const peripheralLocation = opts.rootDependencies[alias]
// Skipping linked packages
if (!peripheralLocation) {
@@ -442,12 +441,12 @@ async function linkRootPackages (
const isDev = projectSnapshot.devDependencies?.[alias]
const isOptional = projectSnapshot.optionalDependencies?.[alias]
const relDepPath = dp.refToRelative(allDeps[alias], alias)
if (relDepPath === null) return
const pkgSnapshot = lockfile.packages?.[relDepPath]
const depPath = dp.refToRelative(allDeps[alias], alias)
if (depPath === null) return
const pkgSnapshot = lockfile.packages?.[depPath]
if (!pkgSnapshot) return // this won't ever happen. Just making typescript happy
const pkgId = pkgSnapshot.id || depPath || undefined
const pkgInfo = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot)
const pkgId = pkgSnapshot.id || dp.refToAbsolute(allDeps[alias], alias, opts.registries) || undefined
const pkgInfo = nameVerFromPkgSnapshot(depPath, pkgSnapshot)
rootLogger.debug({
added: {
dependencyType: isDev && 'dev' || isOptional && 'optional' || 'prod',
@@ -488,13 +487,12 @@ async function lockfileToDepGraph (
if (lockfile.packages) {
const pkgSnapshotByLocation = {}
await Promise.all(
Object.keys(lockfile.packages).map(async (relDepPath) => {
const depPath = dp.resolve(opts.registries, relDepPath)
const pkgSnapshot = lockfile.packages![relDepPath]
Object.keys(lockfile.packages).map(async (depPath) => {
const pkgSnapshot = lockfile.packages![depPath]
// TODO: optimize. This info can be already returned by pkgSnapshotToResolution()
const pkgName = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot).name
const pkgName = nameVerFromPkgSnapshot(depPath, pkgSnapshot).name
const modules = path.join(opts.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules')
const packageId = packageIdFromSnapshot(relDepPath, pkgSnapshot, opts.registries)
const packageId = packageIdFromSnapshot(depPath, pkgSnapshot, opts.registries)
const pkgLocation = await opts.storeController.getPackageLocation(packageId, pkgName, {
lockfileDir: opts.lockfileDir,
targetEngine: opts.sideEffectsCacheRead && !opts.force && ENGINE_NAME || undefined,
@@ -505,8 +503,8 @@ async function lockfileToDepGraph (
? path.join(modules, pkgName)
: pkgLocation.dir
if (
currentPackages[relDepPath] && R.equals(currentPackages[relDepPath].dependencies, lockfile.packages![relDepPath].dependencies) &&
R.equals(currentPackages[relDepPath].optionalDependencies, lockfile.packages![relDepPath].optionalDependencies)
currentPackages[depPath] && R.equals(currentPackages[depPath].dependencies, lockfile.packages![depPath].dependencies) &&
R.equals(currentPackages[depPath].optionalDependencies, lockfile.packages![depPath].optionalDependencies)
) {
if (await fs.exists(peripheralLocation)) {
return
@@ -516,7 +514,7 @@ async function lockfileToDepGraph (
missing: peripheralLocation,
})
}
const resolution = pkgSnapshotToResolution(relDepPath, pkgSnapshot, opts.registries)
const resolution = pkgSnapshotToResolution(depPath, pkgSnapshot, opts.registries)
progressLogger.debug({
packageId,
requester: opts.lockfileDir,
@@ -543,6 +541,7 @@ async function lockfileToDepGraph (
})
graph[peripheralLocation] = {
children: {},
depPath,
fetchingFiles: fetchResponse.files,
finishing: fetchResponse.finishing,
hasBin: pkgSnapshot.hasBin === true,
@@ -556,7 +555,6 @@ async function lockfileToDepGraph (
packageId,
peripheralLocation,
prepare: pkgSnapshot.prepare === true,
relDepPath,
requiresBuild: pkgSnapshot.requiresBuild === true,
}
pkgSnapshotByLocation[peripheralLocation] = pkgSnapshot
@@ -567,7 +565,7 @@ async function lockfileToDepGraph (
graph,
independentLeaves: opts.independentLeaves,
lockfileDir: opts.lockfileDir,
pkgSnapshotsByRelDepPaths: lockfile.packages,
pkgSnapshotsByDepPaths: lockfile.packages,
registries: opts.registries,
sideEffectsCacheRead: opts.sideEffectsCacheRead,
skipped: opts.skipped,
@@ -606,7 +604,7 @@ async function getChildrenPaths (
independentLeaves: boolean,
storeDir: string,
skipped: Set<string>,
pkgSnapshotsByRelDepPaths: {[relDepPath: string]: PackageSnapshot},
pkgSnapshotsByDepPaths: Record<string, PackageSnapshot>,
lockfileDir: string,
sideEffectsCacheRead: boolean,
storeController: StoreController,
@@ -621,9 +619,9 @@ async function getChildrenPaths (
continue
}
const childRelDepPath = dp.refToRelative(allDeps[alias], alias) as string
const childPkgSnapshot = ctx.pkgSnapshotsByRelDepPaths[childRelDepPath]
if (ctx.graph[childDepPath]) {
children[alias] = ctx.graph[childDepPath].peripheralLocation
const childPkgSnapshot = ctx.pkgSnapshotsByDepPaths[childRelDepPath]
if (ctx.graph[childRelDepPath]) {
children[alias] = ctx.graph[childRelDepPath].peripheralLocation
} else if (childPkgSnapshot) {
if (ctx.independentLeaves && packageIsIndependent(childPkgSnapshot)) {
const pkgId = childPkgSnapshot.id || childDepPath
@@ -635,7 +633,7 @@ async function getChildrenPaths (
children[alias] = pkgLocation.dir
} else {
const pkgName = nameVerFromPkgSnapshot(childRelDepPath, childPkgSnapshot).name
children[alias] = path.join(ctx.virtualStoreDir, pkgIdToFilename(childDepPath, ctx.lockfileDir), 'node_modules', pkgName)
children[alias] = path.join(ctx.virtualStoreDir, pkgIdToFilename(childRelDepPath, ctx.lockfileDir), 'node_modules', pkgName)
}
} else if (allDeps[alias].indexOf('file:') === 0) {
children[alias] = path.resolve(ctx.lockfileDir, allDeps[alias].substr(5))
@@ -659,7 +657,7 @@ export interface DependenciesGraphNode {
independent: boolean,
optionalDependencies: Set<string>,
optional: boolean,
relDepPath: string, // this option is only needed for saving pendingBuild when running with --ignore-scripts flag
depPath: string, // this option is only needed for saving pendingBuild when running with --ignore-scripts flag
packageId: string, // TODO: this option is currently only needed when running postinstall scripts but even there it should be not used
isBuilt: boolean,
requiresBuild: boolean,

View File

@@ -43,7 +43,7 @@ test('installing a simple project', async (t) => {
t.ok(project.requireModule('colors'), 'optional dep installed')
// test that independent leaves is false by default
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/colors@1.2.0`) // colors is not symlinked from the store
await project.has(`.pnpm/colors@1.2.0`) // colors is not symlinked from the store
await project.isExecutable('.bin/rimraf')
@@ -274,10 +274,10 @@ test('installing with independent-leaves and hoistPattern=*', async (t) => {
await project.has('.pnpm/node_modules/path-is-absolute')
// wrappy is linked directly from the store
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/wrappy@1.0.2`)
await project.hasNot(`.pnpm/wrappy@1.0.2`)
await project.storeHas('wrappy', '1.0.2')
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/rimraf@2.5.1`)
await project.has(`.pnpm/rimraf@2.5.1`)
await project.isExecutable('.bin/rimraf')
@@ -556,8 +556,8 @@ test('independent-leaves: installing a simple project', async (t) => {
t.ok(project.requireModule('rimraf'), 'prod dep installed')
t.ok(project.requireModule('is-negative'), 'dev dep installed')
t.ok(project.requireModule('colors'), 'optional dep installed')
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/rimraf@2.7.1`) // rimraf is not symlinked from the store
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/colors@1.2.0`) // colors is symlinked from the store
await project.has(`.pnpm/rimraf@2.7.1`) // rimraf is not symlinked from the store
await project.hasNot(`.pnpm/colors@1.2.0`) // colors is symlinked from the store
await project.isExecutable('.bin/rimraf')
@@ -605,7 +605,7 @@ test('installing with hoistPattern=*', async (t) => {
t.ok(project.requireModule('colors'), 'optional dep installed')
// test that independent leaves is false by default
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/colors@1.2.0`) // colors is not symlinked from the store
await project.has(`.pnpm/colors@1.2.0`) // colors is not symlinked from the store
await project.isExecutable('.bin/rimraf')
await project.isExecutable('.pnpm/node_modules/.bin/hello-world-js-bin')
@@ -645,7 +645,7 @@ test('installing with hoistPattern=*', async (t) => {
const modules = await project.readModulesManifest()
t.deepEqual(modules!.hoistedAliases[`localhost+${REGISTRY_MOCK_PORT}/balanced-match/1.0.0`], ['balanced-match'], 'hoisted field populated in .modules.yaml')
t.deepEqual(modules!.hoistedAliases['/balanced-match/1.0.0'], ['balanced-match'], 'hoisted field populated in .modules.yaml')
t.end()
})
@@ -665,7 +665,7 @@ test('installing with hoistPattern=* and shamefullyHoist=true', async (t) => {
t.ok(project.requireModule('colors'), 'optional dep installed')
// test that independent leaves is false by default
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/colors@1.2.0`) // colors is not symlinked from the store
await project.has(`.pnpm/colors@1.2.0`) // colors is not symlinked from the store
await project.isExecutable('.bin/rimraf')
await project.isExecutable('.bin/hello-world-js-bin')
@@ -705,7 +705,7 @@ test('installing with hoistPattern=* and shamefullyHoist=true', async (t) => {
const modules = await project.readModulesManifest()
t.deepEqual(modules!.hoistedAliases[`localhost+${REGISTRY_MOCK_PORT}/balanced-match/1.0.0`], ['balanced-match'], 'hoisted field populated in .modules.yaml')
t.deepEqual(modules!.hoistedAliases['/balanced-match/1.0.0'], ['balanced-match'], 'hoisted field populated in .modules.yaml')
t.end()
})
@@ -853,7 +853,7 @@ test('independent-leaves: installing in a workspace', async (t) => {
const projectBar = assertProject(t, path.join(workspaceFixture, 'bar'))
await projectBar.has('foo')
t.ok(await exists(path.join(workspaceFixture, `node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/express@4.16.4/node_modules/array-flatten`)), 'independent package linked')
t.ok(await exists(path.join(workspaceFixture, `node_modules/.pnpm/express@4.16.4/node_modules/array-flatten`)), 'independent package linked')
t.end()
})

View File

@@ -33,13 +33,14 @@ export default async function hoistByLockfile (
)
const deps = [
{
absolutePath: '',
children: directDeps
.reduce((acc, dep) => {
if (acc[dep.alias]) return acc
acc[dep.alias] = dp.resolve(opts.registries, dep.relDepPath)
.reduce((acc, { alias, relDepPath }) => {
if (!acc[alias]) {
acc[alias] = relDepPath
}
return acc
}, {}),
depPath: '',
depth: -1,
location: '',
},
@@ -93,18 +94,18 @@ async function getDependencies (
for (const { pkgSnapshot, relDepPath, next } of step.dependencies) {
const absolutePath = dp.resolve(opts.registries, relDepPath)
const pkgName = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot).name
const modules = path.join(opts.virtualStoreDir, pkgIdToFilename(absolutePath, opts.lockfileDir), 'node_modules')
const modules = path.join(opts.virtualStoreDir, pkgIdToFilename(relDepPath, opts.lockfileDir), 'node_modules')
const independent = opts.getIndependentPackageLocation && packageIsIndependent(pkgSnapshot)
const allDeps = {
...pkgSnapshot.dependencies,
...pkgSnapshot.optionalDependencies,
}
deps.push({
absolutePath,
children: Object.keys(allDeps).reduce((children, alias) => {
children[alias] = dp.refToAbsolute(allDeps[alias], alias, opts.registries)
children[alias] = dp.refToRelative(allDeps[alias], alias)
return children
}, {}),
depPath: relDepPath,
depth,
location: !independent
? path.join(modules, pkgName)
@@ -130,8 +131,8 @@ async function getDependencies (
export interface Dependency {
location: string,
children: {[alias: string]: string},
depPath: string,
depth: number,
absolutePath: string,
}
async function hoistGraph (
@@ -150,7 +151,7 @@ async function hoistGraph (
// sort by depth and then alphabetically
.sort((a, b) => {
const depthDiff = a.depth - b.depth
return depthDiff === 0 ? a.absolutePath.localeCompare(b.absolutePath) : depthDiff
return depthDiff === 0 ? a.depPath.localeCompare(b.depPath) : depthDiff
})
// build the alias map and the id map
.map((depNode) => {
@@ -170,7 +171,7 @@ async function hoistGraph (
return depNode
})
.map(async (depNode) => {
const pkgAliases = aliasesByDependencyPath[depNode.absolutePath]
const pkgAliases = aliasesByDependencyPath[depNode.depPath]
if (!pkgAliases) {
return
}

View File

@@ -165,11 +165,10 @@ function mergeDependencies (projectSnapshot: ProjectSnapshot): { [depName: strin
function getPkgsDepPaths (
registries: Registries,
packages: PackageSnapshots,
): {[depPath: string]: string} {
): {[relDepPath: string]: string} {
const pkgIdsByDepPath = {}
for (const relDepPath of Object.keys(packages)) {
const depPath = dp.resolve(registries, relDepPath)
pkgIdsByDepPath[depPath] = packageIdFromSnapshot(relDepPath, packages[relDepPath], registries)
pkgIdsByDepPath[relDepPath] = packageIdFromSnapshot(relDepPath, packages[relDepPath], registries)
}
return pkgIdsByDepPath
}

View File

@@ -270,7 +270,7 @@ async function _rebuild (
const pkgInfo = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot)
const independent = ctx.independentLeaves && packageIsIndependent(pkgSnapshot)
const pkgRoot = !independent
? path.join(ctx.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules', pkgInfo.name)
? path.join(ctx.virtualStoreDir, pkgIdToFilename(relDepPath, opts.lockfileDir), 'node_modules', pkgInfo.name)
: await (
async () => {
const { dir } = await opts.storeController.getPackageLocation(pkgSnapshot.id || depPath, pkgInfo.name, {
@@ -282,7 +282,7 @@ async function _rebuild (
)()
try {
if (!independent) {
const modules = path.join(ctx.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules')
const modules = path.join(ctx.virtualStoreDir, pkgIdToFilename(relDepPath, opts.lockfileDir), 'node_modules')
const binPath = path.join(pkgRoot, 'node_modules', '.bin')
await linkBins(modules, binPath, { warn })
}
@@ -328,7 +328,7 @@ async function _rebuild (
const depPath = dp.resolve(opts.registries, relDepPath)
const pkgSnapshot = pkgSnapshots[relDepPath]
const pkgInfo = nameVerFromPkgSnapshot(relDepPath, pkgSnapshot)
const modules = path.join(ctx.virtualStoreDir, pkgIdToFilename(depPath, opts.lockfileDir), 'node_modules')
const modules = path.join(ctx.virtualStoreDir, pkgIdToFilename(relDepPath, opts.lockfileDir), 'node_modules')
const binPath = path.join(modules, pkgInfo.name, 'node_modules', '.bin')
return linkBins(modules, binPath, { warn })
})),

View File

@@ -217,7 +217,7 @@ test('rebuild dependencies in correct order', async (t) => {
t.ok(modules)
t.doesNotEqual(modules!.pendingBuilds.length, 0)
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)
await project.hasNot(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)
await project.hasNot('with-postinstall-a/output.json')
await rebuild.handler({
@@ -231,7 +231,7 @@ test('rebuild dependencies in correct order', async (t) => {
t.ok(modules)
t.equal(modules!.pendingBuilds.length, 0)
t.ok(+project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
t.ok(+project.requireModule(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
t.end()
})
@@ -256,7 +256,7 @@ test('rebuild dependencies in correct order when node_modules uses independent-l
t.ok(modules)
t.doesNotEqual(modules!.pendingBuilds.length, 0)
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)
await project.hasNot(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)
await project.hasNot('with-postinstall-a/output.json')
await rebuild.handler({
@@ -271,7 +271,7 @@ test('rebuild dependencies in correct order when node_modules uses independent-l
t.ok(modules)
t.equal(modules!.pendingBuilds.length, 0)
t.ok(+project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
t.ok(+project.requireModule(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
t.end()
})

View File

@@ -37,17 +37,18 @@ export default async function (maybeOpts: StoreStatusOptions) {
return {
integrity: pkg.resolution['integrity'],
pkgPath: dp.resolve(registries, relDepPath),
relDepPath,
...nameVerFromPkgSnapshot(relDepPath, pkg),
}
})
const cafsDir = path.join(storeDir, 'files')
const modified = await pFilter(pkgs, async ({ integrity, pkgPath, name }) => {
const modified = await pFilter(pkgs, async ({ integrity, pkgPath, relDepPath, name }) => {
const pkgIndexFilePath = integrity
? getFilePathInCafs(cafsDir, integrity, 'index')
: path.join(storeDir, pkgPath, 'integrity.json')
const { files } = await loadJsonFile(pkgIndexFilePath)
return (await dint.check(path.join(virtualStoreDir, pkgIdToFilename(pkgPath, opts.dir), 'node_modules', name), files)) === false
return (await dint.check(path.join(virtualStoreDir, pkgIdToFilename(relDepPath, opts.dir), 'node_modules', name), files)) === false
})
if (reporter) {

View File

@@ -17,7 +17,7 @@ test('CLI fails when store status finds modified packages', async function (t) {
await execa('node', [pnpmBin, 'add', 'is-positive@3.1.0', '--store-dir', storeDir, '--registry', REGISTRY, '--verify-store-integrity'])
await rimraf(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@3.1.0/node_modules/is-positive/index.js`)
await rimraf(`node_modules/.pnpm/is-positive@3.1.0/node_modules/is-positive/index.js`)
let err!: PnpmError
try {

View File

@@ -448,7 +448,7 @@ test('using a custom virtual-store-dir location', async (t: tape.Test) => {
await execPnpm(['install', '--virtual-store-dir=.pnpm'])
t.ok(await exists(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/rimraf@2.5.1/node_modules/rimraf/package.json`))
t.ok(await exists(`.pnpm/rimraf@2.5.1/node_modules/rimraf/package.json`))
t.ok(await exists('.pnpm/lock.yaml'))
t.ok(await exists('.pnpm/node_modules/once/package.json'))
@@ -457,7 +457,7 @@ test('using a custom virtual-store-dir location', async (t: tape.Test) => {
await execPnpm(['install', '--virtual-store-dir=.pnpm', '--frozen-lockfile'])
t.ok(await exists(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/rimraf@2.5.1/node_modules/rimraf/package.json`))
t.ok(await exists(`.pnpm/rimraf@2.5.1/node_modules/rimraf/package.json`))
t.ok(await exists('.pnpm/lock.yaml'))
t.ok(await exists('.pnpm/node_modules/once/package.json'))
})

View File

@@ -168,6 +168,7 @@ export interface ResolvedPackage {
hasBundledDependencies: boolean,
independent: boolean,
prepare: boolean,
depPath: string,
requiresBuild: boolean | undefined, // added to fix issue #1201
additionalInfo: {
deprecated?: string,
@@ -504,7 +505,7 @@ async function resolveDependency (
// we can safely assume that it doesn't exist in `node_modules`
currentLockfileContainsTheDep &&
options.relDepPath && options.dependencyLockfile &&
await exists(path.join(ctx.virtualStoreDir, `${pkgIdToFilename(options.depPath, ctx.prefix)}/node_modules/${nameVerFromPkgSnapshot(options.relDepPath, options.dependencyLockfile).name}/package.json`)) &&
await exists(path.join(ctx.virtualStoreDir, `${pkgIdToFilename(options.relDepPath, ctx.prefix)}/node_modules/${nameVerFromPkgSnapshot(options.relDepPath, options.dependencyLockfile).name}/package.json`)) &&
(options.currentDepth > 0 || wantedDependency.alias && await exists(path.join(ctx.modulesDir, wantedDependency.alias))))
if (!proceed && depIsLinked) {
@@ -695,6 +696,7 @@ async function resolveDependency (
ctx.resolvedPackagesByPackageId[pkgResponse.body.id] = getResolvedPackage({
dependencyLockfile: options.dependencyLockfile,
depPath: dp.relative(ctx.registries, pkg.name, pkgResponse.body.id),
force: ctx.force,
hasBin,
pkg,
@@ -735,6 +737,7 @@ async function resolveDependency (
function getResolvedPackage (
options: {
dependencyLockfile?: PackageSnapshot,
depPath: string,
force: boolean,
hasBin: boolean,
pkg: PackageManifest,
@@ -756,6 +759,7 @@ function getResolvedPackage (
peerDependencies,
peerDependenciesMeta: options.pkg.peerDependenciesMeta,
},
depPath: options.depPath,
dev: options.wantedDependency.dev,
engineCache: !options.force && options.pkgResponse.body.cacheByEngine?.[ENGINE_NAME],
fetchingBundledManifest: options.pkgResponse.bundledManifest,

View File

@@ -43,7 +43,6 @@ import {
Registries,
} from '@pnpm/types'
import rimraf = require('@zkochan/rimraf')
import * as dp from 'dependency-path'
import isInnerLink = require('is-inner-link')
import isSubdir = require('is-subdir')
import pFilter = require('p-filter')
@@ -72,7 +71,7 @@ import linkPackages, {
DependenciesGraphNode,
Project as ProjectToLink,
} from './link'
import { absolutePathToRef } from './lockfile'
import { depPathToRef } from './lockfile'
export type DependenciesMutation = (
{
@@ -742,15 +741,14 @@ async function installInContext (
)
ctx.pendingBuilds = ctx.pendingBuilds
.filter((relDepPath) => !result.removedDepPaths.has(dp.resolve(ctx.registries, relDepPath)))
.filter((relDepPath) => !result.removedDepPaths.has(relDepPath))
if (opts.ignoreScripts) {
// we can use concat here because we always only append new packages, which are guaranteed to not be there by definition
ctx.pendingBuilds = ctx.pendingBuilds
.concat(
result.newDepPaths
.filter((depPath) => result.depGraph[depPath].requiresBuild)
.map((depPath) => dp.relative(ctx.registries, result.depGraph[depPath].name, depPath)),
.filter((depPath) => result.depGraph[depPath].requiresBuild),
)
}
@@ -962,7 +960,7 @@ function addDirectDependenciesToLockfile (
for (const alias of allDeps) {
if (directDependenciesByAlias[alias]) {
const dep = directDependenciesByAlias[alias]
const ref = absolutePathToRef(dep.id, {
const ref = depPathToRef(dep.id, {
alias: dep.alias,
realName: dep.name,
registries,

View File

@@ -21,12 +21,11 @@ import { DependenciesTree, LinkedDependency } from '@pnpm/resolve-dependencies'
import { StoreController } from '@pnpm/store-controller-types'
import symlinkDependency, { symlinkDirectRootDependency } from '@pnpm/symlink-dependency'
import { ProjectManifest, Registries } from '@pnpm/types'
import * as dp from 'dependency-path'
import fs = require('mz/fs')
import pLimit from 'p-limit'
import path = require('path')
import R = require('ramda')
import { absolutePathToRef } from './lockfile'
import { depPathToRef } from './lockfile'
import resolvePeers, {
DependenciesGraph,
DependenciesGraphNode,
@@ -93,7 +92,7 @@ export default async function linkPackages (
// The `Creating dependency graph` is not good to report in all cases as
// sometimes node_modules is alread up-to-date
// logger.info(`Creating dependency graph`)
const { depGraph, projectsDirectAbsolutePathsByAlias } = resolvePeers({
const { depGraph, projectsDirectPathsByAlias } = resolvePeers({
dependenciesTree,
independentLeaves: opts.independentLeaves,
lockfileDir: opts.lockfileDir,
@@ -102,12 +101,12 @@ export default async function linkPackages (
virtualStoreDir: opts.virtualStoreDir,
})
for (const { id } of projects) {
for (const [alias, depPath] of R.toPairs(projectsDirectAbsolutePathsByAlias[id])) {
for (const [alias, depPath] of R.toPairs(projectsDirectPathsByAlias[id])) {
const depNode = depGraph[depPath]
if (depNode.isPure) continue
const projectSnapshot = opts.wantedLockfile.importers[id]
const ref = absolutePathToRef(depPath, {
const ref = depPathToRef(depPath, {
alias,
realName: depNode.name,
registries: opts.registries,
@@ -127,17 +126,16 @@ export default async function linkPackages (
? opts.afterAllResolvedHook(newLockfile)
: newLockfile
let depNodes = R.values(depGraph).filter(({ absolutePath, name, packageId }) => {
const relDepPath = dp.relative(opts.registries, name, absolutePath)
if (newWantedLockfile.packages?.[relDepPath] && !newWantedLockfile.packages[relDepPath].optional) {
opts.skipped.delete(relDepPath)
let depNodes = R.values(depGraph).filter(({ depPath, packageId }) => {
if (newWantedLockfile.packages?.[depPath] && !newWantedLockfile.packages[depPath].optional) {
opts.skipped.delete(depPath)
return true
}
if (opts.wantedToBeSkippedPackageIds.has(packageId)) {
opts.skipped.add(relDepPath)
opts.skipped.add(depPath)
return false
}
opts.skipped.delete(relDepPath)
opts.skipped.delete(depPath)
return true
})
if (!opts.include.dependencies) {
@@ -205,15 +203,15 @@ export default async function linkPackages (
const rootDepsByDepPath = depNodes
.filter(({ depth }) => depth === 0)
.reduce((acc, depNode) => {
acc[depNode.absolutePath] = depNode
acc[depNode.depPath] = depNode
return acc
}, {}) as {[absolutePath: string]: DependenciesGraphNode}
}, {})
await Promise.all(projects.map(({ id, manifest, modulesDir, rootDir }) => {
const directAbsolutePathsByAlias = projectsDirectAbsolutePathsByAlias[id]
const directPathsByAlias = projectsDirectPathsByAlias[id]
return Promise.all(
Object.keys(directAbsolutePathsByAlias)
.map((rootAlias) => ({ rootAlias, depGraphNode: rootDepsByDepPath[directAbsolutePathsByAlias[rootAlias]] }))
Object.keys(directPathsByAlias)
.map((rootAlias) => ({ rootAlias, depGraphNode: rootDepsByDepPath[directPathsByAlias[rootAlias]] }))
.filter(({ depGraphNode }) => depGraphNode)
.map(async ({ rootAlias, depGraphNode }) => {
if (
@@ -242,11 +240,11 @@ export default async function linkPackages (
newWantedLockfile.lockfileVersion = LOCKFILE_VERSION
}
await Promise.all(pendingRequiresBuilds.map(async ({ absoluteDepPath, relativeDepPath }) => {
const depNode = depGraph[absoluteDepPath]
await Promise.all(pendingRequiresBuilds.map(async (depPath) => {
const depNode = depGraph[depPath]
if (!depNode.fetchingBundledManifest) {
// This should never ever happen
throw new Error(`Cannot create ${WANTED_LOCKFILE} because raw manifest (aka package.json) wasn't fetched for "${absoluteDepPath}"`)
throw new Error(`Cannot create ${WANTED_LOCKFILE} because raw manifest (aka package.json) wasn't fetched for "${depPath}"`)
}
const filesResponse = await depNode.fetchingFiles()
// The npm team suggests to always read the package.json for deciding whether the package has lifecycle scripts
@@ -259,8 +257,8 @@ export default async function linkPackages (
// TODO: try to cover with unit test the case when entry is no longer available in lockfile
// It is an edge that probably happens if the entry is removed during lockfile prune
if (depNode.requiresBuild && newWantedLockfile.packages![relativeDepPath]) {
newWantedLockfile.packages![relativeDepPath].requiresBuild = true
if (depNode.requiresBuild && newWantedLockfile.packages![depPath]) {
newWantedLockfile.packages![depPath].requiresBuild = true
}
}))
@@ -272,10 +270,9 @@ export default async function linkPackages (
) {
const packages = opts.currentLockfile.packages || {}
if (newWantedLockfile.packages) {
for (const relDepPath in newWantedLockfile.packages) { // tslint:disable-line:forin
const depPath = dp.resolve(opts.registries, relDepPath)
for (const depPath in newWantedLockfile.packages) { // tslint:disable-line:forin
if (depGraph[depPath]) {
packages[relDepPath] = newWantedLockfile.packages[relDepPath]
packages[depPath] = newWantedLockfile.packages[depPath]
}
}
}
@@ -378,7 +375,6 @@ async function linkNewPackages (
if (opts.force) {
newDepPathsSet = new Set(
wantedRelDepPaths
.map((relDepPath) => dp.resolve(opts.registries, relDepPath))
// when installing a new package, not all the nodes are analyzed
// just skip the ones that are in the lockfile but were not analyzed
.filter((depPath) => depGraph[depPath]),
@@ -396,11 +392,10 @@ async function linkNewPackages (
if (!opts.force && currentLockfile.packages && wantedLockfile.packages) {
// add subdependencies that have been updated
// TODO: no need to relink everything. Can be relinked only what was changed
for (const relDepPath of wantedRelDepPaths) {
if (currentLockfile.packages[relDepPath] &&
(!R.equals(currentLockfile.packages[relDepPath].dependencies, wantedLockfile.packages[relDepPath].dependencies) ||
!R.equals(currentLockfile.packages[relDepPath].optionalDependencies, wantedLockfile.packages[relDepPath].optionalDependencies))) {
const depPath = dp.resolve(opts.registries, relDepPath)
for (const depPath of wantedRelDepPaths) {
if (currentLockfile.packages[depPath] &&
(!R.equals(currentLockfile.packages[depPath].dependencies, wantedLockfile.packages[depPath].dependencies) ||
!R.equals(currentLockfile.packages[depPath].optionalDependencies, wantedLockfile.packages[depPath].optionalDependencies))) {
// TODO: come up with a test that triggers the usecase of depGraph[depPath] undefined
// see related issue: https://github.com/pnpm/pnpm/issues/870
@@ -446,11 +441,10 @@ async function selectNewFromWantedDeps (
const prevRelDepPaths = new Set(R.keys(currentLockfile.packages))
await Promise.all(
wantedRelDepPaths.map(
async (wantedRelDepPath: string) => {
const depPath = dp.resolve(opts.registries, wantedRelDepPath)
async (depPath: string) => {
const depNode = depGraph[depPath]
if (!depNode) return
if (prevRelDepPaths.has(wantedRelDepPath)) {
if (prevRelDepPaths.has(depPath)) {
if (await fs.exists(depNode.peripheralLocation)) {
return
}

View File

@@ -3,8 +3,8 @@ import { Registries } from '@pnpm/types'
import { getRegistryByPackageName } from 'dependency-path'
import encodeRegistry = require('encode-registry')
export function absolutePathToRef (
absolutePath: string,
export function depPathToRef (
depPath: string,
opts: {
alias: string,
realName: string,
@@ -12,15 +12,15 @@ export function absolutePathToRef (
resolution: Resolution,
},
) {
if (opts.resolution.type) return absolutePath
if (opts.resolution.type) return depPath
const registryName = encodeRegistry(getRegistryByPackageName(opts.registries, opts.realName))
if (absolutePath.startsWith(`${registryName}/`) && !absolutePath.includes('/-/')) {
if (opts.alias === opts.realName) {
const ref = absolutePath.replace(`${registryName}/${opts.realName}/`, '')
if (!ref.includes('/')) return ref
}
return absolutePath.replace(`${registryName}/`, '/')
if (depPath.startsWith(`${registryName}/`) && !depPath.includes('/-/')) {
depPath = depPath.replace(`${registryName}/`, '/')
}
return absolutePath
if (opts.alias === opts.realName) {
const ref = depPath.replace(`/${opts.realName}/`, '')
if (!ref.includes('/')) return ref
}
return depPath
}

View File

@@ -34,7 +34,7 @@ export interface DependenciesGraphNode {
independent: boolean,
optionalDependencies: Set<string>,
depth: number,
absolutePath: string,
depPath: string,
prod: boolean,
dev: boolean,
optional: boolean,
@@ -80,10 +80,10 @@ export default function (
},
): {
depGraph: DependenciesGraph,
projectsDirectAbsolutePathsByAlias: {[id: string]: {[alias: string]: string}},
projectsDirectPathsByAlias: {[id: string]: {[alias: string]: string}},
} {
const depGraph: DependenciesGraph = {}
const absolutePathsByNodeId = {}
const pathsByNodeId = {}
for (const { directNodeIdsByAlias, topParents, rootDir } of opts.projects) {
const pkgsByName = Object.assign(
@@ -108,11 +108,11 @@ export default function (
)
resolvePeersOfChildren(directNodeIdsByAlias, pkgsByName, {
absolutePathsByNodeId,
dependenciesTree: opts.dependenciesTree,
depGraph,
independentLeaves: opts.independentLeaves,
lockfileDir: opts.lockfileDir,
pathsByNodeId,
purePkgs: new Set(),
rootDir,
strictPeerDependencies: opts.strictPeerDependencies,
@@ -122,21 +122,21 @@ export default function (
R.values(depGraph).forEach((node) => {
node.children = R.keys(node.children).reduce((acc, alias) => {
acc[alias] = absolutePathsByNodeId[node.children[alias]]
acc[alias] = pathsByNodeId[node.children[alias]]
return acc
}, {})
})
const projectsDirectAbsolutePathsByAlias: {[id: string]: {[alias: string]: string}} = {}
const projectsDirectPathsByAlias: {[id: string]: {[alias: string]: string}} = {}
for (const { directNodeIdsByAlias, id } of opts.projects) {
projectsDirectAbsolutePathsByAlias[id] = R.keys(directNodeIdsByAlias).reduce((rootAbsolutePathsByAlias, alias) => {
rootAbsolutePathsByAlias[alias] = absolutePathsByNodeId[directNodeIdsByAlias[alias]]
return rootAbsolutePathsByAlias
projectsDirectPathsByAlias[id] = R.keys(directNodeIdsByAlias).reduce((rootPathsByAlias, alias) => {
rootPathsByAlias[alias] = pathsByNodeId[directNodeIdsByAlias[alias]]
return rootPathsByAlias
}, {})
}
return {
depGraph,
projectsDirectAbsolutePathsByAlias,
projectsDirectPathsByAlias,
}
}
@@ -145,7 +145,7 @@ function resolvePeersOfNode (
parentParentPkgs: ParentRefs,
ctx: {
dependenciesTree: DependenciesTree,
absolutePathsByNodeId: {[nodeId: string]: string},
pathsByNodeId: {[nodeId: string]: string},
depGraph: DependenciesGraph,
independentLeaves: boolean,
virtualStoreDir: string,
@@ -156,8 +156,8 @@ function resolvePeersOfNode (
},
): {[alias: string]: string} {
const node = ctx.dependenciesTree[nodeId]
if (ctx.purePkgs.has(node.resolvedPackage.id) && ctx.depGraph[node.resolvedPackage.id].depth <= node.depth) {
ctx.absolutePathsByNodeId[nodeId] = node.resolvedPackage.id
if (ctx.purePkgs.has(node.resolvedPackage.depPath) && ctx.depGraph[node.resolvedPackage.depPath].depth <= node.depth) {
ctx.pathsByNodeId[nodeId] = node.resolvedPackage.depPath
return {}
}
@@ -184,14 +184,14 @@ function resolvePeersOfNode (
const allResolvedPeers = Object.assign(unknownResolvedPeersOfChildren, resolvedPeers)
let modules: string
let absolutePath: string
const localLocation = path.join(ctx.virtualStoreDir, pkgIdToFilename(node.resolvedPackage.id, ctx.lockfileDir))
let depPath: string
const localLocation = path.join(ctx.virtualStoreDir, pkgIdToFilename(node.resolvedPackage.depPath, ctx.lockfileDir))
const isPure = R.isEmpty(allResolvedPeers)
if (isPure) {
modules = path.join(localLocation, 'node_modules')
absolutePath = node.resolvedPackage.id
depPath = node.resolvedPackage.depPath
if (R.isEmpty(node.resolvedPackage.peerDependencies)) {
ctx.purePkgs.add(node.resolvedPackage.id)
ctx.purePkgs.add(node.resolvedPackage.depPath)
}
} else {
const peersFolderSuffix = createPeersFolderSuffix(
@@ -200,11 +200,11 @@ function resolvePeersOfNode (
version: ctx.dependenciesTree[allResolvedPeers[alias]].resolvedPackage.version,
})))
modules = path.join(`${localLocation}${peersFolderSuffix}`, 'node_modules')
absolutePath = `${node.resolvedPackage.id}${peersFolderSuffix}`
depPath = `${node.resolvedPackage.depPath}${peersFolderSuffix}`
}
ctx.absolutePathsByNodeId[nodeId] = absolutePath
if (!ctx.depGraph[absolutePath] || ctx.depGraph[absolutePath].depth > node.depth) {
ctx.pathsByNodeId[nodeId] = depPath
if (!ctx.depGraph[depPath] || ctx.depGraph[depPath].depth > node.depth) {
const independent = ctx.independentLeaves && node.resolvedPackage.independent
const centralLocation = node.resolvedPackage.engineCache || path.join(node.resolvedPackage.path, 'node_modules', node.resolvedPackage.name)
const peripheralLocation = !independent
@@ -222,10 +222,10 @@ function resolvePeersOfNode (
}
}
}
ctx.depGraph[absolutePath] = {
absolutePath,
ctx.depGraph[depPath] = {
additionalInfo: node.resolvedPackage.additionalInfo,
children: Object.assign(children, resolvedPeers),
depPath,
depth: node.depth,
dev: node.resolvedPackage.dev,
fetchingBundledManifest: node.resolvedPackage.fetchingBundledManifest,
@@ -258,7 +258,7 @@ function resolvePeersOfChildren (
},
parentPkgs: ParentRefs,
ctx: {
absolutePathsByNodeId: {[nodeId: string]: string},
pathsByNodeId: {[nodeId: string]: string},
independentLeaves: boolean,
virtualStoreDir: string,
purePkgs: Set<string>,

View File

@@ -15,7 +15,7 @@ import {
import * as dp from 'dependency-path'
import getNpmTarballUrl from 'get-npm-tarball-url'
import R = require('ramda')
import { absolutePathToRef } from './lockfile'
import { depPathToRef } from './lockfile'
import { DependenciesGraph } from './resolvePeers'
export default function (
@@ -25,26 +25,24 @@ export default function (
registries: Registries,
): {
newLockfile: Lockfile,
pendingRequiresBuilds: PendingRequiresBuild[],
pendingRequiresBuilds: string[],
} {
lockfile.packages = lockfile.packages || {}
const pendingRequiresBuilds = [] as PendingRequiresBuild[]
const pendingRequiresBuilds = [] as string[]
for (const depPath of Object.keys(depGraph)) {
const depNode = depGraph[depPath]
const relDepPath = dp.relative(registries, depNode.name, depPath)
const result = R.partition(
const [updatedOptionalDeps, updatedDeps] = R.partition(
(child) => depNode.optionalDependencies.has(depGraph[child.depPath].name),
Object.keys(depNode.children).map((alias) => ({ alias, depPath: depNode.children[alias] })),
)
lockfile.packages[relDepPath] = toLockfileDependency(pendingRequiresBuilds, depNode.additionalInfo, {
lockfile.packages[depPath] = toLockfileDependency(pendingRequiresBuilds, depNode.additionalInfo, {
depGraph,
depPath,
prevSnapshot: lockfile.packages[relDepPath],
prevSnapshot: lockfile.packages[depPath],
registries,
registry: dp.getRegistryByPackageName(registries, depNode.name),
relDepPath,
updatedDeps: result[1],
updatedOptionalDeps: result[0],
updatedDeps,
updatedOptionalDeps,
})
}
const warn = (message: string) => logger.warn({ message, prefix })
@@ -54,13 +52,8 @@ export default function (
}
}
export interface PendingRequiresBuild {
relativeDepPath: string,
absoluteDepPath: string,
}
function toLockfileDependency (
pendingRequiresBuilds: PendingRequiresBuild[],
pendingRequiresBuilds: string[],
pkg: {
deprecated?: string,
peerDependencies?: Dependencies,
@@ -76,7 +69,6 @@ function toLockfileDependency (
},
opts: {
depPath: string,
relDepPath: string,
registry: string,
registries: Registries,
updatedDeps: Array<{alias: string, depPath: string}>,
@@ -88,7 +80,7 @@ function toLockfileDependency (
const depNode = opts.depGraph[opts.depPath]
const lockfileResolution = toLockfileResolution(
{ name: depNode.name, version: depNode.version },
opts.relDepPath,
opts.depPath,
depNode.resolution,
opts.registry,
)
@@ -108,7 +100,7 @@ function toLockfileDependency (
resolution: lockfileResolution,
}
// tslint:disable:no-string-literal
if (dp.isAbsolute(opts.relDepPath)) {
if (dp.isAbsolute(opts.depPath)) {
result['name'] = depNode.name
// There is no guarantee that a non-npmjs.org-hosted package
@@ -131,7 +123,7 @@ function toLockfileDependency (
if (depNode.optional) {
result['optional'] = true
}
if (opts.relDepPath[0] !== '/' && opts.depPath !== depNode.packageId) {
if (opts.depPath[0] !== '/' && !depNode.packageId.endsWith(opts.depPath)) {
result['id'] = depNode.packageId
}
if (pkg.peerDependencies) {
@@ -185,10 +177,7 @@ function toLockfileDependency (
result['requiresBuild'] = true
}
} else {
pendingRequiresBuilds.push({
absoluteDepPath: opts.depPath,
relativeDepPath: opts.relDepPath,
})
pendingRequiresBuilds.push(opts.depPath)
}
depNode.requiresBuild = result['requiresBuild']
// tslint:enable:no-string-literal
@@ -210,7 +199,7 @@ function updateResolvedDeps (
const depNode = depGraph[depPath]
return [
alias,
absolutePathToRef(depNode.absolutePath, {
depPathToRef(depNode.depPath, {
alias,
realName: depNode.name,
registries,
@@ -230,12 +219,12 @@ function toLockfileResolution (
name: string,
version: string,
},
relDepPath: string,
depPath: string,
resolution: Resolution,
registry: string,
): LockfileResolution {
// tslint:disable:no-string-literal
if (dp.isAbsolute(relDepPath) || resolution.type !== undefined || !resolution['integrity']) {
if (dp.isAbsolute(depPath) || resolution.type !== undefined || !resolution['integrity']) {
return resolution as LockfileResolution
}
const base = registry !== resolution['registry'] ? { registry: resolution['registry'] } : {}

View File

@@ -119,7 +119,7 @@ test('a subdependency is from a github repo with different name', async (t: tape
await project.isExecutable('has-aliased-git-dependency/node_modules/.bin/hi')
await project.isExecutable('has-aliased-git-dependency/node_modules/.bin/szia')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/has-say-hi-peer@1.0.0_say-hi@1.0.0/node_modules/has-say-hi-peer`)),
t.ok(await exists(path.resolve(`node_modules/.pnpm/has-say-hi-peer@1.0.0_say-hi@1.0.0/node_modules/has-say-hi-peer`)),
'aliased name used to resolve a peer dependency')
})

View File

@@ -1,6 +1,5 @@
import assertProject from '@pnpm/assert-project'
import { prepareEmpty, preparePackages } from '@pnpm/prepare'
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
import rimraf = require('@zkochan/rimraf')
import fs = require('fs')
import path = require('path')
@@ -114,7 +113,7 @@ test('should rehoist when uninstalling a package', async (t: tape.Test) => {
const modules = await project.readModulesManifest()
t.ok(modules)
t.deepEqual(modules!.hoistedAliases[`localhost+${REGISTRY_MOCK_PORT}/debug/2.6.9`], ['debug'], 'new hoisted debug added to .modules.yaml')
t.deepEqual(modules!.hoistedAliases[`/debug/2.6.9`], ['debug'], 'new hoisted debug added to .modules.yaml')
})
test('should rehoist after running a general install', async (t) => {
@@ -211,7 +210,7 @@ test('hoist by alias', async (t: tape.Test) => {
const modules = await project.readModulesManifest()
t.ok(modules)
t.deepEqual(modules!.hoistedAliases, { [`localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep/100.1.0`]: [ 'dep' ] }, '.modules.yaml updated correctly')
t.deepEqual(modules!.hoistedAliases, { [`/dep-of-pkg-with-1-dep/100.1.0`]: [ 'dep' ] }, '.modules.yaml updated correctly')
})
test('should remove aliased hoisted dependencies', async (t) => {
@@ -434,8 +433,8 @@ test('hoist when updating in one of the workspace projects', async (t) => {
{
const modulesManifest = await rootModules.readModulesManifest()
t.deepEqual(modulesManifest?.hoistedAliases, {
[`localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep/100.0.0`]: ['dep-of-pkg-with-1-dep'],
[`localhost+${REGISTRY_MOCK_PORT}/foo/100.0.0`]: ['foo'],
[`/dep-of-pkg-with-1-dep/100.0.0`]: ['dep-of-pkg-with-1-dep'],
[`/foo/100.0.0`]: ['foo'],
})
}
@@ -462,7 +461,7 @@ test('hoist when updating in one of the workspace projects', async (t) => {
{
const modulesManifest = await rootModules.readModulesManifest()
t.deepEqual(modulesManifest?.hoistedAliases, {
[`localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep/100.0.0`]: ['dep-of-pkg-with-1-dep'],
[`/dep-of-pkg-with-1-dep/100.0.0`]: ['dep-of-pkg-with-1-dep'],
})
}
})

View File

@@ -77,8 +77,8 @@ test('installing with independent-leaves and hoistPattern', async (t) => {
await project.has('.pnpm/node_modules/dep-of-pkg-with-1-dep')
// wrappy is linked directly from the store
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep@100.0.0`)
await project.hasNot(`.pnpm/dep-of-pkg-with-1-dep@100.0.0`)
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0`)
await project.has(`.pnpm/pkg-with-1-dep@100.0.0`)
})

View File

@@ -178,14 +178,14 @@ test("reports child's output", async (t: tape.Test) => {
await addDependenciesToPackage({}, ['count-to-10'], await testDefaults({ fastUnpack: false, reporter }))
t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`,
depPath: '/count-to-10/1.0.0',
level: 'debug',
name: 'pnpm:lifecycle',
script: 'node postinstall',
stage: 'postinstall',
} as LifecycleLog))
t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`,
depPath: '/count-to-10/1.0.0',
level: 'debug',
line: '1',
name: 'pnpm:lifecycle',
@@ -193,7 +193,7 @@ test("reports child's output", async (t: tape.Test) => {
stdio: 'stdout',
} as LifecycleLog))
t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`,
depPath: '/count-to-10/1.0.0',
level: 'debug',
line: '2',
name: 'pnpm:lifecycle',
@@ -201,7 +201,7 @@ test("reports child's output", async (t: tape.Test) => {
stdio: 'stdout',
} as LifecycleLog))
t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`,
depPath: '/count-to-10/1.0.0',
level: 'debug',
line: '6',
name: 'pnpm:lifecycle',
@@ -209,7 +209,7 @@ test("reports child's output", async (t: tape.Test) => {
stdio: 'stderr',
} as LifecycleLog))
t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/count-to-10/1.0.0`,
depPath: '/count-to-10/1.0.0',
exitCode: 0,
level: 'debug',
name: 'pnpm:lifecycle',
@@ -227,7 +227,7 @@ test("reports child's close event", async (t: tape.Test) => {
t.fail()
} catch (err) {
t.ok(reporter.calledWithMatch({
depPath: `localhost+${REGISTRY_MOCK_PORT}/failing-postinstall/1.0.0`,
depPath: '/failing-postinstall/1.0.0',
exitCode: 1,
level: 'debug',
name: 'pnpm:lifecycle',
@@ -262,7 +262,7 @@ test('run lifecycle scripts of dependent packages after running scripts of their
await addDependenciesToPackage({}, ['with-postinstall-a'], await testDefaults({ fastUnpack: false }))
t.ok(+project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
t.ok(+project.requireModule(`.pnpm/with-postinstall-b@1.0.0/node_modules/with-postinstall-b/output.json`)[0] < +project.requireModule('with-postinstall-a/output.json')[0])
})
test('run prepare script for git-hosted dependencies', async (t: tape.Test) => {

View File

@@ -457,10 +457,10 @@ test('concurrent circular deps', async (t: tape.Test) => {
const m = project.requireModule('es6-iterator')
t.ok(m, 'es6-iterator is installed')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/es6-iterator@2.0.0/node_modules/es5-ext`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/es6-iterator@2.0.1/node_modules/es5-ext`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/es5-ext@0.10.31/node_modules/es6-iterator`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/es5-ext@0.10.31/node_modules/es6-symbol`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/es6-iterator@2.0.0/node_modules/es5-ext`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/es6-iterator@2.0.1/node_modules/es5-ext`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/es5-ext@0.10.31/node_modules/es6-iterator`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/es5-ext@0.10.31/node_modules/es6-symbol`)))
})
test('concurrent installation of the same packages', async (t) => {
@@ -737,7 +737,7 @@ test('lockfile locks npm dependencies', async (t: tape.Test) => {
status: 'found_in_store',
} as ProgressLog), 'logged that package was found in store')
const m = project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0/node_modules/dep-of-pkg-with-1-dep/package.json`)
const m = project.requireModule(`.pnpm/pkg-with-1-dep@100.0.0/node_modules/dep-of-pkg-with-1-dep/package.json`)
t.equal(m.version, '100.0.0', `dependency specified in ${WANTED_LOCKFILE} is installed`)
})
@@ -837,7 +837,7 @@ test("don't fail on case insensitive filesystems when package has 2 files with s
test('reinstalls missing packages to node_modules', async (t) => {
prepareEmpty(t)
const reporter = sinon.spy()
const depLocation = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive`)
const depLocation = path.resolve(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive`)
const missingDepLog = {
level: 'debug',
missing: depLocation,
@@ -873,7 +873,7 @@ test('reinstalls missing packages to node_modules', async (t) => {
test('reinstalls missing packages to node_modules during headless install', async (t) => {
prepareEmpty(t)
const reporter = sinon.spy()
const depLocation = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive`)
const depLocation = path.resolve(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive`)
const missingDepLog = {
level: 'debug',
missing: depLocation,
@@ -1002,7 +1002,7 @@ test('all the subdeps of dependencies are linked when a node_modules is partiall
], await testDefaults({ preferFrozenLockfile: false }))
t.deepEqual(
await fs.readdir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobarqar@1.0.1/node_modules`)),
await fs.readdir(path.resolve(`node_modules/.pnpm/foobarqar@1.0.1/node_modules`)),
[
'bar',
'foo',
@@ -1089,7 +1089,7 @@ test('subdep symlinks are updated if the lockfile has new subdep versions specif
},
], await testDefaults({ preferFrozenLockfile: false }))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0/node_modules/dep-of-pkg-with-1-dep/package.json`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/pkg-with-1-dep@100.0.0/node_modules/dep-of-pkg-with-1-dep/package.json`)))
})
test('fail if none of the available resolvers support a version spec', async (t: tape.Test) => {

View File

@@ -66,8 +66,8 @@ test('install only the dependencies of the specified importer', async (t) => {
await projects['project-2'].hasNot('is-negative')
const rootModules = assertProject(t, process.cwd())
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`)
await rootModules.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`)
await rootModules.has(`.pnpm/is-positive@1.0.0`)
await rootModules.hasNot(`.pnpm/is-negative@1.0.0`)
})
test('install only the dependencies of the specified importer. The current lockfile has importers that do not exist anymore', async (t) => {
@@ -194,9 +194,9 @@ test('dependencies of other importers are not pruned when installing for a subse
await projects['project-2'].has('is-negative')
const rootModules = assertProject(t, process.cwd())
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@2.0.0`)
await rootModules.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`)
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`)
await rootModules.has(`.pnpm/is-positive@2.0.0`)
await rootModules.hasNot(`.pnpm/is-positive@1.0.0`)
await rootModules.has(`.pnpm/is-negative@1.0.0`)
const lockfile = await rootModules.readCurrentLockfile()
t.deepEqual(Object.keys(lockfile.importers), ['project-1', 'project-2'])
@@ -259,9 +259,9 @@ test('dependencies of other importers are not pruned when (headless) installing
await projects['project-2'].has('is-negative')
const rootModules = assertProject(t, process.cwd())
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@2.0.0`)
await rootModules.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`)
await rootModules.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`)
await rootModules.has(`.pnpm/is-positive@2.0.0`)
await rootModules.hasNot(`.pnpm/is-positive@1.0.0`)
await rootModules.has(`.pnpm/is-negative@1.0.0`)
})
test('adding a new dev dependency to project that uses a shared lockfile', async (t) => {
@@ -532,9 +532,9 @@ test('partial installation in a monorepo does not remove dependencies of other w
},
], await testDefaults())
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@2.0.0/node_modules/is-positive`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0/node_modules/pkg-with-1-dep`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep@100.1.0/node_modules/dep-of-pkg-with-1-dep`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/is-positive@2.0.0/node_modules/is-positive`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/pkg-with-1-dep@100.0.0/node_modules/pkg-with-1-dep`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/dep-of-pkg-with-1-dep@100.1.0/node_modules/dep-of-pkg-with-1-dep`)))
})
test('partial installation in a monorepo does not remove dependencies of other workspace projects when lockfile is frozen', async (t: tape.Test) => {
@@ -625,9 +625,9 @@ test('partial installation in a monorepo does not remove dependencies of other w
},
], await testDefaults({ frozenLockfile: true }))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-1-dep@100.0.0/node_modules/pkg-with-1-dep`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/dep-of-pkg-with-1-dep@100.1.0/node_modules/dep-of-pkg-with-1-dep`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/pkg-with-1-dep@100.0.0/node_modules/pkg-with-1-dep`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/dep-of-pkg-with-1-dep@100.1.0/node_modules/dep-of-pkg-with-1-dep`)))
})
test('adding a new dependency with the workspace: protocol', async (t) => {
@@ -829,8 +829,8 @@ test('remove dependencies of a project that was removed from the workspace (duri
t.deepEqual(Object.keys(currentLockfile.importers), ['project-1', 'project-2'])
t.deepEqual(Object.keys(currentLockfile.packages), ['/is-negative/1.0.0', '/is-positive/1.0.0'])
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`)
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`)
await project.has(`.pnpm/is-positive@1.0.0`)
await project.has(`.pnpm/is-negative@1.0.0`)
}
await mutateModules(importers.slice(0, 1), await testDefaults({ preferFrozenLockfile: false }))
@@ -839,7 +839,7 @@ test('remove dependencies of a project that was removed from the workspace (duri
t.deepEqual(Object.keys(currentLockfile.importers), ['project-1'])
t.deepEqual(Object.keys(currentLockfile.packages), ['/is-positive/1.0.0'])
await project.has(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0`)
await project.hasNot(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-negative@1.0.0`)
await project.has(`.pnpm/is-positive@1.0.0`)
await project.hasNot(`.pnpm/is-negative@1.0.0`)
}
})

View File

@@ -70,7 +70,7 @@ test('skip optional dependency that does not support the current OS', async (t:
await project.hasNot('not-compatible-with-any-os')
await project.storeHas('not-compatible-with-any-os', '1.0.0')
t.notOk(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/dep-of-optional-pkg@1.0.0`)), "isn't linked into node_modules")
t.notOk(await exists(path.resolve(`node_modules/.pnpm/dep-of-optional-pkg@1.0.0`)), "isn't linked into node_modules")
const lockfile = await project.readLockfile()
t.ok(lockfile.packages['/not-compatible-with-any-os/1.0.0'], 'lockfile contains optional dependency')
@@ -213,8 +213,8 @@ test('optional subdependency is skipped', async (t: tape.Test) => {
t.deepEqual(modulesInfo.skipped, ['/not-compatible-with-any-os/1.0.0'], 'optional subdep skipped')
}
t.ok(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-optional@1.0.0`), 'regular dependency linked')
t.notOk(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/not-compatible-with-any-os@1.0.0`), 'optional dependency not linked')
t.ok(await exists(`node_modules/.pnpm/pkg-with-optional@1.0.0`), 'regular dependency linked')
t.notOk(await exists(`node_modules/.pnpm/not-compatible-with-any-os@1.0.0`), 'optional dependency not linked')
const logMatcher = sinon.match({
package: {
@@ -240,7 +240,7 @@ test('optional subdependency is skipped', async (t: tape.Test) => {
await testDefaults({ force: true, frozenLockfile: true }),
)
t.ok(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/not-compatible-with-any-os@1.0.0`), 'optional dependency linked after forced headless install')
t.ok(await exists(`node_modules/.pnpm/not-compatible-with-any-os@1.0.0`), 'optional dependency linked after forced headless install')
{
const modulesInfo = await readYamlFile<{ skipped: string[] }>(path.join('node_modules', '.modules.yaml'))
@@ -354,10 +354,10 @@ test('only skip optional dependencies', async (t: tape.Test) => {
},
}, await testDefaults({ fastUnpack: false, preferredVersions }))
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/duplexify@3.6.0`)), 'duplexify is linked into node_modules')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/stream-shift@1.0.0`)), 'stream-shift is linked into node_modules')
t.ok(await exists(path.resolve(`node_modules/.pnpm/duplexify@3.6.0`)), 'duplexify is linked into node_modules')
t.ok(await exists(path.resolve(`node_modules/.pnpm/stream-shift@1.0.0`)), 'stream-shift is linked into node_modules')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/got@3.3.1/node_modules/duplexify`)), 'duplexify is linked into node_modules of got')
t.ok(await exists(path.resolve(`node_modules/.pnpm/got@3.3.1/node_modules/duplexify`)), 'duplexify is linked into node_modules of got')
})
test('skip optional dependency that does not support the current OS, when doing install on a subset of workspace projects', async (t: tape.Test) => {

View File

@@ -32,7 +32,7 @@ test('peer dependency is grouped with dependency when peer is resolved not from
const opts = await testDefaults()
let manifest = await addDependenciesToPackage({}, ['using-ajv'], opts)
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4')
// testing that peers are reinstalled correctly using info from the lockfile
@@ -40,7 +40,7 @@ test('peer dependency is grouped with dependency when peer is resolved not from
await rimraf(path.resolve('..', '.store'))
manifest = await install(manifest, await testDefaults())
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4')
await addDependenciesToPackage(manifest, ['using-ajv'], await testDefaults({ update: true }))
@@ -62,8 +62,8 @@ test('nothing is needlessly removed from node_modules', async (t: tape.Test) =>
const opts = await testDefaults()
const manifest = await addDependenciesToPackage({}, ['using-ajv', 'ajv-keywords@1.5.0'], opts)
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'root dependency resolution is present')
t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'root dependency resolution is present')
t.equal(deepRequireCwd(['using-ajv', 'ajv-keywords', 'ajv', './package.json']).version, '4.10.4')
await mutateModules([
@@ -75,8 +75,8 @@ test('nothing is needlessly removed from node_modules', async (t: tape.Test) =>
},
], opts)
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency link is not removed')
t.notOk(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'root dependency resolution is removed')
t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency link is not removed')
t.notOk(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'root dependency resolution is removed')
})
test('peer dependency is grouped with dependent when the peer is a top dependency', async (t: tape.Test) => {
@@ -90,7 +90,7 @@ test('peer dependency is grouped with dependent when the peer is a top dependenc
message: `localhost+${REGISTRY_MOCK_PORT}/ajv-keywords/1.5.0 requires a peer of ajv@>=4.10.0 but none was installed.`,
}), 'no warning is logged about unresolved peer dep')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv-keywords`)), 'dependent is grouped with top peer dep')
t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv-keywords`)), 'dependent is grouped with top peer dep')
})
test('the right peer dependency is used in every workspace package', async (t: tape.Test) => {
@@ -256,8 +256,8 @@ test('top peer dependency is linked on subsequent install', async (t: tape.Test)
await addDependenciesToPackage(manifest, ['ajv-keywords@1.5.0'], await testDefaults())
t.notOk(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'dependency without peer is prunned')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
t.notOk(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0/node_modules/ajv-keywords`)), 'dependency without peer is prunned')
t.ok(await exists(path.resolve(`node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv`)), 'peer dependency is linked')
})
async function okFile (t: tape.Test, filename: string) {
@@ -273,7 +273,7 @@ test('peer dependencies are linked when running one named installation', async (
const manifest = await addDependenciesToPackage({}, ['abc-grand-parent-with-c', 'abc-parent-with-ab', 'peer-c@2.0.0'], await testDefaults())
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc@1.0.0`)
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/abc@1.0.0`)
const pkgVariation1 = path.join(pkgVariationsDir + '_165e1e08a3f7e7f77ddb572ad0e55660/node_modules')
await okFile(t, path.join(pkgVariation1, 'abc'))
@@ -305,7 +305,7 @@ test('peer dependencies are linked when running two separate named installations
const manifest = await addDependenciesToPackage({}, ['abc-grand-parent-with-c', 'peer-c@2.0.0'], await testDefaults())
await addDependenciesToPackage(manifest, ['abc-parent-with-ab'], await testDefaults())
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc@1.0.0`)
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/abc@1.0.0`)
const pkgVariation1 = path.join(pkgVariationsDir + '_165e1e08a3f7e7f77ddb572ad0e55660/node_modules')
await okFile(t, path.join(pkgVariation1, 'abc'))
@@ -337,7 +337,7 @@ test.skip('peer dependencies are linked', async (t: tape.Test) => {
},
}, await testDefaults())
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc@1.0.0`)
const pkgVariationsDir = path.resolve(`node_modules/.pnpm/abc@1.0.0`)
const pkgVariation1 = path.join(pkgVariationsDir, '165e1e08a3f7e7f77ddb572ad0e55660/node_modules')
await okFile(t, path.join(pkgVariation1, 'abc'))
@@ -363,7 +363,7 @@ test('scoped peer dependency is linked', async (t: tape.Test) => {
prepareEmpty(t)
await addDependenciesToPackage({}, ['for-testing-scoped-peers'], await testDefaults())
const pkgVariation = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/@having/scoped-peer@1.0.0_@scoped+peer@1.0.0/node_modules`)
const pkgVariation = path.resolve(`node_modules/.pnpm/@having/scoped-peer@1.0.0_@scoped+peer@1.0.0/node_modules`)
await okFile(t, path.join(pkgVariation, '@having', 'scoped-peer'))
await okFile(t, path.join(pkgVariation, '@scoped', 'peer'))
})
@@ -373,7 +373,7 @@ test('peer bins are linked', async (t: tape.Test) => {
await addDependenciesToPackage({}, ['for-testing-peers-having-bins'], await testDefaults({ fastUnpack: false }))
const pkgVariation = path.join(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-peer-having-bin@1.0.0_peer-with-bin@1.0.0/node_modules`)
const pkgVariation = path.join(`.pnpm/pkg-with-peer-having-bin@1.0.0_peer-with-bin@1.0.0/node_modules`)
await project.isExecutable(path.join(pkgVariation, 'pkg-with-peer-having-bin/node_modules/.bin', 'peer-with-bin'))
@@ -385,11 +385,11 @@ test('run pre/postinstall scripts of each variations of packages with peer depen
prepareEmpty(t)
await addDependenciesToPackage({}, ['parent-of-pkg-with-events-and-peers', 'pkg-with-events-and-peers', 'peer-c@2.0.0'], await testDefaults({ fastUnpack: false }))
const pkgVariation1 = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-events-and-peers@1.0.0_peer-c@1.0.0/node_modules`)
const pkgVariation1 = path.resolve(`node_modules/.pnpm/pkg-with-events-and-peers@1.0.0_peer-c@1.0.0/node_modules`)
await okFile(t, path.join(pkgVariation1, 'pkg-with-events-and-peers', 'generated-by-preinstall.js'))
await okFile(t, path.join(pkgVariation1, 'pkg-with-events-and-peers', 'generated-by-postinstall.js'))
const pkgVariation2 = path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-events-and-peers@1.0.0_peer-c@2.0.0/node_modules`)
const pkgVariation2 = path.resolve(`node_modules/.pnpm/pkg-with-events-and-peers@1.0.0_peer-c@2.0.0/node_modules`)
await okFile(t, path.join(pkgVariation2, 'pkg-with-events-and-peers', 'generated-by-preinstall.js'))
await okFile(t, path.join(pkgVariation2, 'pkg-with-events-and-peers', 'generated-by-postinstall.js'))
})
@@ -405,7 +405,7 @@ test('package that resolves its own peer dependency', async (t: tape.Test) => {
t.equal(deepRequireCwd(['pkg-with-resolved-peer', 'peer-c', './package.json']).version, '1.0.0')
t.ok(await exists(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/pkg-with-resolved-peer@1.0.0/node_modules/pkg-with-resolved-peer`)))
t.ok(await exists(path.resolve(`node_modules/.pnpm/pkg-with-resolved-peer@1.0.0/node_modules/pkg-with-resolved-peer`)))
const lockfile = await project.readLockfile()
@@ -443,7 +443,7 @@ test('peer dependency is grouped with dependent when the peer is a top dependenc
message: `localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0 requires a peer of ajv@>=4.10.0 but none was installed.`,
}), 'no warning is logged about unresolved peer dep')
t.ok(await exists(path.join(`../node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv-keywords`)))
t.ok(await exists(path.join(`../node_modules/.pnpm/ajv-keywords@1.5.0_ajv@4.10.4/node_modules/ajv-keywords`)))
const lockfile = await readYamlFile<Lockfile>(path.join('..', WANTED_LOCKFILE))
@@ -473,7 +473,7 @@ test('peer dependency is grouped correctly with peer installed via separate inst
}, await testDefaults({ reporter, lockfileDir }))
await addDependenciesToPackage(manifest, ['peer-c@2.0.0'], await testDefaults({ reporter, lockfileDir }))
t.ok(await exists(path.join(`../node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc@1.0.0_peer-c@2.0.0/node_modules/dep-of-pkg-with-1-dep`)))
t.ok(await exists(path.join(`../node_modules/.pnpm/abc@1.0.0_peer-c@2.0.0/node_modules/dep-of-pkg-with-1-dep`)))
})
test('peer dependency is grouped with dependent when the peer is a top dependency and external node_modules is used', async (t: tape.Test) => {
@@ -619,7 +619,7 @@ test('external lockfile: peer dependency is grouped with dependent even after a
})
}
t.ok(await exists(path.join(`../node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc-parent-with-ab@1.0.0_peer-c@2.0.0/node_modules/is-positive`)))
t.ok(await exists(path.join(`../node_modules/.pnpm/abc-parent-with-ab@1.0.0_peer-c@2.0.0/node_modules/is-positive`)))
})
test('regular dependencies are not removed on update from transitive packages that have children with peers resolved from above', async (t: tape.Test) => {
@@ -635,7 +635,7 @@ test('regular dependencies are not removed on update from transitive packages th
await addDistTag({ package: 'peer-c', version: '1.0.1', distTag: 'latest' })
await install(manifest, await testDefaults({ lockfileDir, update: true, depth: 2 }))
t.ok(await exists(path.join(`../node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/abc-parent-with-ab@1.0.1_peer-c@1.0.1/node_modules/is-positive`)))
t.ok(await exists(path.join(`../node_modules/.pnpm/abc-parent-with-ab@1.0.1_peer-c@1.0.1/node_modules/is-positive`)))
})
test('peer dependency is resolved from parent package', async (t) => {

View File

@@ -210,11 +210,11 @@ test('node_modules is pruned after linking', async (t: tape.Test) => {
const manifest = await addDependenciesToPackage({}, ['is-positive@1.0.0'], await testDefaults())
t.ok(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive/package.json`))
t.ok(await exists(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive/package.json`))
await link(['../is-positive'], path.resolve('node_modules'), await testDefaults({ manifest, dir: process.cwd() }))
t.notOk(await exists(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/is-positive@1.0.0/node_modules/is-positive/package.json`), 'pruned')
t.notOk(await exists(`node_modules/.pnpm/is-positive@1.0.0/node_modules/is-positive/package.json`), 'pruned')
})
test('relative link uses realpath when contained in a symlinked dir', async (t: tape.Test) => {

View File

@@ -161,7 +161,7 @@ test("lockfile doesn't lock subdependencies that don't satisfy the new specs", a
await addDependenciesToPackage(manifest, ['react-datetime@1.3.0'], await testDefaults({ save: true }))
t.equal(
project.requireModule(`.pnpm/localhost+${REGISTRY_MOCK_PORT}/react-datetime@1.3.0/node_modules/react-onclickoutside/package.json`).version,
project.requireModule(`.pnpm/react-datetime@1.3.0/node_modules/react-onclickoutside/package.json`).version,
'0.3.4',
'react-datetime@1.3.0 has react-onclickoutside@0.3.4 in its node_modules')
@@ -335,8 +335,8 @@ test(`respects ${WANTED_LOCKFILE} for top dependencies`, async (t: tape.Test) =>
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'foo'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'bar'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'qar'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobar@100.0.0/node_modules/foo`))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobar@100.0.0/node_modules/bar`))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/foobar@100.0.0/node_modules/foo`))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/foobar@100.0.0/node_modules/bar`))).version, '100.0.0')
await Promise.all(pkgs.map((pkgName) => addDistTag(pkgName, '100.1.0', 'latest')))
@@ -361,8 +361,8 @@ test(`respects ${WANTED_LOCKFILE} for top dependencies`, async (t: tape.Test) =>
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'foo'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'bar'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve('node_modules', 'qar'))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobar@100.0.0/node_modules/foo`))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/localhost+${REGISTRY_MOCK_PORT}/foobar@100.0.0/node_modules/bar`))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/foobar@100.0.0/node_modules/foo`))).version, '100.0.0')
t.equal((await readPackageJsonFromDir(path.resolve(`node_modules/.pnpm/foobar@100.0.0/node_modules/bar`))).version, '100.0.0')
})
test(`subdeps are updated on repeat install if outer ${WANTED_LOCKFILE} does not match the inner one`, async (t: tape.Test) => {