perf: don't use await inside loops (#6617)

This commit is contained in:
Zoltan Kochan
2023-06-05 12:12:47 +03:00
committed by GitHub
parent f870fa2af4
commit 4b97f1f07a
41 changed files with 326 additions and 264 deletions

View File

@@ -0,0 +1,17 @@
---
"@pnpm/plugin-commands-publishing": patch
"@pnpm/plugin-commands-patching": patch
"@pnpm/plugin-commands-listing": patch
"@pnpm/resolve-dependencies": patch
"@pnpm/exportable-manifest": patch
"@pnpm/license-scanner": patch
"@pnpm/get-context": patch
"@pnpm/headless": patch
"@pnpm/read-modules-dir": patch
"@pnpm/package-store": patch
"@pnpm/core": patch
"@pnpm/audit": patch
"@pnpm/list": patch
---
Don't use await in loops.

View File

@@ -0,0 +1,5 @@
---
"pnpm": patch
---
Some minor performance improvements by removing await from loops [#6617](https://github.com/pnpm/pnpm/pull/6617).

View File

@@ -34,6 +34,7 @@
"@typescript-eslint/explicit-function-return-type": "off",
"@typescript-eslint/no-explicit-any": "error",
"no-return-await": "error",
"no-await-in-loop": "error",
"@typescript-eslint/return-await": "off",
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/no-unused-expressions": "error",

View File

@@ -244,6 +244,7 @@ test.each([
// shouldn't affect its arg parsing. Test both scenarios for good measure.
const input = [...(testWithCommandFallback ? [] : ['run']), ...testInput.split(' ')]
// eslint-disable-next-line no-await-in-loop
const { options, cmd, params, fallbackCommandUsed } = await parseCliArgs({
...DEFAULT_OPTS,
fallbackCommand: 'run',

View File

@@ -119,6 +119,7 @@ test('it re-attempts failed downloads', async () => {
try {
const attempts = 2
for (let i = 0; i < attempts; i++) {
// eslint-disable-next-line no-await-in-loop
await expect(
env.handler({
bin: process.cwd(),

View File

@@ -52,7 +52,7 @@ export async function runLifecycleHooksConcurrently (
let isBuilt = false
for (const stage of (importerStages ?? stages)) {
if ((manifest.scripts == null) || !manifest.scripts[stage]) continue
await runLifecycleHook(stage, manifest, runLifecycleHookOpts)
await runLifecycleHook(stage, manifest, runLifecycleHookOpts) // eslint-disable-line no-await-in-loop
isBuilt = true
}
if (targetDirs == null || targetDirs.length === 0 || !isBuilt) return

View File

@@ -111,6 +111,7 @@ export async function recursiveRebuild (
}
const limitRebuild = pLimit(opts.workspaceConcurrency ?? 4)
for (const chunk of chunks) {
// eslint-disable-next-line no-await-in-loop
await Promise.all(chunk.map(async (rootDir: string) =>
limitRebuild(async () => {
try {

View File

@@ -180,6 +180,7 @@ export async function handler (
let exitCode = 0
for (const chunk of chunks) {
// eslint-disable-next-line no-await-in-loop
await Promise.all(chunk.map(async (prefix: string) =>
limitRun(async () => {
result[prefix].status = 'running'

View File

@@ -90,6 +90,7 @@ export async function runRecursive (
return specifiedScripts.map(script => ({ prefix, scriptName: script }))
}).flat()
// eslint-disable-next-line no-await-in-loop
await Promise.all(selectedScripts.map(async ({ prefix, scriptName }) =>
limitRun(async () => {
const pkg = opts.selectedProjectsGraph[prefix]

View File

@@ -41,6 +41,7 @@ export async function preparePackage (opts: PreparePackageOptions, pkgDir: strin
await runLifecycleHook(installScriptName, manifest, execOpts)
for (const scriptName of PREPUBLISH_SCRIPTS) {
if (manifest.scripts[scriptName] == null || manifest.scripts[scriptName] === '') continue
// eslint-disable-next-line no-await-in-loop
await runLifecycleHook(scriptName, manifest, execOpts)
}
} catch (err: any) { // eslint-disable-line

View File

@@ -17,21 +17,18 @@ async function _readModulesDir (
modulesDir: string,
scope?: string
) {
let pkgNames: string[] = []
const pkgNames: string[] = []
const parentDir = scope ? path.join(modulesDir, scope) : modulesDir
for (const dir of await readdir(parentDir, { withFileTypes: true })) {
if (dir.isFile() || dir.name[0] === '.') continue
await Promise.all((await readdir(parentDir, { withFileTypes: true })).map(async (dir) => {
if (dir.isFile() || dir.name[0] === '.') return
if (!scope && dir.name[0] === '@') {
pkgNames = [
...pkgNames,
...await _readModulesDir(modulesDir, dir.name),
]
continue
pkgNames.push(...await _readModulesDir(modulesDir, dir.name))
return
}
const pkgName = scope ? `${scope}/${dir.name as string}` : dir.name
pkgNames.push(pkgName)
}
}))
return pkgNames
}

View File

@@ -83,11 +83,12 @@ async function extendWithDependencyPaths (auditReport: AuditReport, opts: {
include: opts.include,
}
const _searchPackagePaths = searchPackagePaths.bind(null, searchOpts, projectDirs)
for (const { findings, module_name: moduleName } of Object.values(advisories)) {
for (const finding of findings) {
finding.paths = await _searchPackagePaths(`${moduleName}@${finding.version}`)
}
}
// eslint-disable-next-line @typescript-eslint/naming-convention
await Promise.all(Object.values(advisories).map(async ({ findings, module_name }) => {
await Promise.all(findings.map(async (finding) => {
finding.paths = await _searchPackagePaths(`${module_name}@${finding.version}`)
}))
}))
return auditReport
}

View File

@@ -178,6 +178,7 @@ async function _readWantedLockfile (
}
}
let result: { lockfile: Lockfile | null, hadConflicts: boolean } = { lockfile: null, hadConflicts: false }
/* eslint-disable no-await-in-loop */
for (const lockfileName of lockfileNames) {
result = await _read(path.join(pkgPath, lockfileName), pkgPath, { ...opts, autofixMergeConflicts: true })
if (result.lockfile) {
@@ -187,6 +188,7 @@ async function _readWantedLockfile (
break
}
}
/* eslint-enable no-await-in-loop */
return result
}

View File

@@ -48,6 +48,7 @@ export function createFetchFromRegistry (
let redirects = 0
let urlObject = new URL(url)
const originalHost = urlObject.host
/* eslint-disable no-await-in-loop */
while (true) {
const agentOptions = {
...defaultOpts,
@@ -77,6 +78,7 @@ export function createFetchFromRegistry (
if (!headers['authorization'] || originalHost === urlObject.host) continue
delete headers.authorization
}
/* eslint-enable no-await-in-loop */
}
}

View File

@@ -57,13 +57,13 @@ export async function handler (opts: PatchRemoveCommandOptions, params: string[]
throw new PnpmError('NO_PATCHES_TO_REMOVE', 'There are no patches that need to be removed')
}
for (const patch of patchesToRemove) {
await Promise.all(patchesToRemove.map(async (patch) => {
if (Object.prototype.hasOwnProperty.call(patchedDependencies, patch)) {
const patchFile = path.join(lockfileDir, patchedDependencies[patch])
await fs.rm(patchFile, { force: true })
delete rootProjectManifest.pnpm!.patchedDependencies![patch]
}
}
}))
await writeProjectManifest(rootProjectManifest)

View File

@@ -74,44 +74,50 @@ async function linkedPackagesAreUpToDate (
snapshot: ProjectSnapshot
}
) {
for (const depField of DEPENDENCIES_FIELDS) {
const lockfileDeps = project.snapshot[depField]
const manifestDeps = project.manifest[depField]
if ((lockfileDeps == null) || (manifestDeps == null)) continue
const depNames = Object.keys(lockfileDeps)
for (const depName of depNames) {
const currentSpec = manifestDeps[depName]
if (!currentSpec) continue
const lockfileRef = lockfileDeps[depName]
const isLinked = lockfileRef.startsWith('link:')
if (
isLinked &&
(
currentSpec.startsWith('link:') ||
currentSpec.startsWith('file:') ||
currentSpec.startsWith('workspace:.')
)
) {
continue
}
const linkedDir = isLinked
? path.join(project.dir, lockfileRef.slice(5))
: workspacePackages?.[depName]?.[lockfileRef]?.dir
if (!linkedDir) continue
if (!linkWorkspacePackages && !currentSpec.startsWith('workspace:')) {
// we found a linked dir, but we don't want to use it, because it's not specified as a
// workspace:x.x.x dependency
continue
}
const linkedPkg = manifestsByDir[linkedDir] ?? await safeReadPackageJsonFromDir(linkedDir)
const availableRange = getVersionRange(currentSpec)
// This should pass the same options to semver as @pnpm/npm-resolver
const localPackageSatisfiesRange = availableRange === '*' || availableRange === '^' || availableRange === '~' ||
linkedPkg && semver.satisfies(linkedPkg.version, availableRange, { loose: true })
if (isLinked !== localPackageSatisfiesRange) return false
return pEvery(
DEPENDENCIES_FIELDS,
(depField) => {
const lockfileDeps = project.snapshot[depField]
const manifestDeps = project.manifest[depField]
if ((lockfileDeps == null) || (manifestDeps == null)) return true
const depNames = Object.keys(lockfileDeps)
return pEvery(
depNames,
async (depName) => {
const currentSpec = manifestDeps[depName]
if (!currentSpec) return true
const lockfileRef = lockfileDeps[depName]
const isLinked = lockfileRef.startsWith('link:')
if (
isLinked &&
(
currentSpec.startsWith('link:') ||
currentSpec.startsWith('file:') ||
currentSpec.startsWith('workspace:.')
)
) {
return true
}
const linkedDir = isLinked
? path.join(project.dir, lockfileRef.slice(5))
: workspacePackages?.[depName]?.[lockfileRef]?.dir
if (!linkedDir) return true
if (!linkWorkspacePackages && !currentSpec.startsWith('workspace:')) {
// we found a linked dir, but we don't want to use it, because it's not specified as a
// workspace:x.x.x dependency
return true
}
const linkedPkg = manifestsByDir[linkedDir] ?? await safeReadPackageJsonFromDir(linkedDir)
const availableRange = getVersionRange(currentSpec)
// This should pass the same options to semver as @pnpm/npm-resolver
const localPackageSatisfiesRange = availableRange === '*' || availableRange === '^' || availableRange === '~' ||
linkedPkg && semver.satisfies(linkedPkg.version, availableRange, { loose: true })
if (isLinked !== localPackageSatisfiesRange) return false
return true
}
)
}
}
return true
)
}
function getVersionRange (spec: string) {

View File

@@ -513,6 +513,7 @@ Note that in CI environments, this setting is enabled by default.`,
let preferredSpecs: Record<string, string> | null = null
// TODO: make it concurrent
/* eslint-disable no-await-in-loop */
for (const project of projects) {
const projectOpts = {
...project,
@@ -608,6 +609,7 @@ Note that in CI environments, this setting is enabled by default.`,
}
}
}
/* eslint-enable no-await-in-loop */
async function installCase (project: any) { // eslint-disable-line
const wantedDependencies = getWantedDependencies(project.manifest, {

View File

@@ -58,48 +58,49 @@ export async function link (
}, true)
const importerId = getLockfileImporterId(ctx.lockfileDir, opts.dir)
const linkedPkgs: Array<{ path: string, manifest: DependencyManifest, alias: string }> = []
const specsToUpsert = [] as PackageSpecObject[]
for (const linkFrom of linkFromPkgs) {
let linkFromPath: string
let linkFromAlias: string | undefined
if (typeof linkFrom === 'string') {
linkFromPath = linkFrom
} else {
linkFromPath = linkFrom.path
linkFromAlias = linkFrom.alias
}
const { manifest } = await readProjectManifest(linkFromPath) as { manifest: DependencyManifest }
if (typeof linkFrom === 'string' && manifest.name === undefined) {
throw new PnpmError('INVALID_PACKAGE_NAME', `Package in ${linkFromPath} must have a name field to be linked`)
}
const linkedPkgs = await Promise.all(
linkFromPkgs.map(async (linkFrom) => {
let linkFromPath: string
let linkFromAlias: string | undefined
if (typeof linkFrom === 'string') {
linkFromPath = linkFrom
} else {
linkFromPath = linkFrom.path
linkFromAlias = linkFrom.alias
}
const { manifest } = await readProjectManifest(linkFromPath) as { manifest: DependencyManifest }
if (typeof linkFrom === 'string' && manifest.name === undefined) {
throw new PnpmError('INVALID_PACKAGE_NAME', `Package in ${linkFromPath} must have a name field to be linked`)
}
const targetDependencyType = getDependencyTypeFromManifest(opts.manifest, manifest.name) ?? opts.targetDependenciesField
const targetDependencyType = getDependencyTypeFromManifest(opts.manifest, manifest.name) ?? opts.targetDependenciesField
specsToUpsert.push({
alias: manifest.name,
pref: getPref(manifest.name, manifest.name, manifest.version, {
pinnedVersion: opts.pinnedVersion,
}),
saveType: (targetDependencyType ?? (ctx.manifest && guessDependencyType(manifest.name, ctx.manifest))) as DependenciesField,
specsToUpsert.push({
alias: manifest.name,
pref: getPref(manifest.name, manifest.name, manifest.version, {
pinnedVersion: opts.pinnedVersion,
}),
saveType: (targetDependencyType ?? (ctx.manifest && guessDependencyType(manifest.name, ctx.manifest))) as DependenciesField,
})
const packagePath = normalize(path.relative(opts.dir, linkFromPath))
const addLinkOpts = {
linkedPkgName: linkFromAlias ?? manifest.name,
manifest: ctx.manifest,
packagePath,
}
addLinkToLockfile(ctx.currentLockfile.importers[importerId], addLinkOpts)
addLinkToLockfile(ctx.wantedLockfile.importers[importerId], addLinkOpts)
return {
alias: linkFromAlias ?? manifest.name,
manifest,
path: linkFromPath,
}
})
const packagePath = normalize(path.relative(opts.dir, linkFromPath))
const addLinkOpts = {
linkedPkgName: linkFromAlias ?? manifest.name,
manifest: ctx.manifest,
packagePath,
}
addLinkToLockfile(ctx.currentLockfile.importers[importerId], addLinkOpts)
addLinkToLockfile(ctx.wantedLockfile.importers[importerId], addLinkOpts)
linkedPkgs.push({
alias: linkFromAlias ?? manifest.name,
manifest,
path: linkFromPath,
})
}
)
const updatedCurrentLockfile = pruneSharedLockfile(ctx.currentLockfile)
@@ -110,7 +111,7 @@ export async function link (
// Linking should happen after removing orphans
// Otherwise would've been removed
for (const { alias, manifest, path } of linkedPkgs) {
await Promise.all(linkedPkgs.map(async ({ alias, manifest, path }) => {
// TODO: cover with test that linking reports with correct dependency types
const stu = specsToUpsert.find((s) => s.alias === manifest.name)
const targetDependencyType = getDependencyTypeFromManifest(opts.manifest, manifest.name) ?? opts.targetDependenciesField
@@ -119,7 +120,7 @@ export async function link (
linkedPackage: manifest,
prefix: opts.dir,
})
}
}))
const linkToBin = maybeOpts?.linkToBin ?? path.join(destModules, '.bin')
await linkBinsOfPackages(linkedPkgs.map((p) => ({ manifest: p.manifest, location: p.path })), linkToBin, {

View File

@@ -146,10 +146,10 @@ export async function getContext (
})
})
if (opts.readPackageHook != null) {
for (const project of importersContext.projects) {
await Promise.all(importersContext.projects.map(async (project) => {
project.originalManifest = project.manifest
project.manifest = await opts.readPackageHook(clone(project.manifest), project.rootDir)
}
project.manifest = await opts.readPackageHook!(clone(project.manifest), project.rootDir)
}))
}
const extraBinPaths = [
@@ -338,7 +338,7 @@ async function purgeModulesDirsOfImporters (
throw new PnpmError('ABORTED_REMOVE_MODULES_DIR', 'Aborted removal of modules directory')
}
}
for (const importer of importers) {
await Promise.all(importers.map(async (importer) => {
logger.info({
message: `Recreating ${importer.modulesDir}`,
prefix: importer.rootDir,
@@ -351,12 +351,12 @@ async function purgeModulesDirsOfImporters (
} catch (err: any) { // eslint-disable-line
if (err.code !== 'ENOENT') throw err
}
}
}))
}
async function removeContentsOfDir (dir: string, virtualStoreDir: string) {
const items = await fs.readdir(dir)
for (const item of items) {
await Promise.all(items.map(async (item) => {
// The non-pnpm related hidden files are kept
if (
item.startsWith('.') &&
@@ -364,11 +364,10 @@ async function removeContentsOfDir (dir: string, virtualStoreDir: string) {
item !== '.modules.yaml' &&
!dirsAreEqual(path.join(dir, item), virtualStoreDir)
) {
continue
return
}
await rimraf(path.join(dir, item))
}
}))
}
function dirsAreEqual (dir1: string, dir2: string) {

View File

@@ -85,21 +85,23 @@ async function _lockfileToHoistedDepGraph (
'.': directDepsMap(Object.keys(hierarchy[opts.lockfileDir]), graph),
}
const symlinkedDirectDependenciesByImporterId: DirectDependenciesByImporterId = { '.': {} }
for (const rootDep of Array.from(tree.dependencies)) {
const reference = Array.from(rootDep.references)[0]
if (reference.startsWith('workspace:')) {
const importerId = reference.replace('workspace:', '')
const projectDir = path.join(opts.lockfileDir, importerId)
const modulesDir = path.join(projectDir, 'node_modules')
const nextHierarchy = (await fetchDeps(fetchDepsOpts, modulesDir, rootDep.dependencies))
hierarchy[projectDir] = nextHierarchy
await Promise.all(
Array.from(tree.dependencies).map(async (rootDep) => {
const reference = Array.from(rootDep.references)[0]
if (reference.startsWith('workspace:')) {
const importerId = reference.replace('workspace:', '')
const projectDir = path.join(opts.lockfileDir, importerId)
const modulesDir = path.join(projectDir, 'node_modules')
const nextHierarchy = (await fetchDeps(fetchDepsOpts, modulesDir, rootDep.dependencies))
hierarchy[projectDir] = nextHierarchy
const importer = lockfile.importers[importerId]
const importerDir = path.join(opts.lockfileDir, importerId)
symlinkedDirectDependenciesByImporterId[importerId] = pickLinkedDirectDeps(importer, importerDir, opts.include)
directDependenciesByImporterId[importerId] = directDepsMap(Object.keys(nextHierarchy), graph)
}
}
const importer = lockfile.importers[importerId]
const importerDir = path.join(opts.lockfileDir, importerId)
symlinkedDirectDependenciesByImporterId[importerId] = pickLinkedDirectDeps(importer, importerDir, opts.include)
directDependenciesByImporterId[importerId] = directDepsMap(Object.keys(nextHierarchy), graph)
}
})
)
return {
directDependenciesByImporterId,
graph,

View File

@@ -339,7 +339,7 @@ test('linkBins() fix window shebang line', async () => {
const lfBinLoc = path.join(binTarget, 'lf')
const crlfBinLoc = path.join(binTarget, 'crlf')
for (const binLocation of [lfBinLoc, crlfBinLoc]) {
expect(await exists(binLocation)).toBe(true)
expect(await exists(binLocation)).toBe(true) // eslint-disable-line no-await-in-loop
}
if (EXECUTABLE_SHEBANG_SUPPORTED) {
@@ -347,7 +347,7 @@ test('linkBins() fix window shebang line', async () => {
const crlfFilePath = path.join(windowShebangFixture, 'node_modules', 'crlf/bin/crlf.js')
for (const filePath of [lfFilePath, crlfFilePath]) {
const content = await fs.readFile(filePath, 'utf8')
const content = await fs.readFile(filePath, 'utf8') // eslint-disable-line no-await-in-loop
expect(content.startsWith('#!/usr/bin/env node\n')).toBeTruthy()
}

View File

@@ -302,6 +302,7 @@ export async function resolveRootDependencies (
if (!Object.keys(importerResolutionResult.missingPeers).length) break
const wantedDependencies = getNonDevWantedDependencies({ dependencies: importerResolutionResult.missingPeers })
// eslint-disable-next-line no-await-in-loop
const resolveDependenciesResult = await resolveDependencies(ctx, preferredVersions, wantedDependencies, {
...options,
parentPkgAliases,
@@ -309,6 +310,7 @@ export async function resolveRootDependencies (
})
importerResolutionResult = {
pkgAddresses: resolveDependenciesResult.pkgAddresses,
// eslint-disable-next-line no-await-in-loop
...filterMissingPeers(await resolveDependenciesResult.resolvingPeers, parentPkgAliases),
}
pkgAddresses.push(...importerResolutionResult.pkgAddresses)

View File

@@ -82,14 +82,14 @@ async function partitionLinkedPackages (
): Promise<WantedDependency[]> {
const nonLinkedDependencies: WantedDependency[] = []
const linkedAliases = new Set<string>()
for (const dependency of dependencies) {
await Promise.all(dependencies.map(async (dependency) => {
if (
!dependency.alias ||
opts.workspacePackages?.[dependency.alias] != null ||
dependency.pref.startsWith('workspace:')
) {
nonLinkedDependencies.push(dependency)
continue
return
}
const isInnerLink = await safeIsInnerLink(opts.modulesDir, dependency.alias, {
hideAlienModules: !opts.lockfileOnly,
@@ -98,7 +98,7 @@ async function partitionLinkedPackages (
})
if (isInnerLink === true) {
nonLinkedDependencies.push(dependency)
continue
return
}
if (!dependency.pref.startsWith('link:')) {
// This info-log might be better to be moved to the reporter
@@ -108,7 +108,7 @@ async function partitionLinkedPackages (
})
}
linkedAliases.add(dependency.alias)
}
}))
return nonLinkedDependencies
}

View File

@@ -29,12 +29,12 @@ export async function createExportableManifest (
if (originalManifest.scripts != null) {
publishManifest.scripts = omit(PREPUBLISH_SCRIPTS, originalManifest.scripts)
}
for (const depsField of ['dependencies', 'devDependencies', 'optionalDependencies', 'peerDependencies'] as const) {
await Promise.all((['dependencies', 'devDependencies', 'optionalDependencies', 'peerDependencies'] as const).map(async (depsField) => {
const deps = await makePublishDependencies(dir, originalManifest[depsField], opts?.modulesDir)
if (deps != null) {
publishManifest[depsField] = deps
}
}
}))
overridePublishConfig(publishManifest)

View File

@@ -153,17 +153,17 @@ async function packPkg (opts: {
]
const mtime = new Date('1985-10-26T08:15:00.000Z')
const pack = tar.pack()
for (const [name, source] of Object.entries(filesMap)) {
await Promise.all(Object.entries(filesMap).map(async ([name, source]) => {
const isExecutable = bins.some((bin) => path.relative(bin, source) === '')
const mode = isExecutable ? 0o755 : 0o644
if (/^package\/package\.(json|json5|yaml)/.test(name)) {
const readmeFile = embedReadme ? await readReadmeFile(filesMap) : undefined
const publishManifest = await createExportableManifest(projectDir, manifest, { readmeFile, modulesDir: opts.modulesDir })
pack.entry({ mode, mtime, name: 'package/package.json' }, JSON.stringify(publishManifest, null, 2))
continue
return
}
pack.entry({ mode, mtime, name }, fs.readFileSync(source))
}
}))
const tarball = fs.createWriteStream(destFile)
pack.pipe(createGzip({ level: opts.packGzipLevel })).pipe(tarball)
pack.finalize()

View File

@@ -276,6 +276,6 @@ export async function runScriptsIfPresent (
) {
for (const scriptName of scriptNames) {
if (!manifest.scripts?.[scriptName]) continue
await runLifecycleHook(scriptName, manifest, opts)
await runLifecycleHook(scriptName, manifest, opts) // eslint-disable-line no-await-in-loop
}
}

View File

@@ -81,14 +81,14 @@ export async function recursivePublish (
}, pkg.manifest.name, pkg.manifest.version))
})
const publishedPkgDirs = new Set(pkgsToPublish.map(({ dir }) => dir))
const publishedPackages = []
const publishedPackages: Array<{ name?: string, version?: string }> = []
if (publishedPkgDirs.size === 0) {
logger.info({
message: 'There are no new packages that should be published',
prefix: opts.dir,
})
} else {
const appendedArgs = []
const appendedArgs: string[] = []
if (opts.cliOptions['access']) {
appendedArgs.push(`--access=${opts.cliOptions['access'] as string}`)
}
@@ -101,8 +101,9 @@ export async function recursivePublish (
const chunks = sortPackages(opts.selectedProjectsGraph)
const tag = opts.tag ?? 'latest'
for (const chunk of chunks) {
for (const pkgDir of chunk) {
if (!publishedPkgDirs.has(pkgDir)) continue
// eslint-disable-next-line no-await-in-loop
const publishResults = await Promise.all(chunk.map(async (pkgDir) => {
if (!publishedPkgDirs.has(pkgDir)) return null
const pkg = opts.selectedProjectsGraph[pkgDir].package
const publishResult = await publish({
...opts,
@@ -122,9 +123,12 @@ export async function recursivePublish (
}, [pkg.dir])
if (publishResult?.manifest != null) {
publishedPackages.push(pick(['name', 'version'], publishResult.manifest))
} else if (publishResult?.exitCode) {
return { exitCode: publishResult.exitCode }
}
return publishResult
}))
const failedPublish = publishResults.find((result) => result?.exitCode)
if (failedPublish) {
return { exitCode: failedPublish.exitCode! }
}
}
}

View File

@@ -24,7 +24,7 @@ test('resolveFromGit() with no commit', async () => {
// The issue occurred because .hosted field (which is class from the 'hosted-git-info' package)
// was mutated. A 'committish' field was added to it.
for (let i = 0; i < 2; i++) {
const resolveResult = await resolveFromGit({ pref: 'zkochan/is-negative' })
const resolveResult = await resolveFromGit({ pref: 'zkochan/is-negative' }) // eslint-disable-line no-await-in-loop
expect(resolveResult).toStrictEqual({
id: 'github.com/zkochan/is-negative/1d7e288222b53a0cab90a331f1865220ec29560c',
normalizedPref: 'github:zkochan/is-negative',

View File

@@ -37,6 +37,7 @@ const createResolveFromNpm = createNpmResolver.bind(null, fetch, getAuthHeader)
async function retryLoadJsonFile<T> (filePath: string) {
let retry = 0
/* eslint-disable no-await-in-loop */
while (true) {
await delay(500)
try {
@@ -46,6 +47,7 @@ async function retryLoadJsonFile<T> (filePath: string) {
retry++
}
}
/* eslint-enable no-await-in-loop */
}
afterEach(() => {

View File

@@ -117,13 +117,9 @@ async function parseLicense (
// check if we discovered a license, if not attempt to parse the LICENSE file
if (!license || /see license/i.test(license)) {
const { files: pkgFileIndex } = pkg.files
for (const filename of LICENSE_FILES) {
// check if the a file with the expected name exists in the file index
if (!(filename in pkgFileIndex)) {
continue
}
const licensePackageFileInfo = pkgFileIndex[filename]
const licenseFile = LICENSE_FILES.find((licenseFile) => licenseFile in pkgFileIndex)
if (licenseFile) {
const licensePackageFileInfo = pkgFileIndex[licenseFile]
let licenseContents: Buffer | undefined
if (pkg.files.local) {
licenseContents = await readFile(licensePackageFileInfo as string)

View File

@@ -42,68 +42,69 @@ export async function lockfileToLicenseNode (
step: LockfileWalkerStep,
options: LicenseExtractOptions
) {
const dependencies: Record<string, LicenseNode> = {}
for (const dependency of step.dependencies) {
const { depPath, pkgSnapshot, next } = dependency
const { name, version } = nameVerFromPkgSnapshot(depPath, pkgSnapshot)
const dependencies: Record<string, LicenseNode> = Object.fromEntries(
(await Promise.all(step.dependencies.map(async (dependency): Promise<[string, LicenseNode] | null> => {
const { depPath, pkgSnapshot, next } = dependency
const { name, version } = nameVerFromPkgSnapshot(depPath, pkgSnapshot)
const packageInstallable = packageIsInstallable(pkgSnapshot.id ?? depPath, {
name,
version,
cpu: pkgSnapshot.cpu,
os: pkgSnapshot.os,
libc: pkgSnapshot.libc,
}, {
optional: pkgSnapshot.optional ?? false,
lockfileDir: options.dir,
})
// If the package is not installable on the given platform, we ignore the
// package, typically the case for platform prebuild packages
if (!packageInstallable) {
continue
}
const packageInfo = await getPkgInfo(
{
const packageInstallable = packageIsInstallable(pkgSnapshot.id ?? depPath, {
name,
version,
depPath,
snapshot: pkgSnapshot,
registries: options.registries,
},
{
storeDir: options.storeDir,
virtualStoreDir: options.virtualStoreDir,
dir: options.dir,
modulesDir: options.modulesDir ?? 'node_modules',
cpu: pkgSnapshot.cpu,
os: pkgSnapshot.os,
libc: pkgSnapshot.libc,
}, {
optional: pkgSnapshot.optional ?? false,
lockfileDir: options.dir,
})
// If the package is not installable on the given platform, we ignore the
// package, typically the case for platform prebuild packages
if (!packageInstallable) {
return null
}
)
const subdeps = await lockfileToLicenseNode(next(), options)
const packageInfo = await getPkgInfo(
{
name,
version,
depPath,
snapshot: pkgSnapshot,
registries: options.registries,
},
{
storeDir: options.storeDir,
virtualStoreDir: options.virtualStoreDir,
dir: options.dir,
modulesDir: options.modulesDir ?? 'node_modules',
}
)
const dep: LicenseNode = {
name,
dev: pkgSnapshot.dev === true,
integrity: (pkgSnapshot.resolution as TarballResolution).integrity,
version,
license: packageInfo.license,
licenseContents: packageInfo.licenseContents,
author: packageInfo.author,
homepage: packageInfo.homepage,
description: packageInfo.description,
repository: packageInfo.repository,
dir: packageInfo.path as string,
}
const subdeps = await lockfileToLicenseNode(next(), options)
if (Object.keys(subdeps).length > 0) {
dep.dependencies = subdeps
dep.requires = toRequires(subdeps)
}
const dep: LicenseNode = {
name,
dev: pkgSnapshot.dev === true,
integrity: (pkgSnapshot.resolution as TarballResolution).integrity,
version,
license: packageInfo.license,
licenseContents: packageInfo.licenseContents,
author: packageInfo.author,
homepage: packageInfo.homepage,
description: packageInfo.description,
repository: packageInfo.repository,
dir: packageInfo.path as string,
}
// If the package details could be fetched, we consider it part of the tree
dependencies[name] = dep
}
if (Object.keys(subdeps).length > 0) {
dep.dependencies = subdeps
dep.requires = toRequires(subdeps)
}
// If the package details could be fetched, we consider it part of the tree
return [name, dep]
}))).filter(Boolean) as Array<[string, LicenseNode]>
)
return dependencies
}
@@ -126,25 +127,25 @@ export async function lockfileToLicenseNodeTree (
Object.keys(lockfile.importers),
{ include: opts?.include }
)
const dependencies: any = {} // eslint-disable-line @typescript-eslint/no-explicit-any
for (const importerWalker of importerWalkers) {
const importerDeps = await lockfileToLicenseNode(importerWalker.step, {
storeDir: opts.storeDir,
virtualStoreDir: opts.virtualStoreDir,
modulesDir: opts.modulesDir,
dir: opts.dir,
registries: opts.registries,
})
const depName = importerWalker.importerId
dependencies[depName] = {
dependencies: importerDeps,
requires: toRequires(importerDeps),
version: '0.0.0',
license: undefined,
}
}
const dependencies = Object.fromEntries(
await Promise.all(
importerWalkers.map(async (importerWalker) => {
const importerDeps = await lockfileToLicenseNode(importerWalker.step, {
storeDir: opts.storeDir,
virtualStoreDir: opts.virtualStoreDir,
modulesDir: opts.modulesDir,
dir: opts.dir,
registries: opts.registries,
})
return [importerWalker.importerId, {
dependencies: importerDeps,
requires: toRequires(importerDeps),
version: '0.0.0',
license: undefined,
}]
})
)
)
const licenseNodeTree: LicenseNodeTree = {
name: undefined,

View File

@@ -32,11 +32,19 @@ export async function renderJson (
path: pkg.path,
private: !!pkg.private,
}
for (const dependenciesField of [...DEPENDENCIES_FIELDS.sort(), 'unsavedDependencies'] as const) {
if (pkg[dependenciesField]?.length) {
jsonObj[dependenciesField] = await toJsonResult(pkg[dependenciesField]!, { long: opts.long })
}
}
Object.assign(jsonObj,
Object.fromEntries(
await Promise.all(
([...DEPENDENCIES_FIELDS.sort(), 'unsavedDependencies'] as const)
.filter((dependenciesField) => pkg[dependenciesField]?.length)
.map(async (dependenciesField) => [
dependenciesField,
await toJsonResult(pkg[dependenciesField]!, { long: opts.long }),
]
)
)
)
)
return jsonObj
}))

View File

@@ -64,7 +64,6 @@ async function renderTreeForPackage (
if (pkg.private) {
label += ' (PRIVATE)'
}
let output = `${chalk.bold.underline(label)}\n`
const useColumns = opts.depth === 0 && !opts.long && !opts.search
const dependenciesFields: Array<DependenciesField | 'unsavedDependencies'> = [
...DEPENDENCIES_FIELDS.sort(),
@@ -72,30 +71,33 @@ async function renderTreeForPackage (
if (opts.showExtraneous) {
dependenciesFields.push('unsavedDependencies')
}
for (const dependenciesField of dependenciesFields) {
if (pkg[dependenciesField]?.length) {
const depsLabel = chalk.cyanBright(
dependenciesField !== 'unsavedDependencies'
? `${dependenciesField}:`
: 'not saved (you should add these dependencies to package.json if you need them):'
)
output += `\n${depsLabel}\n`
const gPkgColor = dependenciesField === 'unsavedDependencies' ? () => NOT_SAVED_DEP_CLR : getPkgColor
if (useColumns && pkg[dependenciesField]!.length > 10) {
output += cliColumns(pkg[dependenciesField]!.map(printLabel.bind(printLabel, gPkgColor))) + '\n'
continue
const output = (await Promise.all(
dependenciesFields.map(async (dependenciesField) => {
if (pkg[dependenciesField]?.length) {
const depsLabel = chalk.cyanBright(
dependenciesField !== 'unsavedDependencies'
? `${dependenciesField}:`
: 'not saved (you should add these dependencies to package.json if you need them):'
)
let output = `${depsLabel}\n`
const gPkgColor = dependenciesField === 'unsavedDependencies' ? () => NOT_SAVED_DEP_CLR : getPkgColor
if (useColumns && pkg[dependenciesField]!.length > 10) {
output += cliColumns(pkg[dependenciesField]!.map(printLabel.bind(printLabel, gPkgColor))) + '\n'
return output
}
const data = await toArchyTree(gPkgColor, pkg[dependenciesField]!, {
long: opts.long,
modules: path.join(pkg.path, 'node_modules'),
})
for (const d of data) {
output += archy(d)
}
return output
}
const data = await toArchyTree(gPkgColor, pkg[dependenciesField]!, {
long: opts.long,
modules: path.join(pkg.path, 'node_modules'),
})
for (const d of data) {
output += archy(d)
}
}
}
return null
}))).filter(Boolean).join('\n')
return output.replace(/\n$/, '')
return `${chalk.bold.underline(label)}\n\n${output}`.replace(/(\n)+$/, '')
}
type GetPkgColor = (node: PackageNode) => (s: string) => string

View File

@@ -13,9 +13,6 @@
},
"scripts": {
"lint": "eslint \"src/**/*.ts\" \"test/**/*.ts\"",
"registry-mock": "registry-mock",
"test:jest": "jest",
"test:e2e": "registry-mock prepare && run-p -r registry-mock test:jest",
"_test": "jest",
"test": "pnpm run compile && pnpm run _test",
"prepublishOnly": "pnpm run compile",

View File

@@ -39,7 +39,7 @@ function renderDetails (licensePackage: LicensePackage) {
outputs.push(licensePackage.description)
}
if (licensePackage.homepage) {
outputs.push(chalk.underline(licensePackage.homepage))
outputs.push(licensePackage.homepage)
}
return outputs.join('\n')
}

View File

@@ -55,7 +55,7 @@ exports[`pnpm licenses: should correctly read LICENSE file with executable file
│ │ │ https://github.com/feross/safe-buffer │
├──────────────────────────────┼─────────┼────────────────────────────────────────────────────────────────────────────────────────────────────────┤
│ string_decoder │ MIT │ The string_decoder module from Node core │
│ │ │ https://github.com/nodejs/string_decoder │
│ │ │ https://github.com/rvagg/string_decoder
├──────────────────────────────┼─────────┼────────────────────────────────────────────────────────────────────────────────────────────────────────┤
│ string.fromcodepoint │ MIT │ Mathias Bynens │
│ │ │ A robust & optimized \`String.fromCodePoint\` polyfill, based on the ECMAScript 6 specification. │

View File

@@ -22,22 +22,19 @@ export async function listRecursive (
lockfileDir: opts.lockfileDir,
})
}
const outputs = []
for (const { dir } of pkgs) {
const outputs = (await Promise.all(pkgs.map(async ({ dir }) => {
try {
const output = await render([dir], params, {
return await render([dir], params, {
...opts,
alwaysPrintRootPackage: depth === -1,
lockfileDir: opts.lockfileDir ?? dir,
})
if (!output) continue
outputs.push(output)
} catch (err: any) { // eslint-disable-line
logger.info(err)
err['prefix'] = dir
throw err
}
}
}))).filter(Boolean)
if (outputs.length === 0) return ''
const joiner = typeof depth === 'number' && depth > -1 ? '\n\n' : '\n'

View File

@@ -28,35 +28,35 @@ export async function prune ({ cacheDir, storeDir }: PruneOptions) {
.filter(entry => entry.isDirectory())
.map(dir => dir.name)
let fileCounter = 0
for (const dir of dirs) {
await Promise.all(dirs.map(async (dir) => {
const subdir = path.join(cafsDir, dir)
for (const fileName of await fs.readdir(subdir)) {
await Promise.all((await fs.readdir(subdir)).map(async (fileName) => {
const filePath = path.join(subdir, fileName)
if (fileName.endsWith('-index.json')) {
pkgIndexFiles.push(filePath)
continue
return
}
const stat = await fs.stat(filePath)
if (stat.isDirectory()) {
globalWarn(`An alien directory is present in the store: ${filePath}`)
continue
return
}
if (stat.nlink === 1 || stat.nlink === BIG_ONE) {
await fs.unlink(filePath)
fileCounter++
removedHashes.add(ssri.fromHex(`${dir}${fileName}`, 'sha512').toString())
}
}
}
}))
}))
globalInfo(`Removed ${fileCounter} file${fileCounter === 1 ? '' : 's'}`)
let pkgCounter = 0
for (const pkgIndexFilePath of pkgIndexFiles) {
await Promise.all(pkgIndexFiles.map(async (pkgIndexFilePath) => {
const { files: pkgFilesIndex } = await loadJsonFile<PackageFilesIndex>(pkgIndexFilePath)
if (removedHashes.has(pkgFilesIndex['package.json'].integrity)) {
await fs.unlink(pkgIndexFilePath)
pkgCounter++
}
}
}))
globalInfo(`Removed ${pkgCounter} package${pkgCounter === 1 ? '' : 's'}`)
}

View File

@@ -293,12 +293,14 @@ test('server should only allow POST', async () => {
// Try various methods (not including POST)
const methods = ['GET', 'PUT', 'PATCH', 'DELETE', 'OPTIONS']
/* eslint-disable no-await-in-loop */
for (const method of methods) {
// Ensure 405 error is received
const response = await fetch(`${remotePrefix}/a-random-endpoint`, { method })
expect(response.status).toBe(405)
expect((await response.json() as any).error).toBeTruthy() // eslint-disable-line
}
/* eslint-enable no-await-in-loop */
await server.close()
await storeCtrlForServer.close()

View File

@@ -102,6 +102,7 @@ export async function tryLoadServerJson (
}> {
let beforeFirstAttempt = true
const startHRTime = process.hrtime()
/* eslint-disable no-await-in-loop */
while (true) {
if (!beforeFirstAttempt) {
const elapsedHRTime = process.hrtime(startHRTime)
@@ -150,4 +151,5 @@ export async function tryLoadServerJson (
}
return serverJson
}
/* eslint-enable no-await-in-loop */
}

View File

@@ -209,8 +209,16 @@ async function _filterGraph<T> (
let entryPackages: string[] | null = null
if (selector.diff) {
let ignoreDependentForPkgs: string[] = []
;[entryPackages, ignoreDependentForPkgs] = await getChangedPackages(Object.keys(pkgGraph),
selector.diff, { workspaceDir: selector.parentDir ?? opts.workspaceDir, testPattern: opts.testPattern, changedFilesIgnorePattern: opts.changedFilesIgnorePattern })
// eslint-disable-next-line no-await-in-loop
;[entryPackages, ignoreDependentForPkgs] = await getChangedPackages(
Object.keys(pkgGraph),
selector.diff,
{
changedFilesIgnorePattern: opts.changedFilesIgnorePattern,
testPattern: opts.testPattern,
workspaceDir: selector.parentDir ?? opts.workspaceDir,
}
)
selectEntries({
...selector,
includeDependents: false,