feat: reverse pnpm why tree and improve list/why output (#10615)

- **`pnpm why` now shows a reverse dependency tree.** The searched package appears at the root with its dependants as branches, walking back to workspace roots. This replaces the previous forward-tree output which was noisy and hard to read for deeply nested dependencies.
- **Replaced `archy` with a new `@pnpm/text.tree-renderer` package** that renders trees using box-drawing characters (├──, └──, │) and supports grouped sections, dim connectors, and deduplication markers.
- **Show peer dependency hash suffixes** in `pnpm list` and `pnpm why` output to distinguish between different peer-dep variants of the same package.
- **Improved `pnpm list` visual output:** bold importer nodes, dimmed workspace paths, dependency grouping, package count summary, and deterministic sort order.
- **Added `--long` support to `pnpm why`** and the ability to read package manifests from the CAS store.
- **Deduplicated shared code** between `list` and `why` commands into a common module, and reused `getPkgInfo` in the why tree builder.
This commit is contained in:
Zoltan Kochan
2026-02-15 14:38:43 +01:00
committed by GitHub
parent 5ff0e16864
commit 7d5ada0701
48 changed files with 2458 additions and 566 deletions

View File

@@ -0,0 +1,5 @@
---
"@pnpm/text.tree-renderer": minor
---
Added `@pnpm/text.tree-renderer`, a new package that renders tree structures using box-drawing characters (├──, └──, │). It supports grouped sections and deduplication markers.

View File

@@ -0,0 +1,6 @@
---
"@pnpm/dedupe.issues-renderer": patch
"@pnpm/render-peer-issues": patch
---
Replaced `archy` with `@pnpm/text.tree-renderer` for tree output.

View File

@@ -0,0 +1,8 @@
---
"@pnpm/reviewing.dependencies-hierarchy": minor
"@pnpm/list": minor
"@pnpm/plugin-commands-listing": minor
"pnpm": minor
---
`pnpm why` now shows a reverse dependency tree. The searched package appears at the root with its dependents as branches, walking back to workspace roots. This replaces the previous forward-tree output which was noisy and hard to read for deeply nested dependencies.

View File

@@ -242,6 +242,7 @@
"rehoist",
"reka",
"relinks",
"renderable",
"reqheaders",
"rmgr",
"rpmdevtools",

View File

@@ -32,12 +32,11 @@
},
"dependencies": {
"@pnpm/dedupe.types": "workspace:*",
"archy": "catalog:",
"@pnpm/text.tree-renderer": "workspace:*",
"chalk": "catalog:"
},
"devDependencies": {
"@pnpm/dedupe.issues-renderer": "workspace:*",
"@types/archy": "catalog:"
"@pnpm/dedupe.issues-renderer": "workspace:*"
},
"engines": {
"node": ">=22.12"

View File

@@ -4,7 +4,7 @@ import {
type ResolutionChangesByAlias,
type SnapshotsChanges,
} from '@pnpm/dedupe.types'
import archy from 'archy'
import { renderTree, type TreeNode } from '@pnpm/text.tree-renderer'
import chalk from 'chalk'
export function renderDedupeCheckIssues (dedupeCheckIssues: DedupeCheckIssues): string {
@@ -31,20 +31,20 @@ export function renderDedupeCheckIssues (dedupeCheckIssues: DedupeCheckIssues):
*/
function report (snapshotChanges: SnapshotsChanges): string {
return [
...Object.entries(snapshotChanges.updated).map(([alias, updates]) => archy(toArchy(alias, updates))),
...Object.entries(snapshotChanges.updated).map(([alias, updates]) => renderTree(toArchy(alias, updates))),
...snapshotChanges.added.map((id) => `${chalk.green('+')} ${id}`),
...snapshotChanges.removed.map((id) => `${chalk.red('-')} ${id}`),
].join('\n')
}
function toArchy (name: string, issue: ResolutionChangesByAlias): archy.Data {
function toArchy (name: string, issue: ResolutionChangesByAlias): TreeNode {
return {
label: name,
nodes: Object.entries(issue).map(([alias, change]) => toArchyResolution(alias, change)),
}
}
function toArchyResolution (alias: string, change: ResolutionChange): archy.Data {
function toArchyResolution (alias: string, change: ResolutionChange): TreeNode {
switch (change.type) {
case 'added':
return { label: `${chalk.green('+')} ${alias} ${chalk.gray(change.next)}` }

View File

@@ -9,6 +9,9 @@
"../../__typings__/**/*.d.ts"
],
"references": [
{
"path": "../../text/tree-renderer"
},
{
"path": "../types"
}

View File

@@ -32,14 +32,13 @@
},
"dependencies": {
"@pnpm/error": "workspace:*",
"@pnpm/text.tree-renderer": "workspace:*",
"@pnpm/types": "workspace:*",
"archy": "catalog:",
"chalk": "catalog:",
"cli-columns": "catalog:"
},
"devDependencies": {
"@pnpm/render-peer-issues": "workspace:*",
"@types/archy": "catalog:"
"@pnpm/render-peer-issues": "workspace:*"
},
"engines": {
"node": ">=22.12"

View File

@@ -2,7 +2,7 @@ import {
type BadPeerDependencyIssue,
type PeerDependencyIssuesByProjects,
} from '@pnpm/types'
import archy from 'archy'
import { renderTree, type TreeNode } from '@pnpm/text.tree-renderer'
import chalk from 'chalk'
import cliColumns from 'cli-columns'
@@ -57,7 +57,7 @@ export function renderPeerIssues (
}
const title = chalk.reset(projectKey)
const summariesConcatenated = summaries.join('\n')
return `${archy(toArchyData(title, project))}${summariesConcatenated}`.trimEnd()
return `${renderTree(toArchyData(title, project))}${summariesConcatenated}`.trimEnd()
}).join('\n\n')
}
@@ -103,8 +103,8 @@ function createTree (pkgNode: PkgNode, pkgs: Array<{ name: string, version: stri
createTree(pkgNode.dependencies[label], rest, issueText)
}
function toArchyData (depName: string, pkgNode: PkgNode): archy.Data {
const result: Required<archy.Data> = {
function toArchyData (depName: string, pkgNode: PkgNode): TreeNode {
const result: Required<TreeNode> = {
label: depName,
nodes: [],
}

View File

@@ -9,6 +9,9 @@
"../../__typings__/**/*.d.ts"
],
"references": [
{
"path": "../../text/tree-renderer"
},
{
"path": "../error"
},

49
pnpm-lock.yaml generated
View File

@@ -2119,9 +2119,9 @@ importers:
'@pnpm/dedupe.types':
specifier: workspace:*
version: link:../types
archy:
specifier: 'catalog:'
version: 1.0.0
'@pnpm/text.tree-renderer':
specifier: workspace:*
version: link:../../text/tree-renderer
chalk:
specifier: 'catalog:'
version: 5.6.2
@@ -2129,9 +2129,6 @@ importers:
'@pnpm/dedupe.issues-renderer':
specifier: workspace:*
version: 'link:'
'@types/archy':
specifier: 'catalog:'
version: 0.0.36
dedupe/types:
devDependencies:
@@ -4696,12 +4693,12 @@ importers:
'@pnpm/error':
specifier: workspace:*
version: link:../error
'@pnpm/text.tree-renderer':
specifier: workspace:*
version: link:../../text/tree-renderer
'@pnpm/types':
specifier: workspace:*
version: link:../types
archy:
specifier: 'catalog:'
version: 1.0.0
chalk:
specifier: 'catalog:'
version: 5.6.2
@@ -4712,9 +4709,6 @@ importers:
'@pnpm/render-peer-issues':
specifier: workspace:*
version: 'link:'
'@types/archy':
specifier: 'catalog:'
version: 0.0.36
packages/types:
devDependencies:
@@ -7674,6 +7668,9 @@ importers:
'@pnpm/dependency-path':
specifier: workspace:*
version: link:../../packages/dependency-path
'@pnpm/fs.msgpack-file':
specifier: workspace:*
version: link:../../fs/msgpack-file
'@pnpm/lockfile.detect-dep-types':
specifier: workspace:*
version: link:../../lockfile/detect-dep-types
@@ -7701,9 +7698,15 @@ importers:
'@pnpm/read-package-json':
specifier: workspace:*
version: link:../../pkg-manifest/read-package-json
'@pnpm/store.cafs':
specifier: workspace:*
version: link:../../store/cafs
'@pnpm/types':
specifier: workspace:*
version: link:../../packages/types
'@pnpm/util.lex-comparator':
specifier: 'catalog:'
version: 3.0.2
normalize-path:
specifier: 'catalog:'
version: 3.0.0
@@ -7808,6 +7811,9 @@ importers:
reviewing/list:
dependencies:
'@pnpm/lockfile.fs':
specifier: workspace:*
version: link:../../lockfile/fs
'@pnpm/read-package-json':
specifier: workspace:*
version: link:../../pkg-manifest/read-package-json
@@ -7817,18 +7823,18 @@ importers:
'@pnpm/reviewing.dependencies-hierarchy':
specifier: workspace:*
version: link:../dependencies-hierarchy
'@pnpm/text.tree-renderer':
specifier: workspace:*
version: link:../../text/tree-renderer
'@pnpm/types':
specifier: workspace:*
version: link:../../packages/types
archy:
'@pnpm/util.lex-comparator':
specifier: 'catalog:'
version: 1.0.0
version: 3.0.2
chalk:
specifier: 'catalog:'
version: 5.6.2
cli-columns:
specifier: 'catalog:'
version: 4.0.0
p-limit:
specifier: 'catalog:'
version: 7.3.0
@@ -7842,9 +7848,6 @@ importers:
'@pnpm/test-fixtures':
specifier: workspace:*
version: link:../../__utils__/test-fixtures
'@types/archy':
specifier: 'catalog:'
version: 0.0.36
'@types/ramda':
specifier: 'catalog:'
version: 0.29.12
@@ -8656,6 +8659,12 @@ importers:
specifier: workspace:*
version: 'link:'
text/tree-renderer:
devDependencies:
'@pnpm/text.tree-renderer':
specifier: workspace:*
version: 'link:'
tools/path:
devDependencies:
'@pnpm/tools.path':

View File

@@ -39,9 +39,8 @@ function hasPeerA (context) {
fs.writeFileSync('.pnpmfile.cjs', pnpmfile, 'utf8')
await execPnpm(['add', 'is-positive@1.0.0', '@pnpm.e2e/abc@1.0.0'])
const result = execPnpmSync(['list', '--find-by=hasPeerA'])
expect(result.stdout.toString()).toMatch(`dependencies:
@pnpm.e2e/abc 1.0.0
@pnpm.e2e/peer-a@^1.0.0`)
expect(result.stdout.toString()).toMatch('@pnpm.e2e/abc@1.0.0')
expect(result.stdout.toString()).toMatch('@pnpm.e2e/peer-a@^1.0.0')
})
test('pnpm list returns correct paths with global virtual store', async () => {

View File

@@ -35,6 +35,7 @@
},
"dependencies": {
"@pnpm/dependency-path": "workspace:*",
"@pnpm/fs.msgpack-file": "workspace:*",
"@pnpm/lockfile.detect-dep-types": "workspace:*",
"@pnpm/lockfile.fs": "workspace:*",
"@pnpm/lockfile.utils": "workspace:*",
@@ -44,7 +45,9 @@
"@pnpm/npm-package-arg": "catalog:",
"@pnpm/read-modules-dir": "workspace:*",
"@pnpm/read-package-json": "workspace:*",
"@pnpm/store.cafs": "workspace:*",
"@pnpm/types": "workspace:*",
"@pnpm/util.lex-comparator": "catalog:",
"normalize-path": "catalog:",
"realpath-missing": "catalog:",
"resolve-link-target": "catalog:",

View File

@@ -1,4 +1,4 @@
export interface PackageNode {
export interface DependencyNode {
alias: string
circular?: true
deduped?: true
@@ -7,7 +7,13 @@ export interface PackageNode {
* elided because this subtree was already expanded elsewhere in the tree.
*/
dedupedDependenciesCount?: number
dependencies?: PackageNode[]
/**
* Short hash of the peer dependency suffix in the depPath, used to
* distinguish deduped instances of the same package with different
* peer dependency resolutions.
*/
peersSuffixHash?: string
dependencies?: DependencyNode[]
dev?: boolean
isPeer: boolean
isSkipped: boolean

View File

@@ -16,19 +16,19 @@ import { type DependenciesField, type Finder, DEPENDENCIES_FIELDS, type Registri
import normalizePath from 'normalize-path'
import realpathMissing from 'realpath-missing'
import resolveLinkTarget from 'resolve-link-target'
import { type PackageNode } from './PackageNode.js'
import { type DependencyNode } from './DependencyNode.js'
import { buildDependencyGraph } from './buildDependencyGraph.js'
import { getTree, type BaseTreeOpts, type MaterializationCache } from './getTree.js'
import { type TreeNodeId } from './TreeNodeId.js'
export interface DependenciesHierarchy {
dependencies?: PackageNode[]
devDependencies?: PackageNode[]
optionalDependencies?: PackageNode[]
unsavedDependencies?: PackageNode[]
export interface DependenciesTree {
dependencies?: DependencyNode[]
devDependencies?: DependencyNode[]
optionalDependencies?: DependencyNode[]
unsavedDependencies?: DependencyNode[]
}
export async function buildDependenciesHierarchy (
export async function buildDependenciesTree (
projectPaths: string[] | undefined,
maybeOpts: {
depth: number
@@ -43,7 +43,7 @@ export async function buildDependenciesHierarchy (
modulesDir?: string
virtualStoreDirMaxLength: number
}
): Promise<{ [projectDir: string]: DependenciesHierarchy }> {
): Promise<{ [projectDir: string]: DependenciesTree }> {
if (!maybeOpts?.lockfileDir) {
throw new TypeError('opts.lockfileDir is required')
}
@@ -61,7 +61,7 @@ export async function buildDependenciesHierarchy (
.map((id) => path.join(maybeOpts.lockfileDir, id))
}
const result = {} as { [projectDir: string]: DependenciesHierarchy }
const result = {} as { [projectDir: string]: DependenciesTree }
const lockfileToUse = maybeOpts.checkWantedLockfileOnly ? wantedLockfile : currentLockfile
@@ -87,6 +87,7 @@ export async function buildDependenciesHierarchy (
search: maybeOpts.search,
showDedupedSearchMatches: maybeOpts.showDedupedSearchMatches ?? (maybeOpts.search != null),
skipped: new Set(modules?.skipped ?? []),
storeDir: modules?.storeDir,
modulesDir,
virtualStoreDir: modules?.virtualStoreDir,
virtualStoreDirMaxLength: modules?.virtualStoreDirMaxLength ?? maybeOpts.virtualStoreDirMaxLength,
@@ -124,7 +125,7 @@ export async function buildDependenciesHierarchy (
return [
projectPath,
await getHierarchy(projectPath),
] as [string, DependenciesHierarchy]
] as [string, DependenciesTree]
}))
for (const [projectPath, dependenciesHierarchy] of pairs) {
result[projectPath] = dependenciesHierarchy
@@ -142,7 +143,7 @@ interface HierarchyContext extends BaseTreeOpts {
async function dependenciesHierarchyForPackage (
opts: HierarchyContext,
projectPath: string
): Promise<DependenciesHierarchy> {
): Promise<DependenciesTree> {
const { currentLockfile, wantedLockfile } = opts
const importerId = getLockfileImporterId(opts.lockfileDir, projectPath)
@@ -156,7 +157,7 @@ async function dependenciesHierarchyForPackage (
const wantedPackages = wantedLockfile?.packages ?? {}
// Build a map from alias → dependency field for post-categorization.
const result: DependenciesHierarchy = {}
const result: DependenciesTree = {}
const fieldMap = new Map<string, DependenciesField>()
for (const field of DEPENDENCIES_FIELDS.sort().filter(f => opts.include[f])) {
result[field] = []
@@ -208,7 +209,7 @@ async function dependenciesHierarchyForPackage (
const pkg = await safeReadPackageJsonFromDir(pkgPath)
version = pkg?.version ?? 'undefined'
}
const pkg: PackageNode = {
const pkg: DependencyNode = {
alias: unsavedDep,
isMissing: false,
isPeer: false,

View File

@@ -0,0 +1,380 @@
import path from 'path'
import {
getLockfileImporterId,
type LockfileObject,
type ProjectSnapshot,
type PackageSnapshots,
} from '@pnpm/lockfile.fs'
import { nameVerFromPkgSnapshot } from '@pnpm/lockfile.utils'
import { readModulesManifest } from '@pnpm/modules-yaml'
import { normalizeRegistries } from '@pnpm/normalize-registries'
import { type DependenciesField, type DependencyManifest, type Finder, type Registries } from '@pnpm/types'
import { lexCompare } from '@pnpm/util.lex-comparator'
import semver from 'semver'
import realpathMissing from 'realpath-missing'
import { buildDependencyGraph, type DependencyGraph } from './buildDependencyGraph.js'
import { createPackagesSearcher } from './createPackagesSearcher.js'
import { peersSuffixHashFromDepPath } from './peersSuffixHash.js'
import { type TreeNodeId } from './TreeNodeId.js'
import { getPkgInfo } from './getPkgInfo.js'
interface ReverseEdge {
parentSerialized: string
parentNodeId: TreeNodeId
alias: string
}
export interface DependentNode {
name: string
version: string
dependents?: DependentNode[]
circular?: true
deduped?: true
/** Short hash distinguishing peer-dep variants of the same name@version */
peersSuffixHash?: string
/** For importer leaf nodes: which dep field */
depField?: DependenciesField
}
export interface DependentsTree {
name: string
version: string
/** Resolved filesystem path to this package */
path?: string
/** Short hash distinguishing peer-dep variants of the same name@version */
peersSuffixHash?: string
/** Message returned by the finder function, if any */
searchMessage?: string
dependents: DependentNode[]
}
export interface ImporterInfo {
name: string
version: string
}
interface WalkContext {
reverseMap: Map<string, ReverseEdge[]>
graph: DependencyGraph
importers: Record<string, ProjectSnapshot>
currentPackages: PackageSnapshots
importerInfoMap: Map<string, ImporterInfo>
/** Tracks nodes on the current path for cycle detection. Mutated during walk. */
visited: Set<string>
/** Tracks nodes already fully expanded, for deduplication across branches. */
expanded: Set<string>
}
export async function buildDependentsTree (
packages: string[],
projectPaths: string[],
opts: {
lockfileDir: string
include?: { [field in DependenciesField]?: boolean }
modulesDir?: string
registries?: Registries
finders?: Finder[]
importerInfoMap: Map<string, ImporterInfo>
lockfile: LockfileObject
}
): Promise<DependentsTree[]> {
const modulesDir = await realpathMissing(path.join(opts.lockfileDir, opts.modulesDir ?? 'node_modules'))
const modules = await readModulesManifest(modulesDir)
const registries = normalizeRegistries({
...opts.registries,
...modules?.registries,
})
const storeDir = modules?.storeDir
const virtualStoreDir = modules?.virtualStoreDir ?? path.join(modulesDir, '.pnpm')
const virtualStoreDirMaxLength = modules?.virtualStoreDirMaxLength ?? 120
const include = opts.include ?? {
dependencies: true,
devDependencies: true,
optionalDependencies: true,
}
// Build root IDs from the selected project paths (respects --filter / --recursive)
const allRootIds: TreeNodeId[] = []
for (const projectPath of projectPaths) {
const importerId = getLockfileImporterId(opts.lockfileDir, projectPath)
if (opts.lockfile.importers[importerId]) {
allRootIds.push({ type: 'importer', importerId })
}
}
const graph = buildDependencyGraph(allRootIds, {
currentPackages: opts.lockfile.packages ?? {},
importers: opts.lockfile.importers,
include,
lockfileDir: opts.lockfileDir,
})
const reverseMap = invertGraph(graph)
const search = createPackagesSearcher(packages, opts.finders)
const currentPackages = opts.lockfile.packages ?? {}
// Pre-compute resolved filesystem paths for all package nodes by walking the
// graph top-down from importers. This is needed for global virtual store
// where symlinks must be resolved through each parent's node_modules.
const resolvedPackageNodes = resolvePackageNodes(graph, currentPackages, {
virtualStoreDir,
virtualStoreDirMaxLength,
modulesDir,
registries,
wantedPackages: currentPackages,
storeDir,
})
// Scan all package nodes for matches.
// A package matches if any of the aliases used to refer to it (from incoming
// edges in the graph) or its canonical name match the search query.
// Each distinct depPath (i.e. different peer dep resolutions) is kept as a
// separate result so that peer variants are visible in the output.
const trees: DependentsTree[] = []
const ctx: WalkContext = {
reverseMap,
graph,
importers: opts.lockfile.importers,
currentPackages,
importerInfoMap: opts.importerInfoMap,
visited: new Set(),
expanded: new Set(),
}
for (const [serialized, node] of graph.nodes) {
if (node.nodeId.type !== 'package') continue
const depPath = node.nodeId.depPath
const snapshot = currentPackages[depPath]
if (snapshot == null) continue
const { name, version } = nameVerFromPkgSnapshot(depPath, snapshot)
const pkgNode = resolvedPackageNodes.get(serialized)
if (!pkgNode) continue
const readManifest = pkgNode.readManifest
// Check canonical name first
let matched = search({ alias: name, name, version, readManifest })
// Also check aliases from incoming edges (handles npm: protocol aliases)
if (!matched) {
const incomingEdges = reverseMap.get(serialized)
if (incomingEdges) {
for (const edge of incomingEdges) {
if (edge.alias !== name) {
matched = search({ alias: edge.alias, name, version, readManifest })
if (matched) break
}
}
}
}
if (!matched) continue
ctx.visited = new Set([serialized])
ctx.expanded = new Set()
const dependents = walkReverse(serialized, ctx)
const peersSuffixHash = peersSuffixHashFromDepPath(depPath)
const tree: DependentsTree = {
name,
version,
path: pkgNode.path,
peersSuffixHash,
dependents,
}
if (typeof matched === 'string') {
tree.searchMessage = matched
}
trees.push(tree)
}
trees.sort((a, b) => {
const nameCmp = lexCompare(a.name, b.name)
if (nameCmp !== 0) return nameCmp
const versionCmp = semver.valid(a.version) && semver.valid(b.version)
? semver.compare(a.version, b.version)
: lexCompare(a.version, b.version)
if (versionCmp !== 0) return versionCmp
return lexCompare(a.peersSuffixHash ?? '', b.peersSuffixHash ?? '')
})
return trees
}
function invertGraph (graph: DependencyGraph): Map<string, ReverseEdge[]> {
const reverse = new Map<string, ReverseEdge[]>()
for (const [parentSerialized, node] of graph.nodes) {
for (const edge of node.edges) {
if (edge.target == null) continue
const childSerialized = edge.target.id
let entries = reverse.get(childSerialized)
if (entries == null) {
entries = []
reverse.set(childSerialized, entries)
}
entries.push({
parentSerialized,
parentNodeId: node.nodeId,
alias: edge.alias,
})
}
}
return reverse
}
/**
* Walks the dependency graph top-down from importer nodes and resolves the
* filesystem path for every package node. This is necessary for global virtual
* store where the correct path can only be obtained by following symlinks
* through each parent's node_modules directory.
*/
function resolvePackageNodes (
graph: DependencyGraph,
currentPackages: PackageSnapshots,
opts: {
virtualStoreDir: string
virtualStoreDirMaxLength: number
modulesDir: string
registries: Registries
wantedPackages: PackageSnapshots
storeDir?: string
}
): Map<string, { path: string, readManifest: () => DependencyManifest }> {
const resolved = new Map<string, { path: string, readManifest: () => DependencyManifest }>()
function walk (serialized: string, parentDir: string | undefined): void {
const node = graph.nodes.get(serialized)
if (!node) return
for (const edge of node.edges) {
if (edge.target == null) continue
const childSerialized = edge.target.id
if (resolved.has(childSerialized)) continue
if (edge.target.nodeId.type !== 'package') continue
const { pkgInfo, readManifest } = getPkgInfo({
...opts,
alias: edge.alias,
currentPackages,
depTypes: {},
linkedPathBaseDir: opts.modulesDir, // This might need adjustment for linked deps?
parentDir,
ref: edge.target.nodeId.depPath,
skipped: new Set(),
})
resolved.set(childSerialized, { path: pkgInfo.path, readManifest })
walk(childSerialized, pkgInfo.path)
}
}
for (const [serialized, node] of graph.nodes) {
if (node.nodeId.type === 'importer') {
walk(serialized, undefined)
}
}
return resolved
}
function walkReverse (
nodeId: string,
ctx: WalkContext
): DependentNode[] {
const reverseEdges = ctx.reverseMap.get(nodeId)
if (reverseEdges == null || reverseEdges.length === 0) return []
// Sort edges by parent name (with serialized ID as tiebreaker) so that
// deduplication is deterministic: the first parent always gets fully expanded.
const sortedEdges = [...reverseEdges].sort((a, b) => {
const cmp = lexCompare(resolveParentName(a, ctx), resolveParentName(b, ctx))
if (cmp !== 0) return cmp
return lexCompare(a.parentSerialized, b.parentSerialized)
})
const dependents: DependentNode[] = []
for (const edge of sortedEdges) {
// Cycle detection: this node is already on our current path
if (ctx.visited.has(edge.parentSerialized)) {
const parentNode = ctx.graph.nodes.get(edge.parentSerialized)
if (parentNode?.nodeId.type === 'importer') {
const info = ctx.importerInfoMap.get(parentNode.nodeId.importerId)
if (info) {
dependents.push({
name: info.name,
version: info.version,
circular: true,
})
}
} else if (parentNode?.nodeId.type === 'package') {
const snapshot = ctx.currentPackages[parentNode.nodeId.depPath]
if (snapshot) {
const { name, version } = nameVerFromPkgSnapshot(parentNode.nodeId.depPath, snapshot)
dependents.push({ name, version, circular: true })
}
}
continue
}
const parentGraphNode = ctx.graph.nodes.get(edge.parentSerialized)
if (parentGraphNode == null) continue
if (parentGraphNode.nodeId.type === 'importer') {
const importerId = parentGraphNode.nodeId.importerId
const info = ctx.importerInfoMap.get(importerId) ?? { name: importerId, version: '' }
const depField = getDepFieldForAlias(edge.alias, ctx.importers[importerId])
dependents.push({
name: info.name,
version: info.version,
depField,
})
} else if (parentGraphNode.nodeId.type === 'package') {
const snapshot = ctx.currentPackages[parentGraphNode.nodeId.depPath]
if (snapshot == null) continue
const { name, version } = nameVerFromPkgSnapshot(parentGraphNode.nodeId.depPath, snapshot)
const peersSuffixHash = peersSuffixHashFromDepPath(parentGraphNode.nodeId.depPath)
// Deduplication: if this package was already expanded elsewhere in the
// tree, show it as a leaf to keep the output bounded.
if (ctx.expanded.has(edge.parentSerialized)) {
dependents.push({ name, version, peersSuffixHash, deduped: true })
continue
}
ctx.visited.add(edge.parentSerialized)
ctx.expanded.add(edge.parentSerialized)
const childDependents = walkReverse(edge.parentSerialized, ctx)
ctx.visited.delete(edge.parentSerialized)
dependents.push({
name,
version,
peersSuffixHash,
dependents: childDependents.length > 0 ? childDependents : undefined,
})
}
}
return dependents
}
function resolveParentName (edge: ReverseEdge, ctx: WalkContext): string {
const graphNode = ctx.graph.nodes.get(edge.parentSerialized)
if (graphNode == null) return ''
if (graphNode.nodeId.type === 'importer') {
const info = ctx.importerInfoMap.get(graphNode.nodeId.importerId)
return info?.name ?? graphNode.nodeId.importerId
}
const snapshot = ctx.currentPackages[graphNode.nodeId.depPath]
if (snapshot == null) return ''
return nameVerFromPkgSnapshot(graphNode.nodeId.depPath, snapshot).name
}
function getDepFieldForAlias (
alias: string,
importerSnapshot: ProjectSnapshot
): DependenciesField | undefined {
if (importerSnapshot.devDependencies?.[alias] != null) return 'devDependencies'
if (importerSnapshot.optionalDependencies?.[alias] != null) return 'optionalDependencies'
if (importerSnapshot.dependencies?.[alias] != null) return 'dependencies'
return undefined
}

View File

@@ -1,5 +1,4 @@
import path from 'path'
import fs from 'fs'
import {
type PackageSnapshot,
type PackageSnapshots,
@@ -11,9 +10,11 @@ import {
} from '@pnpm/lockfile.utils'
import { type DepTypes, DepType } from '@pnpm/lockfile.detect-dep-types'
import { type DependencyManifest, type Registries } from '@pnpm/types'
import { depPathToFilename, refToRelative } from '@pnpm/dependency-path'
import { refToRelative } from '@pnpm/dependency-path'
import { readPackageJsonFromDirSync } from '@pnpm/read-package-json'
import normalizePath from 'normalize-path'
import { readManifestFromCafs } from './readManifestFromCafs.js'
import { resolvePackagePath } from './resolvePackagePath.js'
export interface GetPkgInfoOpts {
readonly alias: string
@@ -22,6 +23,7 @@ export interface GetPkgInfoOpts {
readonly peers?: Set<string>
readonly registries: Registries
readonly skipped: Set<string>
readonly storeDir?: string
readonly wantedPackages: PackageSnapshots
readonly virtualStoreDir?: string
readonly virtualStoreDirMaxLength: number
@@ -63,6 +65,7 @@ export function getPkgInfo (opts: GetPkgInfoOpts): { pkgInfo: PackageInfo, readM
let optional: true | undefined
let isSkipped: boolean = false
let isMissing: boolean = false
let integrity: string | undefined
const depPath = refToRelative(opts.ref, opts.alias)
if (depPath) {
let pkgSnapshot!: PackageSnapshot
@@ -87,6 +90,9 @@ export function getPkgInfo (opts: GetPkgInfoOpts): { pkgInfo: PackageInfo, readM
resolved = (pkgSnapshotToResolution(depPath, pkgSnapshot, opts.registries) as TarballResolution).tarball
depType = opts.depTypes[depPath]
optional = pkgSnapshot.optional
if ('integrity' in pkgSnapshot.resolution) {
integrity = pkgSnapshot.resolution.integrity as string
}
} else {
name = opts.alias
version = opts.ref
@@ -94,41 +100,18 @@ export function getPkgInfo (opts: GetPkgInfoOpts): { pkgInfo: PackageInfo, readM
if (!version) {
version = opts.ref
}
let fullPackagePath = depPath
? path.join(opts.virtualStoreDir ?? '.pnpm', depPathToFilename(depPath, opts.virtualStoreDirMaxLength), 'node_modules', name)
const fullPackagePath = depPath
? resolvePackagePath({
depPath,
name,
alias: opts.alias,
virtualStoreDir: opts.virtualStoreDir ?? '.pnpm',
virtualStoreDirMaxLength: opts.virtualStoreDirMaxLength,
modulesDir: opts.modulesDir,
parentDir: opts.parentDir,
})
: path.join(opts.linkedPathBaseDir, opts.ref.slice(5))
// Resolve symlink for global virtual store.
// Global virtual store is detected when virtualStoreDir is outside the project's node_modules.
// We use path.resolve() to normalize paths for reliable comparison.
const resolvedVirtualStoreDir = opts.virtualStoreDir ? path.resolve(opts.virtualStoreDir) : undefined
const resolvedModulesDir = opts.modulesDir ? path.resolve(opts.modulesDir) : undefined
const isGlobalVirtualStore = resolvedVirtualStoreDir && resolvedModulesDir &&
!resolvedVirtualStoreDir.startsWith(resolvedModulesDir + path.sep) &&
resolvedVirtualStoreDir !== resolvedModulesDir
// For global virtual store, resolve symlinks to get the actual path with hash
if (depPath && isGlobalVirtualStore) {
try {
let nodeModulesDir: string
if (opts.parentDir) {
// parentDir example: /store/.../node_modules/express
// /store/.../node_modules/@scope/pkg
// We need the node_modules directory to find sibling packages
nodeModulesDir = path.dirname(opts.parentDir)
// For scoped packages (@org/pkg), go up one more level
if (path.basename(nodeModulesDir).startsWith('@')) {
nodeModulesDir = path.dirname(nodeModulesDir)
}
} else {
nodeModulesDir = opts.modulesDir!
}
fullPackagePath = fs.realpathSync(path.join(nodeModulesDir, opts.alias))
} catch {
// Fallback to constructed path if symlink doesn't exist
}
}
if (version.startsWith('link:') && opts.rewriteLinkVersionDir) {
version = `link:${normalizePath(path.relative(opts.rewriteLinkVersionDir, fullPackagePath))}`
}
@@ -155,7 +138,13 @@ export function getPkgInfo (opts: GetPkgInfoOpts): { pkgInfo: PackageInfo, readM
}
return {
pkgInfo: packageInfo,
readManifest: () => readPackageJsonFromDirSync(fullPackagePath),
readManifest: () => {
if (integrity && opts.storeDir) {
const manifest = readManifestFromCafs(opts.storeDir, { integrity, name, version })
if (manifest) return manifest
}
return readPackageJsonFromDirSync(fullPackagePath)
},
}
}

View File

@@ -2,9 +2,11 @@ import path from 'path'
import { type PackageSnapshots, type ProjectSnapshot } from '@pnpm/lockfile.fs'
import { type DepTypes } from '@pnpm/lockfile.detect-dep-types'
import { type Finder, type Registries } from '@pnpm/types'
import { lexCompare } from '@pnpm/util.lex-comparator'
import { type DependencyGraph } from './buildDependencyGraph.js'
import { type PackageNode } from './PackageNode.js'
import { type DependencyNode } from './DependencyNode.js'
import { getPkgInfo } from './getPkgInfo.js'
import { peersSuffixHashFromDepPath } from './peersSuffixHash.js'
import { serializeTreeNodeId, type TreeNodeId } from './TreeNodeId.js'
export interface BaseTreeOpts {
@@ -20,6 +22,7 @@ export interface BaseTreeOpts {
skipped: Set<string>
registries: Registries
depTypes: DepTypes
storeDir?: string
virtualStoreDir?: string
virtualStoreDirMaxLength: number
modulesDir?: string
@@ -49,7 +52,7 @@ type MaterializationContext =
// ---------------------------------------------------------------------------
interface CachedSubtree {
/** Total number of PackageNode objects in the subtree (recursive). */
/** Total number of DependencyNode objects in the subtree (recursive). */
count: number
/** Whether any node in this subtree matched the search. */
hasSearchMatch: boolean
@@ -67,7 +70,7 @@ export type MaterializationCache = Map<string, CachedSubtree>
export function getTree (
opts: GetTreeOpts,
parentId: TreeNodeId
): PackageNode[] {
): DependencyNode[] {
const ancestors = new Set<string>()
ancestors.add(serializeTreeNodeId(parentId))
@@ -94,7 +97,7 @@ export function getTree (
}
// ---------------------------------------------------------------------------
// Materialize PackageNode[] tree from the graph
// Materialize DependencyNode[] tree from the graph
// ---------------------------------------------------------------------------
function materializeCacheKey (nodeId: string, depth: number): string {
@@ -103,8 +106,8 @@ function materializeCacheKey (nodeId: string, depth: number): string {
}
interface MaterializationResult {
nodes: PackageNode[]
/** Total number of PackageNode objects in `nodes` (recursive). */
nodes: DependencyNode[]
/** Total number of DependencyNode objects in `nodes` (recursive). */
count: number
/** Whether any node in this subtree matched the search. */
hasSearchMatch: boolean
@@ -114,10 +117,10 @@ interface MaterializationResult {
/**
* Core materialization function. Walks the pre-built dependency graph to
* produce the `PackageNode[]` tree that downstream renderers expect.
* produce the `DependencyNode[]` tree that downstream renderers expect.
*
* The cache is keyed by `(nodeId, remainingDepth)` and stores the
* `PackageNode[]` children of a given node. It is populated
* `DependencyNode[]` children of a given node. It is populated
* unconditionally, including results where recursion was truncated at a
* cycle boundary. Cycle detection uses a mutable `ancestors` Set to
* stop recursion but does NOT set the `circular` flag — that is handled
@@ -144,12 +147,16 @@ function materializeChildren (
? path.join(ctx.lockfileDir, parentId.importerId)
: ctx.lockfileDir
const resultDependencies: PackageNode[] = []
const resultDependencies: DependencyNode[] = []
let resultCount = 0
let resultHasSearchMatch = false
const resultSearchMessages = ctx.showDedupedSearchMatches ? [] as string[] : undefined
for (const edge of graphNode.edges) {
// Sort edges by alias so that deduplication is deterministic:
// the alphabetically-first dependency always gets fully expanded.
const sortedEdges = [...graphNode.edges].sort((a, b) => lexCompare(a.alias, b.alias))
for (const edge of sortedEdges) {
if (ctx.onlyProjects && edge.target?.nodeId.type !== 'importer') {
continue
}
@@ -170,7 +177,7 @@ function materializeChildren (
readManifest,
})
let newEntry: PackageNode | null = null
let newEntry: DependencyNode | null = null
let childCount = 0
let dedupedHasSearchMatch = false
let dedupedSearchMessages: string[] = []
@@ -183,7 +190,7 @@ function materializeChildren (
continue
}
} else {
let dependencies: PackageNode[]
let dependencies: DependencyNode[]
let childHasSearchMatch = false
let childSearchMessages: string[] = []
let dedupedCount: number | undefined
@@ -244,6 +251,12 @@ function materializeChildren (
newEntry.deduped = true
newEntry.dedupedDependenciesCount = dedupedCount
}
if (edge.target.nodeId.type === 'package') {
const peerHash = peersSuffixHashFromDepPath(edge.target.nodeId.depPath)
if (peerHash != null) {
newEntry.peersSuffixHash = peerHash
}
}
}
if (searchMatch) {
@@ -274,16 +287,16 @@ function materializeChildren (
}
/**
* Walks the materialized PackageNode[] tree and marks circular back-edges.
* Walks the materialized DependencyNode[] tree and marks circular back-edges.
* A node whose `path` matches an ancestor is a cycle — it gets
* `circular: true` and its dependencies (if any) are stripped.
*
* With deduplication in place (deduped nodes are leaves), the walk is O(N).
*/
function fixCircularRefs (
nodes: PackageNode[],
nodes: DependencyNode[],
ancestors: Set<string>
): PackageNode[] {
): DependencyNode[] {
let changed = false
const result = nodes.map(node => {
// A node whose path matches an ancestor is a circular back-edge.

View File

@@ -1,3 +1,4 @@
export { buildDependenciesHierarchy, type DependenciesHierarchy } from './buildDependenciesHierarchy.js'
export { type PackageNode } from './PackageNode.js'
export { buildDependenciesTree, type DependenciesTree } from './buildDependenciesTree.js'
export { type DependencyNode } from './DependencyNode.js'
export { createPackagesSearcher } from './createPackagesSearcher.js'
export { buildDependentsTree, type DependentsTree, type DependentNode, type ImporterInfo } from './buildDependentsTree.js'

View File

@@ -0,0 +1,8 @@
import crypto from 'crypto'
import { parseDepPath } from '@pnpm/dependency-path'
export function peersSuffixHashFromDepPath (depPath: string): string | undefined {
const { peerDepGraphHash } = parseDepPath(depPath)
if (!peerDepGraphHash) return undefined
return crypto.createHash('sha256').update(peerDepGraphHash).digest('hex').slice(0, 4)
}

View File

@@ -0,0 +1,24 @@
import { readMsgpackFileSync } from '@pnpm/fs.msgpack-file'
import { getIndexFilePathInCafs, readManifestFromStore, type PackageFilesIndex } from '@pnpm/store.cafs'
import { type DependencyManifest } from '@pnpm/types'
/**
* Attempts to read a package manifest from the content-addressable store (CAFS)
* using its integrity hash. Returns `undefined` if the manifest cannot be read.
*/
export function readManifestFromCafs (storeDir: string, pkg: {
integrity: string
name: string
version: string
}): DependencyManifest | undefined {
try {
const pkgId = `${pkg.name}@${pkg.version}`
const indexPath = getIndexFilePathInCafs(storeDir, pkg.integrity, pkgId)
const pkgIndex = readMsgpackFileSync<PackageFilesIndex>(indexPath)
const manifest = readManifestFromStore(storeDir, pkgIndex)
if (manifest) return manifest as DependencyManifest
} catch {
// Fall through to undefined
}
return undefined
}

View File

@@ -0,0 +1,60 @@
import fs from 'fs'
import path from 'path'
import { depPathToFilename } from '@pnpm/dependency-path'
/**
* Resolves the filesystem path for a package identified by its depPath.
*
* For local virtual stores, the path is constructed directly.
* For global virtual stores (where virtualStoreDir is outside modulesDir),
* symlinks are resolved to find the actual store location.
*/
export function resolvePackagePath (opts: {
depPath: string
name: string
alias: string
virtualStoreDir: string
virtualStoreDirMaxLength: number
modulesDir?: string
parentDir?: string
}): string {
let fullPackagePath = path.join(
opts.virtualStoreDir,
depPathToFilename(opts.depPath, opts.virtualStoreDirMaxLength),
'node_modules',
opts.name
)
// Resolve symlink for global virtual store.
// Global virtual store is detected when virtualStoreDir is outside the project's node_modules.
const resolvedVirtualStoreDir = path.resolve(opts.virtualStoreDir)
const resolvedModulesDir = opts.modulesDir ? path.resolve(opts.modulesDir) : undefined
const isGlobalVirtualStore = resolvedModulesDir &&
!resolvedVirtualStoreDir.startsWith(resolvedModulesDir + path.sep) &&
resolvedVirtualStoreDir !== resolvedModulesDir
if (isGlobalVirtualStore) {
try {
let nodeModulesDir: string
if (opts.parentDir) {
// parentDir example: /store/.../node_modules/express
// /store/.../node_modules/@scope/pkg
// We need the node_modules directory to find sibling packages
nodeModulesDir = path.dirname(opts.parentDir)
// For scoped packages (@org/pkg), go up one more level
if (path.basename(nodeModulesDir).startsWith('@')) {
nodeModulesDir = path.dirname(nodeModulesDir)
}
} else if (opts.modulesDir) {
nodeModulesDir = opts.modulesDir
} else {
return fullPackagePath
}
fullPackagePath = fs.realpathSync(path.join(nodeModulesDir, opts.alias))
} catch {
// Fallback to constructed path if symlink doesn't exist
}
}
return fullPackagePath
}

View File

@@ -1,6 +1,6 @@
import { refToRelative } from '@pnpm/dependency-path'
import { type PackageSnapshots } from '@pnpm/lockfile.fs'
import { type PackageNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { type DependencyNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { type DepPath, type Finder } from '@pnpm/types'
import { buildDependencyGraph } from '../lib/buildDependencyGraph.js'
import { getTree, type MaterializationCache } from '../lib/getTree.js'
@@ -55,7 +55,7 @@ function refToRelativeOrThrow (reference: string, pkgName: string): DepPath {
}
/**
* If {@see PackageNode} has no dependencies, the `dependencies` field is not
* If {@see DependencyNode} has no dependencies, the `dependencies` field is not
* set at all.
*
* This is usually desirable. However, Jest structural matchers currently have
@@ -67,10 +67,10 @@ function refToRelativeOrThrow (reference: string, pkgName: string): DepPath {
* expect(node).toMatchObject({ dependencies: undefined })
* ```
*/
function normalizePackageNodeForTesting (nodes: readonly PackageNode[]): PackageNode[] {
function normalizeDependencyNodeForTesting (nodes: readonly DependencyNode[]): DependencyNode[] {
return nodes.map(node => ({
...node,
dependencies: node.dependencies != null ? normalizePackageNodeForTesting(node.dependencies) : undefined,
dependencies: node.dependencies != null ? normalizeDependencyNodeForTesting(node.dependencies) : undefined,
}))
}
@@ -110,7 +110,7 @@ describe('getTree', () => {
}
test('full test case to print when max depth is large', () => {
const result = normalizePackageNodeForTesting(getTreeWithGraph({ ...getTreeArgs, maxDepth: 9999, virtualStoreDirMaxLength: 120 }, rootNodeId))
const result = normalizeDependencyNodeForTesting(getTreeWithGraph({ ...getTreeArgs, maxDepth: 9999, virtualStoreDirMaxLength: 120 }, rootNodeId))
expect(result).toEqual([
expect.objectContaining({
@@ -137,7 +137,7 @@ describe('getTree', () => {
test('max depth of 1 to print flat dependencies', () => {
const result = getTreeWithGraph({ ...getTreeArgs, maxDepth: 1, virtualStoreDirMaxLength: 120 }, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expect.objectContaining({ alias: 'b1', dependencies: undefined }),
expect.objectContaining({ alias: 'b2', dependencies: undefined }),
expect.objectContaining({ alias: 'b3', dependencies: undefined }),
@@ -147,7 +147,7 @@ describe('getTree', () => {
test('max depth of 2 to print a1 -> b1 -> c1, but not d1', () => {
const result = getTreeWithGraph({ ...getTreeArgs, maxDepth: 2, virtualStoreDirMaxLength: 120 }, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expect.objectContaining({
alias: 'b1',
dependencies: [
@@ -211,7 +211,7 @@ describe('getTree', () => {
virtualStoreDirMaxLength: 120,
}, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
// depth 0
expect.objectContaining({
alias: 'glob',
@@ -270,7 +270,7 @@ describe('getTree', () => {
virtualStoreDirMaxLength: 120,
}, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expect.objectContaining({
alias: 'a',
dependencies: [
@@ -347,7 +347,7 @@ describe('getTree', () => {
wantedPackages: currentPackages,
}, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expect.objectContaining({
alias: 'a',
dependencies: [
@@ -409,7 +409,7 @@ describe('getTree', () => {
],
})
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expectedA,
expect.objectContaining({
alias: 'c',
@@ -450,7 +450,7 @@ describe('getTree', () => {
wantedPackages: currentPackages,
}, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expect.objectContaining({
alias: 'a',
dependencies: [
@@ -511,7 +511,7 @@ describe('getTree', () => {
wantedPackages: currentPackages,
}, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expect.objectContaining({
alias: 'a',
dependencies: [
@@ -592,7 +592,7 @@ describe('getTree', () => {
wantedPackages: currentPackages,
}, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expect.objectContaining({
alias: 'a',
dependencies: [
@@ -632,7 +632,7 @@ describe('getTree', () => {
wantedPackages: currentPackages,
}, rootNodeId)
expect(normalizePackageNodeForTesting(result)).toEqual([
expect(normalizeDependencyNodeForTesting(result)).toEqual([
expect.objectContaining({
alias: 'a',
dependencies: [
@@ -690,7 +690,7 @@ describe('getTree', () => {
}
const rootNodeId: TreeNodeId = { type: 'importer', importerId: '.' }
const result = normalizePackageNodeForTesting(getTreeWithGraph({
const result = normalizeDependencyNodeForTesting(getTreeWithGraph({
...commonMockGetTreeArgs,
maxDepth: Infinity,
currentPackages,
@@ -734,7 +734,7 @@ describe('getTree', () => {
}
const rootNodeId: TreeNodeId = { type: 'importer', importerId: '.' }
const result = normalizePackageNodeForTesting(getTreeWithGraph({
const result = normalizeDependencyNodeForTesting(getTreeWithGraph({
...commonMockGetTreeArgs,
maxDepth: Infinity,
currentPackages,
@@ -787,7 +787,7 @@ describe('getTree', () => {
const search: Finder = ({ name }) => name === 'target'
const result = normalizePackageNodeForTesting(getTreeWithGraph({
const result = normalizeDependencyNodeForTesting(getTreeWithGraph({
...commonMockGetTreeArgs,
maxDepth: Infinity,
currentPackages,
@@ -840,7 +840,7 @@ describe('getTree', () => {
const search: Finder = ({ name }) => name === 'target' ? 'depends on target' : false
const result = normalizePackageNodeForTesting(getTreeWithGraph({
const result = normalizeDependencyNodeForTesting(getTreeWithGraph({
...commonMockGetTreeArgs,
maxDepth: Infinity,
currentPackages,
@@ -896,7 +896,7 @@ describe('getTree', () => {
const search: Finder = ({ name }) => name === 'target'
const result = normalizePackageNodeForTesting(getTreeWithGraph({
const result = normalizeDependencyNodeForTesting(getTreeWithGraph({
...commonMockGetTreeArgs,
maxDepth: Infinity,
currentPackages,
@@ -1246,7 +1246,7 @@ describe('getTree', () => {
currentPackages,
wantedPackages: currentPackages,
}
const result = normalizePackageNodeForTesting(getTreeWithGraph({ ...getTreeArgs, maxDepth: 9999, virtualStoreDirMaxLength: 120 }, rootNodeId))
const result = normalizeDependencyNodeForTesting(getTreeWithGraph({ ...getTreeArgs, maxDepth: 9999, virtualStoreDirMaxLength: 120 }, rootNodeId))
expect(result).toEqual([
expect.objectContaining({

View File

@@ -3,7 +3,7 @@ import fs from 'fs'
import path from 'path'
import { WANTED_LOCKFILE } from '@pnpm/constants'
import { fixtures } from '@pnpm/test-fixtures'
import { buildDependenciesHierarchy, type PackageNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { buildDependenciesTree, type DependencyNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { depPathToFilename } from '@pnpm/dependency-path'
const virtualStoreDirMaxLength = process.platform === 'win32' ? 60 : 120
@@ -21,7 +21,7 @@ const workspaceWithNestedWorkspaceDeps = f.find('workspace-with-nested-workspace
const customModulesDirFixture = f.find('custom-modules-dir')
test('one package depth 0', async () => {
const tree = await buildDependenciesHierarchy([generalFixture], { depth: 0, lockfileDir: generalFixture, virtualStoreDirMaxLength })
const tree = await buildDependenciesTree([generalFixture], { depth: 0, lockfileDir: generalFixture, virtualStoreDirMaxLength })
const modulesDir = path.join(generalFixture, 'node_modules')
expect(tree).toStrictEqual({
@@ -82,7 +82,7 @@ test('one package depth 0', async () => {
})
test('one package depth 1', async () => {
const tree = await buildDependenciesHierarchy([generalFixture], { depth: 1, lockfileDir: generalFixture, virtualStoreDirMaxLength })
const tree = await buildDependenciesTree([generalFixture], { depth: 1, lockfileDir: generalFixture, virtualStoreDirMaxLength })
const modulesDir = path.join(generalFixture, 'node_modules')
expect(tree).toStrictEqual({
@@ -171,7 +171,7 @@ test('one package depth 1', async () => {
})
test('only prod depth 0', async () => {
const tree = await buildDependenciesHierarchy(
const tree = await buildDependenciesTree(
[generalFixture],
{
depth: 0,
@@ -217,7 +217,7 @@ test('only prod depth 0', async () => {
})
test('only dev depth 0', async () => {
const tree = await buildDependenciesHierarchy(
const tree = await buildDependenciesTree(
[generalFixture],
{
depth: 0,
@@ -252,7 +252,7 @@ test('only dev depth 0', async () => {
})
test('hierarchy for no packages', async () => {
const tree = await buildDependenciesHierarchy([generalFixture], {
const tree = await buildDependenciesTree([generalFixture], {
depth: 100,
lockfileDir: generalFixture,
search: () => false,
@@ -269,7 +269,7 @@ test('hierarchy for no packages', async () => {
})
test('filter 1 package with depth 0', async () => {
const tree = await buildDependenciesHierarchy(
const tree = await buildDependenciesTree(
[generalFixture],
{
depth: 0,
@@ -303,7 +303,7 @@ test('filter 1 package with depth 0', async () => {
})
test('circular dependency', async () => {
const tree = await buildDependenciesHierarchy([circularFixture], {
const tree = await buildDependenciesTree([circularFixture], {
depth: 1000,
lockfileDir: circularFixture,
virtualStoreDirMaxLength,
@@ -314,14 +314,14 @@ test('circular dependency', async () => {
[circularFixture]: {
dependencies: JSON.parse(fs.readFileSync(path.join(import.meta.dirname, 'circularTree.json'), 'utf8'))
.dependencies
.map((dep: PackageNode) => resolvePaths(modulesDir, dep)),
.map((dep: DependencyNode) => resolvePaths(modulesDir, dep)),
devDependencies: [],
optionalDependencies: [],
},
})
})
function resolvePaths (modulesDir: string, node: PackageNode): PackageNode {
function resolvePaths (modulesDir: string, node: DependencyNode): DependencyNode {
const p = path.resolve(modulesDir, '.pnpm', node.path, 'node_modules', node.name)
if (node.dependencies == null) {
return {
@@ -339,7 +339,7 @@ function resolvePaths (modulesDir: string, node: PackageNode): PackageNode {
}
test('local package depth 0', async () => {
const tree = await buildDependenciesHierarchy([withFileDepFixture], {
const tree = await buildDependenciesTree([withFileDepFixture], {
depth: 1,
lockfileDir: withFileDepFixture,
virtualStoreDirMaxLength,
@@ -377,7 +377,7 @@ test('local package depth 0', async () => {
})
test('on a package that has only links', async () => {
const tree = await buildDependenciesHierarchy([withLinksOnlyFixture], {
const tree = await buildDependenciesTree([withLinksOnlyFixture], {
depth: 1000,
lockfileDir: withLinksOnlyFixture,
virtualStoreDirMaxLength,
@@ -404,7 +404,7 @@ test('on a package that has only links', async () => {
// Test for feature request at https://github.com/pnpm/pnpm/issues/4154
test('on a package with nested workspace links', async () => {
const tree = await buildDependenciesHierarchy(
const tree = await buildDependenciesTree(
[workspaceWithNestedWorkspaceDeps],
{
depth: 1000,
@@ -448,7 +448,7 @@ test('on a package with nested workspace links', async () => {
test('unsaved dependencies are listed', async () => {
const modulesDir = path.join(withUnsavedDepsFixture, 'node_modules')
expect(await buildDependenciesHierarchy([withUnsavedDepsFixture], {
expect(await buildDependenciesTree([withUnsavedDepsFixture], {
depth: 0,
lockfileDir: withUnsavedDepsFixture,
virtualStoreDirMaxLength,
@@ -488,7 +488,7 @@ test('unsaved dependencies are listed', async () => {
test('unsaved dependencies are listed and filtered', async () => {
const modulesDir = path.join(withUnsavedDepsFixture, 'node_modules')
expect(
await buildDependenciesHierarchy(
await buildDependenciesTree(
[withUnsavedDepsFixture],
{
depth: 0,
@@ -521,7 +521,7 @@ test('unsaved dependencies are listed and filtered', async () => {
// Covers https://github.com/pnpm/pnpm/issues/1549
test(`do not fail on importers that are not in current ${WANTED_LOCKFILE}`, async () => {
expect(await buildDependenciesHierarchy([fixtureMonorepo], {
expect(await buildDependenciesTree([fixtureMonorepo], {
depth: 0,
lockfileDir: fixtureMonorepo,
virtualStoreDirMaxLength,
@@ -531,7 +531,7 @@ test(`do not fail on importers that are not in current ${WANTED_LOCKFILE}`, asyn
test('dependency with an alias', async () => {
const modulesDir = path.join(withAliasedDepFixture, 'node_modules')
expect(
await buildDependenciesHierarchy([withAliasedDepFixture], {
await buildDependenciesTree([withAliasedDepFixture], {
depth: 0,
lockfileDir: withAliasedDepFixture,
virtualStoreDirMaxLength,
@@ -558,7 +558,7 @@ test('dependency with an alias', async () => {
})
test('peer dependencies', async () => {
const hierarchy = await buildDependenciesHierarchy([withPeerFixture], {
const hierarchy = await buildDependenciesTree([withPeerFixture], {
depth: 1,
lockfileDir: withPeerFixture,
virtualStoreDirMaxLength,
@@ -572,7 +572,7 @@ test('dependency without a package.json', async () => {
const org = 'denolib'
const pkg = 'camelcase'
const commit = 'aeb6b15f9c9957c8fa56f9731e914c4d8a6d2f2b'
const tree = await buildDependenciesHierarchy([withNonPackageDepFixture], {
const tree = await buildDependenciesTree([withNonPackageDepFixture], {
depth: 0,
lockfileDir: withNonPackageDepFixture,
virtualStoreDirMaxLength,
@@ -609,7 +609,7 @@ test('dependency without a package.json', async () => {
})
test('on custom modules-dir workspaces', async () => {
const tree = await buildDependenciesHierarchy(
const tree = await buildDependenciesTree(
[customModulesDirFixture, path.join(customModulesDirFixture, './packages/foo'), path.join(customModulesDirFixture, './packages/bar')],
{
depth: 1000,

View File

@@ -18,6 +18,9 @@
{
"path": "../../config/normalize-registries"
},
{
"path": "../../fs/msgpack-file"
},
{
"path": "../../fs/read-modules-dir"
},
@@ -44,6 +47,9 @@
},
{
"path": "../../pkg-manifest/read-package-json"
},
{
"path": "../../store/cafs"
}
]
}

View File

@@ -36,20 +36,20 @@
"compile": "tsgo --build && pnpm run lint --fix"
},
"dependencies": {
"@pnpm/lockfile.fs": "workspace:*",
"@pnpm/read-package-json": "workspace:*",
"@pnpm/read-project-manifest": "workspace:*",
"@pnpm/reviewing.dependencies-hierarchy": "workspace:*",
"@pnpm/text.tree-renderer": "workspace:*",
"@pnpm/types": "workspace:*",
"archy": "catalog:",
"@pnpm/util.lex-comparator": "catalog:",
"chalk": "catalog:",
"cli-columns": "catalog:",
"p-limit": "catalog:",
"ramda": "catalog:"
},
"devDependencies": {
"@pnpm/list": "workspace:*",
"@pnpm/test-fixtures": "workspace:*",
"@types/archy": "catalog:",
"@types/ramda": "catalog:"
},
"engines": {

View File

@@ -1,13 +1,15 @@
import path from 'path'
import { readCurrentLockfile, readWantedLockfile } from '@pnpm/lockfile.fs'
import { safeReadProjectManifestOnly } from '@pnpm/read-project-manifest'
import { type DependencyNode, buildDependenciesTree, type DependenciesTree, createPackagesSearcher, buildDependentsTree, type ImporterInfo } from '@pnpm/reviewing.dependencies-hierarchy'
import { type DependenciesField, type Registries, type Finder } from '@pnpm/types'
import { type PackageNode, buildDependenciesHierarchy, type DependenciesHierarchy, createPackagesSearcher } from '@pnpm/reviewing.dependencies-hierarchy'
import { renderJson } from './renderJson.js'
import { renderParseable } from './renderParseable.js'
import { renderTree } from './renderTree.js'
import { renderDependentsTree, renderDependentsJson, renderDependentsParseable } from './renderDependentsTree.js'
import { type PackageDependencyHierarchy } from './types.js'
export type { PackageNode } from '@pnpm/reviewing.dependencies-hierarchy'
export type { DependencyNode } from '@pnpm/reviewing.dependencies-hierarchy'
export { renderJson, renderParseable, renderTree, type PackageDependencyHierarchy }
const DEFAULTS = {
@@ -38,7 +40,7 @@ export function flattenSearchedPackages (pkgs: PackageDependencyHierarchy[], opt
return flattedPkgs
function _walker (packages: PackageNode[], depPath: string): void {
function _walker (packages: DependencyNode[], depPath: string): void {
for (const pkg of packages) {
const nextDepPath = `${depPath} > ${pkg.name}@${pkg.version}`
if (pkg.dependencies?.length) {
@@ -72,7 +74,7 @@ export async function searchForPackages (
const search = createPackagesSearcher(packages, opts.finders)
return Promise.all(
Object.entries(await buildDependenciesHierarchy(projectPaths, {
Object.entries(await buildDependenciesTree(projectPaths, {
depth: opts.depth,
excludePeerDependencies: opts.excludePeerDependencies,
include: opts.include,
@@ -85,7 +87,7 @@ export async function searchForPackages (
modulesDir: opts.modulesDir,
virtualStoreDirMaxLength: opts.virtualStoreDirMaxLength,
}))
.map(async ([projectPath, buildDependenciesHierarchy]) => {
.map(async ([projectPath, buildDependenciesTree]) => {
const entryPkg = await safeReadProjectManifestOnly(projectPath) ?? {}
return {
name: entryPkg.name,
@@ -93,7 +95,7 @@ export async function searchForPackages (
private: entryPkg.private,
path: projectPath,
...buildDependenciesHierarchy,
...buildDependenciesTree,
} as PackageDependencyHierarchy
})
)
@@ -115,6 +117,7 @@ export async function listForPackages (
modulesDir?: string
virtualStoreDirMaxLength: number
finders?: Finder[]
showSummary?: boolean
}
): Promise<string> {
const opts = { ...DEFAULTS, ...maybeOpts }
@@ -128,6 +131,7 @@ export async function listForPackages (
long: opts.long,
search: Boolean(packages.length),
showExtraneous: opts.showExtraneous,
showSummary: opts.showSummary,
})
}
@@ -148,6 +152,7 @@ export async function list (
modulesDir?: string
virtualStoreDirMaxLength: number
finders?: Finder[]
showSummary?: boolean
}
): Promise<string> {
const opts = { ...DEFAULTS, ...maybeOpts }
@@ -158,8 +163,8 @@ export async function list (
? projectPaths.reduce((acc, projectPath) => {
acc[projectPath] = {}
return acc
}, {} as Record<string, DependenciesHierarchy>)
: await buildDependenciesHierarchy(projectPaths, {
}, {} as Record<string, DependenciesTree>)
: await buildDependenciesTree(projectPaths, {
depth: opts.depth,
excludePeerDependencies: maybeOpts?.excludePeerDependencies,
include: maybeOpts?.include,
@@ -191,6 +196,7 @@ export async function list (
long: opts.long,
search: false,
showExtraneous: opts.showExtraneous,
showSummary: opts.showSummary,
})
}
@@ -200,6 +206,7 @@ type Printer = (packages: PackageDependencyHierarchy[], opts: {
long: boolean
search: boolean
showExtraneous: boolean
showSummary?: boolean
}) => Promise<string>
function getPrinter (reportAs: 'parseable' | 'tree' | 'json'): Printer {
@@ -209,3 +216,58 @@ function getPrinter (reportAs: 'parseable' | 'tree' | 'json'): Printer {
case 'tree': return renderTree
}
}
export async function whyForPackages (
packages: string[],
projectPaths: string[],
opts: {
lockfileDir: string
checkWantedLockfileOnly?: boolean
include?: { [dependenciesField in DependenciesField]: boolean }
long?: boolean
registries?: Registries
reportAs?: 'parseable' | 'tree' | 'json'
modulesDir?: string
finders?: Finder[]
}
): Promise<string> {
const reportAs = opts.reportAs ?? 'tree'
const long = opts.long ?? false
const importerInfoMap = new Map<string, ImporterInfo>()
const modulesDir = opts.modulesDir ?? 'node_modules'
const lockfile = opts.checkWantedLockfileOnly
? await readWantedLockfile(opts.lockfileDir, { ignoreIncompatible: false })
: await readCurrentLockfile(path.join(opts.lockfileDir, modulesDir, '.pnpm'), { ignoreIncompatible: false })
?? await readWantedLockfile(opts.lockfileDir, { ignoreIncompatible: false })
if (!lockfile) return ''
const importerIds = Object.keys(lockfile.importers)
const manifests = await Promise.all(
importerIds.map((importerId) => safeReadProjectManifestOnly(path.join(opts.lockfileDir, importerId)))
)
for (let i = 0; i < importerIds.length; i++) {
const importerId = importerIds[i]
const manifest = manifests[i]
importerInfoMap.set(importerId, {
name: manifest?.name ?? (importerId === '.' ? 'the root project' : importerId),
version: manifest?.version ?? '',
})
}
const trees = await buildDependentsTree(packages, projectPaths, {
lockfileDir: opts.lockfileDir,
include: opts.include,
modulesDir: opts.modulesDir,
registries: opts.registries,
finders: opts.finders,
importerInfoMap,
lockfile,
})
switch (reportAs) {
case 'json': return renderDependentsJson(trees, { long })
case 'parseable': return renderDependentsParseable(trees, { long })
case 'tree': return renderDependentsTree(trees, { long })
}
}

View File

@@ -0,0 +1,50 @@
import chalk from 'chalk'
export function nameAtVersion (name: string, version: string, colorName?: (s: string) => string): string {
if (!version) return colorName ? colorName(name) : name
const styledName = colorName ? colorName(name) : name
return `${styledName}${chalk.gray(`@${version}`)}`
}
export function peerHashSuffix (pkg: {
name: string
version: string
peersSuffixHash?: string | undefined
}, multiPeerPkgs: Map<string, number>): string {
if (!pkg.peersSuffixHash) return ''
const key = `${pkg.name}@${pkg.version}`
const variantCount = multiPeerPkgs.get(key)
if (variantCount == null) return ''
return chalk.red(` peer#${pkg.peersSuffixHash} (${variantCount} variation${variantCount === 1 ? '' : 's'})`)
}
export const DEDUPED_LABEL = chalk.dim(' [deduped]')
export function collectHashes (hashesPerPkg: Map<string, Set<string>>, pkg: {
name: string
version: string
peersSuffixHash?: string | undefined
}): void {
if (!pkg.peersSuffixHash) return
const key = `${pkg.name}@${pkg.version}`
let hashes = hashesPerPkg.get(key)
if (hashes == null) {
hashes = new Set()
hashesPerPkg.set(key, hashes)
}
hashes.add(pkg.peersSuffixHash)
}
/**
* Given a map of `name@version` → Set of distinct peer hashes,
* returns only those entries with more than one variant.
*/
export function filterMultiPeerEntries (hashesPerPkg: Map<string, Set<string>>): Map<string, number> {
const result = new Map<string, number>()
for (const [key, hashes] of hashesPerPkg) {
if (hashes.size > 1) {
result.set(key, hashes.size)
}
}
return result
}

View File

@@ -0,0 +1,156 @@
import { type DependentsTree, type DependentNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { renderTree as renderArchyTree, type TreeNode } from '@pnpm/text.tree-renderer'
import chalk from 'chalk'
import { collectHashes, DEDUPED_LABEL, filterMultiPeerEntries, nameAtVersion, peerHashSuffix } from './peerVariants.js'
import { getPkgInfo } from './getPkgInfo.js'
export async function renderDependentsTree (trees: DependentsTree[], opts: { long: boolean }): Promise<string> {
if (trees.length === 0) return ''
const multiPeerPkgs = findMultiPeerPackages(trees)
const output = (
await Promise.all(trees.map(async (result) => {
const rootLabelParts = [chalk.bold(nameAtVersion(result.name, result.version)) +
peerHashSuffix(result, multiPeerPkgs)]
if (result.searchMessage) {
rootLabelParts.push(result.searchMessage)
}
if (opts.long && result.path) {
const pkg = await getPkgInfo({ name: result.name, version: result.version, path: result.path, alias: undefined })
if (pkg.description) {
rootLabelParts.push(pkg.description)
}
if (pkg.repository) {
rootLabelParts.push(pkg.repository)
}
if (pkg.homepage) {
rootLabelParts.push(pkg.homepage)
}
rootLabelParts.push(pkg.path)
}
const rootLabel = rootLabelParts.join('\n')
if (result.dependents.length === 0) {
return rootLabel
}
const childNodes = dependentsToTreeNodes(result.dependents, multiPeerPkgs)
const tree: TreeNode = { label: rootLabel, nodes: childNodes }
return renderArchyTree(tree, { treeChars: chalk.dim }).replace(/\n+$/, '')
}))
).join('\n\n')
const summary = whySummary(trees)
return summary ? `${output}\n\n${summary}` : output
}
function whySummary (trees: DependentsTree[]): string {
if (trees.length === 0) return ''
const byName = new Map<string, { versions: Set<string>, count: number }>()
for (const tree of trees) {
let entry = byName.get(tree.name)
if (entry == null) {
entry = { versions: new Set<string>(), count: 0 }
byName.set(tree.name, entry)
}
entry.versions.add(tree.version)
entry.count++
}
const lines: string[] = []
for (const [name, info] of byName) {
const parts: string[] = [`${info.versions.size} version${info.versions.size === 1 ? '' : 's'}`]
if (info.count > info.versions.size) {
parts.push(`${info.count} instances`)
}
lines.push(`Found ${parts.join(', ')} of ${name}`)
}
return chalk.dim(lines.join('\n'))
}
function findMultiPeerPackages (trees: DependentsTree[]): Map<string, number> {
const hashesPerPkg = new Map<string, Set<string>>()
function walkDependents (dependents: DependentNode[]): void {
for (const dep of dependents) {
collectHashes(hashesPerPkg, dep)
if (dep.dependents) {
walkDependents(dep.dependents)
}
}
}
for (const tree of trees) {
collectHashes(hashesPerPkg, tree)
walkDependents(tree.dependents)
}
return filterMultiPeerEntries(hashesPerPkg)
}
function dependentsToTreeNodes (dependents: DependentNode[], multiPeerPkgs: Map<string, number>): TreeNode[] {
return dependents.map((dep) => {
let label: string
if (dep.depField != null) {
// This is an importer (leaf node)
label = chalk.bold(nameAtVersion(dep.name, dep.version)) + ` ${chalk.dim(`(${dep.depField})`)}`
} else {
label = nameAtVersion(dep.name, dep.version)
label += peerHashSuffix(dep, multiPeerPkgs)
}
if (dep.circular) {
label += chalk.dim(' [circular]')
}
if (dep.deduped) {
label += DEDUPED_LABEL
}
const nodes = dep.dependents ? dependentsToTreeNodes(dep.dependents, multiPeerPkgs) : []
return { label, nodes }
})
}
export async function renderDependentsJson (trees: DependentsTree[], opts: { long: boolean }): Promise<string> {
if (!opts.long) {
return JSON.stringify(trees, null, 2)
}
const enriched = await Promise.all(trees.map(async (result) => {
if (!result.path) return result
const pkg = await getPkgInfo({ name: result.name, version: result.version, path: result.path, alias: undefined })
return {
...result,
description: pkg.description,
repository: pkg.repository,
homepage: pkg.homepage,
}
}))
return JSON.stringify(enriched, null, 2)
}
export function renderDependentsParseable (trees: DependentsTree[], opts: { long: boolean }): string {
const lines: string[] = []
for (const result of trees) {
const rootSegment = opts.long && result.path
? `${result.path}:${plainNameAtVersion(result.name, result.version)}`
: plainNameAtVersion(result.name, result.version)
collectPaths(result.dependents, [rootSegment], lines)
}
return lines.join('\n')
}
function collectPaths (dependents: DependentNode[], currentPath: string[], lines: string[]): void {
for (const dep of dependents) {
const newPath = [...currentPath, plainNameAtVersion(dep.name, dep.version)]
if (dep.dependents && dep.dependents.length > 0) {
collectPaths(dep.dependents, newPath, lines)
} else {
// Leaf node (importer) — reverse to show importer first
lines.push([...newPath].reverse().join(' > '))
}
}
}
function plainNameAtVersion (name: string, version: string): string {
return version ? `${name}@${version}` : name
}

View File

@@ -1,10 +1,10 @@
import { DEPENDENCIES_FIELDS } from '@pnpm/types'
import { type PackageNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { type DependencyNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { sortBy, path, type Ord } from 'ramda'
import { getPkgInfo, type PkgInfo } from './getPkgInfo.js'
import { type PackageDependencyHierarchy } from './types.js'
const sortPackages = sortBy(path(['pkg', 'alias']) as (pkg: PackageNode) => Ord)
const sortPackages = sortBy(path(['pkg', 'alias']) as (pkg: DependencyNode) => Ord)
type RenderJsonResultItem = Pick<PackageDependencyHierarchy, 'name' | 'version' | 'path'> &
Required<Pick<PackageDependencyHierarchy, 'private'>> &
@@ -51,7 +51,7 @@ export async function renderJson (
}
export async function toJsonResult (
entryNodes: PackageNode[],
entryNodes: DependencyNode[],
opts: {
long: boolean
}

View File

@@ -1,4 +1,4 @@
import { type PackageNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { type DependencyNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { sortBy, prop } from 'ramda'
import { type PackageDependencyHierarchy } from './types.js'
@@ -56,7 +56,7 @@ function renderParseableForPackage (
return [
firstLine,
...pkgs.map((pkgNode) => {
const node = pkgNode as PackageNode
const node = pkgNode as DependencyNode
if (node.alias !== node.name) {
// Only add npm: prefix if version doesn't already contain @ (to avoid file:, link:, etc.)
if (!node.version.includes('@')) {
@@ -87,7 +87,7 @@ interface PackageInfo {
function flatten (
depPaths: Set<string>,
nodes: PackageNode[]
nodes: DependencyNode[]
): PackageInfo[] {
let packages: PackageInfo[] = []
for (const node of nodes) {

View File

@@ -1,16 +1,13 @@
import path from 'path'
import { type PackageNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { type DependencyNode } from '@pnpm/reviewing.dependencies-hierarchy'
import { renderTree as renderArchyTree, type TreeNode, type TreeNodeGroup } from '@pnpm/text.tree-renderer'
import { DEPENDENCIES_FIELDS, type DependenciesField } from '@pnpm/types'
import archy from 'archy'
import { lexCompare } from '@pnpm/util.lex-comparator'
import chalk from 'chalk'
import cliColumns from 'cli-columns'
import { sortBy, path as ramdaPath } from 'ramda'
import { type Ord } from 'ramda'
import { getPkgInfo } from './getPkgInfo.js'
import { collectHashes, DEDUPED_LABEL, filterMultiPeerEntries, nameAtVersion, peerHashSuffix } from './peerVariants.js'
import { type PackageDependencyHierarchy } from './types.js'
const sortPackages = sortBy(ramdaPath(['name']) as (pkg: PackageNode) => Ord)
const DEV_DEP_ONLY_CLR = chalk.yellow
const PROD_DEP_CLR = (s: string) => s // just use the default color
const OPTIONAL_DEP_CLR = chalk.blue
@@ -24,23 +21,28 @@ export interface RenderTreeOptions {
long: boolean
search: boolean
showExtraneous: boolean
showSummary?: boolean
}
export async function renderTree (
packages: PackageDependencyHierarchy[],
opts: RenderTreeOptions
): Promise<string> {
const multiPeerPkgs = findMultiPeerPackages(packages)
const output = (
await Promise.all(packages.map(async (pkg) => renderTreeForPackage(pkg, opts)))
await Promise.all(packages.map(async (pkg) => renderTreeForPackage(pkg, opts, multiPeerPkgs)))
)
.filter(Boolean)
.join('\n\n')
return `${(opts.depth > -1 && output ? LEGEND : '')}${output}`
const legend = opts.depth > -1 && output ? LEGEND : ''
const summary = opts.showSummary && opts.depth > -1 && output ? `\n\n${listSummary(packages)}` : ''
return `${legend}${output}${summary}`
}
async function renderTreeForPackage (
pkg: PackageDependencyHierarchy,
opts: RenderTreeOptions
opts: RenderTreeOptions,
multiPeerPkgs: Map<string, number>
): Promise<string> {
if (
!opts.alwaysPrintRootPackage &&
@@ -52,69 +54,65 @@ async function renderTreeForPackage (
let label = ''
if (pkg.name) {
label += pkg.name
if (pkg.version) {
label += `@${pkg.version}`
}
label += nameAtVersion(pkg.name, pkg.version ?? '')
label += ' '
}
label += pkg.path
label += chalk.dim(pkg.path)
if (pkg.private) {
label += ' (PRIVATE)'
label += chalk.dim(' (PRIVATE)')
}
const useColumns = opts.depth === 0 && !opts.long && !opts.search
const dependenciesFields: Array<DependenciesField | 'unsavedDependencies'> = [
...DEPENDENCIES_FIELDS.sort(),
]
if (opts.showExtraneous) {
dependenciesFields.push('unsavedDependencies')
}
const output = (await Promise.all(
const childNodes: TreeNodeGroup[] = (await Promise.all(
dependenciesFields.map(async (dependenciesField) => {
if (pkg[dependenciesField]?.length) {
const depsLabel = chalk.cyanBright(
dependenciesField !== 'unsavedDependencies'
? `${dependenciesField}:`
: 'not saved (you should add these dependencies to package.json if you need them):'
)
let output = `${depsLabel}\n`
const gPkgColor = dependenciesField === 'unsavedDependencies' ? () => NOT_SAVED_DEP_CLR : getPkgColor
if (useColumns && pkg[dependenciesField]!.length > 10) {
output += cliColumns(pkg[dependenciesField]!.map(printLabel.bind(printLabel, gPkgColor))) + '\n'
return output
}
const data = await toArchyTree(gPkgColor, pkg[dependenciesField]!, {
long: opts.long,
modules: path.join(pkg.path, 'node_modules'),
})
for (const d of data) {
output += archy(d)
}
return output
}
return null
}))).filter(Boolean).join('\n')
if (!pkg[dependenciesField]?.length) return null
const depsLabel = chalk.cyanBright(
dependenciesField !== 'unsavedDependencies'
? `${dependenciesField}:`
: 'not saved (you should add these dependencies to package.json if you need them):'
)
const gPkgColor = dependenciesField === 'unsavedDependencies' ? () => NOT_SAVED_DEP_CLR : getPkgColor
const depNodes = await toArchyTree(gPkgColor, pkg[dependenciesField]!, {
long: opts.long,
modules: path.join(pkg.path, 'node_modules'),
multiPeerPkgs,
})
return { group: depsLabel, nodes: depNodes } as TreeNodeGroup
})
)).filter((n): n is TreeNodeGroup => n != null)
// eslint-disable-next-line regexp/no-unused-capturing-group
return `${chalk.bold.underline(label)}\n\n${output}`.replace(/(\n)+$/, '')
const rootLabel = chalk.bold(label)
if (childNodes.length === 0) {
return rootLabel
}
const tree: TreeNode = { label: rootLabel, nodes: childNodes }
return renderArchyTree(tree, { treeChars: chalk.dim }).trimEnd()
}
type GetPkgColor = (node: PackageNode) => (s: string) => string
type GetPkgColor = (node: DependencyNode) => (s: string) => string
export async function toArchyTree (
getPkgColor: GetPkgColor,
entryNodes: PackageNode[],
entryNodes: DependencyNode[],
opts: {
long: boolean
modules: string
multiPeerPkgs?: Map<string, number>
}
): Promise<archy.Data[]> {
): Promise<TreeNode[]> {
const sorted = [...entryNodes].sort((a, b) => lexCompare(a.name, b.name))
return Promise.all(
sortPackages(entryNodes).map(async (node) => {
const nodes = await toArchyTree(getPkgColor, node.dependencies ?? [], opts)
sorted.map(async (node) => {
const nodes: TreeNode[] = node.deduped
? []
: await toArchyTree(getPkgColor, node.dependencies ?? [], opts)
const labelLines: string[] = [
printLabel(getPkgColor, node),
printLabel(getPkgColor, opts.multiPeerPkgs, node),
]
if (node.searchMessage) {
labelLines.push(node.searchMessage)
@@ -142,19 +140,19 @@ export async function toArchyTree (
)
}
function printLabel (getPkgColor: GetPkgColor, node: PackageNode): string {
function printLabel (getPkgColor: GetPkgColor, multiPeerPkgs: Map<string, number> | undefined, node: DependencyNode): string {
const color = getPkgColor(node)
let txt: string
if (node.alias !== node.name) {
// When using npm: protocol alias, display as "alias npm:name@version"
// When using npm: protocol alias, display as "alias@npm:name@version"
// Only add npm: prefix if version doesn't already contain @ (to avoid file:, link:, etc.)
if (!node.version.includes('@')) {
txt = `${color(node.alias)} ${chalk.gray(`npm:${node.name}@${node.version}`)}`
txt = `${color(node.alias)}${chalk.gray(`@npm:${node.name}@${node.version}`)}`
} else {
txt = `${color(node.alias)} ${chalk.gray(node.version)}`
txt = `${color(node.alias)}${chalk.gray(`@${node.version}`)}`
}
} else {
txt = `${color(node.name)} ${chalk.gray(node.version)}`
txt = nameAtVersion(node.name, node.version, color)
}
if (node.isPeer) {
txt += ' peer'
@@ -162,17 +160,71 @@ function printLabel (getPkgColor: GetPkgColor, node: PackageNode): string {
if (node.isSkipped) {
txt += ' skipped'
}
if (multiPeerPkgs) {
txt += peerHashSuffix(node, multiPeerPkgs)
}
if (node.deduped) {
txt += chalk.dim(' deduped')
if (node.dedupedDependenciesCount) {
txt += chalk.dim(` (${node.dedupedDependenciesCount} dep${node.dedupedDependenciesCount === 1 ? '' : 's'} hidden)`)
}
txt += DEDUPED_LABEL
}
return node.searched ? chalk.bold(txt) : txt
}
function getPkgColor (node: PackageNode): (text: string) => string {
function getPkgColor (node: DependencyNode): (text: string) => string {
if (node.dev === true) return DEV_DEP_ONLY_CLR
if (node.optional) return OPTIONAL_DEP_CLR
return PROD_DEP_CLR
}
/**
* Walks all package trees and returns the set of `name@version` strings
* that appear with more than one distinct `peersSuffixHash`.
*/
function findMultiPeerPackages (packages: PackageDependencyHierarchy[]): Map<string, number> {
const hashesPerPkg = new Map<string, Set<string>>()
function walk (nodes: DependencyNode[]): void {
for (const node of nodes) {
collectHashes(hashesPerPkg, node)
if (node.dependencies) {
walk(node.dependencies)
}
}
}
for (const pkg of packages) {
for (const field of DEPENDENCIES_FIELDS) {
if (pkg[field]) {
walk(pkg[field])
}
}
}
return filterMultiPeerEntries(hashesPerPkg)
}
function listSummary (packages: PackageDependencyHierarchy[]): string {
let total = 0
function walk (nodes: DependencyNode[]): void {
for (const node of nodes) {
total++
if (node.dependencies) {
walk(node.dependencies)
}
}
}
for (const pkg of packages) {
for (const field of DEPENDENCIES_FIELDS) {
if (pkg[field]) {
walk(pkg[field])
}
}
}
const parts: string[] = [`${total} package${total === 1 ? '' : 's'}`]
if (packages.length > 1) {
parts.push(`${packages.length} projects`)
}
return chalk.dim(parts.join(' in '))
}

View File

@@ -1,6 +1,6 @@
import { type DependenciesHierarchy } from '@pnpm/reviewing.dependencies-hierarchy'
import { type DependenciesTree } from '@pnpm/reviewing.dependencies-hierarchy'
export interface PackageDependencyHierarchy extends DependenciesHierarchy {
export interface PackageDependencyHierarchy extends DependenciesTree {
name?: string
version?: string
path: string

View File

@@ -3,7 +3,6 @@ import path from 'path'
import { list, listForPackages } from '@pnpm/list'
import { fixtures } from '@pnpm/test-fixtures'
import chalk from 'chalk'
import cliColumns from 'cli-columns'
import { renderTree } from '../lib/renderTree.js'
import { renderParseable } from '../lib/renderParseable.js'
@@ -40,9 +39,9 @@ test('list all deps of a package that has an external lockfile', async () => {
})).toBe(`${LEGEND}
${boldHighlighted(`pkg@1.0.0 ${fixtureWithExternalLockfile}`)}
${DEPENDENCIES}
is-positive ${VERSION_CLR('1.0.0')}`)
${DEPENDENCIES}
└── is-positive${VERSION_CLR('@1.0.0')}`)
})
test('print legend only once', async () => {
@@ -55,14 +54,14 @@ test('print legend only once', async () => {
})).toBe(`${LEGEND}
${boldHighlighted(`bar@0.0.0 ${path.join(workspaceWith2Pkgs, 'packages/bar')}`)}
${DEPENDENCIES}
is-positive ${VERSION_CLR('1.0.0')}
${DEPENDENCIES}
└── is-positive${VERSION_CLR('@1.0.0')}
${boldHighlighted(`foo@0.0.0 ${path.join(workspaceWith2Pkgs, 'packages/foo')}`)}
${DEPENDENCIES}
is-positive ${VERSION_CLR('1.0.0')}`)
${DEPENDENCIES}
└── is-positive${VERSION_CLR('@1.0.0')}`)
})
test('list in workspace with private package', async () => {
@@ -75,59 +74,59 @@ test('list in workspace with private package', async () => {
})).toBe(`${LEGEND}
${boldHighlighted(`private@1.0.0 ${path.join(workspaceWithPrivatePkgs, 'packages/private')} (PRIVATE)`)}
${DEPENDENCIES}
is-positive ${VERSION_CLR('1.0.0')}
${DEPENDENCIES}
└── is-positive${VERSION_CLR('@1.0.0')}
${boldHighlighted(`public@1.0.0 ${path.join(workspaceWithPrivatePkgs, 'packages/public')}`)}
${DEPENDENCIES}
is-positive ${VERSION_CLR('1.0.0')}`)
${DEPENDENCIES}
└── is-positive${VERSION_CLR('@1.0.0')}`)
})
test('list with default parameters', async () => {
expect(await list([fixture], { lockfileDir: fixture, virtualStoreDirMaxLength: 120 })).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEPENDENCIES}
write-json-file ${VERSION_CLR('2.3.0')}
${DEV_DEPENDENCIES}
${DEV_DEP_ONLY_CLR('is-positive')} ${VERSION_CLR('3.1.0')}
${OPTIONAL_DEPENDENCIES}
${OPTIONAL_DEP_CLR('is-negative')} ${VERSION_CLR('2.1.0')}`)
${DEPENDENCIES}
├── write-json-file${VERSION_CLR('@2.3.0')}
${DEV_DEPENDENCIES}
├── ${DEV_DEP_ONLY_CLR('is-positive')}${VERSION_CLR('@3.1.0')}
${OPTIONAL_DEPENDENCIES}
└── ${OPTIONAL_DEP_CLR('is-negative')}${VERSION_CLR('@2.1.0')}`)
})
test('list with default parameters in pkg that has no name and version', async () => {
expect(await list([fixtureWithNoPkgNameAndNoVersion], { lockfileDir: fixtureWithNoPkgNameAndNoVersion, virtualStoreDirMaxLength: 120 })).toBe(`${LEGEND}
${boldHighlighted(fixtureWithNoPkgNameAndNoVersion)}
${DEPENDENCIES}
write-json-file ${VERSION_CLR('2.3.0')}
${DEV_DEPENDENCIES}
${DEV_DEP_ONLY_CLR('is-positive')} ${VERSION_CLR('3.1.0')}
${OPTIONAL_DEPENDENCIES}
${OPTIONAL_DEP_CLR('is-negative')} ${VERSION_CLR('2.1.0')}`)
${DEPENDENCIES}
├── write-json-file${VERSION_CLR('@2.3.0')}
${DEV_DEPENDENCIES}
├── ${DEV_DEP_ONLY_CLR('is-positive')}${VERSION_CLR('@3.1.0')}
${OPTIONAL_DEPENDENCIES}
└── ${OPTIONAL_DEP_CLR('is-negative')}${VERSION_CLR('@2.1.0')}`)
})
test('list with default parameters in pkg that has no version', async () => {
expect(await list([fixtureWithNoPkgVersion], { lockfileDir: fixtureWithNoPkgVersion, virtualStoreDirMaxLength: 120 })).toBe(`${LEGEND}
${boldHighlighted(`fixture ${fixtureWithNoPkgVersion}`)}
${DEPENDENCIES}
write-json-file ${VERSION_CLR('2.3.0')}
${DEV_DEPENDENCIES}
${DEV_DEP_ONLY_CLR('is-positive')} ${VERSION_CLR('3.1.0')}
${OPTIONAL_DEPENDENCIES}
${OPTIONAL_DEP_CLR('is-negative')} ${VERSION_CLR('2.1.0')}`)
${DEPENDENCIES}
├── write-json-file${VERSION_CLR('@2.3.0')}
${DEV_DEPENDENCIES}
├── ${DEV_DEP_ONLY_CLR('is-positive')}${VERSION_CLR('@3.1.0')}
${OPTIONAL_DEPENDENCIES}
└── ${OPTIONAL_DEP_CLR('is-negative')}${VERSION_CLR('@2.1.0')}`)
})
test('list dev only', async () => {
@@ -140,9 +139,9 @@ test('list dev only', async () => {
).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEV_DEPENDENCIES}
${DEV_DEP_ONLY_CLR('is-positive')} ${VERSION_CLR('3.1.0')}`
${DEV_DEPENDENCIES}
└── ${DEV_DEP_ONLY_CLR('is-positive')}${VERSION_CLR('@3.1.0')}`
)
})
@@ -156,9 +155,9 @@ test('list prod only', async () => {
).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEPENDENCIES}
write-json-file ${VERSION_CLR('2.3.0')}`
${DEPENDENCIES}
└── write-json-file${VERSION_CLR('@2.3.0')}`
)
})
@@ -173,20 +172,20 @@ test('list prod only with depth 2', async () => {
).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEPENDENCIES}
write-json-file ${VERSION_CLR('2.3.0')}
├── detect-indent ${VERSION_CLR('5.0.0')}
├── graceful-fs ${VERSION_CLR('4.2.2')}
├─┬ make-dir ${VERSION_CLR('1.3.0')}
│ └── pify ${VERSION_CLR('3.0.0')}
├── pify ${VERSION_CLR('3.0.0')}
├─┬ sort-keys ${VERSION_CLR('2.0.0')}
│ └── is-plain-obj ${VERSION_CLR('1.1.0')}
└─┬ write-file-atomic ${VERSION_CLR('2.4.3')}
├── graceful-fs ${VERSION_CLR('4.2.2')}
├── imurmurhash ${VERSION_CLR('0.1.4')}
└── signal-exit ${VERSION_CLR('3.0.2')}`
${DEPENDENCIES}
└─┬ write-json-file${VERSION_CLR('@2.3.0')}
├── detect-indent${VERSION_CLR('@5.0.0')}
├── graceful-fs${VERSION_CLR('@4.2.2')}
├─┬ make-dir${VERSION_CLR('@1.3.0')}
│ └── pify${VERSION_CLR('@3.0.0')}
├── pify${VERSION_CLR('@3.0.0')}
├─┬ sort-keys${VERSION_CLR('@2.0.0')}
│ └── is-plain-obj${VERSION_CLR('@1.1.0')}
└─┬ write-file-atomic${VERSION_CLR('@2.4.3')}
├── graceful-fs${VERSION_CLR('@4.2.2')}
├── imurmurhash${VERSION_CLR('@0.1.4')}
└── signal-exit${VERSION_CLR('@3.0.2')}`
)
})
@@ -194,21 +193,21 @@ test('list with depth 1', async () => {
expect(await list([fixture], { depth: 1, lockfileDir: fixture, virtualStoreDirMaxLength: 120 })).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEPENDENCIES}
write-json-file ${VERSION_CLR('2.3.0')}
├── detect-indent ${VERSION_CLR('5.0.0')}
├── graceful-fs ${VERSION_CLR('4.2.2')}
├── make-dir ${VERSION_CLR('1.3.0')}
├── pify ${VERSION_CLR('3.0.0')}
├── sort-keys ${VERSION_CLR('2.0.0')}
└── write-file-atomic ${VERSION_CLR('2.4.3')}
${DEV_DEPENDENCIES}
${DEV_DEP_ONLY_CLR('is-positive')} ${VERSION_CLR('3.1.0')}
${OPTIONAL_DEPENDENCIES}
${OPTIONAL_DEP_CLR('is-negative')} ${VERSION_CLR('2.1.0')}`)
${DEPENDENCIES}
├─┬ write-json-file${VERSION_CLR('@2.3.0')}
├── detect-indent${VERSION_CLR('@5.0.0')}
├── graceful-fs${VERSION_CLR('@4.2.2')}
├── make-dir${VERSION_CLR('@1.3.0')}
├── pify${VERSION_CLR('@3.0.0')}
├── sort-keys${VERSION_CLR('@2.0.0')}
└── write-file-atomic${VERSION_CLR('@2.4.3')}
${DEV_DEPENDENCIES}
├── ${DEV_DEP_ONLY_CLR('is-positive')}${VERSION_CLR('@3.1.0')}
${OPTIONAL_DEPENDENCIES}
└── ${OPTIONAL_DEP_CLR('is-negative')}${VERSION_CLR('@2.1.0')}`)
})
test('list with depth -1', async () => {
@@ -221,14 +220,14 @@ test('list with depth 1 and selected packages', async () => {
).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEPENDENCIES}
write-json-file ${VERSION_CLR('2.3.0')}
├── ${highlighted('make-dir ' + VERSION_CLR('1.3.0'))}
└── ${highlighted('sort-keys ' + VERSION_CLR('2.0.0'))}
${OPTIONAL_DEPENDENCIES}
${highlighted(OPTIONAL_DEP_CLR('is-negative') + ' ' + VERSION_CLR('2.1.0'))}`
${DEPENDENCIES}
├─┬ write-json-file${VERSION_CLR('@2.3.0')}
├── ${highlighted('make-dir' + VERSION_CLR('@1.3.0'))}
└── ${highlighted('sort-keys' + VERSION_CLR('@2.0.0'))}
${OPTIONAL_DEPENDENCIES}
└── ${highlighted(OPTIONAL_DEP_CLR('is-negative') + VERSION_CLR('@2.1.0'))}`
)
})
@@ -236,27 +235,27 @@ test('list in long format', async () => {
expect(await list([fixture], { long: true, lockfileDir: fixture, virtualStoreDirMaxLength: 0 })).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEPENDENCIES}
write-json-file ${VERSION_CLR('2.3.0')}
Stringify and write JSON to a file atomically
git+https://github.com/sindresorhus/write-json-file.git
https://github.com/sindresorhus/write-json-file#readme
${path.join(fixture, 'node_modules/.pnpm/write-json-file@2.3.0/node_modules/write-json-file')}
${DEV_DEPENDENCIES}
${DEV_DEP_ONLY_CLR('is-positive')} ${VERSION_CLR('3.1.0')}
Check if something is a positive number
git+https://github.com/kevva/is-positive.git
https://github.com/kevva/is-positive#readme
${path.join(fixture, 'node_modules/.pnpm/is-positive@3.1.0/node_modules/is-positive')}
${OPTIONAL_DEPENDENCIES}
${OPTIONAL_DEP_CLR('is-negative')} ${VERSION_CLR('2.1.0')}
Check if something is a negative number
git+https://github.com/kevva/is-negative.git
https://github.com/kevva/is-negative#readme
${path.join(fixture, 'node_modules/.pnpm/is-negative@2.1.0/node_modules/is-negative')}`)
${DEPENDENCIES}
├── write-json-file${VERSION_CLR('@2.3.0')}
Stringify and write JSON to a file atomically
git+https://github.com/sindresorhus/write-json-file.git
https://github.com/sindresorhus/write-json-file#readme
${path.join(fixture, 'node_modules/.pnpm/write-json-file@2.3.0/node_modules/write-json-file')}
${DEV_DEPENDENCIES}
├── ${DEV_DEP_ONLY_CLR('is-positive')}${VERSION_CLR('@3.1.0')}
Check if something is a positive number
git+https://github.com/kevva/is-positive.git
https://github.com/kevva/is-positive#readme
${path.join(fixture, 'node_modules/.pnpm/is-positive@3.1.0/node_modules/is-positive')}
${OPTIONAL_DEPENDENCIES}
└── ${OPTIONAL_DEP_CLR('is-negative')}${VERSION_CLR('@2.1.0')}
Check if something is a negative number
git+https://github.com/kevva/is-negative.git
https://github.com/kevva/is-negative#readme
${path.join(fixture, 'node_modules/.pnpm/is-negative@2.1.0/node_modules/is-negative')}`)
})
test('parseable list in workspace with private package', async () => {
@@ -591,12 +590,12 @@ test('unsaved dependencies are marked', async () => {
)).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${UNSAVED_DEPENDENCIES}
${NOT_SAVED_DEP_CLR('foo')} ${VERSION_CLR('1.0.0')}`)
${UNSAVED_DEPENDENCIES}
└── ${NOT_SAVED_DEP_CLR('foo')}${VERSION_CLR('@1.0.0')}`)
})
test('write long lists in columns', async () => {
test('list with many dependencies', async () => {
expect(await renderTree(
[
{
@@ -717,21 +716,19 @@ test('write long lists in columns', async () => {
)).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEPENDENCIES}` + '\n' +
cliColumns([
`a ${VERSION_CLR('1.0.0')}`,
`b ${VERSION_CLR('1.0.0')}`,
`c ${VERSION_CLR('1.0.0')}`,
`d ${VERSION_CLR('1.0.0')}`,
`e ${VERSION_CLR('1.0.0')}`,
`f ${VERSION_CLR('1.0.0')}`,
`g ${VERSION_CLR('1.0.0')}`,
`h ${VERSION_CLR('1.0.0')}`,
`i ${VERSION_CLR('1.0.0')}`,
`k ${VERSION_CLR('1.0.0')}`,
`l ${VERSION_CLR('1.0.0')}`,
]))
${DEPENDENCIES}
├── a${VERSION_CLR('@1.0.0')}
├── b${VERSION_CLR('@1.0.0')}
├── c${VERSION_CLR('@1.0.0')}
├── d${VERSION_CLR('@1.0.0')}
├── e${VERSION_CLR('@1.0.0')}
├── f${VERSION_CLR('@1.0.0')}
├── g${VERSION_CLR('@1.0.0')}
├── h${VERSION_CLR('@1.0.0')}
├── i${VERSION_CLR('@1.0.0')}
├── k${VERSION_CLR('@1.0.0')}
└── l${VERSION_CLR('@1.0.0')}`)
})
test('sort list items', async () => {
@@ -786,11 +783,11 @@ test('sort list items', async () => {
)).toBe(`${LEGEND}
${boldHighlighted(`fixture@1.0.0 ${fixture}`)}
${DEPENDENCIES}
foo ${VERSION_CLR('1.0.0')}
├── bar ${VERSION_CLR('1.0.0')}
└── qar ${VERSION_CLR('1.0.0')}`)
${DEPENDENCIES}
└─┬ foo${VERSION_CLR('@1.0.0')}
├── bar${VERSION_CLR('@1.0.0')}
└── qar${VERSION_CLR('@1.0.0')}`)
})
test('peer dependencies are marked', async () => {
@@ -799,15 +796,15 @@ test('peer dependencies are marked', async () => {
expect(output).toBe(`${LEGEND}
${boldHighlighted(`with-peer@1.0.0 ${fixture}`)}
${DEPENDENCIES}
ajv ${VERSION_CLR('6.10.2')}
├── fast-deep-equal ${VERSION_CLR('2.0.1')}
├── fast-json-stable-stringify ${VERSION_CLR('2.0.0')}
├── json-schema-traverse ${VERSION_CLR('0.4.1')}
└── uri-js ${VERSION_CLR('4.2.2')}
ajv-keywords ${VERSION_CLR('3.4.1')}
└── ajv ${VERSION_CLR('6.10.2')} peer`)
${DEPENDENCIES}
├─┬ ajv${VERSION_CLR('@6.10.2')}
├── fast-deep-equal${VERSION_CLR('@2.0.1')}
├── fast-json-stable-stringify${VERSION_CLR('@2.0.0')}
├── json-schema-traverse${VERSION_CLR('@0.4.1')}
└── uri-js${VERSION_CLR('@4.2.2')}
└─┬ ajv-keywords${VERSION_CLR('@3.4.1')}
└── ajv${VERSION_CLR('@6.10.2')} peer`)
})
test('peer dependencies are marked when searching', async () => {
@@ -816,11 +813,11 @@ test('peer dependencies are marked when searching', async () => {
expect(output).toBe(`${LEGEND}
${boldHighlighted(`with-peer@1.0.0 ${fixture}`)}
${DEPENDENCIES}
${highlighted(`ajv ${VERSION_CLR('6.10.2')}`)}
ajv-keywords ${VERSION_CLR('3.4.1')}
└── ${highlighted(`ajv ${VERSION_CLR('6.10.2')} peer`)}`)
${DEPENDENCIES}
├── ${highlighted(`ajv${VERSION_CLR('@6.10.2')}`)}
└─┬ ajv-keywords${VERSION_CLR('@3.4.1')}
└── ${highlighted(`ajv${VERSION_CLR('@6.10.2')} peer`)}`)
})
test('--only-projects shows only projects', async () => {
@@ -833,11 +830,11 @@ test('--only-projects shows only projects', async () => {
expect(output).toBe(`${LEGEND}
${boldHighlighted(`root@1.0.0 ${fixture}`)}
${DEPENDENCIES}
@scope/a ${VERSION_CLR('link:packages/a')}
└─┬ @scope/b ${VERSION_CLR('link:packages/b')}
└── @scope/c ${VERSION_CLR('link:packages/c')}`)
${DEPENDENCIES}
└─┬ @scope/a${VERSION_CLR('@link:packages/a')}
└─┬ @scope/b${VERSION_CLR('@link:packages/b')}
└── @scope/c${VERSION_CLR('@link:packages/c')}`)
})
test('renderTree displays npm: protocol for aliased packages', async () => {

View File

@@ -0,0 +1,199 @@
import { stripVTControlCharacters as stripAnsi } from 'util'
import { renderDependentsTree, renderDependentsJson, renderDependentsParseable } from '../lib/renderDependentsTree.js'
import { type DependentsTree } from '@pnpm/reviewing.dependencies-hierarchy'
describe('renderDependentsTree', () => {
test('renders searchMessage below the root label', async () => {
const results: DependentsTree[] = [
{
name: 'foo',
version: '1.0.0',
searchMessage: 'Matched by custom finder',
dependents: [
{ name: 'my-project', version: '0.0.0', depField: 'dependencies' },
],
},
]
const output = stripAnsi(await renderDependentsTree(results, { long: false }))
const lines = output.split('\n')
// Root label should be the package name@version
expect(lines[0]).toContain('foo@1.0.0')
// Search message should appear on a subsequent line
expect(lines.some(l => l.includes('Matched by custom finder'))).toBe(true)
// Dependent should still be rendered
expect(lines.some(l => l.includes('my-project@0.0.0'))).toBe(true)
})
test('does not render extra line when searchMessage is undefined', async () => {
const results: DependentsTree[] = [
{
name: 'foo',
version: '1.0.0',
dependents: [
{ name: 'my-project', version: '0.0.0', depField: 'dependencies' },
],
},
]
const output = stripAnsi(await renderDependentsTree(results, { long: false }))
const lines = output.split('\n')
expect(lines[0]).toBe('foo@1.0.0')
// Second line should be part of the tree, not a message
expect(lines[1]).not.toBe('')
expect(lines[1]).toContain('my-project')
})
test('renders package with no dependents and a searchMessage', async () => {
const results: DependentsTree[] = [
{
name: 'bar',
version: '2.0.0',
searchMessage: 'Found via license check',
dependents: [],
},
]
const output = stripAnsi(await renderDependentsTree(results, { long: false }))
const lines = output.split('\n')
expect(lines[0]).toBe('bar@2.0.0')
expect(lines[1]).toBe('Found via license check')
})
})
describe('whySummary', () => {
test('single package, single version', async () => {
const results: DependentsTree[] = [
{
name: 'foo',
version: '1.0.0',
dependents: [{ name: 'my-project', version: '0.0.0', depField: 'dependencies' }],
},
]
const output = stripAnsi(await renderDependentsTree(results, { long: false }))
expect(output).toContain('Found 1 version of foo')
expect(output).not.toContain('instances')
})
test('single package, multiple versions', async () => {
const results: DependentsTree[] = [
{
name: 'foo',
version: '1.0.0',
dependents: [{ name: 'my-project', version: '0.0.0', depField: 'dependencies' }],
},
{
name: 'foo',
version: '2.0.0',
dependents: [{ name: 'my-project', version: '0.0.0', depField: 'dependencies' }],
},
]
const output = stripAnsi(await renderDependentsTree(results, { long: false }))
expect(output).toContain('Found 2 versions of foo')
expect(output).not.toContain('instances')
})
test('single package, same version with multiple peer variants shows instance count', async () => {
const results: DependentsTree[] = [
{
name: 'foo',
version: '1.0.0',
peersSuffixHash: 'aaaa',
dependents: [{ name: 'my-project', version: '0.0.0', depField: 'dependencies' }],
},
{
name: 'foo',
version: '1.0.0',
peersSuffixHash: 'bbbb',
dependents: [{ name: 'other', version: '0.0.0', depField: 'dependencies' }],
},
]
const output = stripAnsi(await renderDependentsTree(results, { long: false }))
expect(output).toContain('Found 1 version, 2 instances of foo')
})
test('multiple different packages each get their own summary line', async () => {
const results: DependentsTree[] = [
{
name: 'foo',
version: '1.0.0',
dependents: [{ name: 'my-project', version: '0.0.0', depField: 'dependencies' }],
},
{
name: 'bar',
version: '2.0.0',
dependents: [{ name: 'my-project', version: '0.0.0', depField: 'dependencies' }],
},
{
name: 'bar',
version: '3.0.0',
dependents: [{ name: 'my-project', version: '0.0.0', depField: 'dependencies' }],
},
]
const output = stripAnsi(await renderDependentsTree(results, { long: false }))
expect(output).toContain('Found 1 version of foo')
expect(output).toContain('Found 2 versions of bar')
})
test('empty results produce no summary', async () => {
const output = await renderDependentsTree([], { long: false })
expect(output).toBe('')
})
})
describe('renderDependentsJson', () => {
test('includes searchMessage in JSON output', async () => {
const results: DependentsTree[] = [
{
name: 'foo',
version: '1.0.0',
searchMessage: 'Matched by custom finder',
dependents: [
{ name: 'my-project', version: '0.0.0', depField: 'dependencies' },
],
},
]
const parsed = JSON.parse(await renderDependentsJson(results, { long: false }))
expect(parsed).toHaveLength(1)
expect(parsed[0].searchMessage).toBe('Matched by custom finder')
})
test('does not include searchMessage when undefined', async () => {
const results: DependentsTree[] = [
{
name: 'foo',
version: '1.0.0',
dependents: [],
},
]
const parsed = JSON.parse(await renderDependentsJson(results, { long: false }))
expect(parsed[0].searchMessage).toBeUndefined()
})
})
describe('renderDependentsParseable', () => {
test('renders parseable output with searchMessage result', () => {
const results: DependentsTree[] = [
{
name: 'dep-a',
version: '1.0.0',
searchMessage: 'Found via custom check',
dependents: [
{ name: 'my-project', version: '0.0.0', depField: 'dependencies' },
],
},
]
const output = renderDependentsParseable(results, { long: false })
const lines = output.split('\n')
// Parseable output should still contain the path
expect(lines).toHaveLength(1)
expect(lines[0]).toContain('dep-a@1.0.0')
expect(lines[0]).toContain('my-project@0.0.0')
})
})

View File

@@ -12,6 +12,9 @@
{
"path": "../../__utils__/test-fixtures"
},
{
"path": "../../lockfile/fs"
},
{
"path": "../../packages/types"
},
@@ -21,6 +24,9 @@
{
"path": "../../pkg-manifest/read-project-manifest"
},
{
"path": "../../text/tree-renderer"
},
{
"path": "../dependencies-hierarchy"
}

View File

@@ -0,0 +1,87 @@
import { PnpmError } from '@pnpm/error'
import { type Finder, type IncludedDependencies } from '@pnpm/types'
export type ReportAs = 'parseable' | 'json' | 'tree'
export function computeInclude (opts: { production?: boolean; dev?: boolean; optional?: boolean }): IncludedDependencies {
return {
dependencies: opts.production !== false,
devDependencies: opts.dev !== false,
optionalDependencies: opts.optional !== false,
}
}
export function resolveFinders (opts: { findBy?: string[]; finders?: Record<string, Finder> }): Finder[] {
const finders: Finder[] = []
if (opts.findBy) {
for (const finderName of opts.findBy) {
if (opts.finders?.[finderName] == null) {
throw new PnpmError('FINDER_NOT_FOUND', `No finder with name ${finderName} is found`)
}
finders.push(opts.finders[finderName])
}
}
return finders
}
export function determineReportAs (opts: { parseable?: boolean; json?: boolean }): ReportAs {
return opts.parseable ? 'parseable' : (opts.json ? 'json' : 'tree')
}
export const shorthands: Record<string, string> = {
D: '--dev',
P: '--production',
}
export const BASE_RC_OPTION_KEYS = [
'dev',
'global-dir',
'global',
'json',
'long',
'only',
'optional',
'parseable',
'production',
] as const
export const SHARED_CLI_HELP_OPTIONS = [
{
description: 'Perform command on every package in subdirectories \
or on every workspace package, when executed inside a workspace. \
For options that may be used with `-r`, see "pnpm help recursive"',
name: '--recursive',
shortAlias: '-r',
},
{
description: 'Show extended information',
name: '--long',
},
{
description: 'Show parseable output instead of tree view',
name: '--parseable',
},
{
description: 'Show information in JSON format',
name: '--json',
},
{
description: 'List packages in the global install prefix instead of in the current project',
name: '--global',
shortAlias: '-g',
},
{
description: 'Display only the dependency graph for packages in `dependencies` and `optionalDependencies`',
name: '--prod',
shortAlias: '-P',
},
{
description: 'Display only the dependency graph for packages in `devDependencies`',
name: '--dev',
shortAlias: '-D',
},
{
description: "Don't display packages from `optionalDependencies`",
name: '--no-optional',
},
]

View File

@@ -1,4 +1,3 @@
import { PnpmError } from '@pnpm/error'
import { docsUrl } from '@pnpm/cli-utils'
import { FILTERING, OPTIONS, UNIVERSAL_OPTIONS } from '@pnpm/common-cli-options-help'
import { type Config, types as allTypes } from '@pnpm/config'
@@ -6,6 +5,7 @@ import { list, listForPackages } from '@pnpm/list'
import { type Finder, type IncludedDependencies } from '@pnpm/types'
import { pick } from 'ramda'
import renderHelp from 'render-help'
import { computeInclude, resolveFinders, determineReportAs, SHARED_CLI_HELP_OPTIONS, BASE_RC_OPTION_KEYS } from './common.js'
import { listRecursive } from './recursive.js'
export const EXCLUDE_PEERS_HELP = {
@@ -15,17 +15,9 @@ export const EXCLUDE_PEERS_HELP = {
export function rcOptionsTypes (): Record<string, unknown> {
return pick([
...BASE_RC_OPTION_KEYS,
'depth',
'dev',
'global-dir',
'global',
'json',
'lockfile-only',
'long',
'only',
'optional',
'parseable',
'production',
], allTypes)
}
@@ -37,10 +29,7 @@ export const cliOptionsTypes = (): Record<string, unknown> => ({
'find-by': [String, Array],
})
export const shorthands: Record<string, string> = {
D: '--dev',
P: '--production',
}
export { shorthands } from './common.js'
export const commandNames = ['list', 'ls']
@@ -55,30 +44,7 @@ For example: pnpm ls babel-* eslint-*',
title: 'Options',
list: [
{
description: 'Perform command on every package in subdirectories \
or on every workspace package, when executed inside a workspace. \
For options that may be used with `-r`, see "pnpm help recursive"',
name: '--recursive',
shortAlias: '-r',
},
{
description: 'Show extended information',
name: '--long',
},
{
description: 'Show parseable output instead of tree view',
name: '--parseable',
},
{
description: 'Show information in JSON format',
name: '--json',
},
{
description: 'List packages in the global install prefix instead of in the current project',
name: '--global',
shortAlias: '-g',
},
...SHARED_CLI_HELP_OPTIONS,
{
description: 'Max display depth of the dependency tree',
name: '--depth <number>',
@@ -91,24 +57,10 @@ For options that may be used with `-r`, see "pnpm help recursive"',
description: 'Display only projects. Useful in a monorepo. `pnpm ls -r --depth -1` lists all projects in a monorepo',
name: '--depth -1',
},
{
description: 'Display only the dependency graph for packages in `dependencies` and `optionalDependencies`',
name: '--prod',
shortAlias: '-P',
},
{
description: 'Display only the dependency graph for packages in `devDependencies`',
name: '--dev',
shortAlias: '-D',
},
{
description: 'Display only dependencies that are also projects within the workspace',
name: '--only-projects',
},
{
description: "Don't display packages from `optionalDependencies`",
name: '--no-optional',
},
{
description: 'List packages from the lockfile only, without checking node_modules.',
name: '--lockfile-only',
@@ -155,11 +107,7 @@ export async function handler (
opts: ListCommandOptions,
params: string[]
): Promise<string> {
const include = {
dependencies: opts.production !== false,
devDependencies: opts.dev !== false,
optionalDependencies: opts.optional !== false,
}
const include = computeInclude(opts)
const depth = opts.cliOptions?.['depth'] ?? 0
if (opts.recursive && (opts.selectedProjectsGraph != null)) {
const pkgs = Object.values(opts.selectedProjectsGraph).map((wsPkg) => wsPkg.package)
@@ -194,15 +142,7 @@ export async function render (
findBy?: string[]
}
): Promise<string> {
const finders: Finder[] = []
if (opts.findBy) {
for (const finderName of opts.findBy) {
if (opts.finders?.[finderName] == null) {
throw new PnpmError('FINDER_NOT_FOUND', `No finder with name ${finderName} is found`)
}
finders.push(opts.finders[finderName])
}
}
const finders = resolveFinders(opts)
const listOpts = {
alwaysPrintRootPackage: opts.alwaysPrintRootPackage,
depth: opts.depth ?? 0,
@@ -212,8 +152,9 @@ export async function render (
checkWantedLockfileOnly: opts.checkWantedLockfileOnly,
long: opts.long,
onlyProjects: opts.onlyProjects,
reportAs: (opts.parseable ? 'parseable' : (opts.json ? 'json' : 'tree')) as ('parseable' | 'json' | 'tree'),
reportAs: determineReportAs(opts),
showExtraneous: false,
showSummary: true,
modulesDir: opts.modulesDir,
virtualStoreDirMaxLength: opts.virtualStoreDirMaxLength,
finders,

View File

@@ -2,23 +2,14 @@ import { docsUrl } from '@pnpm/cli-utils'
import { FILTERING, OPTIONS, UNIVERSAL_OPTIONS } from '@pnpm/common-cli-options-help'
import { types as allTypes } from '@pnpm/config'
import { PnpmError } from '@pnpm/error'
import { whyForPackages } from '@pnpm/list'
import { pick } from 'ramda'
import renderHelp from 'render-help'
import { handler as list, type ListCommandOptions, EXCLUDE_PEERS_HELP } from './list.js'
import { computeInclude, resolveFinders, determineReportAs, SHARED_CLI_HELP_OPTIONS, BASE_RC_OPTION_KEYS } from './common.js'
import { type ListCommandOptions, EXCLUDE_PEERS_HELP } from './list.js'
export function rcOptionsTypes (): Record<string, unknown> {
return pick([
'depth',
'dev',
'global-dir',
'global',
'json',
'long',
'only',
'optional',
'parseable',
'production',
], allTypes)
return pick([...BASE_RC_OPTION_KEYS], allTypes)
}
export const cliOptionsTypes = (): Record<string, unknown> => ({
@@ -28,10 +19,7 @@ export const cliOptionsTypes = (): Record<string, unknown> => ({
'find-by': [String, Array],
})
export const shorthands: Record<string, string> = {
D: '--dev',
P: '--production',
}
export { shorthands } from './common.js'
export const commandNames = ['why']
@@ -44,48 +32,7 @@ For example: pnpm why babel-* eslint-*`,
title: 'Options',
list: [
{
description: 'Perform command on every package in subdirectories \
or on every workspace package, when executed inside a workspace. \
For options that may be used with `-r`, see "pnpm help recursive"',
name: '--recursive',
shortAlias: '-r',
},
{
description: 'Show extended information',
name: '--long',
},
{
description: 'Show parseable output instead of tree view',
name: '--parseable',
},
{
description: 'Show information in JSON format',
name: '--json',
},
{
description: 'List packages in the global install prefix instead of in the current project',
name: '--global',
shortAlias: '-g',
},
{
description: 'Display only the dependency graph for packages in `dependencies` and `optionalDependencies`',
name: '--prod',
shortAlias: '-P',
},
{
description: 'Display only the dependency graph for packages in `devDependencies`',
name: '--dev',
shortAlias: '-D',
},
{
description: "Don't display packages from `optionalDependencies`",
name: '--no-optional',
},
{
name: '--depth <number>',
description: 'Max display depth of the dependency graph',
},
...SHARED_CLI_HELP_OPTIONS,
EXCLUDE_PEERS_HELP,
OPTIONS.globalDir,
...UNIVERSAL_OPTIONS,
@@ -107,11 +54,23 @@ export async function handler (
if (params.length === 0 && opts.findBy == null) {
throw new PnpmError('MISSING_PACKAGE_NAME', '`pnpm why` requires the package name or --find-by=<finder-name>')
}
return list({
...opts,
cliOptions: {
...(opts.cliOptions ?? {}),
depth: opts.depth ?? Infinity,
},
}, params)
const include = computeInclude(opts)
const finders = resolveFinders(opts)
const lockfileDir = opts.lockfileDir ?? opts.dir
const reportAs = determineReportAs(opts)
const projectPaths = opts.recursive && opts.selectedProjectsGraph
? Object.keys(opts.selectedProjectsGraph)
: [opts.dir]
return whyForPackages(params, projectPaths, {
include,
long: opts.long,
lockfileDir,
reportAs,
modulesDir: opts.modulesDir,
checkWantedLockfileOnly: opts.lockfileOnly,
finders,
})
}

View File

@@ -33,9 +33,11 @@ test('listing packages', async () => {
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
project@0.0.0 ${process.cwd()}
│ dependencies:
└── is-positive@1.0.0
dependencies:
is-positive 1.0.0`)
1 package`)
}
{
@@ -49,9 +51,11 @@ is-positive 1.0.0`)
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
project@0.0.0 ${process.cwd()}
│ devDependencies:
└── is-negative@1.0.0
devDependencies:
is-negative 1.0.0`)
1 package`)
}
{
@@ -63,12 +67,14 @@ is-negative 1.0.0`)
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
project@0.0.0 ${process.cwd()}
│ dependencies:
├── is-positive@1.0.0
│ devDependencies:
└── is-negative@1.0.0
dependencies:
is-positive 1.0.0
devDependencies:
is-negative 1.0.0`)
2 packages`)
}
})
@@ -102,9 +108,11 @@ test(`listing packages of a project that has an external ${WANTED_LOCKFILE}`, as
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
pkg@1.0.0 ${process.cwd()}
│ dependencies:
└── is-positive@1.0.0
dependencies:
is-positive 1.0.0`)
1 package`)
})
// Use a preinstalled fixture
@@ -193,9 +201,11 @@ test('listing packages should not fail on package that has local file directory
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
pkg@1.0.0 ${pkgDir}
│ dependencies:
└── dep@file:../dep
dependencies:
dep file:../dep`)
1 package`)
})
test('listing packages with --lockfile-only', async () => {
@@ -222,9 +232,11 @@ test('listing packages with --lockfile-only', async () => {
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
project@0.0.0 ${process.cwd()}
│ dependencies:
└── is-positive@1.0.0
dependencies:
is-positive 1.0.0`)
1 package`)
}
{
@@ -239,9 +251,11 @@ is-positive 1.0.0`)
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
project@0.0.0 ${process.cwd()}
│ devDependencies:
└── is-negative@1.0.0
devDependencies:
is-negative 1.0.0`)
1 package`)
}
{
@@ -254,12 +268,14 @@ is-negative 1.0.0`)
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
project@0.0.0 ${process.cwd()}
│ dependencies:
├── is-positive@1.0.0
│ devDependencies:
└── is-negative@1.0.0
dependencies:
is-positive 1.0.0
devDependencies:
is-negative 1.0.0`)
2 packages`)
}
})
@@ -305,7 +321,9 @@ test('listing specific package with --lockfile-only', async () => {
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
project@0.0.0 ${process.cwd()}
│ dependencies:
└── is-positive@1.0.0
dependencies:
is-positive 1.0.0`)
1 package`)
})

View File

@@ -55,16 +55,20 @@ test('recursive list', async () => {
expect(stripAnsi(output as unknown as string)).toBe(`Legend: production dependency, optional only, dev only
project-1@1.0.0 ${path.resolve('project-1')}
│ dependencies:
└── is-positive@1.0.0
dependencies:
is-positive 1.0.0
1 package
Legend: production dependency, optional only, dev only
project-2@1.0.0 ${path.resolve('project-2')}
│ dependencies:
└── is-negative@1.0.0
dependencies:
is-negative 1.0.0`)
1 package`)
})
test('recursive list with sharedWorkspaceLockfile', async () => {
@@ -120,17 +124,21 @@ test('recursive list with sharedWorkspaceLockfile', async () => {
expect(stripAnsi(output as unknown as string)).toBe(`Legend: production dependency, optional only, dev only
project-1@1.0.0 ${path.resolve('project-1')}
│ dependencies:
└─┬ @pnpm.e2e/pkg-with-1-dep@100.0.0
└── @pnpm.e2e/dep-of-pkg-with-1-dep@100.1.0
dependencies:
@pnpm.e2e/pkg-with-1-dep 100.0.0
└── @pnpm.e2e/dep-of-pkg-with-1-dep 100.1.0
2 packages
Legend: production dependency, optional only, dev only
project-2@1.0.0 ${path.resolve('project-2')}
│ dependencies:
└── is-negative@1.0.0
dependencies:
is-negative 1.0.0`)
1 package`)
})
test('recursive list --filter', async () => {
@@ -184,17 +192,21 @@ test('recursive list --filter', async () => {
expect(stripAnsi(output as unknown as string)).toBe(`Legend: production dependency, optional only, dev only
project-1@1.0.0 ${path.resolve('project-1')}
│ dependencies:
├── is-positive@1.0.0
└── project-2@link:../project-2
dependencies:
is-positive 1.0.0
project-2 link:../project-2
2 packages
Legend: production dependency, optional only, dev only
project-2@1.0.0 ${path.resolve('project-2')}
│ dependencies:
└── is-negative@1.0.0
dependencies:
is-negative 1.0.0`)
1 package`)
})
test('recursive list --filter link-workspace-packages=false', async () => {
@@ -239,10 +251,12 @@ test('recursive list --filter link-workspace-packages=false', async () => {
expect(stripAnsi(output as unknown as string)).toBe(`Legend: production dependency, optional only, dev only
project-1@1.0.0 ${path.resolve('project-1')}
│ dependencies:
├── is-positive@1.0.0
└── project-2@link:../project-2
dependencies:
is-positive 1.0.0
project-2 link:../project-2`)
2 packages`)
})
test('`pnpm recursive why` should fail if no package name was provided', async () => {

View File

@@ -26,7 +26,7 @@ test('`pnpm why` should fail if no package name was provided', async () => {
expect(err.message).toMatch(/`pnpm why` requires the package name/)
})
test('"why" should find non-direct dependency', async () => {
test('"why" should show reverse dependency tree for a non-direct dependency', async () => {
prepare({
dependencies: {
'@pnpm.e2e/dep-of-pkg-with-1-dep': '100.0.0',
@@ -43,14 +43,13 @@ test('"why" should find non-direct dependency', async () => {
virtualStoreDirMaxLength: process.platform === 'win32' ? 60 : 120,
}, ['@pnpm.e2e/dep-of-pkg-with-1-dep'])
expect(stripAnsi(output)).toBe(`Legend: production dependency, optional only, dev only
project@0.0.0 ${process.cwd()}
dependencies:
@pnpm.e2e/dep-of-pkg-with-1-dep 100.0.0
@pnpm.e2e/pkg-with-1-dep 100.0.0
└── @pnpm.e2e/dep-of-pkg-with-1-dep 100.0.0`)
const lines = stripAnsi(output).split('\n')
// Root is the searched package
expect(lines[0]).toBe('@pnpm.e2e/dep-of-pkg-with-1-dep@100.0.0')
// It should show project@0.0.0 as a direct dependent
expect(lines.some(l => l.includes('project@0.0.0'))).toBe(true)
// It should show @pnpm.e2e/pkg-with-1-dep as a dependent (transitive path)
expect(lines.some(l => l.includes('@pnpm.e2e/pkg-with-1-dep@100.0.0'))).toBe(true)
})
test('"why" should find packages by alias name when using npm: protocol', async () => {
@@ -70,7 +69,9 @@ test('"why" should find packages by alias name when using npm: protocol', async
}, ['foo'])
const lines = stripAnsi(output).split('\n')
expect(lines).toContain('foo npm:@pnpm.e2e/pkg-with-1-dep@100.0.0')
// Root should show the canonical package name
expect(lines[0]).toBe('@pnpm.e2e/pkg-with-1-dep@100.0.0')
expect(lines.some(l => l.includes('project@0.0.0'))).toBe(true)
})
test('"why" should find packages by actual package name when using npm: protocol', async () => {
@@ -90,13 +91,15 @@ test('"why" should find packages by actual package name when using npm: protocol
}, ['@pnpm.e2e/pkg-with-1-dep'])
const lines = stripAnsi(output).split('\n')
expect(lines).toContain('foo npm:@pnpm.e2e/pkg-with-1-dep@100.0.0')
expect(lines[0]).toBe('@pnpm.e2e/pkg-with-1-dep@100.0.0')
expect(lines.some(l => l.includes('project@0.0.0'))).toBe(true)
})
test('"why" should display npm: protocol in parseable format', async () => {
test('"why" should display parseable output', async () => {
prepare({
dependencies: {
foo: 'npm:@pnpm.e2e/pkg-with-1-dep@100.0.0',
'@pnpm.e2e/dep-of-pkg-with-1-dep': '100.0.0',
'@pnpm.e2e/pkg-with-1-dep': '100.0.0',
},
})
@@ -106,16 +109,108 @@ test('"why" should display npm: protocol in parseable format', async () => {
dev: false,
dir: process.cwd(),
optional: false,
long: true,
parseable: true,
virtualStoreDirMaxLength: process.platform === 'win32' ? 60 : 120,
}, ['foo'])
}, ['@pnpm.e2e/dep-of-pkg-with-1-dep'])
const lines = output.split('\n')
expect(lines.some(line => line.includes('foo npm:@pnpm.e2e/pkg-with-1-dep@100.0.0'))).toBe(true)
// Parseable output should have paths from importer to target
expect(lines.some(line => line.includes('project@0.0.0'))).toBe(true)
expect(lines.some(line => line.includes('@pnpm.e2e/dep-of-pkg-with-1-dep@100.0.0'))).toBe(true)
})
test('"why" should display file: protocol correctly for aliased packages', async () => {
test('"why" should display finder message in tree output', async () => {
prepare({
dependencies: {
'@pnpm.e2e/pkg-with-1-dep': '100.0.0',
},
})
await execa('node', [pnpmBin, 'install', '--registry', `http://localhost:${REGISTRY_MOCK_PORT}`])
const output = await why.handler({
dir: process.cwd(),
virtualStoreDirMaxLength: process.platform === 'win32' ? 60 : 120,
findBy: ['test-finder'],
finders: {
'test-finder': (ctx) => {
if (ctx.name === '@pnpm.e2e/pkg-with-1-dep') {
return 'Found: has 1 dep'
}
return false
},
},
}, [])
const lines = stripAnsi(output).split('\n')
expect(lines[0]).toBe('@pnpm.e2e/pkg-with-1-dep@100.0.0')
expect(lines[1]).toBe('│ Found: has 1 dep')
})
test('"why" should display finder message in JSON output', async () => {
prepare({
dependencies: {
'@pnpm.e2e/pkg-with-1-dep': '100.0.0',
},
})
await execa('node', [pnpmBin, 'install', '--registry', `http://localhost:${REGISTRY_MOCK_PORT}`])
const output = await why.handler({
dir: process.cwd(),
json: true,
virtualStoreDirMaxLength: process.platform === 'win32' ? 60 : 120,
findBy: ['test-finder'],
finders: {
'test-finder': (ctx) => {
if (ctx.name === '@pnpm.e2e/pkg-with-1-dep') {
return 'custom message'
}
return false
},
},
}, [])
const parsed = JSON.parse(output)
const match = parsed.find((r: any) => r.name === '@pnpm.e2e/pkg-with-1-dep') // eslint-disable-line
expect(match).toBeDefined()
expect(match.searchMessage).toBe('custom message')
})
test('"why" finder can read manifest from store', async () => {
prepare({
dependencies: {
'@pnpm.e2e/pkg-with-1-dep': '100.0.0',
},
})
await execa('node', [pnpmBin, 'install', '--registry', `http://localhost:${REGISTRY_MOCK_PORT}`])
const output = await why.handler({
dir: process.cwd(),
json: true,
virtualStoreDirMaxLength: process.platform === 'win32' ? 60 : 120,
findBy: ['manifest-reader'],
finders: {
'manifest-reader': (ctx) => {
const manifest = ctx.readManifest()
// The manifest should contain the actual package name
if (manifest.name === '@pnpm.e2e/pkg-with-1-dep') {
return `description: ${manifest.description ?? 'none'}`
}
return false
},
},
}, [])
const parsed = JSON.parse(output)
const match = parsed.find((r: any) => r.name === '@pnpm.e2e/pkg-with-1-dep') // eslint-disable-line
expect(match).toBeDefined()
// The finder should have been able to read the manifest and produce a message
expect(match.searchMessage).toMatch(/^description: /)
})
test('"why" should find file: protocol local packages', async () => {
prepare({
dependencies: {
'my-alias': 'file:./local-pkg',
@@ -140,5 +235,7 @@ test('"why" should display file: protocol correctly for aliased packages', async
}, ['my-local-pkg'])
const lines = stripAnsi(output).split('\n')
expect(lines).toContain('my-alias my-local-pkg@file:local-pkg')
// Should find the local package and show reverse tree
expect(lines[0]).toContain('my-local-pkg')
expect(lines.some(l => l.includes('project@0.0.0'))).toBe(true)
})

View File

@@ -0,0 +1,43 @@
{
"name": "@pnpm/text.tree-renderer",
"version": "1000.0.0-0",
"description": "Renders a tree structure with box-drawing characters",
"keywords": [
"pnpm",
"pnpm11"
],
"license": "MIT",
"funding": "https://opencollective.com/pnpm",
"repository": "https://github.com/pnpm/pnpm/tree/main/text/tree-renderer",
"homepage": "https://github.com/pnpm/pnpm/tree/main/text/tree-renderer#readme",
"bugs": {
"url": "https://github.com/pnpm/pnpm/issues"
},
"type": "module",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"exports": {
".": "./lib/index.js"
},
"files": [
"lib",
"!*.map"
],
"scripts": {
"start": "tsgo --watch",
"_test": "cross-env NODE_OPTIONS=\"$NODE_OPTIONS --experimental-vm-modules\" jest",
"test": "pnpm run compile && pnpm run _test",
"lint": "eslint \"src/**/*.ts\" \"test/**/*.ts\"",
"prepublishOnly": "pnpm run compile",
"compile": "tsgo --build && pnpm run lint --fix"
},
"devDependencies": {
"@pnpm/text.tree-renderer": "workspace:*"
},
"engines": {
"node": ">=22.12"
},
"jest": {
"preset": "@pnpm/jest-config"
}
}

View File

@@ -0,0 +1,139 @@
export interface TreeNodeGroup {
group: string
nodes: Array<TreeNode | string>
}
export interface TreeNode {
label: string
nodes?: Array<TreeNode | string | TreeNodeGroup>
}
export interface TreeRendererOptions {
/**
* Formatter applied to tree-drawing character sequences (e.g. `├─┬ `, `│ `).
* Useful for dimming tree lines so labels stand out: `{ treeChars: chalk.dim }`.
*/
treeChars?: (chars: string) => string
/**
* When false, use ASCII characters (+, `, |, -) instead of
* unicode box-drawing characters. Defaults to true (unicode).
*/
unicode?: boolean
}
export function renderTree (node: TreeNode | string, opts?: TreeRendererOptions): string {
return render(opts ?? {}, { node, connector: '', prefix: '' })
}
interface RenderContext {
node: TreeNode | string
/**
* The formatted connector string for this node's first line
* (e.g. `├─┬ `). Empty string for the root node.
*/
connector: string
/**
* The raw prefix for subsequent lines and children of this node.
* Built from unformatted characters so it can be extended for deeper levels.
*/
prefix: string
}
function render (
opts: TreeRendererOptions,
ctx: RenderContext
): string {
const { connector, prefix } = ctx
let { node } = ctx
if (typeof node === 'string') node = { label: node }
const fmt = opts.treeChars ?? identity
const chr = opts.unicode === false ? asciiChar : unicodeChar
const nodes = node.nodes ?? []
const lines = (node.label || '').split('\n')
// First line: connector + label
let result = (connector ? fmt(connector) : '') + lines[0] + '\n'
// Flatten groups into items with group annotations
const items: Array<{ node: TreeNode, group?: string }> = []
for (const child of nodes) {
if (isGroup(child)) {
for (const gn of child.nodes) {
items.push({ node: typeof gn === 'string' ? { label: gn } : gn, group: child.group })
}
} else {
items.push({ node: typeof child === 'string' ? { label: child } : child })
}
}
// Continuation lines for multiline labels
const continuationChars = items.length ? chr('│') + ' ' : ' '
for (let l = 1; l < lines.length; l++) {
result += fmt(prefix + continuationChars) + lines[l] + '\n'
}
// Render items, emitting group headers when the group changes
let currentGroup: string | undefined
for (let i = 0; i < items.length; i++) {
const item = items[i]
const last = i === items.length - 1
if (item.group !== currentGroup) {
currentGroup = item.group
if (currentGroup != null) {
result += fmt(prefix + chr('│')) + '\n'
result += fmt(prefix + chr('│') + ' ') + currentGroup + '\n'
}
}
const more = hasRenderableChildren(item.node.nodes)
const childConnector = prefix +
(last ? chr('└') : chr('├')) + chr('─') +
(more ? chr('┬') : chr('─')) + ' '
const childPrefix = prefix + (last ? ' ' : chr('│') + ' ')
result += render(opts, {
node: item.node,
connector: childConnector,
prefix: childPrefix,
})
}
return result
}
function hasRenderableChildren (nodes: Array<TreeNode | string | TreeNodeGroup> | undefined): boolean {
if (nodes == null) return false
for (const child of nodes) {
if (isGroup(child)) {
if (child.nodes.length > 0) return true
} else {
return true
}
}
return false
}
function isGroup (node: TreeNode | string | TreeNodeGroup): node is TreeNodeGroup {
return typeof node !== 'string' && 'group' in node
}
function identity (s: string): string {
return s
}
function unicodeChar (s: string): string {
return s
}
function asciiChar (s: string): string {
const chars: Record<string, string> = {
'│': '|',
'└': '`',
'├': '+',
'─': '-',
'┬': '-',
}
return chars[s] ?? s
}

View File

@@ -0,0 +1,511 @@
import { renderTree, type TreeNode, type TreeNodeGroup } from '@pnpm/text.tree-renderer'
test('single root with no children', () => {
expect(renderTree({ label: 'root' })).toBe('root\n')
})
test('single root with empty nodes array', () => {
expect(renderTree({ label: 'root', nodes: [] })).toBe('root\n')
})
test('single child (leaf)', () => {
expect(renderTree({
label: 'root',
nodes: [{ label: 'child' }],
})).toBe(
'root\n' +
'└── child\n'
)
})
test('multiple children', () => {
expect(renderTree({
label: 'root',
nodes: [
{ label: 'a' },
{ label: 'b' },
{ label: 'c' },
],
})).toBe(
'root\n' +
'├── a\n' +
'├── b\n' +
'└── c\n'
)
})
test('nested children with correct prefix propagation', () => {
expect(renderTree({
label: 'root',
nodes: [
{
label: 'a',
nodes: [
{ label: 'a1' },
{ label: 'a2' },
],
},
{ label: 'b' },
],
})).toBe(
'root\n' +
'├─┬ a\n' +
'│ ├── a1\n' +
'│ └── a2\n' +
'└── b\n'
)
})
test('last child with children uses └─┬', () => {
expect(renderTree({
label: 'root',
nodes: [
{ label: 'a' },
{
label: 'b',
nodes: [
{ label: 'b1' },
],
},
],
})).toBe(
'root\n' +
'├── a\n' +
'└─┬ b\n' +
' └── b1\n'
)
})
test('deep nesting (3+ levels)', () => {
expect(renderTree({
label: 'root',
nodes: [
{
label: 'a',
nodes: [
{
label: 'b',
nodes: [
{ label: 'c' },
{ label: 'd' },
],
},
],
},
],
})).toBe(
'root\n' +
'└─┬ a\n' +
' └─┬ b\n' +
' ├── c\n' +
' └── d\n'
)
})
test('sibling trees with deep nesting', () => {
expect(renderTree({
label: 'root',
nodes: [
{
label: 'a',
nodes: [
{
label: 'a1',
nodes: [{ label: 'a1x' }],
},
],
},
{
label: 'b',
nodes: [
{ label: 'b1' },
],
},
],
})).toBe(
'root\n' +
'├─┬ a\n' +
'│ └─┬ a1\n' +
'│ └── a1x\n' +
'└─┬ b\n' +
' └── b1\n'
)
})
test('multiline labels on node with children', () => {
expect(renderTree({
label: 'root',
nodes: [
{
label: 'pkg@1.0.0\nA description\nhttps://example.com',
nodes: [{ label: 'child' }],
},
{
label: 'leaf@2.0.0\nAnother description',
},
],
})).toBe(
'root\n' +
'├─┬ pkg@1.0.0\n' +
'│ │ A description\n' +
'│ │ https://example.com\n' +
'│ └── child\n' +
'└── leaf@2.0.0\n' +
' Another description\n'
)
})
test('multiline label on leaf node', () => {
expect(renderTree({
label: 'root',
nodes: [
{
label: 'pkg@1.0.0\nA description',
},
{
label: 'last@2.0.0\nAnother description',
},
],
})).toBe(
'root\n' +
'├── pkg@1.0.0\n' +
'│ A description\n' +
'└── last@2.0.0\n' +
' Another description\n'
)
})
test('multiline label on root', () => {
expect(renderTree({
label: 'root\nsecond line',
nodes: [{ label: 'child' }],
})).toBe(
'root\n' +
'│ second line\n' +
'└── child\n'
)
})
test('string nodes in array', () => {
expect(renderTree({
label: 'root',
nodes: [
'string-child-1',
{ label: 'object-child' },
'string-child-2',
],
})).toBe(
'root\n' +
'├── string-child-1\n' +
'├── object-child\n' +
'└── string-child-2\n'
)
})
test('treeChars formatter option', () => {
const wrapped = (s: string) => `[${s}]`
expect(renderTree({
label: 'root',
nodes: [
{ label: 'a' },
{ label: 'b' },
],
}, { treeChars: wrapped })).toBe(
'root\n' +
'[├── ]a\n' +
'[└── ]b\n'
)
})
test('treeChars formatter with nested children', () => {
const wrapped = (s: string) => `[${s}]`
expect(renderTree({
label: 'root',
nodes: [
{
label: 'a',
nodes: [{ label: 'a1' }],
},
{ label: 'b' },
],
}, { treeChars: wrapped })).toBe(
'root\n' +
'[├─┬ ]a\n' +
'[│ └── ]a1\n' +
'[└── ]b\n'
)
})
test('treeChars formatter with multiline labels', () => {
const wrapped = (s: string) => `[${s}]`
expect(renderTree({
label: 'root',
nodes: [
{
label: 'pkg\ndescription',
},
{ label: 'b' },
],
}, { treeChars: wrapped })).toBe(
'root\n' +
'[├── ]pkg\n' +
'[│ ]description\n' +
'[└── ]b\n'
)
})
test('unicode: false uses ASCII characters', () => {
expect(renderTree({
label: 'root',
nodes: [
{
label: 'a',
nodes: [
{ label: 'a1' },
{ label: 'a2' },
],
},
{ label: 'b' },
],
}, { unicode: false })).toBe(
'root\n' +
'+-- a\n' +
'| +-- a1\n' +
'| `-- a2\n' +
'`-- b\n'
)
})
test('string input is treated as label', () => {
expect(renderTree('just a string')).toBe('just a string\n')
})
test('matches archy output for pnpm list-like structure', () => {
// Simulate the pnpm list structure: root → group headers + deps as flat siblings
const tree: TreeNode = {
label: 'fixture@1.0.0 /path',
nodes: [
{ label: 'dependencies:', nodes: [] },
{
label: 'write-json-file@2.3.0',
nodes: [
{ label: 'detect-indent@5.0.0' },
{ label: 'graceful-fs@4.2.2' },
],
},
{ label: 'devDependencies:', nodes: [] },
{ label: 'is-positive@3.1.0' },
],
}
expect(renderTree(tree)).toBe(
'fixture@1.0.0 /path\n' +
'├── dependencies:\n' +
'├─┬ write-json-file@2.3.0\n' +
'│ ├── detect-indent@5.0.0\n' +
'│ └── graceful-fs@4.2.2\n' +
'├── devDependencies:\n' +
'└── is-positive@3.1.0\n'
)
})
test('single group', () => {
expect(renderTree({
label: 'root',
nodes: [
{ group: 'dependencies:', nodes: [{ label: 'a' }, { label: 'b' }] },
],
})).toBe(
'root\n' +
'│\n' +
'│ dependencies:\n' +
'├── a\n' +
'└── b\n'
)
})
test('multiple groups with last/not-last spanning across all', () => {
expect(renderTree({
label: 'root',
nodes: [
{ group: 'dependencies:', nodes: [{ label: 'a' }] },
{ group: 'devDependencies:', nodes: [{ label: 'b' }] },
],
})).toBe(
'root\n' +
'│\n' +
'│ dependencies:\n' +
'├── a\n' +
'│\n' +
'│ devDependencies:\n' +
'└── b\n'
)
})
test('groups with nested children', () => {
expect(renderTree({
label: 'root',
nodes: [
{
group: 'dependencies:',
nodes: [
{
label: 'write-json-file@2.3.0',
nodes: [
{ label: 'detect-indent@5.0.0' },
{ label: 'graceful-fs@4.2.2' },
],
},
],
},
{ group: 'devDependencies:', nodes: [{ label: 'is-positive@3.1.0' }] },
],
})).toBe(
'root\n' +
'│\n' +
'│ dependencies:\n' +
'├─┬ write-json-file@2.3.0\n' +
'│ ├── detect-indent@5.0.0\n' +
'│ └── graceful-fs@4.2.2\n' +
'│\n' +
'│ devDependencies:\n' +
'└── is-positive@3.1.0\n'
)
})
test('groups with treeChars formatter', () => {
const wrapped = (s: string) => `[${s}]`
expect(renderTree({
label: 'root',
nodes: [
{ group: 'deps:', nodes: [{ label: 'a' }] },
{ group: 'dev:', nodes: [{ label: 'b' }] },
],
}, { treeChars: wrapped })).toBe(
'root\n' +
'[│]\n' +
'[│ ]deps:\n' +
'[├── ]a\n' +
'[│]\n' +
'[│ ]dev:\n' +
'[└── ]b\n'
)
})
test('mixed groups and plain nodes', () => {
expect(renderTree({
label: 'root',
nodes: [
{ label: 'plain-first' },
{ group: 'dependencies:', nodes: [{ label: 'a' }] },
],
})).toBe(
'root\n' +
'├── plain-first\n' +
'│\n' +
'│ dependencies:\n' +
'└── a\n'
)
})
test('empty group is skipped', () => {
expect(renderTree({
label: 'root',
nodes: [
{ group: 'empty:', nodes: [] } as TreeNodeGroup,
{ group: 'deps:', nodes: [{ label: 'a' }] },
],
})).toBe(
'root\n' +
'│\n' +
'│ deps:\n' +
'└── a\n'
)
})
test('multiline label with only empty groups uses plain continuation', () => {
// When all groups are empty, items.length is 0, so the multiline
// continuation prefix should be plain spaces, not │.
expect(renderTree({
label: 'root',
nodes: [
{
label: 'pkg@1.0.0\nA description',
nodes: [
{ group: 'dependencies:', nodes: [] } as TreeNodeGroup,
],
},
{ label: 'other' },
],
})).toBe(
'root\n' +
'├── pkg@1.0.0\n' +
'│ A description\n' +
'└── other\n'
)
})
test('child with only empty groups uses ── connector, not ─┬', () => {
// A child whose nodes array contains only empty groups has no renderable
// children, so its connector should be ── (leaf) not ─┬ (branch).
expect(renderTree({
label: 'root',
nodes: [
{
label: 'a',
nodes: [
{ group: 'deps:', nodes: [] } as TreeNodeGroup,
{ group: 'dev:', nodes: [] } as TreeNodeGroup,
],
},
],
})).toBe(
'root\n' +
'└── a\n'
)
})
test('group matching pnpm list output', () => {
const tree: TreeNode = {
label: 'my-pkg@1.0.0 /path',
nodes: [
{
group: 'dependencies:',
nodes: [
{
label: 'write-json-file@2.3.0',
nodes: [
{ label: 'detect-indent@5.0.0' },
{ label: 'graceful-fs@4.2.2' },
],
},
],
},
{
group: 'devDependencies:',
nodes: [
{ label: 'is-positive@3.1.0' },
],
},
{
group: 'optionalDependencies:',
nodes: [
{ label: 'is-negative@2.1.0' },
],
},
],
}
expect(renderTree(tree)).toBe(
'my-pkg@1.0.0 /path\n' +
'│\n' +
'│ dependencies:\n' +
'├─┬ write-json-file@2.3.0\n' +
'│ ├── detect-indent@5.0.0\n' +
'│ └── graceful-fs@4.2.2\n' +
'│\n' +
'│ devDependencies:\n' +
'├── is-positive@3.1.0\n' +
'│\n' +
'│ optionalDependencies:\n' +
'└── is-negative@2.1.0\n'
)
})

View File

@@ -0,0 +1,18 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"noEmit": false,
"outDir": "../node_modules/.test.lib",
"rootDir": "..",
"isolatedModules": true
},
"include": [
"**/*.ts",
"../../../__typings__/**/*.d.ts"
],
"references": [
{
"path": ".."
}
]
}

View File

@@ -0,0 +1,12 @@
{
"extends": "@pnpm/tsconfig",
"compilerOptions": {
"outDir": "lib",
"rootDir": "src"
},
"include": [
"src/**/*.ts",
"../../__typings__/**/*.d.ts"
],
"references": []
}

View File

@@ -0,0 +1,8 @@
{
"extends": "./tsconfig.json",
"include": [
"src/**/*.ts",
"test/**/*.ts",
"../../__typings__/**/*.d.ts"
]
}