feat: update injected packages after run (#9100)

* feat: update injected packages after run (wip)

close #9081

* refactor: rename field

* feat: injectedPackages (wip)

* feat: findInjectedPackages (wip)

* feat: complete implementation

* test: findInjectedPackages

* docs: changeset

* refactor: be lazy

* chore: set `version` to `1000.0.0-0`

* feat: use hardlinks for injected packages

* refactor: just use `.modules.yaml`

* feat: debug logger

* refactor: `modulesDir` is unnecessary

* test: shouldUpdateInjectedFilesAfterRun

* fix(test): remove the test command

* test: updateInjectedPackagesAfterRun

* fix: eslint

* feat: rename config

* perf: diff to reduce fs operations

* perf: load source map only once

* chore(deps): remove unused dependencies

* fix: eslint

* refactor: use `symlink-dir`

* refactor: move type expr to an alias

* refactor: simplify types

* feat: reuse stats from the directory fetcher

* test: directories and symlinks

* feat: sort alphabetic

* test: diffDir

* test: rename a test

* test: remove nesting

* refactor: rename

* feat: remove buggy symlink support

* test: applyPatch

* docs: correct

* docs: fix

* test: extendFilesMap

* docs: remove outdated comment

* docs: remove unneeded comment

* test: fix

* test: more assertions

* test: DirPatcher

* test: more assertions

* test: more assertions

* test: just use `createDir`

* test: multiple patchers

* test: reuse stat results

* docs: consistent grammar

* test: workaround

* test: fix windows

* refactor: remove single-use `makeParent`

* refactor: remove nonsense test

How could I even misunderstand my own code?!

`Patcher.apply()` will never call stat on the files because they have all
been loaded to calculate `Patcher.patch`.

This test is therefore nonsense.

* feat: rename

* feat: rename again

* feat: remove `boolean`

* fix: broken lockfile

* test: use a fixture for testing sync injected deps

* test: refactor syne injected deps test

* test: refactor sync injected deps test

* test: refactor sync injected deps test

* refactor: rename injected deps to syncer

* refactor: change injected deps logger

* docs: update changeset

---------

Co-authored-by: Zoltan Kochan <z@kochan.io>
This commit is contained in:
Khải
2025-02-24 08:09:45 +07:00
committed by GitHub
parent 5df8de7180
commit e32b1a29e9
30 changed files with 1389 additions and 10 deletions

View File

@@ -0,0 +1,13 @@
---
"@pnpm/workspace.injected-deps-syncer": major
"@pnpm/fs.indexed-pkg-importer": minor
"@pnpm/config": minor
"@pnpm/plugin-commands-script-runners": minor
"pnpm": minor
---
Added support for automatically syncing files of injected workspace packages after `pnpm run` [#9081](https://github.com/pnpm/pnpm/issues/9081). Use the `sync-injected-deps-after-scripts` setting to specify which scripts build the workspace package. This tells pnpm when syncing is needed. The setting should be defined in a `.npmrc` file at the root of the workspace. Example:
```ini
sync-injected-deps-after-scripts[]=compile
```

View File

@@ -0,0 +1,5 @@
---
"@pnpm/workspace.injected-deps-syncer": major
---
Initial Release.

View File

@@ -0,0 +1,5 @@
---
"@pnpm/directory-fetcher": minor
---
Optionally return file stats from `fetchFromDir`.

View File

@@ -215,6 +215,7 @@ export interface Config extends OptionsFromRootManifest {
strictStorePkgContentCheck: boolean
managePackageManagerVersions: boolean
strictDepBuilds: boolean
syncInjectedDepsAfterScripts?: string[]
initPackageManager: boolean
}

View File

@@ -120,4 +120,5 @@ export const types = Object.assign({
'update-notifier': Boolean,
'registry-supports-time-field': Boolean,
'fail-if-no-match': Boolean,
'sync-injected-deps-after-scripts': Array,
}, npmTypes.types)

View File

@@ -250,6 +250,7 @@
"subpkg",
"supercede",
"syml",
"syncer",
"szia",
"tabtab",
"taffydb",

View File

@@ -68,6 +68,7 @@
"@pnpm/sort-packages": "workspace:*",
"@pnpm/store-path": "workspace:*",
"@pnpm/types": "workspace:*",
"@pnpm/workspace.injected-deps-syncer": "workspace:*",
"@zkochan/rimraf": "catalog:",
"didyoumean2": "catalog:",
"enquirer": "catalog:",

View File

@@ -16,6 +16,7 @@ import {
makeNodeRequireOption,
type RunLifecycleHookOptions,
} from '@pnpm/lifecycle'
import { syncInjectedDeps } from '@pnpm/workspace.injected-deps-syncer'
import { type PackageScripts, type ProjectManifest } from '@pnpm/types'
import pick from 'ramda/src/pick'
import realpathMissing from 'realpath-missing'
@@ -172,6 +173,7 @@ export type RunOpts =
| 'scriptShell'
| 'scriptsPrependNodePath'
| 'shellEmulator'
| 'syncInjectedDepsAfterScripts'
| 'userAgent'
>
& (
@@ -290,7 +292,12 @@ so you may run "pnpm -w run ${scriptName}"`,
try {
const limitRun = pLimit(concurrency)
const _runScript = runScript.bind(null, { manifest, lifecycleOpts, runScriptOptions: { enablePrePostScripts: opts.enablePrePostScripts ?? false }, passedThruArgs })
const runScriptOptions: RunScriptOptions = {
enablePrePostScripts: opts.enablePrePostScripts ?? false,
syncInjectedDepsAfterScripts: opts.syncInjectedDepsAfterScripts,
workspaceDir: opts.workspaceDir,
}
const _runScript = runScript.bind(null, { manifest, lifecycleOpts, runScriptOptions, passedThruArgs })
await Promise.all(specifiedScripts.map(script => limitRun(() => _runScript(script))))
} catch (err: unknown) {
@@ -377,6 +384,8 @@ ${renderCommands(rootScripts)}`
export interface RunScriptOptions {
enablePrePostScripts: boolean
syncInjectedDepsAfterScripts: string[] | undefined
workspaceDir: string | undefined
}
export async function runScript (opts: {
@@ -400,6 +409,13 @@ export async function runScript (opts: {
) {
await runLifecycleHook(`post${scriptName}`, opts.manifest, opts.lifecycleOpts)
}
if (opts.runScriptOptions.syncInjectedDepsAfterScripts?.includes(scriptName)) {
await syncInjectedDeps({
pkgName: opts.manifest.name,
pkgRootDir: opts.lifecycleOpts.pkgRoot,
workspaceDir: opts.runScriptOptions.workspaceDir,
})
}
}
function renderCommands (commands: string[][]): string {

View File

@@ -16,7 +16,7 @@ import pLimit from 'p-limit'
import realpathMissing from 'realpath-missing'
import { existsInDir } from './existsInDir'
import { createEmptyRecursiveSummary, getExecutionDuration, getResumedPackageChunks, writeRecursiveSummary } from './exec'
import { runScript } from './run'
import { type RunScriptOptions, runScript } from './run'
import { tryBuildRegExpFromCommand } from './regexpCommand'
import { type PackageScripts, type ProjectRootDir } from '@pnpm/types'
@@ -31,6 +31,8 @@ export type RecursiveRunOpts = Pick<Config,
| 'scriptShell'
| 'shellEmulator'
| 'stream'
| 'syncInjectedDepsAfterScripts'
| 'workspaceDir'
> & Required<Pick<Config, 'allProjects' | 'selectedProjectsGraph' | 'workspaceDir' | 'dir'>> &
Partial<Pick<Config, 'extraBinPaths' | 'extraEnv' | 'bail' | 'reporter' | 'reverse' | 'sort' | 'workspaceConcurrency'>> &
{
@@ -137,7 +139,12 @@ export async function runRecursive (
}
}
const _runScript = runScript.bind(null, { manifest: pkg.package.manifest, lifecycleOpts, runScriptOptions: { enablePrePostScripts: opts.enablePrePostScripts ?? false }, passedThruArgs })
const runScriptOptions: RunScriptOptions = {
enablePrePostScripts: opts.enablePrePostScripts ?? false,
syncInjectedDepsAfterScripts: opts.syncInjectedDepsAfterScripts,
workspaceDir: opts.workspaceDir,
}
const _runScript = runScript.bind(null, { manifest: pkg.package.manifest, lifecycleOpts, runScriptOptions, passedThruArgs })
const groupEnd = (opts.workspaceConcurrency ?? 4) > 1
? undefined
: groupStart(formatSectionName({

View File

@@ -81,6 +81,9 @@
{
"path": "../../workspace/filter-workspace-packages"
},
{
"path": "../../workspace/injected-deps-syncer"
},
{
"path": "../../workspace/sort-packages"
},

View File

@@ -31,11 +31,12 @@ export function createDirectoryFetcher (
}
}
type FetchFromDirOptions = Omit<DirectoryFetcherOptions, 'lockfileDir'> & CreateDirectoryFetcherOptions
export type FetchFromDirOptions = Omit<DirectoryFetcherOptions, 'lockfileDir'> & CreateDirectoryFetcherOptions
interface FetchResult {
export interface FetchResult {
local: true
filesIndex: Record<string, string>
filesStats?: Record<string, Stats | null>
packageImportMethod: 'hardlink'
manifest: DependencyManifest
requiresBuild: boolean
@@ -53,7 +54,7 @@ async function fetchAllFilesFromDir (
readFileStat: ReadFileStat,
dir: string
): Promise<FetchResult> {
const filesIndex = await _fetchAllFilesFromDir(readFileStat, dir)
const { filesIndex, filesStats } = await _fetchAllFilesFromDir(readFileStat, dir)
// In a regular pnpm workspace it will probably never happen that a dependency has no package.json file.
// Safe read was added to support the Bit workspace in which the components have no package.json files.
// Related PR in Bit: https://github.com/teambit/bit/pull/5251
@@ -62,6 +63,7 @@ async function fetchAllFilesFromDir (
return {
local: true,
filesIndex,
filesStats,
packageImportMethod: 'hardlink',
manifest,
requiresBuild,
@@ -72,8 +74,9 @@ async function _fetchAllFilesFromDir (
readFileStat: ReadFileStat,
dir: string,
relativeDir = ''
): Promise<Record<string, string>> {
): Promise<Pick<FetchResult, 'filesIndex' | 'filesStats'>> {
const filesIndex: Record<string, string> = {}
const filesStats: Record<string, Stats | null> = {}
const files = await fs.readdir(dir)
await Promise.all(files
.filter((file) => file !== 'node_modules')
@@ -83,14 +86,16 @@ async function _fetchAllFilesFromDir (
const { filePath, stat } = fileStatResult
const relativeSubdir = `${relativeDir}${relativeDir ? '/' : ''}${file}`
if (stat.isDirectory()) {
const subFilesIndex = await _fetchAllFilesFromDir(readFileStat, filePath, relativeSubdir)
Object.assign(filesIndex, subFilesIndex)
const subFetchResult = await _fetchAllFilesFromDir(readFileStat, filePath, relativeSubdir)
Object.assign(filesIndex, subFetchResult.filesIndex)
Object.assign(filesStats, subFetchResult.filesStats)
} else {
filesIndex[relativeSubdir] = filePath
filesStats[relativeSubdir] = fileStatResult.stat
}
})
)
return filesIndex
return { filesIndex, filesStats }
}
interface FileStatResult {

View File

@@ -8,6 +8,8 @@ import { packageImportMethodLogger } from '@pnpm/core-loggers'
import { type FilesMap, type ImportOptions, type ImportIndexedPackage } from '@pnpm/store-controller-types'
import { importIndexedDir, type ImportFile } from './importIndexedDir'
export { type FilesMap, type ImportOptions, type ImportIndexedPackage }
export type PackageImportMethod = 'auto' | 'hardlink' | 'copy' | 'clone' | 'clone-or-copy'
export function createIndexedPkgImporter (packageImportMethod?: PackageImportMethod): ImportIndexedPackage {

31
pnpm-lock.yaml generated
View File

@@ -2494,6 +2494,9 @@ importers:
'@pnpm/types':
specifier: workspace:*
version: link:../../packages/types
'@pnpm/workspace.injected-deps-syncer':
specifier: workspace:*
version: link:../../workspace/injected-deps-syncer
'@zkochan/rimraf':
specifier: 'catalog:'
version: 3.0.2
@@ -8009,6 +8012,34 @@ importers:
specifier: workspace:*
version: 'link:'
workspace/injected-deps-syncer:
dependencies:
'@pnpm/directory-fetcher':
specifier: workspace:*
version: link:../../fetching/directory-fetcher
'@pnpm/error':
specifier: workspace:*
version: link:../../packages/error
'@pnpm/logger':
specifier: workspace:*
version: link:../../packages/logger
'@pnpm/modules-yaml':
specifier: workspace:*
version: link:../../pkg-manager/modules-yaml
'@types/normalize-path':
specifier: 'catalog:'
version: 3.0.2
normalize-path:
specifier: 'catalog:'
version: 3.0.0
devDependencies:
'@pnpm/prepare':
specifier: workspace:*
version: link:../../__utils__/prepare
'@pnpm/workspace.injected-deps-syncer':
specifier: workspace:*
version: 'link:'
workspace/manifest-writer:
dependencies:
'@pnpm/workspace.read-manifest':

View File

@@ -0,0 +1,4 @@
const fs = require('fs')
fs.rmSync('should-be-deleted-by-build1.txt', { force: true })
fs.writeFileSync('should-be-modified-by-build1.txt', 'After modification')
fs.writeFileSync('should-be-added-by-build1.txt', __filename)

View File

@@ -0,0 +1,2 @@
const fs = require('fs')
fs.writeFileSync('created-by-build2.txt', __filename)

View File

@@ -0,0 +1,14 @@
const fs = require('fs')
console.log('Creating a tree of empty directories...')
fs.mkdirSync('empty-dirs/a/a/', { recursive: true })
fs.mkdirSync('empty-dirs/a/b/', { recursive: true })
fs.mkdirSync('empty-dirs/b/a/', { recursive: true })
fs.mkdirSync('empty-dirs/b/b/', { recursive: true })
console.log('Creating a tree of real files...')
fs.mkdirSync('files/foo/foo/', { recursive: true })
fs.writeFileSync('files/foo/foo/foo.txt', '')
fs.writeFileSync('files/foo/bar.txt', '')
fs.writeFileSync('files/foo_bar.txt', 'This is foo_bar')
console.log('Creating symlinks...')
fs.symlinkSync('files/foo_bar.txt', 'link-to-a-file', 'file')
fs.symlinkSync('files/foo', 'link-to-a-dir', 'dir')

View File

@@ -0,0 +1 @@
Before modification

View File

@@ -0,0 +1,257 @@
import fs from 'fs'
import path from 'path'
import { preparePackages } from '@pnpm/prepare'
import { fixtures } from '@pnpm/test-fixtures'
import { sync as writeYamlFile } from 'write-yaml-file'
import { execPnpm } from './utils'
const f = fixtures(__dirname)
const PKG_FILES = [
...fs.readdirSync(f.find('injected-dep-files')),
'package.json',
].sort()
function prepareInjectedDepsWorkspace (syncInjectedDepsAfterScripts: string[]) {
const scripts = {
build1: 'node ./build1.cjs',
build2: 'node ./build2.cjs',
build3: 'node ./build3.cjs',
}
preparePackages([
{
name: 'foo',
version: '0.0.0',
dependencies: {
'is-positive': '1.0.0',
},
scripts,
},
{
name: 'bar',
version: '0.0.0',
dependencies: {
foo: 'workspace:*',
},
scripts,
},
{
name: 'baz',
version: '0.0.0',
dependencies: {
bar: 'workspace:*',
},
scripts,
},
])
for (const pkgName of ['foo', 'bar', 'baz']) {
f.copy('injected-dep-files', pkgName)
}
writeYamlFile('pnpm-workspace.yaml', {
packages: ['*'],
})
fs.writeFileSync('.npmrc', [
'reporter=append-only',
'inject-workspace-packages=true',
'dedupe-injected-deps=false',
...syncInjectedDepsAfterScripts.map((scriptName) => `sync-injected-deps-after-scripts[]=${scriptName}`),
].join('\n'))
}
test('with sync-injected-deps-after-scripts', async () => {
prepareInjectedDepsWorkspace(['build1', 'build2', 'build3'])
await execPnpm(['install'])
expect(fs.readdirSync('node_modules/.pnpm')).toContain('foo@file+foo')
expect(fs.readdirSync('node_modules/.pnpm')).toContain('bar@file+bar')
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo').sort()).toStrictEqual(PKG_FILES)
expect(
fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/should-be-modified-by-build1.txt', 'utf-8')
).toBe('Before modification\n')
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar').sort()).toStrictEqual(PKG_FILES)
expect(
fs.readFileSync('node_modules/.pnpm/bar@file+bar/node_modules/bar/should-be-modified-by-build1.txt', 'utf-8')
).toBe('Before modification\n')
// build1 should update the injected files
{
await execPnpm(['--recursive', 'run', 'build1'])
// injected foo
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo')).not.toContain('should-be-deleted-by-build1.txt')
expect(
fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/should-be-added-by-build1.txt', 'utf-8')
).toBe(path.resolve('foo/build1.cjs'))
expect(
fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/should-be-modified-by-build1.txt', 'utf-8')
).toBe('After modification')
// injected bar
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar')).not.toContain('should-be-deleted-by-build1.txt')
expect(
fs.readFileSync('node_modules/.pnpm/bar@file+bar/node_modules/bar/should-be-added-by-build1.txt', 'utf-8')
).toBe(path.resolve('bar/build1.cjs'))
expect(
fs.readFileSync('node_modules/.pnpm/bar@file+bar/node_modules/bar/should-be-modified-by-build1.txt', 'utf-8')
).toBe('After modification')
}
// build2 should update the injected files
{
await execPnpm(['--recursive', 'run', 'build2'])
// injected foo
expect(
fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/created-by-build2.txt', 'utf-8')
).toBe(path.resolve('foo/build2.cjs'))
// injected bar
expect(
fs.readFileSync('node_modules/.pnpm/bar@file+bar/node_modules/bar/created-by-build2.txt', 'utf-8')
).toBe(path.resolve('bar/build2.cjs'))
}
})
test('without sync-injected-deps-after-scripts', async () => {
prepareInjectedDepsWorkspace([])
await execPnpm(['install'])
expect(fs.readdirSync('node_modules/.pnpm')).toContain('foo@file+foo')
expect(fs.readdirSync('node_modules/.pnpm')).toContain('bar@file+bar')
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo').sort()).toStrictEqual(PKG_FILES)
expect(
fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/should-be-modified-by-build1.txt', 'utf-8')
).toBe('Before modification\n')
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar').sort()).toStrictEqual(PKG_FILES)
expect(
fs.readFileSync('node_modules/.pnpm/bar@file+bar/node_modules/bar/should-be-modified-by-build1.txt', 'utf-8')
).toBe('Before modification\n')
// build1 should not update the injected files
{
await execPnpm(['--recursive', 'run', 'build1'])
// injected foo
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo')).toContain('should-be-deleted-by-build1.txt')
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo')).not.toContain('should-be-added-by-build1.txt')
// injected bar
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar')).toContain('should-be-deleted-by-build1.txt')
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar')).not.toContain('should-be-added-by-build1.txt')
}
// build2 should not update the injected files
{
await execPnpm(['--recursive', 'run', 'build2'])
// injected foo
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo')).not.toContain('created-by-build2.txt')
// injected bar
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar')).not.toContain('created-by-build2.txt')
}
})
test('filter scripts', async () => {
prepareInjectedDepsWorkspace(['build1'])
await execPnpm(['install'])
expect(fs.readdirSync('node_modules/.pnpm')).toContain('foo@file+foo')
expect(fs.readdirSync('node_modules/.pnpm')).toContain('bar@file+bar')
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo').sort()).toStrictEqual(PKG_FILES)
expect(
fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/should-be-modified-by-build1.txt', 'utf-8')
).toBe('Before modification\n')
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar').sort()).toStrictEqual(PKG_FILES)
expect(
fs.readFileSync('node_modules/.pnpm/bar@file+bar/node_modules/bar/should-be-modified-by-build1.txt', 'utf-8')
).toBe('Before modification\n')
// build1 should update the injected files
{
await execPnpm(['--recursive', 'run', 'build1'])
// injected foo
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo')).not.toContain('should-be-deleted-by-build1.txt')
expect(
fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/should-be-added-by-build1.txt', 'utf-8')
).toBe(path.resolve('foo/build1.cjs'))
expect(
fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/should-be-modified-by-build1.txt', 'utf-8')
).toBe('After modification')
// injected bar
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar')).not.toContain('should-be-deleted-by-build1.txt')
expect(
fs.readFileSync('node_modules/.pnpm/bar@file+bar/node_modules/bar/should-be-added-by-build1.txt', 'utf-8')
).toBe(path.resolve('bar/build1.cjs'))
expect(
fs.readFileSync('node_modules/.pnpm/bar@file+bar/node_modules/bar/should-be-modified-by-build1.txt', 'utf-8')
).toBe('After modification')
}
// build2 should not update the injected files
{
await execPnpm(['--recursive', 'run', 'build2'])
// injected foo
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo')).not.toContain('created-by-build2.txt')
// injected bar
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar')).not.toContain('created-by-build2.txt')
}
})
test('directories and symlinks', async () => {
prepareInjectedDepsWorkspace(['build1', 'build2', 'build3'])
await execPnpm(['install'])
expect(fs.readdirSync('node_modules/.pnpm')).toContain('foo@file+foo')
expect(fs.readdirSync('node_modules/.pnpm')).toContain('bar@file+bar')
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo').sort()).toStrictEqual(PKG_FILES)
expect(fs.readdirSync('node_modules/.pnpm/bar@file+bar/node_modules/bar').sort()).toStrictEqual(PKG_FILES)
// build3 should update the injected files
{
await execPnpm(['--filter=foo', 'run', 'build3'])
// should create empty-dirs at source
expect(fs.readdirSync('foo/empty-dirs/a/a')).toStrictEqual([])
expect(fs.readdirSync('foo/empty-dirs/a/b')).toStrictEqual([])
expect(fs.readdirSync('foo/empty-dirs/b/a')).toStrictEqual([])
expect(fs.readdirSync('foo/empty-dirs/b/b')).toStrictEqual([])
// should not create empty-dirs at the injected location
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo')).not.toContain('empty-dirs')
// should recreate a directories tree at the injected location
expect(fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo')).toContain('files')
expect(
fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/files')
.sort()
).toStrictEqual(['foo', 'foo_bar.txt'])
expect(
fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/files/foo')
.sort()
).toStrictEqual(['bar.txt', 'foo'])
expect(
fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/files/foo/foo')
.sort()
).toStrictEqual(['foo.txt'])
// should recreate the structure of the symlinks at the injected location
// NOTE: The current implementation of @pnpm/directory-fetcher would treat symlinks to dir at real dir
// because it uses fs.stat instead of fs.lstat, so testing with fs.realpathSync wouldn't work.
expect(fs.readFileSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/link-to-a-file', 'utf-8')).toBe('This is foo_bar')
expect(
fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/link-to-a-dir')
.sort()
).toStrictEqual(
fs.readdirSync('node_modules/.pnpm/foo@file+foo/node_modules/foo/files/foo')
.sort()
)
}
})

View File

@@ -0,0 +1,15 @@
# @pnpm/workspace.injected-deps-syncer
> Update all injected replica of a workspace package
[![npm version](https://img.shields.io/npm/v/@pnpm/workspace.injected-deps-syncer.svg)](https://www.npmjs.com/package/@pnpm/workspace.injected-deps-syncer)
## Installation
```sh
pnpm add @pnpm/workspace.injected-deps-syncer
```
## License
MIT

View File

@@ -0,0 +1,50 @@
{
"name": "@pnpm/workspace.injected-deps-syncer",
"version": "1000.0.0-0",
"description": "Update all injected replica of a workspace package",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"files": [
"lib",
"!*.map"
],
"engines": {
"node": ">=18.12"
},
"scripts": {
"lint": "eslint \"src/**/*.ts\" \"test/**/*.ts\"",
"test": "pnpm run compile && pnpm run _test",
"prepublishOnly": "pnpm run compile",
"compile": "tsc --build && pnpm run lint --fix",
"_test": "jest"
},
"repository": "https://github.com/pnpm/pnpm/blob/main/workspace/injected-deps-syncer",
"keywords": [
"pnpm10",
"pnpm"
],
"license": "MIT",
"bugs": {
"url": "https://github.com/pnpm/pnpm/issues"
},
"homepage": "https://github.com/pnpm/pnpm/blob/main/workspace/injected-deps-syncer#readme",
"funding": "https://opencollective.com/pnpm",
"dependencies": {
"@pnpm/directory-fetcher": "workspace:*",
"@pnpm/error": "workspace:*",
"@pnpm/logger": "workspace:*",
"@pnpm/modules-yaml": "workspace:*",
"@types/normalize-path": "catalog:",
"normalize-path": "catalog:"
},
"devDependencies": {
"@pnpm/prepare": "workspace:*",
"@pnpm/workspace.injected-deps-syncer": "workspace:*"
},
"exports": {
".": "./lib/index.js"
},
"jest": {
"preset": "@pnpm/jest-config"
}
}

View File

@@ -0,0 +1,208 @@
import fs from 'fs'
import path from 'path'
import util from 'util'
import { type FetchFromDirOptions, fetchFromDir } from '@pnpm/directory-fetcher'
import { PnpmError } from '@pnpm/error'
export const DIR: unique symbol = Symbol('Path is a directory')
// symbols and and numbers are used instead of discriminated union because
// it's faster and simpler to compare primitives than to deep compare objects
export type File = number // representing the file's inode, which is sufficient for hardlinks
export type Dir = typeof DIR
export type Value = File | Dir
export type InodeMap = Record<string, Value>
export interface DiffItemBase {
path: string
oldValue?: Value
newValue?: Value
}
export interface AddedItem extends DiffItemBase {
path: string
oldValue?: undefined
newValue: Value
}
export interface RemovedItem extends DiffItemBase {
path: string
oldValue: Value
newValue?: undefined
}
export interface ModifiedItem extends DiffItemBase {
path: string
oldValue: Value
newValue: Value
}
export interface DirDiff {
added: AddedItem[]
removed: RemovedItem[]
modified: ModifiedItem[]
}
// length comparison should place every directory before the files it contains because
// a directory path is always shorter than any file path it contains
const comparePaths = (a: string, b: string): number => (a.split(/\\|\//).length - b.split(/\\|\//).length) || a.localeCompare(b)
/**
* Get the difference between 2 files tree.
*
* The arrays in the resulting object are sorted in such a way that every directory paths are placed before
* the files it contains. This way, it would allow optimization for operations upon this diff.
* Note that when performing removal of removed files according to this diff, the `removed` array should be reversed first.
*/
export function diffDir (oldIndex: InodeMap, newIndex: InodeMap): DirDiff {
const oldPaths = Object.keys(oldIndex).sort(comparePaths)
const newPaths = Object.keys(newIndex).sort(comparePaths)
const removed: RemovedItem[] = oldPaths
.filter(path => !(path in newIndex))
.map(path => ({ path, oldValue: oldIndex[path] }))
const added: AddedItem[] = newPaths
.filter(path => !(path in oldIndex))
.map(path => ({ path, newValue: newIndex[path] }))
const modified: ModifiedItem[] = oldPaths
.filter(path => path in newIndex && oldIndex[path] !== newIndex[path])
.map(path => ({ path, oldValue: oldIndex[path], newValue: newIndex[path] }))
return { added, removed, modified }
}
/**
* Apply a patch on a directory.
*
* The {@link optimizedDirPatch} is assumed to be already optimized (i.e. `removed` is already reversed).
*/
export async function applyPatch (optimizedDirPatch: DirDiff, sourceDir: string, targetDir: string): Promise<void> {
async function addRecursive (sourcePath: string, targetPath: string, value: Value): Promise<void> {
if (value === DIR) {
await fs.promises.mkdir(targetPath, { recursive: true })
} else if (typeof value === 'number') {
fs.mkdirSync(path.dirname(targetPath), { recursive: true })
await fs.promises.link(sourcePath, targetPath)
} else {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const _: never = value // static type guard
}
}
async function removeRecursive (targetPath: string): Promise<void> {
try {
await fs.promises.rm(targetPath, { recursive: true, force: true })
} catch (error) {
if (!util.types.isNativeError(error) || !('code' in error) || (error.code !== 'ENOENT')) {
throw error
}
}
}
const adding = Promise.all(optimizedDirPatch.added.map(async item => {
const sourcePath = path.join(sourceDir, item.path)
const targetPath = path.join(targetDir, item.path)
await addRecursive(sourcePath, targetPath, item.newValue)
}))
const removing = Promise.all(optimizedDirPatch.removed.map(async item => {
const targetPath = path.join(targetDir, item.path)
await removeRecursive(targetPath)
}))
const modifying = Promise.all(optimizedDirPatch.modified.map(async item => {
const sourcePath = path.join(sourceDir, item.path)
const targetPath = path.join(targetDir, item.path)
if (item.oldValue === item.newValue) return
await removeRecursive(targetPath)
await addRecursive(sourcePath, targetPath, item.newValue)
}))
await Promise.all([adding, removing, modifying])
}
export type ExtendFilesMapStats = Pick<fs.Stats, 'ino' | 'isFile' | 'isDirectory'>
export interface ExtendFilesMapOptions {
/** Map relative path of each file to their real path */
filesIndex: Record<string, string>
/** Map relative path of each file to their stats */
filesStats?: Record<string, ExtendFilesMapStats | null>
}
/**
* Convert a pair of a files index map, which is a map from relative path of each file to their real paths,
* and an optional file stats map, which is a map from relative path of each file to their stats,
* into an inodes map, which is a map from relative path of every file and directory to their inode type.
*/
export async function extendFilesMap ({ filesIndex, filesStats }: ExtendFilesMapOptions): Promise<InodeMap> {
const result: InodeMap = {
'.': DIR,
}
function addInodeAndAncestors (relativePath: string, value: Value): void {
if (relativePath && relativePath !== '.' && !result[relativePath]) {
result[relativePath] = value
addInodeAndAncestors(path.dirname(relativePath), DIR)
}
}
await Promise.all(Object.entries(filesIndex).map(async ([relativePath, realPath]) => {
const stats = filesStats?.[relativePath] ?? await fs.promises.stat(realPath)
if (stats.isFile()) {
addInodeAndAncestors(relativePath, stats.ino)
} else if (stats.isDirectory()) {
addInodeAndAncestors(relativePath, DIR)
} else {
throw new PnpmError('UNSUPPORTED_INODE_TYPE', `Filesystem inode at ${realPath} is neither a file, a directory, or a symbolic link`)
}
}))
return result
}
export class DirPatcher {
private readonly sourceDir: string
private readonly targetDir: string
private readonly patch: DirDiff
private constructor (patch: DirDiff, sourceDir: string, targetDir: string) {
this.patch = patch
this.sourceDir = sourceDir
this.targetDir = targetDir
}
static async fromMultipleTargets (sourceDir: string, targetDirs: string[]): Promise<DirPatcher[]> {
const fetchOptions: FetchFromDirOptions = {
resolveSymlinks: false,
}
async function loadMap (dir: string): Promise<[InodeMap, string]> {
const fetchResult = await fetchFromDir(dir, fetchOptions)
return [await extendFilesMap(fetchResult), dir]
}
const [[sourceMap], targetPairs] = await Promise.all([
loadMap(sourceDir),
Promise.all(targetDirs.map(loadMap)),
])
return targetPairs.map(([targetMap, targetDir]) => {
const diff = diffDir(targetMap, sourceMap)
// Before reversal, every directory in `diff.removed` are placed before its files.
// After reversal, every file is place before its ancestors,
// leading to children being deleted before parents, optimizing performance.
diff.removed.reverse()
return new this(diff, sourceDir, targetDir)
})
}
async apply (): Promise<void> {
await applyPatch(this.patch, this.sourceDir, this.targetDir)
}
}

View File

@@ -0,0 +1,60 @@
import path from 'path'
import { PnpmError } from '@pnpm/error'
import { logger as createLogger } from '@pnpm/logger'
import { readModulesManifest } from '@pnpm/modules-yaml'
import normalizePath from 'normalize-path'
import { DirPatcher } from './DirPatcher'
interface SkipSyncInjectedDepsMessage {
message: string
reason: 'no-name' | 'no-injected-deps'
opts: SyncInjectedDepsOptions
}
const logger = createLogger<SkipSyncInjectedDepsMessage>('skip-sync-injected-deps')
export interface SyncInjectedDepsOptions {
pkgName: string | undefined
pkgRootDir: string
workspaceDir: string | undefined
}
export async function syncInjectedDeps (opts: SyncInjectedDepsOptions): Promise<void> {
if (!opts.pkgName) {
logger.debug({
reason: 'no-name',
message: `Skipping sync of ${opts.pkgRootDir} as an injected dependency because, without a name, it cannot be a dependency`,
opts,
})
return
}
if (!opts.workspaceDir) {
throw new PnpmError('NO_WORKSPACE_DIR', 'Cannot update injected dependencies without workspace dir')
}
const pkgRootDir = path.resolve(opts.workspaceDir, opts.pkgRootDir)
const modulesDir = path.resolve(opts.workspaceDir, 'node_modules')
const modules = await readModulesManifest(modulesDir)
if (!modules?.injectedDeps) {
logger.debug({
reason: 'no-injected-deps',
message: 'Skipping sync of injected dependencies because none were detected',
opts,
})
return
}
const injectedDepKey = normalizePath(path.relative(opts.workspaceDir, pkgRootDir), true)
const targetDirs: string[] | undefined = modules.injectedDeps[injectedDepKey]
if (!targetDirs || targetDirs.length === 0) {
logger.debug({
reason: 'no-injected-deps',
message: `There are no injected dependencies from ${opts.pkgRootDir}`,
opts,
})
return
}
const patchers = await DirPatcher.fromMultipleTargets(
pkgRootDir,
targetDirs.map(targetDir => path.resolve(opts.workspaceDir!, targetDir))
)
await Promise.all(patchers.map(patcher => patcher.apply()))
}

View File

@@ -0,0 +1,217 @@
import fs from 'fs'
import path from 'path'
import { fetchFromDir } from '@pnpm/directory-fetcher'
import { prepareEmpty } from '@pnpm/prepare'
import { DirPatcher } from '../src/DirPatcher'
const originalRm = fs.promises.rm
const originalMkdir = fs.promises.mkdir
const originalLink = fs.promises.link
function mockFsPromises (): Record<'rm' | 'mkdir' | 'link', jest.Mock> {
const rm = jest.fn(fs.promises.rm)
const mkdir = jest.fn(fs.promises.mkdir)
const link = jest.fn(fs.promises.link)
fs.promises.rm = rm as typeof fs.promises.rm
fs.promises.mkdir = mkdir as typeof fs.promises.mkdir
fs.promises.link = link as typeof fs.promises.link
return { rm, mkdir, link }
}
function restoreAllMocks (): void {
jest.resetAllMocks()
fs.promises.rm = originalRm
fs.promises.mkdir = originalMkdir
fs.promises.link = originalLink
}
afterEach(restoreAllMocks)
function createDir (dirPath: string): void {
fs.mkdirSync(dirPath, { recursive: true })
}
function createFile (filePath: string, content: string = ''): void {
createDir(path.dirname(filePath))
fs.writeFileSync(filePath, content)
}
function createHardlink (existingPath: string, newPath: string): void {
createDir(path.dirname(newPath))
fs.linkSync(existingPath, newPath)
}
const inodeNumber = (filePath: string): number => fs.lstatSync(filePath).ino
test('optimally synchronizes source and target', async () => {
prepareEmpty()
createDir('source')
createDir('target')
/** Same files that exist in both source and target */
const filesToKeep = [
'files-to-keep/a/a.txt',
'files-to-keep/a/b.txt',
'files-to-keep/b.txt',
'single-file-to-keep.txt',
] as const
for (const suffix of filesToKeep) {
const source = `source/${suffix}`
const target = `target/${suffix}`
createFile(source, '')
createHardlink(source, target)
}
/** Files that no longer exist in source but still exist in target */
const filesToRemove = [
'files-to-remove/a/a.txt',
'files-to-remove/a/b.txt',
'files-to-remove/b.txt',
'single-file-to-remove.txt',
] as const
for (const suffix of filesToRemove) {
createFile(`target/${suffix}`)
}
/** Files that exist in source but not yet in target */
const filesToAdd = [
'files-to-add/a/a.txt',
'files-to-add/a/b.txt',
'files-to-add/b.txt',
'single-file-to-add.txt',
] as const
for (const suffix of filesToAdd) {
createFile(`source/${suffix}`)
}
/** Unequal files that exist in both source and target */
const filesToModify = [
'files-to-modify/a/a.txt',
'files-to-modify/a/b.txt',
'files-to-modify/b.txt',
'single-file-to-modify.txt',
] as const
for (const suffix of filesToModify) {
createFile(`source/${suffix}`, 'new content')
createFile(`target/${suffix}`, 'old content')
}
const sourceDir = path.resolve('source')
const targetDir = path.resolve('target')
const sourceFetchResult = await fetchFromDir(sourceDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
const targetFetchResultBefore = await fetchFromDir(targetDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
expect(Object.keys(targetFetchResultBefore.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(
filesToModify
.map(suffix => path.resolve(targetDir, suffix))
.map(inodeNumber)
).not.toStrictEqual(
filesToModify
.map(suffix => path.resolve(sourceDir, suffix))
.map(inodeNumber)
)
let fsMethods = mockFsPromises()
const patchers = await DirPatcher.fromMultipleTargets(sourceDir, [targetDir])
expect(patchers).toMatchObject([{ sourceDir, targetDir }])
expect(fsMethods.rm).not.toHaveBeenCalled()
expect(fsMethods.mkdir).not.toHaveBeenCalled()
expect(fsMethods.link).not.toHaveBeenCalled()
restoreAllMocks()
fsMethods = mockFsPromises()
await patchers[0].apply()
const targetFetchResultAfter = await fetchFromDir(targetDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).not.toStrictEqual(Object.keys(targetFetchResultBefore.filesIndex).sort())
expect(
filesToModify
.map(suffix => path.resolve(targetDir, suffix))
.map(inodeNumber)
).toStrictEqual(
filesToModify
.map(suffix => path.resolve(sourceDir, suffix))
.map(inodeNumber)
)
// does not touch filesToKeep
for (const suffix of filesToKeep) {
const sourceFile = path.resolve(sourceDir, suffix)
const targetFile = path.resolve(targetDir, suffix)
expect(fsMethods.rm).not.toHaveBeenCalledWith(targetFile, expect.anything())
expect(fsMethods.link).not.toHaveBeenCalledWith(sourceFile, expect.anything())
expect(fsMethods.link).not.toHaveBeenCalledWith(expect.anything(), targetFile)
}
// removes filesToRemove without replacement
for (const suffix of filesToRemove) {
const sourceFile = path.resolve(sourceDir, suffix)
const targetFile = path.resolve(targetDir, suffix)
expect(fsMethods.rm).toHaveBeenCalledWith(targetFile, expect.anything())
expect(fsMethods.link).not.toHaveBeenCalledWith(sourceFile, expect.anything())
expect(fsMethods.link).not.toHaveBeenCalledWith(expect.anything(), targetFile)
}
// adds filesToAdd without removing old files
for (const suffix of filesToAdd) {
const sourceFile = path.resolve(sourceDir, suffix)
const targetFile = path.resolve(targetDir, suffix)
expect(fsMethods.rm).not.toHaveBeenCalledWith(targetFile, expect.anything())
expect(fsMethods.link).toHaveBeenCalledWith(sourceFile, targetFile)
}
// replaces filesToModify by removing old files and add new hardlinks
for (const suffix of filesToModify) {
const sourceFile = path.resolve(sourceDir, suffix)
const targetFile = path.resolve(targetDir, suffix)
expect(fsMethods.rm).toHaveBeenCalledWith(targetFile, expect.anything())
expect(fsMethods.link).toHaveBeenCalledWith(sourceFile, targetFile)
}
expect(fsMethods.mkdir).toHaveBeenCalledWith(path.resolve(targetDir, 'files-to-add'), expect.anything())
expect(fsMethods.mkdir).toHaveBeenCalledWith(path.resolve(targetDir, 'files-to-add/a'), expect.anything())
})
test('multiple patchers', async () => {
prepareEmpty()
createDir('target1')
createDir('target2')
createDir('target3')
createFile('source/dir/file1.txt')
createFile('source/dir/file2.txt')
createFile('source/file3.txt')
const patchers = await DirPatcher.fromMultipleTargets('source', ['target1', 'target2', 'target3'])
expect(patchers).toMatchObject([
{ sourceDir: 'source', targetDir: 'target1' },
{ sourceDir: 'source', targetDir: 'target2' },
{ sourceDir: 'source', targetDir: 'target3' },
])
const sourceFetchResult = await fetchFromDir('source', { includeOnlyPackageFiles: false, resolveSymlinks: true })
const targetFetchResultBefore1 = await fetchFromDir('target1', { includeOnlyPackageFiles: false, resolveSymlinks: true })
const targetFetchResultBefore2 = await fetchFromDir('target2', { includeOnlyPackageFiles: false, resolveSymlinks: true })
const targetFetchResultBefore3 = await fetchFromDir('target3', { includeOnlyPackageFiles: false, resolveSymlinks: true })
expect(Object.keys(targetFetchResultBefore1.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(Object.keys(targetFetchResultBefore2.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(Object.keys(targetFetchResultBefore3.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(Object.keys(targetFetchResultBefore1.filesIndex).sort()).toStrictEqual([])
expect(Object.keys(targetFetchResultBefore2.filesIndex).sort()).toStrictEqual([])
expect(Object.keys(targetFetchResultBefore3.filesIndex).sort()).toStrictEqual([])
await Promise.all(patchers.map(patcher => patcher.apply()))
const targetFetchResultAfter1 = await fetchFromDir('target1', { includeOnlyPackageFiles: false, resolveSymlinks: true })
const targetFetchResultAfter2 = await fetchFromDir('target2', { includeOnlyPackageFiles: false, resolveSymlinks: true })
const targetFetchResultAfter3 = await fetchFromDir('target3', { includeOnlyPackageFiles: false, resolveSymlinks: true })
expect(Object.keys(targetFetchResultAfter1.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(Object.keys(targetFetchResultAfter2.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(Object.keys(targetFetchResultAfter3.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
})

View File

@@ -0,0 +1,198 @@
import fs from 'fs'
import path from 'path'
import { fetchFromDir } from '@pnpm/directory-fetcher'
import { prepareEmpty } from '@pnpm/prepare'
import { type DirDiff, DIR, applyPatch } from '../src/DirPatcher'
const originalRm = fs.promises.rm
const originalMkdir = fs.promises.mkdir
const originalLink = fs.promises.link
function mockFsPromises (): Record<'rm' | 'mkdir' | 'link', jest.Mock> {
const rm = jest.fn(fs.promises.rm)
const mkdir = jest.fn(fs.promises.mkdir)
const link = jest.fn(fs.promises.link)
fs.promises.rm = rm as typeof fs.promises.rm
fs.promises.mkdir = mkdir as typeof fs.promises.mkdir
fs.promises.link = link as typeof fs.promises.link
return { rm, mkdir, link }
}
function restoreAllMocks (): void {
jest.resetAllMocks()
fs.promises.rm = originalRm
fs.promises.mkdir = originalMkdir
fs.promises.link = originalLink
}
afterEach(restoreAllMocks)
function createDir (dirPath: string): void {
fs.mkdirSync(dirPath, { recursive: true })
}
function createFile (filePath: string, content: string = ''): void {
createDir(path.dirname(filePath))
fs.writeFileSync(filePath, content)
}
function createHardlink (existingPath: string, newPath: string): void {
createDir(path.dirname(newPath))
fs.linkSync(existingPath, newPath)
}
const inodeNumber = (filePath: string): number => fs.lstatSync(filePath).ino
test('applies a patch on a directory', async () => {
prepareEmpty()
fs.mkdirSync('source')
fs.mkdirSync('target')
/** Same files that exist in both source and target */
const filesToKeep = [
'files-to-keep/a/a.txt',
'files-to-keep/a/b.txt',
'files-to-keep/b.txt',
'single-file-to-keep.txt',
] as const
for (const suffix of filesToKeep) {
const source = `source/${suffix}`
const target = `target/${suffix}`
createFile(source, '')
createHardlink(source, target)
}
/** Files that no longer exist in source but still exist in target */
const filesToRemove = [
'files-to-remove/a/a.txt',
'files-to-remove/a/b.txt',
'files-to-remove/b.txt',
'single-file-to-remove.txt',
] as const
for (const suffix of filesToRemove) {
createFile(`target/${suffix}`)
}
/** Files that exist in source but not yet in target */
const filesToAdd = [
'files-to-add/a/a.txt',
'files-to-add/a/b.txt',
'files-to-add/b.txt',
'single-file-to-add.txt',
] as const
for (const suffix of filesToAdd) {
createFile(`source/${suffix}`)
}
/** Unequal files that exist in both source and target */
const filesToModify = [
'files-to-modify/a/a.txt',
'files-to-modify/a/b.txt',
'files-to-modify/b.txt',
'single-file-to-modify.txt',
] as const
for (const suffix of filesToModify) {
createFile(`source/${suffix}`, 'new content')
createFile(`target/${suffix}`, 'old content')
}
const optimizedDirPath: DirDiff = {
added: [
{
path: 'files-to-add',
newValue: DIR,
},
{
path: 'files-to-add/a',
newValue: DIR,
},
...filesToAdd.map(path => ({ path, newValue: inodeNumber(`source/${path}`) })),
],
removed: [
{
path: 'files-to-remove',
oldValue: DIR,
} as const,
{
path: 'files-to-remove/a',
oldValue: DIR,
} as const,
...filesToRemove.map(path => ({ path, oldValue: inodeNumber(`target/${path}`) })),
].reverse(),
modified: [
...filesToModify.map(path => ({
path,
oldValue: inodeNumber(`target/${path}`),
newValue: inodeNumber(`source/${path}`),
})),
],
}
const sourceFetchResult = await fetchFromDir('source', { includeOnlyPackageFiles: false, resolveSymlinks: true })
const targetFetchResultBefore = await fetchFromDir('target', { includeOnlyPackageFiles: false, resolveSymlinks: true })
expect(Object.keys(targetFetchResultBefore.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(
filesToModify
.map(suffix => `target/${suffix}`)
.map(inodeNumber)
).not.toStrictEqual(
filesToModify
.map(suffix => `source/${suffix}`)
.map(inodeNumber)
)
const fsMethods = mockFsPromises()
await applyPatch(optimizedDirPath, path.resolve('source'), path.resolve('target'))
const targetFetchResultAfter = await fetchFromDir('target', { includeOnlyPackageFiles: false, resolveSymlinks: true })
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).not.toStrictEqual(Object.keys(targetFetchResultBefore.filesIndex).sort())
expect(
filesToModify
.map(suffix => `target/${suffix}`)
.map(inodeNumber)
).toStrictEqual(
filesToModify
.map(suffix => `source/${suffix}`)
.map(inodeNumber)
)
// does not touch filesToKeep
for (const suffix of filesToKeep) {
const sourceFile = path.resolve('source', suffix)
const targetFile = path.resolve('target', suffix)
expect(fsMethods.rm).not.toHaveBeenCalledWith(targetFile, expect.anything())
expect(fsMethods.link).not.toHaveBeenCalledWith(sourceFile, expect.anything())
expect(fsMethods.link).not.toHaveBeenCalledWith(expect.anything(), targetFile)
}
// remove filesToRemove without replacement
for (const suffix of filesToRemove) {
const sourceFile = path.resolve('source', suffix)
const targetFile = path.resolve('target', suffix)
expect(fsMethods.rm).toHaveBeenCalledWith(targetFile, expect.anything())
expect(fsMethods.link).not.toHaveBeenCalledWith(sourceFile, expect.anything())
expect(fsMethods.link).not.toHaveBeenCalledWith(expect.anything(), targetFile)
}
// add filesToAdd without removing old files
for (const suffix of filesToAdd) {
const sourceFile = path.resolve('source', suffix)
const targetFile = path.resolve('target', suffix)
expect(fsMethods.rm).not.toHaveBeenCalledWith(targetFile, expect.anything())
expect(fsMethods.link).toHaveBeenCalledWith(sourceFile, targetFile)
}
// replace filesToModify by removing old files and add new hardlinks
for (const suffix of filesToModify) {
const sourceFile = path.resolve('source', suffix)
const targetFile = path.resolve('target', suffix)
expect(fsMethods.rm).toHaveBeenCalledWith(targetFile, expect.anything())
expect(fsMethods.link).toHaveBeenCalledWith(sourceFile, targetFile)
}
expect(fsMethods.mkdir).toHaveBeenCalledWith(path.resolve('target', 'files-to-add'), expect.anything())
expect(fsMethods.mkdir).toHaveBeenCalledWith(path.resolve('target', 'files-to-add/a'), expect.anything())
})

View File

@@ -0,0 +1,110 @@
import { type DirDiff, type InodeMap, DIR, diffDir } from '../src/DirPatcher'
test('produces a diff', () => {
const unchangedParts = {
'not-changed': DIR,
'not-changed/foo': DIR,
'not-changed/foo/foo.txt': 123,
'not-changed/foo/bar.txt': 456,
'not-changed/bar': DIR,
'some-files-changed/not-changed.txt': 623,
'some-parts-deleted/file-not-deleted.txt': 624,
'some-parts-added/file-not-added.txt': 145,
} satisfies InodeMap
const oldModifiedParts = {
'some-files-changed': DIR,
'some-files-changed/changed-file.txt': 887,
} satisfies InodeMap
const newModifiedParts: typeof oldModifiedParts = {
'some-files-changed': DIR,
'some-files-changed/changed-file.txt': 553,
}
const oldOnlyParts = {
'some-parts-deleted': DIR,
'some-parts-deleted/file-deleted.txt': 654,
'some-parts-deleted/dir-deleted': DIR,
'some-parts-deleted/dir-deleted/foo.txt': 325,
'some-parts-deleted/dir-deleted/bar.txt': 231,
} satisfies InodeMap
const newOnlyParts = {
'some-parts-added': DIR,
'some-parts-added/file-added.txt': 362,
'some-parts-added/dir-added': DIR,
'some-parts-added/dir-added/foo.txt': 472,
'some-parts-added/dir-added/bar.txt': 241,
} satisfies InodeMap
const oldIndex: InodeMap = {
...unchangedParts,
...oldModifiedParts,
...oldOnlyParts,
}
const newIndex: InodeMap = {
...unchangedParts,
...newModifiedParts,
...newOnlyParts,
}
const expectedDiff: DirDiff = {
added: [
{
path: 'some-parts-added',
newValue: DIR,
},
{
path: 'some-parts-added/dir-added',
newValue: DIR,
},
{
path: 'some-parts-added/file-added.txt',
newValue: newOnlyParts['some-parts-added/file-added.txt'],
},
{
path: 'some-parts-added/dir-added/bar.txt',
newValue: newOnlyParts['some-parts-added/dir-added/bar.txt'],
},
{
path: 'some-parts-added/dir-added/foo.txt',
newValue: newOnlyParts['some-parts-added/dir-added/foo.txt'],
},
],
modified: [
{
path: 'some-files-changed/changed-file.txt',
oldValue: oldModifiedParts['some-files-changed/changed-file.txt'],
newValue: newModifiedParts['some-files-changed/changed-file.txt'],
},
],
removed: [
{
path: 'some-parts-deleted',
oldValue: DIR,
},
{
path: 'some-parts-deleted/dir-deleted',
oldValue: DIR,
},
{
path: 'some-parts-deleted/file-deleted.txt',
oldValue: oldOnlyParts['some-parts-deleted/file-deleted.txt'],
},
{
path: 'some-parts-deleted/dir-deleted/bar.txt',
oldValue: oldOnlyParts['some-parts-deleted/dir-deleted/bar.txt'],
},
{
path: 'some-parts-deleted/dir-deleted/foo.txt',
oldValue: oldOnlyParts['some-parts-deleted/dir-deleted/foo.txt'],
},
],
}
const receivedDiff = diffDir(oldIndex, newIndex)
expect(receivedDiff).toStrictEqual(expectedDiff)
})

View File

@@ -0,0 +1,99 @@
import fs from 'fs'
import path from 'path'
import { prepareEmpty } from '@pnpm/prepare'
import { type InodeMap, type ExtendFilesMapStats, DIR, extendFilesMap } from '../src/DirPatcher'
const originalStat = fs.promises.stat
function mockFsPromiseStat (): jest.Mock {
const mockedMethod = jest.fn(fs.promises.stat)
fs.promises.stat = mockedMethod as typeof fs.promises.stat
return mockedMethod
}
afterEach(() => {
jest.restoreAllMocks()
fs.promises.stat = originalStat
})
test('without provided stats', async () => {
prepareEmpty()
const filePaths = [
'deep/a/b/c/d/e/f.txt',
'foo/foo.txt',
'foo/bar.txt',
'foo_bar.txt',
]
const filesIndex: Record<string, string> = {}
for (const filePath of filePaths) {
filesIndex[filePath] = path.resolve(filePath)
fs.mkdirSync(path.dirname(filePath), { recursive: true })
fs.writeFileSync(filePath, '')
}
const statMethod = mockFsPromiseStat()
expect(await extendFilesMap({ filesIndex })).toStrictEqual({
'.': DIR,
deep: DIR,
'deep/a': DIR,
'deep/a/b': DIR,
'deep/a/b/c': DIR,
'deep/a/b/c/d': DIR,
'deep/a/b/c/d/e': DIR,
'deep/a/b/c/d/e/f.txt': fs.statSync('deep/a/b/c/d/e/f.txt').ino,
foo: DIR,
'foo/foo.txt': fs.statSync('foo/foo.txt').ino,
'foo/bar.txt': fs.statSync('foo/bar.txt').ino,
'foo_bar.txt': fs.statSync('foo_bar.txt').ino,
} as InodeMap)
for (const filePath of filePaths) {
expect(statMethod).toHaveBeenCalledWith(filesIndex[filePath])
}
})
test('with provided stats', async () => {
prepareEmpty()
const startingIno = 7000
const inoIncrement = 100
const filePaths = [
'deep/a/b/c/d/e/f.txt',
'foo/foo.txt',
'foo/bar.txt',
'foo_bar.txt',
]
const filesIndex: Record<string, string> = {}
const filesStats: Record<string, ExtendFilesMapStats> = {}
let ino = startingIno
for (const filePath of filePaths) {
filesIndex[filePath] = path.resolve(filePath)
filesStats[filePath] = {
ino,
isDirectory: () => false,
isFile: () => true,
}
ino += inoIncrement
}
const statMethod = mockFsPromiseStat()
expect(await extendFilesMap({ filesIndex, filesStats })).toStrictEqual({
'.': DIR,
deep: DIR,
'deep/a': DIR,
'deep/a/b': DIR,
'deep/a/b/c': DIR,
'deep/a/b/c/d': DIR,
'deep/a/b/c/d/e': DIR,
'deep/a/b/c/d/e/f.txt': startingIno,
foo: DIR,
'foo/foo.txt': startingIno + inoIncrement,
'foo/bar.txt': startingIno + 2 * inoIncrement,
'foo_bar.txt': startingIno + 3 * inoIncrement,
} as InodeMap)
expect(statMethod).not.toHaveBeenCalled()
})

View File

@@ -0,0 +1,17 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"noEmit": false,
"outDir": "../test.lib",
"rootDir": "."
},
"include": [
"**/*.ts",
"../../../__typings__/**/*.d.ts"
],
"references": [
{
"path": ".."
}
]
}

View File

@@ -0,0 +1,28 @@
{
"extends": "@pnpm/tsconfig",
"compilerOptions": {
"outDir": "lib",
"rootDir": "src"
},
"include": [
"src/**/*.ts",
"../../__typings__/**/*.d.ts"
],
"references": [
{
"path": "../../__utils__/prepare"
},
{
"path": "../../fetching/directory-fetcher"
},
{
"path": "../../packages/error"
},
{
"path": "../../packages/logger"
},
{
"path": "../../pkg-manager/modules-yaml"
}
]
}

View File

@@ -0,0 +1,8 @@
{
"extends": "./tsconfig.json",
"include": [
"src/**/*.ts",
"test/**/*.ts",
"../../__typings__/**/*.d.ts"
]
}