mirror of
https://github.com/pnpm/pnpm.git
synced 2025-12-23 23:29:17 -05:00
refactor: use Maps instead of Records (#10312)
This commit is contained in:
2
env/node.fetcher/src/index.ts
vendored
2
env/node.fetcher/src/index.ts
vendored
@@ -178,7 +178,7 @@ async function downloadAndUnpackTarballToDir (
|
||||
|
||||
cafs.importPackage(targetDir, {
|
||||
filesResponse: {
|
||||
filesIndex: filesIndex as Record<string, string>,
|
||||
filesIndex,
|
||||
resolvedFrom: 'remote',
|
||||
requiresBuild: false,
|
||||
},
|
||||
|
||||
@@ -74,22 +74,19 @@ export async function runLifecycleHooksConcurrently (
|
||||
await Promise.all(
|
||||
targetDirs.map(async (targetDir) => {
|
||||
const targetModulesDir = path.join(targetDir, 'node_modules')
|
||||
const nodeModulesIndex = {}
|
||||
const newFilesIndex = new Map(filesResponse.filesIndex)
|
||||
if (fs.existsSync(targetModulesDir)) {
|
||||
// If the target directory contains a node_modules directory
|
||||
// (it may happen when the hoisted node linker is used)
|
||||
// then we need to preserve this node_modules.
|
||||
// So we scan this node_modules directory and pass it as part of the new package.
|
||||
await scanDir('node_modules', targetModulesDir, targetModulesDir, nodeModulesIndex)
|
||||
await scanDir('node_modules', targetModulesDir, targetModulesDir, newFilesIndex)
|
||||
}
|
||||
return opts.storeController.importPackage(targetDir, {
|
||||
filesResponse: {
|
||||
resolvedFrom: 'local-dir',
|
||||
...filesResponse,
|
||||
filesIndex: {
|
||||
...filesResponse.filesIndex,
|
||||
...nodeModulesIndex,
|
||||
},
|
||||
filesIndex: newFilesIndex,
|
||||
},
|
||||
force: false,
|
||||
})
|
||||
@@ -101,7 +98,7 @@ export async function runLifecycleHooksConcurrently (
|
||||
await runGroups(childConcurrency, groups)
|
||||
}
|
||||
|
||||
async function scanDir (prefix: string, rootDir: string, currentDir: string, index: Record<string, string>): Promise<void> {
|
||||
async function scanDir (prefix: string, rootDir: string, currentDir: string, index: Map<string, string>): Promise<void> {
|
||||
const files = await fs.promises.readdir(currentDir)
|
||||
await Promise.all(files.map(async (file) => {
|
||||
const fullPath = path.join(currentDir, file)
|
||||
@@ -111,7 +108,7 @@ async function scanDir (prefix: string, rootDir: string, currentDir: string, ind
|
||||
}
|
||||
if (stat.isFile()) {
|
||||
const relativePath = path.relative(rootDir, fullPath)
|
||||
index[path.join(prefix, relativePath)] = fullPath
|
||||
index.set(path.join(prefix, relativePath), fullPath)
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type DependencyManifest } from '@pnpm/types'
|
||||
|
||||
export function pkgRequiresBuild (manifest: Partial<DependencyManifest> | undefined, filesIndex: Record<string, unknown>): boolean {
|
||||
export function pkgRequiresBuild (manifest: Partial<DependencyManifest> | undefined, filesIndex: Map<string, unknown>): boolean {
|
||||
return Boolean(
|
||||
manifest?.scripts != null && (
|
||||
Boolean(manifest.scripts.preinstall) ||
|
||||
@@ -11,7 +11,14 @@ export function pkgRequiresBuild (manifest: Partial<DependencyManifest> | undefi
|
||||
)
|
||||
}
|
||||
|
||||
function filesIncludeInstallScripts (filesIndex: Record<string, unknown>): boolean {
|
||||
return filesIndex['binding.gyp'] != null ||
|
||||
Object.keys(filesIndex).some((filename) => !(filename.match(/^\.hooks[\\/]/) == null)) // TODO: optimize this
|
||||
function filesIncludeInstallScripts (filesIndex: Map<string, unknown>): boolean {
|
||||
if (filesIndex.has('binding.gyp')) {
|
||||
return true
|
||||
}
|
||||
for (const filename of filesIndex.keys()) {
|
||||
if (filename.match(/^\.hooks[\\/]/) != null) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -362,7 +362,7 @@ async function _rebuild (
|
||||
sideEffectsCacheKey = calcDepState(depGraph, depsStateCache, depPath, {
|
||||
includeDepGraphHash: true,
|
||||
})
|
||||
if (pkgFilesIndex.sideEffects?.[sideEffectsCacheKey]) {
|
||||
if (pkgFilesIndex.sideEffects?.has(sideEffectsCacheKey)) {
|
||||
pkgsThatWereRebuilt.add(depPath)
|
||||
return
|
||||
}
|
||||
@@ -373,7 +373,7 @@ async function _rebuild (
|
||||
if (pgkManifest != null) {
|
||||
// This won't return the correct result for packages with binding.gyp as we don't pass the filesIndex to the function.
|
||||
// However, currently rebuild doesn't work for such packages at all, which should be fixed.
|
||||
requiresBuild = pkgRequiresBuild(pgkManifest, {})
|
||||
requiresBuild = pkgRequiresBuild(pgkManifest, new Map())
|
||||
}
|
||||
|
||||
const hasSideEffects = requiresBuild && allowBuild(pkgInfo.name, pkgInfo.version, depPath) && await runPostinstallHooks({
|
||||
|
||||
@@ -88,8 +88,8 @@ test('rebuilds dependencies', async () => {
|
||||
}),
|
||||
},
|
||||
})}`
|
||||
expect(cacheIntegrity).toHaveProperty(['sideEffects', sideEffectsKey, 'added', 'generated-by-postinstall.js'])
|
||||
delete cacheIntegrity!.sideEffects![sideEffectsKey].added!['generated-by-postinstall.js']
|
||||
expect(cacheIntegrity.sideEffects!.get(sideEffectsKey)!.added!.has('generated-by-postinstall.js')).toBeTruthy()
|
||||
cacheIntegrity!.sideEffects!.get(sideEffectsKey)!.added!.delete('generated-by-postinstall.js')
|
||||
})
|
||||
|
||||
test('skipIfHasSideEffectsCache', async () => {
|
||||
@@ -112,17 +112,17 @@ test('skipIfHasSideEffectsCache', async () => {
|
||||
const cacheIntegrityPath = getIndexFilePathInCafs(path.join(storeDir, STORE_VERSION), getIntegrity('@pnpm.e2e/pre-and-postinstall-scripts-example', '1.0.0'), '@pnpm.e2e/pre-and-postinstall-scripts-example@1.0.0')
|
||||
let cacheIntegrity = readV8FileSync<PackageFilesIndex>(cacheIntegrityPath)!
|
||||
const sideEffectsKey = `${ENGINE_NAME};deps=${hashObject({ '@pnpm.e2e/hello-world-js-bin@1.0.0': {} })}`
|
||||
cacheIntegrity.sideEffects = {
|
||||
[sideEffectsKey]: {
|
||||
added: {
|
||||
foo: {
|
||||
cacheIntegrity.sideEffects = new Map([
|
||||
[sideEffectsKey, {
|
||||
added: new Map([
|
||||
['foo', {
|
||||
integrity: 'bar',
|
||||
mode: 1,
|
||||
size: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}],
|
||||
]),
|
||||
}],
|
||||
])
|
||||
fs.writeFileSync(cacheIntegrityPath, v8.serialize(cacheIntegrity))
|
||||
|
||||
let modules = project.readModulesManifest()
|
||||
@@ -147,7 +147,7 @@ test('skipIfHasSideEffectsCache', async () => {
|
||||
|
||||
cacheIntegrity = readV8FileSync<PackageFilesIndex>(cacheIntegrityPath)!
|
||||
expect(cacheIntegrity!.sideEffects).toBeTruthy()
|
||||
expect(cacheIntegrity).toHaveProperty(['sideEffects', sideEffectsKey, 'added', 'foo'])
|
||||
expect(cacheIntegrity.sideEffects!.get(sideEffectsKey)!.added!.get('foo')).toBeTruthy()
|
||||
})
|
||||
|
||||
test('rebuild does not fail when a linked package is present', async () => {
|
||||
|
||||
@@ -35,7 +35,7 @@ export type FetchFromDirOptions = Omit<DirectoryFetcherOptions, 'lockfileDir'> &
|
||||
|
||||
export interface FetchResult {
|
||||
local: true
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
filesStats?: Record<string, Stats | null>
|
||||
packageImportMethod: 'hardlink'
|
||||
manifest: DependencyManifest
|
||||
@@ -75,7 +75,7 @@ async function _fetchAllFilesFromDir (
|
||||
dir: string,
|
||||
relativeDir = ''
|
||||
): Promise<Pick<FetchResult, 'filesIndex' | 'filesStats'>> {
|
||||
const filesIndex: Record<string, string> = {}
|
||||
const filesIndex = new Map<string, string>()
|
||||
const filesStats: Record<string, Stats | null> = {}
|
||||
const files = await fs.readdir(dir)
|
||||
await Promise.all(files
|
||||
@@ -87,10 +87,12 @@ async function _fetchAllFilesFromDir (
|
||||
const relativeSubdir = `${relativeDir}${relativeDir ? '/' : ''}${file}`
|
||||
if (stat.isDirectory()) {
|
||||
const subFetchResult = await _fetchAllFilesFromDir(readFileStat, filePath, relativeSubdir)
|
||||
Object.assign(filesIndex, subFetchResult.filesIndex)
|
||||
for (const [key, value] of subFetchResult.filesIndex) {
|
||||
filesIndex.set(key, value)
|
||||
}
|
||||
Object.assign(filesStats, subFetchResult.filesStats)
|
||||
} else {
|
||||
filesIndex[relativeSubdir] = filePath
|
||||
filesIndex.set(relativeSubdir, filePath)
|
||||
filesStats[relativeSubdir] = fileStatResult.stat
|
||||
}
|
||||
})
|
||||
@@ -142,7 +144,7 @@ async function fileStat (filePath: string): Promise<FileStatResult | null> {
|
||||
|
||||
async function fetchPackageFilesFromDir (dir: string): Promise<FetchResult> {
|
||||
const files = await packlist(dir)
|
||||
const filesIndex: Record<string, string> = Object.fromEntries(files.map((file) => [file, path.join(dir, file)]))
|
||||
const filesIndex = new Map<string, string>(files.map((file) => [file, path.join(dir, file)]))
|
||||
// In a regular pnpm workspace it will probably never happen that a dependency has no package.json file.
|
||||
// Safe read was added to support the Bit workspace in which the components have no package.json files.
|
||||
// Related PR in Bit: https://github.com/teambit/bit/pull/5251
|
||||
|
||||
@@ -27,10 +27,10 @@ test('fetch including only package files', async () => {
|
||||
|
||||
expect(fetchResult.local).toBe(true)
|
||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
||||
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||
|
||||
// Only those files are included which would get published
|
||||
expect(Object.keys(fetchResult.filesIndex).sort()).toStrictEqual([
|
||||
expect(Array.from(fetchResult.filesIndex.keys()).sort()).toStrictEqual([
|
||||
'index.js',
|
||||
'package.json',
|
||||
])
|
||||
@@ -50,10 +50,10 @@ test('fetch including all files', async () => {
|
||||
|
||||
expect(fetchResult.local).toBe(true)
|
||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
||||
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||
|
||||
// Only those files are included which would get published
|
||||
expect(Object.keys(fetchResult.filesIndex).sort()).toStrictEqual([
|
||||
expect(Array.from(fetchResult.filesIndex.keys()).sort()).toStrictEqual([
|
||||
'index.js',
|
||||
'package.json',
|
||||
'test.js',
|
||||
@@ -76,10 +76,10 @@ test('fetch a directory that has no package.json', async () => {
|
||||
expect(fetchResult.manifest).toBeUndefined()
|
||||
expect(fetchResult.local).toBe(true)
|
||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||
expect(fetchResult.filesIndex['index.js']).toBe(path.resolve('index.js'))
|
||||
expect(fetchResult.filesIndex.get('index.js')).toBe(path.resolve('index.js'))
|
||||
|
||||
// Only those files are included which would get published
|
||||
expect(Object.keys(fetchResult.filesIndex).sort()).toStrictEqual([
|
||||
expect(Array.from(fetchResult.filesIndex.keys()).sort()).toStrictEqual([
|
||||
'index.js',
|
||||
])
|
||||
})
|
||||
@@ -99,10 +99,10 @@ test('fetch does not fail on package with broken symlink', async () => {
|
||||
|
||||
expect(fetchResult.local).toBe(true)
|
||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
||||
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||
|
||||
// Only those files are included which would get published
|
||||
expect(Object.keys(fetchResult.filesIndex).sort()).toStrictEqual([
|
||||
expect(Array.from(fetchResult.filesIndex.keys()).sort()).toStrictEqual([
|
||||
'index.js',
|
||||
'package.json',
|
||||
])
|
||||
@@ -131,9 +131,9 @@ describe('fetch resolves symlinked files to their real locations', () => {
|
||||
|
||||
expect(fetchResult.local).toBe(true)
|
||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
||||
expect(fetchResult.filesIndex['index.js']).toBe(indexJsPath)
|
||||
expect(fetchResult.filesIndex['src/index.js']).toBe(path.join(srcPath, 'index.js'))
|
||||
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||
expect(fetchResult.filesIndex.get('index.js')).toBe(indexJsPath)
|
||||
expect(fetchResult.filesIndex.get('src/index.js')).toBe(path.join(srcPath, 'index.js'))
|
||||
})
|
||||
test('fetch does not resolve symlinked files to their real locations by default', async () => {
|
||||
const fetcher = createDirectoryFetcher()
|
||||
@@ -148,8 +148,8 @@ describe('fetch resolves symlinked files to their real locations', () => {
|
||||
|
||||
expect(fetchResult.local).toBe(true)
|
||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
||||
expect(fetchResult.filesIndex['index.js']).toBe(path.resolve('index.js'))
|
||||
expect(fetchResult.filesIndex['src/index.js']).toBe(path.resolve('src/index.js'))
|
||||
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||
expect(fetchResult.filesIndex.get('index.js')).toBe(path.resolve('index.js'))
|
||||
expect(fetchResult.filesIndex.get('src/index.js')).toBe(path.resolve('src/index.js'))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -31,7 +31,7 @@ export type FetchFunction<FetcherResolution = Resolution, Options = FetchOptions
|
||||
export interface FetchResult {
|
||||
local?: boolean
|
||||
manifest?: DependencyManifest
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
requiresBuild: boolean
|
||||
integrity?: string
|
||||
}
|
||||
@@ -44,7 +44,7 @@ export interface GitFetcherOptions {
|
||||
}
|
||||
|
||||
export interface GitFetcherResult {
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
manifest?: DependencyManifest
|
||||
requiresBuild: boolean
|
||||
}
|
||||
@@ -60,7 +60,7 @@ export interface DirectoryFetcherOptions {
|
||||
|
||||
export interface DirectoryFetcherResult {
|
||||
local: true
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
packageImportMethod: 'hardlink'
|
||||
manifest?: DependencyManifest
|
||||
requiresBuild: boolean
|
||||
|
||||
@@ -48,7 +48,7 @@ test('fetch', async () => {
|
||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||
}
|
||||
)
|
||||
expect(filesIndex['package.json']).toBeTruthy()
|
||||
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||
expect(manifest?.name).toBe('is-positive')
|
||||
})
|
||||
|
||||
@@ -67,7 +67,7 @@ test('fetch a package from Git sub folder', async () => {
|
||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||
}
|
||||
)
|
||||
expect(filesIndex['public/index.html']).toBeTruthy()
|
||||
expect(filesIndex.has('public/index.html')).toBeTruthy()
|
||||
})
|
||||
|
||||
test('prevent directory traversal attack when using Git sub folder', async () => {
|
||||
@@ -129,7 +129,7 @@ test('fetch a package from Git that has a prepare script', async () => {
|
||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||
}
|
||||
)
|
||||
expect(filesIndex['dist/index.js']).toBeTruthy()
|
||||
expect(filesIndex.has('dist/index.js')).toBeTruthy()
|
||||
})
|
||||
|
||||
// Test case for https://github.com/pnpm/pnpm/issues/1866
|
||||
@@ -148,7 +148,7 @@ test('fetch a package without a package.json', async () => {
|
||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||
}
|
||||
)
|
||||
expect(filesIndex['denolib.json']).toBeTruthy()
|
||||
expect(filesIndex.has('denolib.json')).toBeTruthy()
|
||||
})
|
||||
|
||||
// Covers the regression reported in https://github.com/pnpm/pnpm/issues/4064
|
||||
@@ -191,7 +191,7 @@ test('still able to shallow fetch for allowed hosts', async () => {
|
||||
// Discard final argument as it passes temporary directory
|
||||
expect(calls[i].slice(0, -1)).toEqual(expectedCalls[i])
|
||||
}
|
||||
expect(filesIndex['package.json']).toBeTruthy()
|
||||
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||
expect(manifest?.name).toBe('is-positive')
|
||||
})
|
||||
|
||||
@@ -224,8 +224,8 @@ test('do not build the package when scripts are ignored', async () => {
|
||||
}, {
|
||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||
})
|
||||
expect(filesIndex['package.json']).toBeTruthy()
|
||||
expect(filesIndex['prepare.txt']).toBeFalsy()
|
||||
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||
expect(filesIndex.has('prepare.txt')).toBeFalsy()
|
||||
expect(globalWarn).toHaveBeenCalledWith('The git-hosted package fetched from "https://github.com/pnpm-e2e/prepare-script-works.git" has to be built but the build scripts were ignored.')
|
||||
})
|
||||
|
||||
@@ -261,7 +261,7 @@ test('allow git package with prepare script', async () => {
|
||||
allowBuild: (pkgName) => pkgName === '@pnpm.e2e/prepare-script-works',
|
||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||
})
|
||||
expect(filesIndex['package.json']).toBeTruthy()
|
||||
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||
// Note: prepare.txt is in .gitignore so it won't be in the files index
|
||||
// The fact that no error was thrown proves the prepare script was allowed to run
|
||||
})
|
||||
@@ -284,7 +284,7 @@ test('fetch only the included files', async () => {
|
||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||
}
|
||||
)
|
||||
expect(Object.keys(filesIndex).sort()).toStrictEqual([
|
||||
expect(Array.from(filesIndex.keys()).sort()).toStrictEqual([
|
||||
'README.md',
|
||||
'dist/index.js',
|
||||
'package.json',
|
||||
|
||||
@@ -60,7 +60,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
describe('basic custom fetcher contract', () => {
|
||||
test('should successfully return FetchResult with manifest and filesIndex', async () => {
|
||||
const mockManifest = { name: 'test-package', version: '1.0.0' }
|
||||
const mockFilesIndex = { 'package.json': '/path/to/store/package.json' }
|
||||
const mockFilesIndex = new Map([['package.json', '/path/to/store/package.json']])
|
||||
|
||||
const customFetcher = createMockCustomFetcher(
|
||||
() => true,
|
||||
@@ -92,7 +92,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
const customFetcher = createMockCustomFetcher(
|
||||
() => true,
|
||||
async () => ({
|
||||
filesIndex: {},
|
||||
filesIndex: new Map(),
|
||||
manifest: { name: 'pkg', version: '1.0.0', scripts: { install: 'node install.js' } },
|
||||
requiresBuild: true,
|
||||
})
|
||||
@@ -144,7 +144,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
async (cafs) => {
|
||||
receivedCafs = cafs
|
||||
return {
|
||||
filesIndex: {},
|
||||
filesIndex: new Map(),
|
||||
manifest: { name: 'pkg', version: '1.0.0' },
|
||||
requiresBuild: false,
|
||||
}
|
||||
@@ -179,7 +179,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
;(opts.onProgress as any)?.({ done: 50, total: 100 }) // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
|
||||
return {
|
||||
filesIndex: {},
|
||||
filesIndex: new Map(),
|
||||
manifest: { name: 'pkg', version: '1.0.0' },
|
||||
requiresBuild: false,
|
||||
}
|
||||
@@ -216,7 +216,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
expect((resolution as any).cdnUrl).toBe('https://cdn.example.com/pkg.tgz') // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
|
||||
return {
|
||||
filesIndex: {},
|
||||
filesIndex: new Map(),
|
||||
manifest: { name: 'pkg', version: '1.0.0' },
|
||||
requiresBuild: false,
|
||||
}
|
||||
@@ -236,7 +236,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
const customFetcher = createMockCustomFetcher(
|
||||
() => true,
|
||||
async () => ({
|
||||
filesIndex: {},
|
||||
filesIndex: new Map(),
|
||||
requiresBuild: false,
|
||||
// Manifest is optional in FetchResult
|
||||
})
|
||||
@@ -315,7 +315,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
||||
)
|
||||
|
||||
expect(result.filesIndex['package.json']).toBeTruthy()
|
||||
expect(result.filesIndex.get('package.json')).toBeTruthy()
|
||||
expect(scope.isDone()).toBeTruthy()
|
||||
})
|
||||
|
||||
@@ -361,7 +361,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
||||
)
|
||||
|
||||
expect(result.filesIndex['package.json']).toBeTruthy()
|
||||
expect(result.filesIndex.get('package.json')).toBeTruthy()
|
||||
})
|
||||
|
||||
test('custom fetcher can transform resolution before delegating to tarball fetcher', async () => {
|
||||
@@ -415,7 +415,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
||||
)
|
||||
|
||||
expect(result.filesIndex['package.json']).toBeTruthy()
|
||||
expect(result.filesIndex.get('package.json')).toBeTruthy()
|
||||
expect(scope.isDone()).toBeTruthy()
|
||||
})
|
||||
|
||||
@@ -484,7 +484,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
|
||||
// Simulate fetch
|
||||
const result = {
|
||||
filesIndex: { 'package.json': '/store/pkg.json' },
|
||||
filesIndex: new Map([['package.json', '/store/pkg.json']]),
|
||||
manifest: { name: 'cached-pkg', version: (resolution as any).version }, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
}
|
||||
|
||||
@@ -530,7 +530,7 @@ describe('custom fetcher implementation examples', () => {
|
||||
}
|
||||
|
||||
return {
|
||||
filesIndex: {},
|
||||
filesIndex: new Map(),
|
||||
manifest: { name: 'auth-pkg', version: '1.0.0' },
|
||||
requiresBuild: false,
|
||||
authToken, // Could store for future use
|
||||
|
||||
@@ -48,7 +48,7 @@ test('should fail to pick fetcher if the type is not defined', async () => {
|
||||
|
||||
describe('custom fetcher support', () => {
|
||||
test('should use custom fetcher when canFetch returns true', async () => {
|
||||
const mockFetchResult = { filesIndex: {}, manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||
const mockFetchResult = { filesIndex: new Map(), manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||
const customFetch = jest.fn(async () => mockFetchResult)
|
||||
const remoteTarball = jest.fn() as FetchFunction
|
||||
|
||||
@@ -87,7 +87,7 @@ describe('custom fetcher support', () => {
|
||||
})
|
||||
|
||||
test('should use custom fetcher when canFetch returns promise resolving to true', async () => {
|
||||
const mockFetchResult = { filesIndex: {}, manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||
const mockFetchResult = { filesIndex: new Map(), manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||
const customFetch = jest.fn(async () => mockFetchResult)
|
||||
|
||||
const customFetcher: Partial<CustomFetcher> = {
|
||||
@@ -150,8 +150,8 @@ describe('custom fetcher support', () => {
|
||||
})
|
||||
|
||||
test('should check custom fetchers in order and use first match', async () => {
|
||||
const mockFetchResult1 = { filesIndex: {}, manifest: { name: 'fetcher1', version: '1.0.0' }, requiresBuild: false }
|
||||
const mockFetchResult2 = { filesIndex: {}, manifest: { name: 'fetcher2', version: '1.0.0' }, requiresBuild: false }
|
||||
const mockFetchResult1 = { filesIndex: new Map(), manifest: { name: 'fetcher1', version: '1.0.0' }, requiresBuild: false }
|
||||
const mockFetchResult2 = { filesIndex: new Map(), manifest: { name: 'fetcher2', version: '1.0.0' }, requiresBuild: false }
|
||||
|
||||
const fetcher1: Partial<CustomFetcher> = {
|
||||
canFetch: () => true,
|
||||
@@ -184,7 +184,7 @@ describe('custom fetcher support', () => {
|
||||
})
|
||||
|
||||
test('should handle custom resolution types', async () => {
|
||||
const mockFetchResult = { filesIndex: {}, manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||
const mockFetchResult = { filesIndex: new Map(), manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||
const customFetch = jest.fn(async () => mockFetchResult)
|
||||
|
||||
const customFetcher: Partial<CustomFetcher> = {
|
||||
@@ -217,7 +217,7 @@ describe('custom fetcher support', () => {
|
||||
})
|
||||
|
||||
test('should pass all fetch options to custom fetcher.fetch', async () => {
|
||||
const customFetch = jest.fn(async () => ({ filesIndex: {}, manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }))
|
||||
const customFetch = jest.fn(async () => ({ filesIndex: new Map(), manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }))
|
||||
|
||||
const customFetcher: Partial<CustomFetcher> = {
|
||||
canFetch: () => true,
|
||||
|
||||
@@ -32,7 +32,7 @@ export function createGitHostedTarballFetcher (fetchRemoteTarball: FetchFunction
|
||||
filesIndexFile: tempIndexFile,
|
||||
})
|
||||
try {
|
||||
const prepareResult = await prepareGitHostedPkg(filesIndex as Record<string, string>, cafs, tempIndexFile, opts.filesIndexFile, fetcherOpts, opts, resolution)
|
||||
const prepareResult = await prepareGitHostedPkg(filesIndex, cafs, tempIndexFile, opts.filesIndexFile, fetcherOpts, opts, resolution)
|
||||
if (prepareResult.ignoredBuild) {
|
||||
globalWarn(`The git-hosted package fetched from "${resolution.tarball}" has to be built but the build scripts were ignored.`)
|
||||
}
|
||||
@@ -52,13 +52,13 @@ export function createGitHostedTarballFetcher (fetchRemoteTarball: FetchFunction
|
||||
}
|
||||
|
||||
interface PrepareGitHostedPkgResult {
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
manifest?: DependencyManifest
|
||||
ignoredBuild: boolean
|
||||
}
|
||||
|
||||
async function prepareGitHostedPkg (
|
||||
filesIndex: Record<string, string>,
|
||||
filesIndex: Map<string, string>,
|
||||
cafs: Cafs,
|
||||
filesIndexFileNonBuilt: string,
|
||||
filesIndexFile: string,
|
||||
@@ -80,7 +80,7 @@ async function prepareGitHostedPkg (
|
||||
allowBuild: fetcherOpts.allowBuild,
|
||||
}, tempLocation, resolution.path ?? '')
|
||||
const files = await packlist(pkgDir)
|
||||
if (!resolution.path && files.length === Object.keys(filesIndex).length) {
|
||||
if (!resolution.path && files.length === filesIndex.size) {
|
||||
if (!shouldBeBuilt) {
|
||||
if (filesIndexFileNonBuilt !== filesIndexFile) {
|
||||
await renameOverwrite(filesIndexFileNonBuilt, filesIndexFile)
|
||||
|
||||
@@ -223,7 +223,7 @@ test("don't fail when integrity check of local file succeeds", async () => {
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(typeof filesIndex['package.json']).toBe('string')
|
||||
expect(typeof filesIndex.get('package.json')).toBe('string')
|
||||
})
|
||||
|
||||
test("don't fail when fetching a local tarball in offline mode", async () => {
|
||||
@@ -250,7 +250,7 @@ test("don't fail when fetching a local tarball in offline mode", async () => {
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(typeof filesIndex['package.json']).toBe('string')
|
||||
expect(typeof filesIndex.get('package.json')).toBe('string')
|
||||
})
|
||||
|
||||
test('fail when trying to fetch a non-local tarball in offline mode', async () => {
|
||||
@@ -464,7 +464,7 @@ test('take only the files included in the package, when fetching a git-hosted pa
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(Object.keys(result.filesIndex).sort()).toStrictEqual([
|
||||
expect(Array.from(result.filesIndex.keys()).sort()).toStrictEqual([
|
||||
'README.md',
|
||||
'dist/index.js',
|
||||
'package.json',
|
||||
@@ -515,8 +515,8 @@ test('do not build the package when scripts are ignored', async () => {
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(filesIndex).toHaveProperty(['package.json'])
|
||||
expect(filesIndex).not.toHaveProperty(['prepare.txt'])
|
||||
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||
expect(filesIndex.has('prepare.txt')).toBeFalsy()
|
||||
expect(globalWarn).toHaveBeenCalledWith(`The git-hosted package fetched from "${tarball}" has to be built but the build scripts were ignored.`)
|
||||
})
|
||||
|
||||
@@ -532,7 +532,7 @@ test('when extracting files with the same name, pick the last ones', async () =>
|
||||
readManifest: true,
|
||||
pkg,
|
||||
})
|
||||
const pkgJson = JSON.parse(fs.readFileSync(filesIndex['package.json'], 'utf8'))
|
||||
const pkgJson = JSON.parse(fs.readFileSync(filesIndex.get('package.json')!, 'utf8'))
|
||||
expect(pkgJson.name).toBe('pkg2')
|
||||
expect(manifest?.name).toBe('pkg2')
|
||||
})
|
||||
@@ -560,8 +560,8 @@ test('use the subfolder when path is present', async () => {
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(filesIndex).toHaveProperty(['package.json'])
|
||||
expect(filesIndex).not.toHaveProperty(['lerna.json'])
|
||||
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||
expect(filesIndex.has('lerna.json')).toBeFalsy()
|
||||
})
|
||||
|
||||
test('prevent directory traversal attack when path is present', async () => {
|
||||
|
||||
@@ -17,7 +17,7 @@ export type ImportFile = (src: string, dest: string) => void
|
||||
export function importIndexedDir (
|
||||
importFile: ImportFile,
|
||||
newDir: string,
|
||||
filenames: Record<string, string>,
|
||||
filenames: Map<string, string>,
|
||||
opts: {
|
||||
keepModulesDir?: boolean
|
||||
}
|
||||
@@ -36,16 +36,16 @@ export function importIndexedDir (
|
||||
} catch {} // eslint-disable-line:no-empty
|
||||
if (util.types.isNativeError(err) && 'code' in err && err.code === 'EEXIST') {
|
||||
const { uniqueFileMap, conflictingFileNames } = getUniqueFileMap(filenames)
|
||||
if (Object.keys(conflictingFileNames).length === 0) throw err
|
||||
if (conflictingFileNames.size === 0) throw err
|
||||
filenameConflictsLogger.debug({
|
||||
conflicts: conflictingFileNames,
|
||||
conflicts: Object.fromEntries(conflictingFileNames),
|
||||
writingTo: newDir,
|
||||
})
|
||||
globalWarn(
|
||||
`Not all files were linked to "${path.relative(process.cwd(), newDir)}". ` +
|
||||
'Some of the files have equal names in different case, ' +
|
||||
'which is an issue on case-insensitive filesystems. ' +
|
||||
`The conflicting file names are: ${JSON.stringify(conflictingFileNames)}`
|
||||
`The conflicting file names are: ${JSON.stringify(Object.fromEntries(conflictingFileNames))}`
|
||||
)
|
||||
importIndexedDir(importFile, newDir, uniqueFileMap, opts)
|
||||
return
|
||||
@@ -65,27 +65,27 @@ They were renamed.`)
|
||||
}
|
||||
|
||||
interface SanitizeFilenamesResult {
|
||||
sanitizedFilenames: Record<string, string>
|
||||
sanitizedFilenames: Map<string, string>
|
||||
invalidFilenames: string[]
|
||||
}
|
||||
|
||||
function sanitizeFilenames (filenames: Record<string, string>): SanitizeFilenamesResult {
|
||||
const sanitizedFilenames: Record<string, string> = {}
|
||||
function sanitizeFilenames (filenames: Map<string, string>): SanitizeFilenamesResult {
|
||||
const sanitizedFilenames = new Map<string, string>()
|
||||
const invalidFilenames: string[] = []
|
||||
for (const [filename, src] of Object.entries(filenames)) {
|
||||
for (const [filename, src] of filenames) {
|
||||
const sanitizedFilename = filename.split('/').map((f) => sanitizeFilename(f)).join('/')
|
||||
if (sanitizedFilename !== filename) {
|
||||
invalidFilenames.push(filename)
|
||||
}
|
||||
sanitizedFilenames[sanitizedFilename] = src
|
||||
sanitizedFilenames.set(sanitizedFilename, src)
|
||||
}
|
||||
return { sanitizedFilenames, invalidFilenames }
|
||||
}
|
||||
|
||||
function tryImportIndexedDir (importFile: ImportFile, newDir: string, filenames: Record<string, string>): void {
|
||||
function tryImportIndexedDir (importFile: ImportFile, newDir: string, filenames: Map<string, string>): void {
|
||||
makeEmptyDir(newDir, { recursive: true })
|
||||
const allDirs = new Set<string>()
|
||||
for (const f in filenames) {
|
||||
for (const f of filenames.keys()) {
|
||||
const dir = path.dirname(f)
|
||||
if (dir === '.') continue
|
||||
allDirs.add(dir)
|
||||
@@ -93,29 +93,29 @@ function tryImportIndexedDir (importFile: ImportFile, newDir: string, filenames:
|
||||
Array.from(allDirs)
|
||||
.sort((d1, d2) => d1.length - d2.length) // from shortest to longest
|
||||
.forEach((dir) => fs.mkdirSync(path.join(newDir, dir), { recursive: true }))
|
||||
for (const [f, src] of Object.entries(filenames)) {
|
||||
for (const [f, src] of filenames) {
|
||||
const dest = path.join(newDir, f)
|
||||
importFile(src, dest)
|
||||
}
|
||||
}
|
||||
|
||||
interface GetUniqueFileMapResult {
|
||||
conflictingFileNames: Record<string, string>
|
||||
uniqueFileMap: Record<string, string>
|
||||
conflictingFileNames: Map<string, string>
|
||||
uniqueFileMap: Map<string, string>
|
||||
}
|
||||
|
||||
function getUniqueFileMap (fileMap: Record<string, string>): GetUniqueFileMapResult {
|
||||
function getUniqueFileMap (fileMap: Map<string, string>): GetUniqueFileMapResult {
|
||||
const lowercaseFiles = new Map<string, string>()
|
||||
const conflictingFileNames: Record<string, string> = {}
|
||||
const uniqueFileMap: Record<string, string> = {}
|
||||
for (const filename of Object.keys(fileMap).sort()) {
|
||||
const conflictingFileNames = new Map<string, string>()
|
||||
const uniqueFileMap = new Map<string, string>()
|
||||
for (const filename of Array.from(fileMap.keys()).sort()) {
|
||||
const lowercaseFilename = filename.toLowerCase()
|
||||
if (lowercaseFiles.has(lowercaseFilename)) {
|
||||
conflictingFileNames[filename] = lowercaseFiles.get(lowercaseFilename)!
|
||||
conflictingFileNames.set(filename, lowercaseFiles.get(lowercaseFilename)!)
|
||||
continue
|
||||
}
|
||||
lowercaseFiles.set(lowercaseFilename, filename)
|
||||
uniqueFileMap[filename] = fileMap[filename]
|
||||
uniqueFileMap.set(filename, fileMap.get(filename)!)
|
||||
}
|
||||
return {
|
||||
conflictingFileNames,
|
||||
|
||||
@@ -134,14 +134,13 @@ function pickFileFromFilesMap (filesMap: FilesMap): string {
|
||||
// A package might not have a package.json file.
|
||||
// For instance, the Node.js package.
|
||||
// Or injected packages in a Bit workspace.
|
||||
if (filesMap['package.json']) {
|
||||
if (filesMap.has('package.json')) {
|
||||
return 'package.json'
|
||||
}
|
||||
const files = Object.keys(filesMap)
|
||||
if (files.length === 0) {
|
||||
if (filesMap.size === 0) {
|
||||
throw new Error('pickFileFromFilesMap cannot pick a file from an empty FilesMap')
|
||||
}
|
||||
return files[0]
|
||||
return filesMap.keys().next().value!
|
||||
}
|
||||
|
||||
function createCloneFunction (): CloneFunction {
|
||||
@@ -220,7 +219,7 @@ function pkgLinkedToStore (filesMap: FilesMap, linkedPkgDir: string): boolean {
|
||||
} catch (err: unknown) {
|
||||
if (util.types.isNativeError(err) && 'code' in err && err.code === 'ENOENT') return false
|
||||
}
|
||||
const stats1 = fs.statSync(filesMap[filename])
|
||||
const stats1 = fs.statSync(filesMap.get(filename)!)
|
||||
if (stats0.ino === stats1.ino) return true
|
||||
globalInfo(`Relinking ${linkedPkgDir} from the store`)
|
||||
return false
|
||||
|
||||
@@ -50,10 +50,10 @@ beforeEach(() => {
|
||||
testOnLinuxOnly('packageImportMethod=auto: clone files by default', () => {
|
||||
const importPackage = createIndexedPkgImporter('auto')
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
'package.json': 'hash1',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
['package.json', 'hash1'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'remote',
|
||||
})).toBe('clone')
|
||||
@@ -75,10 +75,10 @@ testOnLinuxOnly('packageImportMethod=auto: link files if cloning fails', () => {
|
||||
throw new Error('This file system does not support cloning')
|
||||
})
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
'package.json': 'hash1',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
['package.json', 'hash1'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'remote',
|
||||
})).toBe('hardlink')
|
||||
@@ -89,10 +89,10 @@ testOnLinuxOnly('packageImportMethod=auto: link files if cloning fails', () => {
|
||||
|
||||
// The copy function will not be called again
|
||||
expect(importPackage('project2/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
'package.json': 'hash1',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
['package.json', 'hash1'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'remote',
|
||||
})).toBe('hardlink')
|
||||
@@ -114,9 +114,9 @@ testOnLinuxOnly('packageImportMethod=auto: link files if cloning fails and even
|
||||
}
|
||||
})
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'remote',
|
||||
})).toBe('hardlink')
|
||||
@@ -136,9 +136,9 @@ testOnLinuxOnly('packageImportMethod=auto: chooses copying if cloning and hard l
|
||||
throw new Error('EXDEV: cross-device link not permitted')
|
||||
})
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'remote',
|
||||
})).toBe('copy')
|
||||
@@ -155,11 +155,11 @@ testOnLinuxOnly('packageImportMethod=hardlink: fall back to copying if hardlinki
|
||||
throw new Error('This file system does not support hard linking')
|
||||
})
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
'package.json': 'hash1',
|
||||
license: 'hash3',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
['package.json', 'hash1'],
|
||||
['license', 'hash3'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'remote',
|
||||
})).toBe('hardlink')
|
||||
@@ -173,10 +173,10 @@ test('packageImportMethod=hardlink does not relink package from store if package
|
||||
const importPackage = createIndexedPkgImporter('hardlink')
|
||||
jest.mocked(gfs.statSync).mockReturnValue({ ino: BigInt(1) } as fs.BigIntStats)
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
'package.json': 'hash1',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
['package.json', 'hash1'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'store',
|
||||
})).toBeUndefined()
|
||||
@@ -187,10 +187,10 @@ test('packageImportMethod=hardlink relinks package from store if package.json is
|
||||
let ino = 0
|
||||
jest.mocked(gfs.statSync as jest.Mock).mockImplementation(() => ({ ino: ++ino }))
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
'package.json': 'hash1',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
['package.json', 'hash1'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'store',
|
||||
})).toBe('hardlink')
|
||||
@@ -204,9 +204,9 @@ test('packageImportMethod=hardlink does not relink package from store if package
|
||||
return { ino: BigInt(1) } as BigIntStats
|
||||
}) as unknown as typeof gfs.statSync)
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'store',
|
||||
})).toBeUndefined()
|
||||
@@ -221,10 +221,10 @@ test('packageImportMethod=hardlink links packages when they are not found', () =
|
||||
return { ino: BigInt(0) } as BigIntStats
|
||||
}) as unknown as typeof gfs.statSync)
|
||||
expect(importPackage('project/package', {
|
||||
filesMap: {
|
||||
'index.js': 'hash2',
|
||||
'package.json': 'hash1',
|
||||
},
|
||||
filesMap: new Map([
|
||||
['index.js', 'hash2'],
|
||||
['package.json', 'hash1'],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'store',
|
||||
})).toBe('hardlink')
|
||||
|
||||
@@ -12,10 +12,10 @@ test('importIndexedDir() keepModulesDir merges node_modules', async () => {
|
||||
fs.writeFileSync(path.join(tmp, 'dest/node_modules/b/index.js'), 'module.exports = 1')
|
||||
|
||||
const newDir = path.join(tmp, 'dest')
|
||||
const filenames = {
|
||||
'node_modules/a/index.js': path.join(tmp, 'src/node_modules/a/index.js'),
|
||||
}
|
||||
const filenames = new Map([
|
||||
['node_modules/a/index.js', path.join(tmp, 'src/node_modules/a/index.js')],
|
||||
])
|
||||
importIndexedDir(fs.linkSync, newDir, filenames, { keepModulesDir: true })
|
||||
|
||||
expect(fs.readdirSync(path.join(newDir, 'node_modules'))).toEqual(['a', 'b'])
|
||||
expect(fs.readdirSync(path.join(newDir, 'node_modules')).sort()).toEqual(['a', 'b'])
|
||||
})
|
||||
|
||||
@@ -8,10 +8,10 @@ test('importing a package with invalid files', () => {
|
||||
const importPackage = createIndexedPkgImporter('copy')
|
||||
const target = path.resolve('target')
|
||||
importPackage(target, {
|
||||
filesMap: {
|
||||
'foo?bar/qar>zoo.txt': import.meta.filename,
|
||||
'1*2.txt': import.meta.filename,
|
||||
},
|
||||
filesMap: new Map([
|
||||
['foo?bar/qar>zoo.txt', import.meta.filename],
|
||||
['1*2.txt', import.meta.filename],
|
||||
]),
|
||||
force: false,
|
||||
resolvedFrom: 'remote',
|
||||
})
|
||||
|
||||
@@ -3,7 +3,7 @@ import { type PackageFilesIndex } from '@pnpm/store.cafs'
|
||||
export function readdir (index: PackageFilesIndex, dir: string): string[] {
|
||||
const dirs = new Set<string>()
|
||||
const prefix = dir ? `${dir}/` : ''
|
||||
for (const filePath of Object.keys(index.files)) {
|
||||
for (const filePath of index.files.keys()) {
|
||||
if (filePath.startsWith(prefix)) {
|
||||
const parts = filePath.substring(dir.length).split('/')
|
||||
dirs.add(parts[0] || parts[1])
|
||||
@@ -15,7 +15,7 @@ export function readdir (index: PackageFilesIndex, dir: string): string[] {
|
||||
export type DirEntityType = 'file' | 'directory'
|
||||
|
||||
export function dirEntityType (index: PackageFilesIndex, p: string): DirEntityType | undefined {
|
||||
if (index.files[p]) return 'file'
|
||||
if (index.files.has(p)) return 'file'
|
||||
const prefix = `${p}/`
|
||||
return Object.keys(index.files).some((k) => k.startsWith(prefix)) ? 'directory' : undefined
|
||||
return Array.from(index.files.keys()).some((k) => k.startsWith(prefix)) ? 'directory' : undefined
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ export function createFuseHandlersFromLockfile (lockfile: LockfileObject, storeD
|
||||
cb(-1)
|
||||
return
|
||||
}
|
||||
const fileInfo = dirEnt.index.files[dirEnt.subPath]
|
||||
const fileInfo = dirEnt.index.files.get(dirEnt.subPath)
|
||||
if (!fileInfo) {
|
||||
cb(-1)
|
||||
return
|
||||
@@ -112,7 +112,7 @@ export function createFuseHandlersFromLockfile (lockfile: LockfileObject, storeD
|
||||
if (dirEnt.entryType === 'index') {
|
||||
switch (cafsExplorer.dirEntityType(dirEnt.index, dirEnt.subPath)) {
|
||||
case 'file': {
|
||||
const { size, mode } = dirEnt.index.files[dirEnt.subPath]
|
||||
const { size, mode } = dirEnt.index.files.get(dirEnt.subPath)!
|
||||
// eslint-disable-next-line n/no-callback-literal
|
||||
cb(0, schemas.Stat.file({
|
||||
...STAT_DEFAULT,
|
||||
|
||||
@@ -58,10 +58,13 @@ test('patch package with exact version', async () => {
|
||||
|
||||
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||
expect(filesIndex.sideEffects).toBeTruthy()
|
||||
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
||||
const patchedFileIntegrity = filesIndex.sideEffects?.[sideEffectsKey].added?.['index.js']?.integrity
|
||||
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||
const patchedFileIntegrity = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('index.js')?.integrity
|
||||
expect(patchedFileIntegrity).toBeTruthy()
|
||||
const originalFileIntegrity = filesIndex.files['index.js'].integrity
|
||||
const originalFileIntegrity = filesIndex.files.get('index.js')!.integrity
|
||||
expect(originalFileIntegrity).toBeTruthy()
|
||||
// The integrity of the original file differs from the integrity of the patched file
|
||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||
@@ -153,10 +156,13 @@ test('patch package with version range', async () => {
|
||||
|
||||
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||
expect(filesIndex.sideEffects).toBeTruthy()
|
||||
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
||||
const patchedFileIntegrity = filesIndex.sideEffects?.[sideEffectsKey].added?.['index.js']?.integrity
|
||||
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||
const patchedFileIntegrity = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('index.js')?.integrity
|
||||
expect(patchedFileIntegrity).toBeTruthy()
|
||||
const originalFileIntegrity = filesIndex.files['index.js'].integrity
|
||||
const originalFileIntegrity = filesIndex.files.get('index.js')!.integrity
|
||||
expect(originalFileIntegrity).toBeTruthy()
|
||||
// The integrity of the original file differs from the integrity of the patched file
|
||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||
@@ -320,10 +326,13 @@ test('patch package when scripts are ignored', async () => {
|
||||
|
||||
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||
expect(filesIndex.sideEffects).toBeTruthy()
|
||||
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
||||
const patchedFileIntegrity = filesIndex.sideEffects?.[sideEffectsKey].added?.['index.js']?.integrity
|
||||
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||
const patchedFileIntegrity = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('index.js')?.integrity
|
||||
expect(patchedFileIntegrity).toBeTruthy()
|
||||
const originalFileIntegrity = filesIndex.files['index.js'].integrity
|
||||
const originalFileIntegrity = filesIndex.files.get('index.js')!.integrity
|
||||
expect(originalFileIntegrity).toBeTruthy()
|
||||
// The integrity of the original file differs from the integrity of the patched file
|
||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||
@@ -408,10 +417,13 @@ test('patch package when the package is not in onlyBuiltDependencies list', asyn
|
||||
|
||||
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||
expect(filesIndex.sideEffects).toBeTruthy()
|
||||
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
||||
const patchedFileIntegrity = filesIndex.sideEffects?.[sideEffectsKey].added?.['index.js']?.integrity
|
||||
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||
const patchedFileIntegrity = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('index.js')?.integrity
|
||||
expect(patchedFileIntegrity).toBeTruthy()
|
||||
const originalFileIntegrity = filesIndex.files['index.js'].integrity
|
||||
const originalFileIntegrity = filesIndex.files.get('index.js')!.integrity
|
||||
expect(originalFileIntegrity).toBeTruthy()
|
||||
// The integrity of the original file differs from the integrity of the patched file
|
||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||
|
||||
@@ -94,9 +94,13 @@ test('using side effects cache', async () => {
|
||||
}),
|
||||
},
|
||||
})}`
|
||||
expect(filesIndex.sideEffects).toHaveProperty([sideEffectsKey, 'added', 'generated-by-preinstall.js'])
|
||||
expect(filesIndex.sideEffects).toHaveProperty([sideEffectsKey, 'added', 'generated-by-postinstall.js'])
|
||||
delete filesIndex.sideEffects![sideEffectsKey].added?.['generated-by-postinstall.js']
|
||||
expect(filesIndex.sideEffects).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||
const addedFiles = filesIndex.sideEffects!.get(sideEffectsKey)!.added!
|
||||
expect(addedFiles.get('generated-by-preinstall.js')).toBeTruthy()
|
||||
expect(addedFiles.get('generated-by-postinstall.js')).toBeTruthy()
|
||||
addedFiles.delete('generated-by-postinstall.js')
|
||||
fs.writeFileSync(filesIndexFile, v8.serialize(filesIndex))
|
||||
|
||||
rimraf('node_modules')
|
||||
@@ -182,12 +186,17 @@ test('a postinstall script does not modify the original sources added to the sto
|
||||
|
||||
const filesIndexFile = getIndexFilePathInCafs(opts.storeDir, getIntegrity('@pnpm/postinstall-modifies-source', '1.0.0'), '@pnpm/postinstall-modifies-source@1.0.0')
|
||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||
const patchedFileIntegrity = filesIndex.sideEffects?.[`${ENGINE_NAME};deps=${hashObject({
|
||||
expect(filesIndex.sideEffects).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.has(`${ENGINE_NAME};deps=${hashObject({
|
||||
id: `@pnpm/postinstall-modifies-source@1.0.0:${getIntegrity('@pnpm/postinstall-modifies-source', '1.0.0')}`,
|
||||
deps: {},
|
||||
})}`].added?.['empty-file.txt']?.integrity
|
||||
})}`)).toBeTruthy()
|
||||
const patchedFileIntegrity = filesIndex.sideEffects!.get(`${ENGINE_NAME};deps=${hashObject({
|
||||
id: `@pnpm/postinstall-modifies-source@1.0.0:${getIntegrity('@pnpm/postinstall-modifies-source', '1.0.0')}`,
|
||||
deps: {},
|
||||
})}`)!.added!.get('empty-file.txt')?.integrity
|
||||
expect(patchedFileIntegrity).toBeTruthy()
|
||||
const originalFileIntegrity = filesIndex.files['empty-file.txt'].integrity
|
||||
const originalFileIntegrity = filesIndex.files.get('empty-file.txt')!.integrity
|
||||
expect(originalFileIntegrity).toBeTruthy()
|
||||
// The integrity of the original file differs from the integrity of the patched file
|
||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||
@@ -218,8 +227,11 @@ test('a corrupted side-effects cache is ignored', async () => {
|
||||
},
|
||||
})}`
|
||||
|
||||
expect(filesIndex.sideEffects).toHaveProperty([sideEffectsKey, 'added', 'generated-by-preinstall.js'])
|
||||
const sideEffectFileStat = filesIndex.sideEffects![sideEffectsKey].added!['generated-by-preinstall.js']
|
||||
expect(filesIndex.sideEffects).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added!.has('generated-by-preinstall.js')).toBeTruthy()
|
||||
const sideEffectFileStat = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('generated-by-preinstall.js')!
|
||||
const sideEffectFile = getFilePathByModeInCafs(opts.storeDir, sideEffectFileStat.integrity, sideEffectFileStat.mode)
|
||||
expect(fs.existsSync(sideEffectFile)).toBeTruthy()
|
||||
rimraf(sideEffectFile) // we remove the side effect file to break the store
|
||||
|
||||
@@ -692,10 +692,10 @@ test.each([['isolated'], ['hoisted']])('using side effects cache with nodeLinker
|
||||
}),
|
||||
},
|
||||
})}`
|
||||
expect(cacheIntegrity).toHaveProperty(['sideEffects', sideEffectsKey, 'added', 'generated-by-postinstall.js'])
|
||||
delete cacheIntegrity!.sideEffects![sideEffectsKey].added!['generated-by-postinstall.js']
|
||||
expect(cacheIntegrity!.sideEffects!.get(sideEffectsKey)!.added!.has('generated-by-postinstall.js')).toBeTruthy()
|
||||
cacheIntegrity!.sideEffects!.get(sideEffectsKey)!.added!.delete('generated-by-postinstall.js')
|
||||
|
||||
expect(cacheIntegrity).toHaveProperty(['sideEffects', sideEffectsKey, 'added', 'generated-by-preinstall.js'])
|
||||
expect(cacheIntegrity!.sideEffects!.get(sideEffectsKey)!.added!.has('generated-by-preinstall.js')).toBeTruthy()
|
||||
fs.writeFileSync(cacheIntegrityPath, v8.serialize(cacheIntegrity))
|
||||
|
||||
prefix = f.prepare('side-effects')
|
||||
|
||||
@@ -505,12 +505,12 @@ function fetchToStore (
|
||||
if (opts.fetchRawManifest && !result.fetchRawManifest) {
|
||||
result.fetching = removeKeyOnFail(
|
||||
result.fetching.then(async ({ files }) => {
|
||||
if (!files.filesIndex['package.json']) return {
|
||||
if (!files.filesIndex.get('package.json')) return {
|
||||
files,
|
||||
bundledManifest: undefined,
|
||||
}
|
||||
if (files.unprocessed) {
|
||||
const { integrity, mode } = files.filesIndex['package.json']
|
||||
const { integrity, mode } = files.filesIndex.get('package.json')!
|
||||
const manifestPath = ctx.getFilePathByModeInCafs(integrity, mode)
|
||||
return {
|
||||
files,
|
||||
@@ -519,7 +519,7 @@ function fetchToStore (
|
||||
}
|
||||
return {
|
||||
files,
|
||||
bundledManifest: await readBundledManifest(files.filesIndex['package.json']),
|
||||
bundledManifest: await readBundledManifest(files.filesIndex.get('package.json')!),
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
@@ -70,7 +70,7 @@ test('request package', async () => {
|
||||
})
|
||||
|
||||
const { files } = await pkgResponse.fetching!()
|
||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(files.resolvedFrom).toBe('remote')
|
||||
})
|
||||
|
||||
@@ -382,12 +382,12 @@ test('fetchPackageToStore()', async () => {
|
||||
|
||||
const { files, bundledManifest } = await fetchResult.fetching()
|
||||
expect(bundledManifest).toBeTruthy() // we always read the bundled manifest
|
||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(files.resolvedFrom).toBe('remote')
|
||||
|
||||
const indexFile = readV8FileStrictSync<PackageFilesIndex>(fetchResult.filesIndexFile)
|
||||
expect(indexFile).toBeTruthy()
|
||||
expect(typeof indexFile.files['package.json'].checkedAt).toBeTruthy()
|
||||
expect(typeof indexFile.files.get('package.json')!.checkedAt).toBeTruthy()
|
||||
|
||||
const fetchResult2 = packageRequester.fetchPackageToStore({
|
||||
fetchRawManifest: true,
|
||||
@@ -470,9 +470,9 @@ test('fetchPackageToStore() concurrency check', async () => {
|
||||
const fetchResult = fetchResults[0]
|
||||
const { files } = await fetchResult.fetching()
|
||||
|
||||
ino1 = fs.statSync(files.filesIndex['package.json'] as string).ino
|
||||
ino1 = fs.statSync(files.filesIndex.get('package.json') as string).ino
|
||||
|
||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(files.resolvedFrom).toBe('remote')
|
||||
}
|
||||
|
||||
@@ -480,9 +480,9 @@ test('fetchPackageToStore() concurrency check', async () => {
|
||||
const fetchResult = fetchResults[1]
|
||||
const { files } = await fetchResult.fetching()
|
||||
|
||||
ino2 = fs.statSync(files.filesIndex['package.json'] as string).ino
|
||||
ino2 = fs.statSync(files.filesIndex.get('package.json') as string).ino
|
||||
|
||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(files.resolvedFrom).toBe('remote')
|
||||
}
|
||||
|
||||
@@ -549,7 +549,7 @@ test('fetchPackageToStore() does not cache errors', async () => {
|
||||
},
|
||||
})
|
||||
const { files } = await fetchResult.fetching()
|
||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||
expect(files.resolvedFrom).toBe('remote')
|
||||
|
||||
expect(nock.isDone()).toBeTruthy()
|
||||
@@ -699,7 +699,7 @@ test('refetch package to store if it has been modified', async () => {
|
||||
})
|
||||
|
||||
const { filesIndex } = (await fetchResult.fetching()).files
|
||||
indexJsFile = filesIndex['index.js'] as string
|
||||
indexJsFile = filesIndex.get('index.js') as string
|
||||
}
|
||||
|
||||
// We should restart the workers otherwise the locker cache will still try to read the file
|
||||
|
||||
9
pnpm-lock.yaml
generated
9
pnpm-lock.yaml
generated
@@ -8494,6 +8494,9 @@ importers:
|
||||
'@pnpm/types':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/types
|
||||
'@pnpm/util.lex-comparator':
|
||||
specifier: 'catalog:'
|
||||
version: 3.0.2
|
||||
chalk:
|
||||
specifier: 'catalog:'
|
||||
version: 5.6.0
|
||||
@@ -8531,6 +8534,9 @@ importers:
|
||||
promise-share:
|
||||
specifier: 'catalog:'
|
||||
version: 1.0.0
|
||||
ramda:
|
||||
specifier: 'catalog:'
|
||||
version: '@pnpm/ramda@0.28.1'
|
||||
uuid:
|
||||
specifier: 'catalog:'
|
||||
version: 9.0.1
|
||||
@@ -8556,6 +8562,9 @@ importers:
|
||||
'@pnpm/store.cafs':
|
||||
specifier: workspace:*
|
||||
version: link:../cafs
|
||||
'@types/ramda':
|
||||
specifier: 'catalog:'
|
||||
version: 0.29.12
|
||||
'@types/uuid':
|
||||
specifier: 'catalog:'
|
||||
version: 8.3.4
|
||||
|
||||
@@ -134,7 +134,7 @@ test('importPackage hooks', async () => {
|
||||
module.exports = { hooks: { importPackage } }
|
||||
|
||||
function importPackage (to, opts) {
|
||||
fs.writeFileSync('args.json', JSON.stringify([to, opts]), 'utf8')
|
||||
fs.writeFileSync('args.json', JSON.stringify([to, Array.from(opts.filesMap.keys()).sort()]), 'utf8')
|
||||
return {}
|
||||
}
|
||||
`
|
||||
@@ -147,10 +147,10 @@ test('importPackage hooks', async () => {
|
||||
|
||||
await execPnpm(['add', 'is-positive@1.0.0'])
|
||||
|
||||
const [to, opts] = loadJsonFileSync<any>('args.json') // eslint-disable-line
|
||||
const [to, files] = loadJsonFileSync<any>('args.json') // eslint-disable-line
|
||||
|
||||
expect(typeof to).toBe('string')
|
||||
expect(Object.keys(opts.filesMap).sort()).toStrictEqual([
|
||||
expect(files).toStrictEqual([
|
||||
'index.js',
|
||||
'license',
|
||||
'package.json',
|
||||
|
||||
@@ -160,7 +160,7 @@ test("don't fail on case insensitive filesystems when package has 2 files with s
|
||||
project.has('@pnpm.e2e/with-same-file-in-different-cases')
|
||||
|
||||
const { files: integrityFile } = readV8FileStrictSync<PackageFilesIndex>(project.getPkgIndexFilePath('@pnpm.e2e/with-same-file-in-different-cases', '1.0.0'))
|
||||
const packageFiles = Object.keys(integrityFile).sort()
|
||||
const packageFiles = Array.from(integrityFile.keys()).sort()
|
||||
|
||||
expect(packageFiles).toStrictEqual(['Foo.js', 'foo.js', 'package.json'])
|
||||
const files = fs.readdirSync('node_modules/@pnpm.e2e/with-same-file-in-different-cases')
|
||||
|
||||
@@ -155,7 +155,7 @@ async function parseLicense (
|
||||
pkg: {
|
||||
manifest: PackageManifest
|
||||
files:
|
||||
| { local: true, files: Record<string, string> }
|
||||
| { local: true, files: Map<string, string> }
|
||||
| { local: false, files: PackageFiles }
|
||||
},
|
||||
opts: { storeDir: string }
|
||||
@@ -173,9 +173,9 @@ async function parseLicense (
|
||||
// check if we discovered a license, if not attempt to parse the LICENSE file
|
||||
if (!license || /see license/i.test(license)) {
|
||||
const { files: pkgFileIndex } = pkg.files
|
||||
const licenseFile = LICENSE_FILES.find((licenseFile) => licenseFile in pkgFileIndex)
|
||||
const licenseFile = LICENSE_FILES.find((licenseFile) => pkgFileIndex.has(licenseFile))
|
||||
if (licenseFile) {
|
||||
const licensePackageFileInfo = pkgFileIndex[licenseFile]
|
||||
const licensePackageFileInfo = pkgFileIndex.get(licenseFile)
|
||||
let licenseContents: Buffer | undefined
|
||||
if (pkg.files.local) {
|
||||
licenseContents = await readFile(licensePackageFileInfo as string)
|
||||
@@ -216,7 +216,7 @@ async function readLicenseFileFromCafs (storeDir: string, { integrity, mode }: P
|
||||
|
||||
export type ReadPackageIndexFileResult =
|
||||
| { local: false, files: PackageFiles }
|
||||
| { local: true, files: Record<string, string> }
|
||||
| { local: true, files: Map<string, string> }
|
||||
|
||||
export interface ReadPackageIndexFileOptions {
|
||||
storeDir: string
|
||||
@@ -344,13 +344,10 @@ export async function getPkgInfo (
|
||||
// Fetch the package manifest
|
||||
let packageManifestDir!: string
|
||||
if (packageFileIndexInfo.local) {
|
||||
packageManifestDir = packageFileIndexInfo.files['package.json']
|
||||
packageManifestDir = packageFileIndexInfo.files.get('package.json') as string
|
||||
} else {
|
||||
const packageFileIndex = packageFileIndexInfo.files as Record<
|
||||
string,
|
||||
PackageFileInfo
|
||||
>
|
||||
const packageManifestFile = packageFileIndex['package.json']
|
||||
const packageFileIndex = packageFileIndexInfo.files
|
||||
const packageManifestFile = packageFileIndex.get('package.json') as PackageFileInfo
|
||||
packageManifestDir = getFilePathByModeInCafs(
|
||||
opts.storeDir,
|
||||
packageManifestFile.integrity,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { IntegrityLike } from 'ssri'
|
||||
import type { DependencyManifest } from '@pnpm/types'
|
||||
|
||||
export type PackageFiles = Record<string, PackageFileInfo>
|
||||
export type PackageFiles = Map<string, PackageFileInfo>
|
||||
|
||||
export interface PackageFileInfo {
|
||||
checkedAt?: number // Nullable for backward compatibility
|
||||
@@ -10,7 +10,7 @@ export interface PackageFileInfo {
|
||||
size: number
|
||||
}
|
||||
|
||||
export type SideEffects = Record<string, SideEffectsDiff>
|
||||
export type SideEffects = Map<string, SideEffectsDiff>
|
||||
|
||||
export interface SideEffectsDiff {
|
||||
deleted?: string[]
|
||||
@@ -26,7 +26,7 @@ export type PackageFilesResponse = {
|
||||
requiresBuild: boolean
|
||||
} & ({
|
||||
unprocessed?: false
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
} | {
|
||||
unprocessed: true
|
||||
filesIndex: PackageFiles
|
||||
@@ -53,12 +53,10 @@ export type ImportPackageFunctionAsync = (
|
||||
|
||||
export type FileType = 'exec' | 'nonexec' | 'index'
|
||||
|
||||
export interface FilesIndex {
|
||||
[filename: string]: {
|
||||
export type FilesIndex = Map<string, {
|
||||
mode: number
|
||||
size: number
|
||||
} & FileWriteResult
|
||||
}
|
||||
} & FileWriteResult>
|
||||
|
||||
export interface FileWriteResult {
|
||||
checkedAt: number
|
||||
|
||||
@@ -18,7 +18,7 @@ export function addFilesFromDir (
|
||||
readManifest?: boolean
|
||||
} = {}
|
||||
): AddToStoreResult {
|
||||
const filesIndex: FilesIndex = {}
|
||||
const filesIndex = new Map() as FilesIndex
|
||||
let manifest: DependencyManifest | undefined
|
||||
let files: File[]
|
||||
if (opts.files) {
|
||||
@@ -50,11 +50,11 @@ export function addFilesFromDir (
|
||||
}
|
||||
// Remove the file type information (regular file, directory, etc.) and leave just the permission bits (rwx for owner, group, and others)
|
||||
const mode = stat.mode & 0o777
|
||||
filesIndex[relativePath] = {
|
||||
filesIndex.set(relativePath, {
|
||||
mode,
|
||||
size: stat.size,
|
||||
...addBuffer(buffer, mode),
|
||||
}
|
||||
})
|
||||
}
|
||||
return { manifest, filesIndex }
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ export function addFilesFromTarball (
|
||||
const ignore = _ignore ?? (() => false)
|
||||
const tarContent = isGzip(tarballBuffer) ? gunzipSync(tarballBuffer) : (Buffer.isBuffer(tarballBuffer) ? tarballBuffer : Buffer.from(tarballBuffer))
|
||||
const { files } = parseTarball(tarContent)
|
||||
const filesIndex: FilesIndex = {}
|
||||
const filesIndex = new Map() as FilesIndex
|
||||
let manifestBuffer: Buffer | undefined
|
||||
|
||||
for (const [relativePath, { mode, offset, size }] of files) {
|
||||
@@ -28,11 +28,11 @@ export function addFilesFromTarball (
|
||||
if (readManifest && relativePath === 'package.json') {
|
||||
manifestBuffer = fileBuffer
|
||||
}
|
||||
filesIndex[relativePath] = {
|
||||
filesIndex.set(relativePath, {
|
||||
mode,
|
||||
size,
|
||||
...addBufferToCafs(fileBuffer, mode),
|
||||
}
|
||||
})
|
||||
}
|
||||
return {
|
||||
filesIndex,
|
||||
|
||||
@@ -49,11 +49,11 @@ export function checkPkgFilesIntegrity (
|
||||
// We verify all side effects cache. We could optimize it to verify only the side effects cache
|
||||
// that satisfies the current os/arch/platform.
|
||||
// However, it likely won't make a big difference.
|
||||
for (const [sideEffectName, { added }] of Object.entries(pkgIndex.sideEffects)) {
|
||||
for (const [sideEffectName, { added }] of pkgIndex.sideEffects) {
|
||||
if (added) {
|
||||
const { passed } = _checkFilesIntegrity(added)
|
||||
if (!passed) {
|
||||
delete pkgIndex.sideEffects![sideEffectName]
|
||||
pkgIndex.sideEffects!.delete(sideEffectName)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -69,7 +69,7 @@ function checkFilesIntegrity (
|
||||
): VerifyResult {
|
||||
let allVerified = true
|
||||
let manifest: DependencyManifest | undefined
|
||||
for (const [f, fstat] of Object.entries(files)) {
|
||||
for (const [f, fstat] of files) {
|
||||
if (!fstat.integrity) {
|
||||
throw new Error(`Integrity checksum is missing for ${f}`)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { getFilePathByModeInCafs } from './getFilePathInCafs.js'
|
||||
import { parseJsonBufferSync } from './parseJson.js'
|
||||
|
||||
export function readManifestFromStore (storeDir: string, pkgIndex: PackageFilesIndex): PackageManifest | undefined {
|
||||
const pkg = pkgIndex.files['package.json']
|
||||
const pkg = pkgIndex.files.get('package.json')
|
||||
if (pkg) {
|
||||
const fileName = getFilePathByModeInCafs(storeDir, pkg.integrity, pkg.mode)
|
||||
return parseJsonBufferSync(gfs.readFileSync(fileName)) as PackageManifest
|
||||
|
||||
@@ -18,12 +18,12 @@ describe('cafs', () => {
|
||||
const { filesIndex } = cafs.addFilesFromTarball(
|
||||
fs.readFileSync(f.find('node-gyp-6.1.0.tgz'))
|
||||
)
|
||||
expect(Object.keys(filesIndex)).toHaveLength(121)
|
||||
const pkgFile = filesIndex['package.json']
|
||||
expect(pkgFile.size).toBe(1121)
|
||||
expect(pkgFile.mode).toBe(420)
|
||||
expect(typeof pkgFile.checkedAt).toBe('number')
|
||||
expect(pkgFile.integrity.toString()).toBe('sha512-8xCvrlC7W3TlwXxetv5CZTi53szYhmT7tmpXF/ttNthtTR9TC7Y7WJFPmJToHaSQ4uObuZyOARdOJYNYuTSbXA==')
|
||||
expect(filesIndex.size).toBe(121)
|
||||
const pkgFile = filesIndex.get('package.json')
|
||||
expect(pkgFile!.size).toBe(1121)
|
||||
expect(pkgFile!.mode).toBe(420)
|
||||
expect(typeof pkgFile!.checkedAt).toBe('number')
|
||||
expect(pkgFile!.integrity.toString()).toBe('sha512-8xCvrlC7W3TlwXxetv5CZTi53szYhmT7tmpXF/ttNthtTR9TC7Y7WJFPmJToHaSQ4uObuZyOARdOJYNYuTSbXA==')
|
||||
})
|
||||
|
||||
it('replaces an already existing file, if the integrity of it was broken', () => {
|
||||
@@ -34,7 +34,7 @@ describe('cafs', () => {
|
||||
let addFilesResult = addFiles()
|
||||
|
||||
// Modifying the file in the store
|
||||
const filePath = getFilePathByModeInCafs(storeDir, addFilesResult.filesIndex['foo.txt'].integrity, 420)
|
||||
const filePath = getFilePathByModeInCafs(storeDir, addFilesResult.filesIndex.get('foo.txt')!.integrity, 420)
|
||||
fs.appendFileSync(filePath, 'bar')
|
||||
|
||||
addFilesResult = addFiles()
|
||||
@@ -48,7 +48,7 @@ describe('cafs', () => {
|
||||
const addFiles = () => createCafs(storeDir).addFilesFromDir(srcDir)
|
||||
|
||||
const { filesIndex } = addFiles()
|
||||
expect(filesIndex['subdir/should-exist.txt']).toBeDefined()
|
||||
expect(filesIndex.get('subdir/should-exist.txt')).toBeDefined()
|
||||
})
|
||||
|
||||
it('symlinks are resolved and added as regular files', async () => {
|
||||
@@ -63,10 +63,10 @@ describe('cafs', () => {
|
||||
await symlinkDir(path.join(srcDir, 'lib'), path.join(srcDir, 'lib-symlink'))
|
||||
|
||||
const { filesIndex } = createCafs(storeDir).addFilesFromDir(srcDir)
|
||||
expect(filesIndex['symlink.js']).toBeDefined()
|
||||
expect(filesIndex['symlink.js']).toStrictEqual(filesIndex['index.js'])
|
||||
expect(filesIndex['lib/index.js']).toBeDefined()
|
||||
expect(filesIndex['lib/index.js']).toStrictEqual(filesIndex['lib-symlink/index.js'])
|
||||
expect(filesIndex.get('symlink.js')).toBeDefined()
|
||||
expect(filesIndex.get('symlink.js')).toStrictEqual(filesIndex.get('index.js'))
|
||||
expect(filesIndex.get('lib/index.js')).toBeDefined()
|
||||
expect(filesIndex.get('lib/index.js')).toStrictEqual(filesIndex.get('lib-symlink/index.js'))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -74,13 +74,13 @@ describe('checkPkgFilesIntegrity()', () => {
|
||||
it("doesn't fail if file was removed from the store", () => {
|
||||
const storeDir = temporaryDirectory()
|
||||
expect(checkPkgFilesIntegrity(storeDir, {
|
||||
files: {
|
||||
foo: {
|
||||
files: new Map([
|
||||
['foo', {
|
||||
integrity: 'sha512-8xCvrlC7W3TlwXxetv5CZTi53szYhmT7tmpXF/ttNthtTR9TC7Y7WJFPmJToHaSQ4uObuZyOARdOJYNYuTSbXA==',
|
||||
mode: 420,
|
||||
size: 10,
|
||||
},
|
||||
},
|
||||
}],
|
||||
]),
|
||||
}).passed).toBeFalsy()
|
||||
})
|
||||
})
|
||||
@@ -91,7 +91,7 @@ test('file names are normalized when unpacking a tarball', () => {
|
||||
const { filesIndex } = cafs.addFilesFromTarball(
|
||||
fs.readFileSync(f.find('colorize-semver-diff.tgz'))
|
||||
)
|
||||
expect(Object.keys(filesIndex).sort()).toStrictEqual([
|
||||
expect(Array.from(filesIndex.keys()).sort()).toStrictEqual([
|
||||
'LICENSE',
|
||||
'README.md',
|
||||
'lib/index.d.ts',
|
||||
@@ -114,7 +114,7 @@ test('unpack an older version of tar that prefixes with spaces', () => {
|
||||
const { filesIndex } = cafs.addFilesFromTarball(
|
||||
fs.readFileSync(f.find('parsers-3.0.0-rc.48.1.tgz'))
|
||||
)
|
||||
expect(Object.keys(filesIndex).sort()).toStrictEqual([
|
||||
expect(Array.from(filesIndex.keys()).sort()).toStrictEqual([
|
||||
'lib/grammars/resolution.d.ts',
|
||||
'lib/grammars/resolution.js',
|
||||
'lib/grammars/resolution.pegjs',
|
||||
@@ -142,7 +142,7 @@ test('unpack a tarball that contains hard links', () => {
|
||||
const { filesIndex } = cafs.addFilesFromTarball(
|
||||
fs.readFileSync(f.find('vue.examples.todomvc.todo-store-0.0.1.tgz'))
|
||||
)
|
||||
expect(Object.keys(filesIndex).length).toBeGreaterThan(0)
|
||||
expect(filesIndex.size).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
// Related issue: https://github.com/pnpm/pnpm/issues/7120
|
||||
@@ -152,5 +152,5 @@ test('unpack should not fail when the tarball format seems to be not USTAR or GN
|
||||
const { filesIndex } = cafs.addFilesFromTarball(
|
||||
fs.readFileSync(f.find('devextreme-17.1.6.tgz'))
|
||||
)
|
||||
expect(Object.keys(filesIndex).length).toBeGreaterThan(0)
|
||||
expect(filesIndex.size).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
} from '@pnpm/store-controller-types'
|
||||
import memoize from 'mem'
|
||||
import pathTemp from 'path-temp'
|
||||
import { map as mapValues } from 'ramda'
|
||||
|
||||
export { type CafsLocker }
|
||||
|
||||
@@ -83,13 +82,13 @@ function getFlatMap (
|
||||
storeDir: string,
|
||||
filesResponse: PackageFilesResponse,
|
||||
targetEngine?: string
|
||||
): { filesMap: Record<string, string>, isBuilt: boolean } {
|
||||
): { filesMap: Map<string, string>, isBuilt: boolean } {
|
||||
let isBuilt!: boolean
|
||||
let filesIndex!: PackageFiles
|
||||
if (targetEngine && ((filesResponse.sideEffects?.[targetEngine]) != null)) {
|
||||
filesIndex = applySideEffectsDiff(filesResponse.filesIndex as PackageFiles, filesResponse.sideEffects?.[targetEngine])
|
||||
if (targetEngine && filesResponse.sideEffects?.has(targetEngine)) {
|
||||
filesIndex = applySideEffectsDiff(filesResponse.filesIndex as PackageFiles, filesResponse.sideEffects.get(targetEngine)!)
|
||||
isBuilt = true
|
||||
} else if (!filesResponse.unprocessed) {
|
||||
} else if (filesResponse.unprocessed !== true) {
|
||||
return {
|
||||
filesMap: filesResponse.filesIndex,
|
||||
isBuilt: false,
|
||||
@@ -98,15 +97,18 @@ function getFlatMap (
|
||||
filesIndex = filesResponse.filesIndex
|
||||
isBuilt = false
|
||||
}
|
||||
const filesMap = mapValues(({ integrity, mode }) => getFilePathByModeInCafs(storeDir, integrity, mode), filesIndex)
|
||||
const filesMap = new Map<string, string>()
|
||||
for (const [fileName, { integrity, mode }] of filesIndex) {
|
||||
filesMap.set(fileName, getFilePathByModeInCafs(storeDir, integrity, mode))
|
||||
}
|
||||
return { filesMap, isBuilt }
|
||||
}
|
||||
|
||||
function applySideEffectsDiff (baseFiles: PackageFiles, { added, deleted }: SideEffectsDiff): PackageFiles {
|
||||
const filesWithSideEffects: PackageFiles = { ...added }
|
||||
for (const fileName in baseFiles) {
|
||||
if (!deleted?.includes(fileName) && !filesWithSideEffects[fileName]) {
|
||||
filesWithSideEffects[fileName] = baseFiles[fileName]
|
||||
const filesWithSideEffects: PackageFiles = new Map(added)
|
||||
for (const [fileName, fileInfo] of baseFiles) {
|
||||
if (!deleted?.includes(fileName) && !filesWithSideEffects.has(fileName)) {
|
||||
filesWithSideEffects.set(fileName, fileInfo)
|
||||
}
|
||||
}
|
||||
return filesWithSideEffects
|
||||
|
||||
@@ -76,7 +76,7 @@ export async function prune ({ cacheDir, storeDir }: PruneOptions, removeAlienFi
|
||||
await Promise.all(pkgIndexFiles.map(async (pkgIndexFilePath) => {
|
||||
const { files: pkgFilesIndex } = await readV8FileStrictAsync<PackageFilesIndex>(pkgIndexFilePath)
|
||||
// TODO: implement prune of Node.js packages, they don't have a package.json file
|
||||
if (pkgFilesIndex['package.json'] && removedHashes.has(pkgFilesIndex['package.json'].integrity)) {
|
||||
if (pkgFilesIndex.has('package.json') && removedHashes.has(pkgFilesIndex.get('package.json')!.integrity)) {
|
||||
await fs.unlink(pkgIndexFilePath)
|
||||
pkgCounter++
|
||||
}
|
||||
|
||||
@@ -42,6 +42,7 @@
|
||||
"@pnpm/store-path": "workspace:*",
|
||||
"@pnpm/store.cafs": "workspace:*",
|
||||
"@pnpm/types": "workspace:*",
|
||||
"@pnpm/util.lex-comparator": "catalog:",
|
||||
"chalk": "catalog:",
|
||||
"render-help": "catalog:"
|
||||
},
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { type Config } from '@pnpm/config'
|
||||
import util from 'util'
|
||||
import { createResolver } from '@pnpm/client'
|
||||
import { type TarballResolution } from '@pnpm/lockfile.types'
|
||||
|
||||
@@ -8,6 +9,7 @@ import { sortDeepKeys } from '@pnpm/object.key-sorting'
|
||||
import { getStorePath } from '@pnpm/store-path'
|
||||
import { getIndexFilePathInCafs, type PackageFilesIndex } from '@pnpm/store.cafs'
|
||||
import { parseWantedDependency } from '@pnpm/parse-wanted-dependency'
|
||||
import { lexCompare } from '@pnpm/util.lex-comparator'
|
||||
|
||||
import renderHelp from 'render-help'
|
||||
|
||||
@@ -87,7 +89,7 @@ export async function handler (opts: CatIndexCommandOptions, params: string[]):
|
||||
)
|
||||
try {
|
||||
const pkgFilesIndex = await readV8FileStrictAsync<PackageFilesIndex>(filesIndexFile)
|
||||
return JSON.stringify(sortDeepKeys(pkgFilesIndex), null, 2)
|
||||
return JSON.stringify(sortDeepKeys(pkgFilesIndex), replacer, 2)
|
||||
} catch {
|
||||
throw new PnpmError(
|
||||
'INVALID_PACKAGE',
|
||||
@@ -95,3 +97,12 @@ export async function handler (opts: CatIndexCommandOptions, params: string[]):
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function replacer (key: string, value: unknown) {
|
||||
if (util.types.isMap(value)) {
|
||||
const entries = Array.from((value as Map<string, unknown>).entries())
|
||||
entries.sort(([key1], [key2]) => lexCompare(key1, key2))
|
||||
return Object.fromEntries(entries)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
import util from 'util'
|
||||
import chalk from 'chalk'
|
||||
|
||||
import { type Config } from '@pnpm/config'
|
||||
@@ -69,7 +70,8 @@ export async function handler (opts: FindHashCommandOptions, params: string[]):
|
||||
continue
|
||||
}
|
||||
|
||||
for (const [, file] of Object.entries(pkgFilesIndex.files)) {
|
||||
if (util.types.isMap(pkgFilesIndex.files)) {
|
||||
for (const [, file] of pkgFilesIndex.files) {
|
||||
if (file?.integrity === hash) {
|
||||
result.push({ name: pkgFilesIndex.name ?? 'unknown', version: pkgFilesIndex?.version ?? 'unknown', filesIndexFile: filesIndexFile.replace(indexDir, '') })
|
||||
|
||||
@@ -77,11 +79,12 @@ export async function handler (opts: FindHashCommandOptions, params: string[]):
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (pkgFilesIndex?.sideEffects) {
|
||||
for (const { added } of Object.values(pkgFilesIndex.sideEffects)) {
|
||||
if (pkgFilesIndex?.sideEffects && util.types.isMap(pkgFilesIndex.sideEffects)) {
|
||||
for (const { added } of pkgFilesIndex.sideEffects.values()) {
|
||||
if (!added) continue
|
||||
for (const file of Object.values(added)) {
|
||||
for (const file of added.values()) {
|
||||
if (file?.integrity === hash) {
|
||||
result.push({ name: pkgFilesIndex.name ?? 'unknown', version: pkgFilesIndex?.version ?? 'unknown', filesIndexFile: filesIndexFile.replace(indexDir, '') })
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ export async function storeStatus (maybeOpts: StoreStatusOptions): Promise<strin
|
||||
? getIndexFilePathInCafs(storeDir, integrity, id)
|
||||
: path.join(storeDir, dp.depPathToFilename(id, maybeOpts.virtualStoreDirMaxLength), 'integrity.json')
|
||||
const { files } = await readV8FileStrictAsync<PackageFilesIndex>(pkgIndexFilePath)
|
||||
return (await dint.check(path.join(virtualStoreDir, dp.depPathToFilename(depPath, maybeOpts.virtualStoreDirMaxLength), 'node_modules', name), files)) === false
|
||||
return (await dint.check(path.join(virtualStoreDir, dp.depPathToFilename(depPath, maybeOpts.virtualStoreDirMaxLength), 'node_modules', name), Object.fromEntries(files))) === false
|
||||
}, { concurrency: 8 })
|
||||
|
||||
if ((reporter != null) && typeof reporter === 'function') {
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
"@pnpm/types": "workspace:*",
|
||||
"p-limit": "catalog:",
|
||||
"promise-share": "catalog:",
|
||||
"ramda": "catalog:",
|
||||
"uuid": "catalog:"
|
||||
},
|
||||
"peerDependencies": {
|
||||
@@ -50,6 +51,7 @@
|
||||
"@pnpm/package-store": "workspace:*",
|
||||
"@pnpm/server": "workspace:*",
|
||||
"@pnpm/store.cafs": "workspace:*",
|
||||
"@types/ramda": "catalog:",
|
||||
"@types/uuid": "catalog:",
|
||||
"@zkochan/rimraf": "catalog:",
|
||||
"get-port": "catalog:",
|
||||
|
||||
@@ -11,6 +11,8 @@ import {
|
||||
|
||||
import pLimit from 'p-limit'
|
||||
import pShare from 'promise-share'
|
||||
import { omit } from 'ramda'
|
||||
import v8 from 'v8'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
export interface StoreServerController extends StoreController {
|
||||
@@ -66,8 +68,8 @@ function limitFetch<T>(limit: (fn: () => PromiseLike<T>) => Promise<T>, url: str
|
||||
url = url.replace('http://unix:', 'unix:')
|
||||
}
|
||||
const response = await fetch(url, {
|
||||
body: JSON.stringify(body),
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: v8.serialize(body),
|
||||
headers: { 'Content-Type': 'application/octet-stream' },
|
||||
method: 'POST',
|
||||
retry: {
|
||||
retries: 100,
|
||||
@@ -76,7 +78,8 @@ function limitFetch<T>(limit: (fn: () => PromiseLike<T>) => Promise<T>, url: str
|
||||
if (!response.ok) {
|
||||
throw await response.json()
|
||||
}
|
||||
const json = await response.json() as any // eslint-disable-line
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
const json = v8.deserialize(Buffer.from(arrayBuffer)) as any // eslint-disable-line
|
||||
if (json.error) {
|
||||
throw json.error
|
||||
}
|
||||
@@ -93,7 +96,7 @@ async function requestPackage (
|
||||
const msgId = uuidv4()
|
||||
const packageResponseBody = await limitedFetch(`${remotePrefix}/requestPackage`, {
|
||||
msgId,
|
||||
options,
|
||||
options: omit(['allowBuild', 'onFetchError'], options),
|
||||
wantedDependency,
|
||||
})
|
||||
if (options.skipFetch === true) {
|
||||
@@ -121,7 +124,7 @@ async function fetchPackage (
|
||||
|
||||
const fetchResponseBody = await limitedFetch(`${remotePrefix}/fetchPackage`, {
|
||||
msgId,
|
||||
options,
|
||||
options: omit(['allowBuild', 'onFetchError'], options),
|
||||
}) as object & { filesIndexFile: string, inStoreLocation: string }
|
||||
const fetching = limitedFetch(`${remotePrefix}/packageFilesResponse`, {
|
||||
msgId,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import assert from 'assert'
|
||||
import http, { type IncomingMessage, type Server, type ServerResponse } from 'http'
|
||||
import util from 'util'
|
||||
import v8 from 'v8'
|
||||
import { globalInfo } from '@pnpm/logger'
|
||||
import {
|
||||
type PkgRequestFetchResult,
|
||||
@@ -57,14 +58,16 @@ export function createServer (
|
||||
}
|
||||
|
||||
const bodyPromise = new Promise<RequestBody>((resolve, reject) => {
|
||||
let body: any = '' // eslint-disable-line
|
||||
req.on('data', (data) => {
|
||||
body += data
|
||||
const chunks: Buffer[] = []
|
||||
req.on('data', (chunk) => {
|
||||
chunks.push(chunk)
|
||||
})
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
if (body.length > 0) {
|
||||
body = JSON.parse(body)
|
||||
const bodyBuffer = Buffer.concat(chunks)
|
||||
let body: any // eslint-disable-line
|
||||
if (bodyBuffer.byteLength > 0) {
|
||||
body = v8.deserialize(bodyBuffer)
|
||||
} else {
|
||||
body = {}
|
||||
}
|
||||
@@ -85,10 +88,10 @@ export function createServer (
|
||||
if (pkgResponse.fetching) {
|
||||
filesPromises[body.msgId] = pkgResponse.fetching
|
||||
}
|
||||
res.end(JSON.stringify(pkgResponse.body))
|
||||
res.end(v8.serialize(pkgResponse.body))
|
||||
} catch (err: unknown) {
|
||||
assert(util.types.isNativeError(err))
|
||||
res.end(JSON.stringify({
|
||||
res.end(v8.serialize({
|
||||
error: {
|
||||
message: err.message,
|
||||
...JSON.parse(JSON.stringify(err)),
|
||||
@@ -102,10 +105,10 @@ export function createServer (
|
||||
body = await bodyPromise
|
||||
const pkgResponse = (store.fetchPackage as FetchPackageToStoreFunction)(body.options as any) // eslint-disable-line
|
||||
filesPromises[body.msgId] = pkgResponse.fetching
|
||||
res.end(JSON.stringify({ filesIndexFile: pkgResponse.filesIndexFile }))
|
||||
res.end(v8.serialize({ filesIndexFile: pkgResponse.filesIndexFile }))
|
||||
} catch (err: unknown) {
|
||||
assert(util.types.isNativeError(err))
|
||||
res.end(JSON.stringify({
|
||||
res.end(v8.serialize({
|
||||
error: {
|
||||
message: err.message,
|
||||
...JSON.parse(JSON.stringify(err)),
|
||||
@@ -118,7 +121,7 @@ export function createServer (
|
||||
body = await bodyPromise
|
||||
const filesResponse = await filesPromises[body.msgId]()
|
||||
delete filesPromises[body.msgId]
|
||||
res.end(JSON.stringify(filesResponse))
|
||||
res.end(v8.serialize(filesResponse))
|
||||
break
|
||||
}
|
||||
case '/prune':
|
||||
@@ -129,7 +132,7 @@ export function createServer (
|
||||
case '/importPackage': {
|
||||
const importPackageBody = (await bodyPromise) as any // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
await store.importPackage(importPackageBody.to, importPackageBody.opts)
|
||||
res.end(JSON.stringify('OK'))
|
||||
res.end(v8.serialize('OK'))
|
||||
break
|
||||
}
|
||||
case '/upload': {
|
||||
@@ -141,7 +144,7 @@ export function createServer (
|
||||
}
|
||||
const uploadBody = (await bodyPromise) as any // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
await lock(uploadBody.builtPkgLocation, async () => store.upload(uploadBody.builtPkgLocation, uploadBody.opts))
|
||||
res.end(JSON.stringify('OK'))
|
||||
res.end(v8.serialize('OK'))
|
||||
break
|
||||
}
|
||||
case '/stop':
|
||||
@@ -152,20 +155,20 @@ export function createServer (
|
||||
}
|
||||
globalInfo('Got request to stop the server')
|
||||
await close()
|
||||
res.end(JSON.stringify('OK'))
|
||||
res.end(v8.serialize('OK'))
|
||||
globalInfo('Server stopped')
|
||||
break
|
||||
default: {
|
||||
res.statusCode = 404
|
||||
const error = { error: `${req.url!} does not match any route` }
|
||||
res.end(JSON.stringify(error))
|
||||
res.end(v8.serialize(error))
|
||||
}
|
||||
}
|
||||
} catch (e: any) { // eslint-disable-line
|
||||
res.statusCode = 503
|
||||
const jsonErr = JSON.parse(JSON.stringify(e))
|
||||
jsonErr.message = e.message
|
||||
res.end(JSON.stringify(jsonErr))
|
||||
res.end(v8.serialize(jsonErr))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ test('server', async () => {
|
||||
expect(response.body.manifest!.version).toBe('1.0.0')
|
||||
|
||||
expect(files.resolvedFrom).toBe('remote')
|
||||
expect(files.filesIndex).toHaveProperty(['package.json'])
|
||||
expect(files.filesIndex.has('package.json')).toBeTruthy()
|
||||
|
||||
await server.close()
|
||||
await storeCtrl.close()
|
||||
@@ -112,7 +112,7 @@ test('fetchPackage', async () => {
|
||||
expect(bundledManifest).toBeTruthy()
|
||||
|
||||
expect(files.resolvedFrom).toBe('remote')
|
||||
expect(files.filesIndex).toHaveProperty(['package.json'])
|
||||
expect(files.filesIndex.has('package.json')).toBeTruthy()
|
||||
|
||||
await server.close()
|
||||
await storeCtrl.close()
|
||||
@@ -177,7 +177,7 @@ test('server upload', async () => {
|
||||
fs.writeFileSync(filesIndexFile, v8.serialize({
|
||||
name: 'fake-pkg',
|
||||
version: '1.0.0',
|
||||
files: {},
|
||||
files: new Map(),
|
||||
}))
|
||||
|
||||
await storeCtrl.upload(path.join(import.meta.dirname, '__fixtures__/side-effect-fake-dir'), {
|
||||
@@ -186,7 +186,7 @@ test('server upload', async () => {
|
||||
})
|
||||
|
||||
const cacheIntegrity = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||
expect(Object.keys(cacheIntegrity.sideEffects![fakeEngine].added!).sort()).toStrictEqual(['side-effect.js', 'side-effect.txt'])
|
||||
expect(Array.from(cacheIntegrity.sideEffects!.get(fakeEngine)!.added!.keys()).sort()).toStrictEqual(['side-effect.js', 'side-effect.txt'])
|
||||
|
||||
await server.close()
|
||||
await storeCtrl.close()
|
||||
@@ -337,7 +337,7 @@ test('server route not found', async () => {
|
||||
const response = await fetch(`${remotePrefix}/a-random-endpoint`, { method: 'POST' })
|
||||
// Ensure error is correct
|
||||
expect(response.status).toBe(404)
|
||||
expect((await response.json() as any).error).toBeTruthy() // eslint-disable-line
|
||||
expect((v8.deserialize(Buffer.from(await response.arrayBuffer())) as any).error).toBeTruthy() // eslint-disable-line
|
||||
|
||||
await server.close()
|
||||
await storeCtrlForServer.close()
|
||||
|
||||
@@ -175,7 +175,7 @@ export interface PackageResponse {
|
||||
)
|
||||
}
|
||||
|
||||
export type FilesMap = Record<string, string>
|
||||
export type FilesMap = Map<string, string>
|
||||
|
||||
export interface ImportOptions {
|
||||
disableRelinkLocalDirDeps?: boolean
|
||||
|
||||
@@ -67,7 +67,7 @@ function availableParallelism (): number {
|
||||
}
|
||||
|
||||
interface AddFilesResult {
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
manifest: DependencyManifest
|
||||
requiresBuild: boolean
|
||||
integrity?: string
|
||||
@@ -80,7 +80,7 @@ export async function addFilesFromDir (opts: AddFilesFromDirOptions): Promise<Ad
|
||||
workerPool = createTarballWorkerPool()
|
||||
}
|
||||
const localWorker = await workerPool.checkoutWorkerAsync(true)
|
||||
return new Promise<{ filesIndex: Record<string, string>, manifest: DependencyManifest, requiresBuild: boolean }>((resolve, reject) => {
|
||||
return new Promise<{ filesIndex: Map<string, string>, manifest: DependencyManifest, requiresBuild: boolean }>((resolve, reject) => {
|
||||
localWorker.once('message', ({ status, error, value }) => {
|
||||
workerPool!.checkinWorker(localWorker)
|
||||
if (status === 'error') {
|
||||
|
||||
@@ -81,7 +81,10 @@ async function handleMessage (
|
||||
let { storeDir, filesIndexFile, readManifest, verifyStoreIntegrity } = message
|
||||
let pkgFilesIndex: PackageFilesIndex | undefined
|
||||
try {
|
||||
pkgFilesIndex = readV8FileStrictSync(filesIndexFile)
|
||||
pkgFilesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||
if (pkgFilesIndex?.files && !(pkgFilesIndex.files instanceof Map)) {
|
||||
pkgFilesIndex = undefined
|
||||
}
|
||||
} catch {
|
||||
// ignoring. It is fine if the integrity file is not present. Just refetch the package
|
||||
}
|
||||
@@ -185,7 +188,7 @@ function calcIntegrity (buffer: Buffer): string {
|
||||
interface AddFilesFromDirResult {
|
||||
status: string
|
||||
value: {
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
manifest?: DependencyManifest
|
||||
requiresBuild: boolean
|
||||
}
|
||||
@@ -237,8 +240,8 @@ function addFilesFromDir ({ dir, storeDir, filesIndexFile, sideEffectsCacheKey,
|
||||
},
|
||||
}
|
||||
}
|
||||
filesIndex.sideEffects = filesIndex.sideEffects ?? {}
|
||||
filesIndex.sideEffects[sideEffectsCacheKey] = calculateDiff(filesIndex.files, filesIntegrity)
|
||||
filesIndex.sideEffects ??= new Map()
|
||||
filesIndex.sideEffects.set(sideEffectsCacheKey, calculateDiff(filesIndex.files, filesIntegrity))
|
||||
if (filesIndex.requiresBuild == null) {
|
||||
requiresBuild = pkgRequiresBuild(manifest, filesIntegrity)
|
||||
} else {
|
||||
@@ -253,23 +256,23 @@ function addFilesFromDir ({ dir, storeDir, filesIndexFile, sideEffectsCacheKey,
|
||||
|
||||
function calculateDiff (baseFiles: PackageFiles, sideEffectsFiles: PackageFiles): SideEffectsDiff {
|
||||
const deleted: string[] = []
|
||||
const added: PackageFiles = {}
|
||||
for (const file of new Set([...Object.keys(baseFiles), ...Object.keys(sideEffectsFiles)])) {
|
||||
if (!sideEffectsFiles[file]) {
|
||||
const added: PackageFiles = new Map()
|
||||
for (const file of new Set([...baseFiles.keys(), ...sideEffectsFiles.keys()])) {
|
||||
if (!sideEffectsFiles.has(file)) {
|
||||
deleted.push(file)
|
||||
} else if (
|
||||
!baseFiles[file] ||
|
||||
baseFiles[file].integrity !== sideEffectsFiles[file].integrity ||
|
||||
baseFiles[file].mode !== sideEffectsFiles[file].mode
|
||||
!baseFiles.has(file) ||
|
||||
baseFiles.get(file)!.integrity !== sideEffectsFiles.get(file)!.integrity ||
|
||||
baseFiles.get(file)!.mode !== sideEffectsFiles.get(file)!.mode
|
||||
) {
|
||||
added[file] = sideEffectsFiles[file]
|
||||
added.set(file, sideEffectsFiles.get(file)!)
|
||||
}
|
||||
}
|
||||
const diff: SideEffectsDiff = {}
|
||||
if (deleted.length > 0) {
|
||||
diff.deleted = deleted
|
||||
}
|
||||
if (Object.keys(added).length > 0) {
|
||||
if (added.size > 0) {
|
||||
diff.added = added
|
||||
}
|
||||
return diff
|
||||
@@ -277,20 +280,20 @@ function calculateDiff (baseFiles: PackageFiles, sideEffectsFiles: PackageFiles)
|
||||
|
||||
interface ProcessFilesIndexResult {
|
||||
filesIntegrity: PackageFiles
|
||||
filesMap: Record<string, string>
|
||||
filesMap: Map<string, string>
|
||||
}
|
||||
|
||||
function processFilesIndex (filesIndex: FilesIndex): ProcessFilesIndexResult {
|
||||
const filesIntegrity: PackageFiles = {}
|
||||
const filesMap: Record<string, string> = {}
|
||||
for (const [k, { checkedAt, filePath, integrity, mode, size }] of Object.entries(filesIndex)) {
|
||||
filesIntegrity[k] = {
|
||||
const filesIntegrity: PackageFiles = new Map()
|
||||
const filesMap = new Map<string, string>()
|
||||
for (const [k, { checkedAt, filePath, integrity, mode, size }] of filesIndex) {
|
||||
filesIntegrity.set(k, {
|
||||
checkedAt,
|
||||
integrity: integrity.toString(), // TODO: use the raw Integrity object
|
||||
mode,
|
||||
size,
|
||||
}
|
||||
filesMap[k] = filePath
|
||||
})
|
||||
filesMap.set(k, filePath)
|
||||
}
|
||||
return { filesIntegrity, filesMap }
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@ export type ExtendFilesMapStats = Pick<fs.Stats, 'ino' | 'isFile' | 'isDirectory
|
||||
|
||||
export interface ExtendFilesMapOptions {
|
||||
/** Map relative path of each file to their real path */
|
||||
filesIndex: Record<string, string>
|
||||
filesIndex: Map<string, string>
|
||||
/** Map relative path of each file to their stats */
|
||||
filesStats?: Record<string, ExtendFilesMapStats | null>
|
||||
}
|
||||
@@ -150,7 +150,7 @@ export async function extendFilesMap ({ filesIndex, filesStats }: ExtendFilesMap
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(Object.entries(filesIndex).map(async ([relativePath, realPath]) => {
|
||||
await Promise.all(Array.from(filesIndex.entries()).map(async ([relativePath, realPath]) => {
|
||||
const stats = filesStats?.[relativePath] ?? await fs.promises.stat(realPath)
|
||||
if (stats.isFile()) {
|
||||
addInodeAndAncestors(relativePath, stats.ino)
|
||||
|
||||
@@ -103,7 +103,7 @@ test('optimally synchronizes source and target', async () => {
|
||||
|
||||
const sourceFetchResult = await fetchFromDir(sourceDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
const targetFetchResultBefore = await fetchFromDir(targetDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
expect(Object.keys(targetFetchResultBefore.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Array.from(targetFetchResultBefore.filesIndex.keys()).sort()).not.toStrictEqual(Array.from(sourceFetchResult.filesIndex.keys()).sort())
|
||||
expect(
|
||||
filesToModify
|
||||
.map(suffix => path.resolve(targetDir, suffix))
|
||||
@@ -128,8 +128,8 @@ test('optimally synchronizes source and target', async () => {
|
||||
await patchers[0].apply()
|
||||
|
||||
const targetFetchResultAfter = await fetchFromDir(targetDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).not.toStrictEqual(Object.keys(targetFetchResultBefore.filesIndex).sort())
|
||||
expect(Array.from(targetFetchResultAfter.filesIndex.keys()).sort()).toStrictEqual(Array.from(sourceFetchResult.filesIndex.keys()).sort())
|
||||
expect(Array.from(targetFetchResultAfter.filesIndex.keys()).sort()).not.toStrictEqual(Array.from(targetFetchResultBefore.filesIndex.keys()).sort())
|
||||
expect(
|
||||
filesToModify
|
||||
.map(suffix => path.resolve(targetDir, suffix))
|
||||
@@ -200,19 +200,20 @@ test('multiple patchers', async () => {
|
||||
const targetFetchResultBefore1 = await fetchFromDir('target1', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
const targetFetchResultBefore2 = await fetchFromDir('target2', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
const targetFetchResultBefore3 = await fetchFromDir('target3', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
expect(Object.keys(targetFetchResultBefore1.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Object.keys(targetFetchResultBefore2.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Object.keys(targetFetchResultBefore3.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Object.keys(targetFetchResultBefore1.filesIndex).sort()).toStrictEqual([])
|
||||
expect(Object.keys(targetFetchResultBefore2.filesIndex).sort()).toStrictEqual([])
|
||||
expect(Object.keys(targetFetchResultBefore3.filesIndex).sort()).toStrictEqual([])
|
||||
const expected = Array.from(sourceFetchResult.filesIndex.keys()).sort()
|
||||
expect(Array.from(targetFetchResultBefore1.filesIndex.keys()).sort()).not.toStrictEqual(expected)
|
||||
expect(Array.from(targetFetchResultBefore2.filesIndex.keys()).sort()).not.toStrictEqual(expected)
|
||||
expect(Array.from(targetFetchResultBefore3.filesIndex.keys()).sort()).not.toStrictEqual(expected)
|
||||
expect(Array.from(targetFetchResultBefore1.filesIndex.keys()).sort()).toStrictEqual([])
|
||||
expect(Array.from(targetFetchResultBefore2.filesIndex.keys()).sort()).toStrictEqual([])
|
||||
expect(Array.from(targetFetchResultBefore3.filesIndex.keys()).sort()).toStrictEqual([])
|
||||
|
||||
await Promise.all(patchers.map(patcher => patcher.apply()))
|
||||
|
||||
const targetFetchResultAfter1 = await fetchFromDir('target1', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
const targetFetchResultAfter2 = await fetchFromDir('target2', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
const targetFetchResultAfter3 = await fetchFromDir('target3', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
expect(Object.keys(targetFetchResultAfter1.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Object.keys(targetFetchResultAfter2.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Object.keys(targetFetchResultAfter3.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Array.from(targetFetchResultAfter1.filesIndex.keys()).sort()).toStrictEqual(expected)
|
||||
expect(Array.from(targetFetchResultAfter2.filesIndex.keys()).sort()).toStrictEqual(expected)
|
||||
expect(Array.from(targetFetchResultAfter3.filesIndex.keys()).sort()).toStrictEqual(expected)
|
||||
})
|
||||
|
||||
@@ -132,7 +132,7 @@ test('applies a patch on a directory', async () => {
|
||||
|
||||
const sourceFetchResult = await fetchFromDir('source', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
const targetFetchResultBefore = await fetchFromDir('target', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
expect(Object.keys(targetFetchResultBefore.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Array.from(targetFetchResultBefore.filesIndex.keys()).sort()).not.toStrictEqual(Array.from(sourceFetchResult.filesIndex.keys()).sort())
|
||||
expect(
|
||||
filesToModify
|
||||
.map(suffix => `target/${suffix}`)
|
||||
@@ -148,8 +148,8 @@ test('applies a patch on a directory', async () => {
|
||||
await applyPatch(optimizedDirPath, path.resolve('source'), path.resolve('target'))
|
||||
|
||||
const targetFetchResultAfter = await fetchFromDir('target', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
||||
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).not.toStrictEqual(Object.keys(targetFetchResultBefore.filesIndex).sort())
|
||||
expect(Array.from(targetFetchResultAfter.filesIndex.keys()).sort()).toStrictEqual(Array.from(sourceFetchResult.filesIndex.keys()).sort())
|
||||
expect(Array.from(targetFetchResultAfter.filesIndex.keys()).sort()).not.toStrictEqual(Array.from(targetFetchResultBefore.filesIndex.keys()).sort())
|
||||
expect(
|
||||
filesToModify
|
||||
.map(suffix => `target/${suffix}`)
|
||||
|
||||
@@ -26,9 +26,9 @@ test('without provided stats', async () => {
|
||||
'foo/bar.txt',
|
||||
'foo_bar.txt',
|
||||
]
|
||||
const filesIndex: Record<string, string> = {}
|
||||
const filesIndex = new Map<string, string>()
|
||||
for (const filePath of filePaths) {
|
||||
filesIndex[filePath] = path.resolve(filePath)
|
||||
filesIndex.set(filePath, path.resolve(filePath))
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true })
|
||||
fs.writeFileSync(filePath, '')
|
||||
}
|
||||
@@ -51,7 +51,7 @@ test('without provided stats', async () => {
|
||||
} as InodeMap)
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
expect(statMethod).toHaveBeenCalledWith(filesIndex[filePath])
|
||||
expect(statMethod).toHaveBeenCalledWith(filesIndex.get(filePath))
|
||||
}
|
||||
})
|
||||
|
||||
@@ -66,11 +66,11 @@ test('with provided stats', async () => {
|
||||
'foo/bar.txt',
|
||||
'foo_bar.txt',
|
||||
]
|
||||
const filesIndex: Record<string, string> = {}
|
||||
const filesIndex = new Map<string, string>()
|
||||
const filesStats: Record<string, ExtendFilesMapStats> = {}
|
||||
let ino = startingIno
|
||||
for (const filePath of filePaths) {
|
||||
filesIndex[filePath] = path.resolve(filePath)
|
||||
filesIndex.set(filePath, path.resolve(filePath))
|
||||
filesStats[filePath] = {
|
||||
ino,
|
||||
isDirectory: () => false,
|
||||
|
||||
Reference in New Issue
Block a user