mirror of
https://github.com/pnpm/pnpm.git
synced 2025-12-23 23:29:17 -05:00
refactor: use Maps instead of Records (#10312)
This commit is contained in:
2
env/node.fetcher/src/index.ts
vendored
2
env/node.fetcher/src/index.ts
vendored
@@ -178,7 +178,7 @@ async function downloadAndUnpackTarballToDir (
|
|||||||
|
|
||||||
cafs.importPackage(targetDir, {
|
cafs.importPackage(targetDir, {
|
||||||
filesResponse: {
|
filesResponse: {
|
||||||
filesIndex: filesIndex as Record<string, string>,
|
filesIndex,
|
||||||
resolvedFrom: 'remote',
|
resolvedFrom: 'remote',
|
||||||
requiresBuild: false,
|
requiresBuild: false,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -74,22 +74,19 @@ export async function runLifecycleHooksConcurrently (
|
|||||||
await Promise.all(
|
await Promise.all(
|
||||||
targetDirs.map(async (targetDir) => {
|
targetDirs.map(async (targetDir) => {
|
||||||
const targetModulesDir = path.join(targetDir, 'node_modules')
|
const targetModulesDir = path.join(targetDir, 'node_modules')
|
||||||
const nodeModulesIndex = {}
|
const newFilesIndex = new Map(filesResponse.filesIndex)
|
||||||
if (fs.existsSync(targetModulesDir)) {
|
if (fs.existsSync(targetModulesDir)) {
|
||||||
// If the target directory contains a node_modules directory
|
// If the target directory contains a node_modules directory
|
||||||
// (it may happen when the hoisted node linker is used)
|
// (it may happen when the hoisted node linker is used)
|
||||||
// then we need to preserve this node_modules.
|
// then we need to preserve this node_modules.
|
||||||
// So we scan this node_modules directory and pass it as part of the new package.
|
// So we scan this node_modules directory and pass it as part of the new package.
|
||||||
await scanDir('node_modules', targetModulesDir, targetModulesDir, nodeModulesIndex)
|
await scanDir('node_modules', targetModulesDir, targetModulesDir, newFilesIndex)
|
||||||
}
|
}
|
||||||
return opts.storeController.importPackage(targetDir, {
|
return opts.storeController.importPackage(targetDir, {
|
||||||
filesResponse: {
|
filesResponse: {
|
||||||
resolvedFrom: 'local-dir',
|
resolvedFrom: 'local-dir',
|
||||||
...filesResponse,
|
...filesResponse,
|
||||||
filesIndex: {
|
filesIndex: newFilesIndex,
|
||||||
...filesResponse.filesIndex,
|
|
||||||
...nodeModulesIndex,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
force: false,
|
force: false,
|
||||||
})
|
})
|
||||||
@@ -101,7 +98,7 @@ export async function runLifecycleHooksConcurrently (
|
|||||||
await runGroups(childConcurrency, groups)
|
await runGroups(childConcurrency, groups)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scanDir (prefix: string, rootDir: string, currentDir: string, index: Record<string, string>): Promise<void> {
|
async function scanDir (prefix: string, rootDir: string, currentDir: string, index: Map<string, string>): Promise<void> {
|
||||||
const files = await fs.promises.readdir(currentDir)
|
const files = await fs.promises.readdir(currentDir)
|
||||||
await Promise.all(files.map(async (file) => {
|
await Promise.all(files.map(async (file) => {
|
||||||
const fullPath = path.join(currentDir, file)
|
const fullPath = path.join(currentDir, file)
|
||||||
@@ -111,7 +108,7 @@ async function scanDir (prefix: string, rootDir: string, currentDir: string, ind
|
|||||||
}
|
}
|
||||||
if (stat.isFile()) {
|
if (stat.isFile()) {
|
||||||
const relativePath = path.relative(rootDir, fullPath)
|
const relativePath = path.relative(rootDir, fullPath)
|
||||||
index[path.join(prefix, relativePath)] = fullPath
|
index.set(path.join(prefix, relativePath), fullPath)
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { type DependencyManifest } from '@pnpm/types'
|
import { type DependencyManifest } from '@pnpm/types'
|
||||||
|
|
||||||
export function pkgRequiresBuild (manifest: Partial<DependencyManifest> | undefined, filesIndex: Record<string, unknown>): boolean {
|
export function pkgRequiresBuild (manifest: Partial<DependencyManifest> | undefined, filesIndex: Map<string, unknown>): boolean {
|
||||||
return Boolean(
|
return Boolean(
|
||||||
manifest?.scripts != null && (
|
manifest?.scripts != null && (
|
||||||
Boolean(manifest.scripts.preinstall) ||
|
Boolean(manifest.scripts.preinstall) ||
|
||||||
@@ -11,7 +11,14 @@ export function pkgRequiresBuild (manifest: Partial<DependencyManifest> | undefi
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function filesIncludeInstallScripts (filesIndex: Record<string, unknown>): boolean {
|
function filesIncludeInstallScripts (filesIndex: Map<string, unknown>): boolean {
|
||||||
return filesIndex['binding.gyp'] != null ||
|
if (filesIndex.has('binding.gyp')) {
|
||||||
Object.keys(filesIndex).some((filename) => !(filename.match(/^\.hooks[\\/]/) == null)) // TODO: optimize this
|
return true
|
||||||
|
}
|
||||||
|
for (const filename of filesIndex.keys()) {
|
||||||
|
if (filename.match(/^\.hooks[\\/]/) != null) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -362,7 +362,7 @@ async function _rebuild (
|
|||||||
sideEffectsCacheKey = calcDepState(depGraph, depsStateCache, depPath, {
|
sideEffectsCacheKey = calcDepState(depGraph, depsStateCache, depPath, {
|
||||||
includeDepGraphHash: true,
|
includeDepGraphHash: true,
|
||||||
})
|
})
|
||||||
if (pkgFilesIndex.sideEffects?.[sideEffectsCacheKey]) {
|
if (pkgFilesIndex.sideEffects?.has(sideEffectsCacheKey)) {
|
||||||
pkgsThatWereRebuilt.add(depPath)
|
pkgsThatWereRebuilt.add(depPath)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -373,7 +373,7 @@ async function _rebuild (
|
|||||||
if (pgkManifest != null) {
|
if (pgkManifest != null) {
|
||||||
// This won't return the correct result for packages with binding.gyp as we don't pass the filesIndex to the function.
|
// This won't return the correct result for packages with binding.gyp as we don't pass the filesIndex to the function.
|
||||||
// However, currently rebuild doesn't work for such packages at all, which should be fixed.
|
// However, currently rebuild doesn't work for such packages at all, which should be fixed.
|
||||||
requiresBuild = pkgRequiresBuild(pgkManifest, {})
|
requiresBuild = pkgRequiresBuild(pgkManifest, new Map())
|
||||||
}
|
}
|
||||||
|
|
||||||
const hasSideEffects = requiresBuild && allowBuild(pkgInfo.name, pkgInfo.version, depPath) && await runPostinstallHooks({
|
const hasSideEffects = requiresBuild && allowBuild(pkgInfo.name, pkgInfo.version, depPath) && await runPostinstallHooks({
|
||||||
|
|||||||
@@ -88,8 +88,8 @@ test('rebuilds dependencies', async () => {
|
|||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
})}`
|
})}`
|
||||||
expect(cacheIntegrity).toHaveProperty(['sideEffects', sideEffectsKey, 'added', 'generated-by-postinstall.js'])
|
expect(cacheIntegrity.sideEffects!.get(sideEffectsKey)!.added!.has('generated-by-postinstall.js')).toBeTruthy()
|
||||||
delete cacheIntegrity!.sideEffects![sideEffectsKey].added!['generated-by-postinstall.js']
|
cacheIntegrity!.sideEffects!.get(sideEffectsKey)!.added!.delete('generated-by-postinstall.js')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('skipIfHasSideEffectsCache', async () => {
|
test('skipIfHasSideEffectsCache', async () => {
|
||||||
@@ -112,17 +112,17 @@ test('skipIfHasSideEffectsCache', async () => {
|
|||||||
const cacheIntegrityPath = getIndexFilePathInCafs(path.join(storeDir, STORE_VERSION), getIntegrity('@pnpm.e2e/pre-and-postinstall-scripts-example', '1.0.0'), '@pnpm.e2e/pre-and-postinstall-scripts-example@1.0.0')
|
const cacheIntegrityPath = getIndexFilePathInCafs(path.join(storeDir, STORE_VERSION), getIntegrity('@pnpm.e2e/pre-and-postinstall-scripts-example', '1.0.0'), '@pnpm.e2e/pre-and-postinstall-scripts-example@1.0.0')
|
||||||
let cacheIntegrity = readV8FileSync<PackageFilesIndex>(cacheIntegrityPath)!
|
let cacheIntegrity = readV8FileSync<PackageFilesIndex>(cacheIntegrityPath)!
|
||||||
const sideEffectsKey = `${ENGINE_NAME};deps=${hashObject({ '@pnpm.e2e/hello-world-js-bin@1.0.0': {} })}`
|
const sideEffectsKey = `${ENGINE_NAME};deps=${hashObject({ '@pnpm.e2e/hello-world-js-bin@1.0.0': {} })}`
|
||||||
cacheIntegrity.sideEffects = {
|
cacheIntegrity.sideEffects = new Map([
|
||||||
[sideEffectsKey]: {
|
[sideEffectsKey, {
|
||||||
added: {
|
added: new Map([
|
||||||
foo: {
|
['foo', {
|
||||||
integrity: 'bar',
|
integrity: 'bar',
|
||||||
mode: 1,
|
mode: 1,
|
||||||
size: 1,
|
size: 1,
|
||||||
},
|
}],
|
||||||
},
|
]),
|
||||||
},
|
}],
|
||||||
}
|
])
|
||||||
fs.writeFileSync(cacheIntegrityPath, v8.serialize(cacheIntegrity))
|
fs.writeFileSync(cacheIntegrityPath, v8.serialize(cacheIntegrity))
|
||||||
|
|
||||||
let modules = project.readModulesManifest()
|
let modules = project.readModulesManifest()
|
||||||
@@ -147,7 +147,7 @@ test('skipIfHasSideEffectsCache', async () => {
|
|||||||
|
|
||||||
cacheIntegrity = readV8FileSync<PackageFilesIndex>(cacheIntegrityPath)!
|
cacheIntegrity = readV8FileSync<PackageFilesIndex>(cacheIntegrityPath)!
|
||||||
expect(cacheIntegrity!.sideEffects).toBeTruthy()
|
expect(cacheIntegrity!.sideEffects).toBeTruthy()
|
||||||
expect(cacheIntegrity).toHaveProperty(['sideEffects', sideEffectsKey, 'added', 'foo'])
|
expect(cacheIntegrity.sideEffects!.get(sideEffectsKey)!.added!.get('foo')).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
test('rebuild does not fail when a linked package is present', async () => {
|
test('rebuild does not fail when a linked package is present', async () => {
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ export type FetchFromDirOptions = Omit<DirectoryFetcherOptions, 'lockfileDir'> &
|
|||||||
|
|
||||||
export interface FetchResult {
|
export interface FetchResult {
|
||||||
local: true
|
local: true
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
filesStats?: Record<string, Stats | null>
|
filesStats?: Record<string, Stats | null>
|
||||||
packageImportMethod: 'hardlink'
|
packageImportMethod: 'hardlink'
|
||||||
manifest: DependencyManifest
|
manifest: DependencyManifest
|
||||||
@@ -75,7 +75,7 @@ async function _fetchAllFilesFromDir (
|
|||||||
dir: string,
|
dir: string,
|
||||||
relativeDir = ''
|
relativeDir = ''
|
||||||
): Promise<Pick<FetchResult, 'filesIndex' | 'filesStats'>> {
|
): Promise<Pick<FetchResult, 'filesIndex' | 'filesStats'>> {
|
||||||
const filesIndex: Record<string, string> = {}
|
const filesIndex = new Map<string, string>()
|
||||||
const filesStats: Record<string, Stats | null> = {}
|
const filesStats: Record<string, Stats | null> = {}
|
||||||
const files = await fs.readdir(dir)
|
const files = await fs.readdir(dir)
|
||||||
await Promise.all(files
|
await Promise.all(files
|
||||||
@@ -87,10 +87,12 @@ async function _fetchAllFilesFromDir (
|
|||||||
const relativeSubdir = `${relativeDir}${relativeDir ? '/' : ''}${file}`
|
const relativeSubdir = `${relativeDir}${relativeDir ? '/' : ''}${file}`
|
||||||
if (stat.isDirectory()) {
|
if (stat.isDirectory()) {
|
||||||
const subFetchResult = await _fetchAllFilesFromDir(readFileStat, filePath, relativeSubdir)
|
const subFetchResult = await _fetchAllFilesFromDir(readFileStat, filePath, relativeSubdir)
|
||||||
Object.assign(filesIndex, subFetchResult.filesIndex)
|
for (const [key, value] of subFetchResult.filesIndex) {
|
||||||
|
filesIndex.set(key, value)
|
||||||
|
}
|
||||||
Object.assign(filesStats, subFetchResult.filesStats)
|
Object.assign(filesStats, subFetchResult.filesStats)
|
||||||
} else {
|
} else {
|
||||||
filesIndex[relativeSubdir] = filePath
|
filesIndex.set(relativeSubdir, filePath)
|
||||||
filesStats[relativeSubdir] = fileStatResult.stat
|
filesStats[relativeSubdir] = fileStatResult.stat
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -142,7 +144,7 @@ async function fileStat (filePath: string): Promise<FileStatResult | null> {
|
|||||||
|
|
||||||
async function fetchPackageFilesFromDir (dir: string): Promise<FetchResult> {
|
async function fetchPackageFilesFromDir (dir: string): Promise<FetchResult> {
|
||||||
const files = await packlist(dir)
|
const files = await packlist(dir)
|
||||||
const filesIndex: Record<string, string> = Object.fromEntries(files.map((file) => [file, path.join(dir, file)]))
|
const filesIndex = new Map<string, string>(files.map((file) => [file, path.join(dir, file)]))
|
||||||
// In a regular pnpm workspace it will probably never happen that a dependency has no package.json file.
|
// In a regular pnpm workspace it will probably never happen that a dependency has no package.json file.
|
||||||
// Safe read was added to support the Bit workspace in which the components have no package.json files.
|
// Safe read was added to support the Bit workspace in which the components have no package.json files.
|
||||||
// Related PR in Bit: https://github.com/teambit/bit/pull/5251
|
// Related PR in Bit: https://github.com/teambit/bit/pull/5251
|
||||||
|
|||||||
@@ -27,10 +27,10 @@ test('fetch including only package files', async () => {
|
|||||||
|
|
||||||
expect(fetchResult.local).toBe(true)
|
expect(fetchResult.local).toBe(true)
|
||||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||||
|
|
||||||
// Only those files are included which would get published
|
// Only those files are included which would get published
|
||||||
expect(Object.keys(fetchResult.filesIndex).sort()).toStrictEqual([
|
expect(Array.from(fetchResult.filesIndex.keys()).sort()).toStrictEqual([
|
||||||
'index.js',
|
'index.js',
|
||||||
'package.json',
|
'package.json',
|
||||||
])
|
])
|
||||||
@@ -50,10 +50,10 @@ test('fetch including all files', async () => {
|
|||||||
|
|
||||||
expect(fetchResult.local).toBe(true)
|
expect(fetchResult.local).toBe(true)
|
||||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||||
|
|
||||||
// Only those files are included which would get published
|
// Only those files are included which would get published
|
||||||
expect(Object.keys(fetchResult.filesIndex).sort()).toStrictEqual([
|
expect(Array.from(fetchResult.filesIndex.keys()).sort()).toStrictEqual([
|
||||||
'index.js',
|
'index.js',
|
||||||
'package.json',
|
'package.json',
|
||||||
'test.js',
|
'test.js',
|
||||||
@@ -76,10 +76,10 @@ test('fetch a directory that has no package.json', async () => {
|
|||||||
expect(fetchResult.manifest).toBeUndefined()
|
expect(fetchResult.manifest).toBeUndefined()
|
||||||
expect(fetchResult.local).toBe(true)
|
expect(fetchResult.local).toBe(true)
|
||||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||||
expect(fetchResult.filesIndex['index.js']).toBe(path.resolve('index.js'))
|
expect(fetchResult.filesIndex.get('index.js')).toBe(path.resolve('index.js'))
|
||||||
|
|
||||||
// Only those files are included which would get published
|
// Only those files are included which would get published
|
||||||
expect(Object.keys(fetchResult.filesIndex).sort()).toStrictEqual([
|
expect(Array.from(fetchResult.filesIndex.keys()).sort()).toStrictEqual([
|
||||||
'index.js',
|
'index.js',
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
@@ -99,10 +99,10 @@ test('fetch does not fail on package with broken symlink', async () => {
|
|||||||
|
|
||||||
expect(fetchResult.local).toBe(true)
|
expect(fetchResult.local).toBe(true)
|
||||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||||
|
|
||||||
// Only those files are included which would get published
|
// Only those files are included which would get published
|
||||||
expect(Object.keys(fetchResult.filesIndex).sort()).toStrictEqual([
|
expect(Array.from(fetchResult.filesIndex.keys()).sort()).toStrictEqual([
|
||||||
'index.js',
|
'index.js',
|
||||||
'package.json',
|
'package.json',
|
||||||
])
|
])
|
||||||
@@ -131,9 +131,9 @@ describe('fetch resolves symlinked files to their real locations', () => {
|
|||||||
|
|
||||||
expect(fetchResult.local).toBe(true)
|
expect(fetchResult.local).toBe(true)
|
||||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||||
expect(fetchResult.filesIndex['index.js']).toBe(indexJsPath)
|
expect(fetchResult.filesIndex.get('index.js')).toBe(indexJsPath)
|
||||||
expect(fetchResult.filesIndex['src/index.js']).toBe(path.join(srcPath, 'index.js'))
|
expect(fetchResult.filesIndex.get('src/index.js')).toBe(path.join(srcPath, 'index.js'))
|
||||||
})
|
})
|
||||||
test('fetch does not resolve symlinked files to their real locations by default', async () => {
|
test('fetch does not resolve symlinked files to their real locations by default', async () => {
|
||||||
const fetcher = createDirectoryFetcher()
|
const fetcher = createDirectoryFetcher()
|
||||||
@@ -148,8 +148,8 @@ describe('fetch resolves symlinked files to their real locations', () => {
|
|||||||
|
|
||||||
expect(fetchResult.local).toBe(true)
|
expect(fetchResult.local).toBe(true)
|
||||||
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
expect(fetchResult.packageImportMethod).toBe('hardlink')
|
||||||
expect(fetchResult.filesIndex['package.json']).toBe(path.resolve('package.json'))
|
expect(fetchResult.filesIndex.get('package.json')).toBe(path.resolve('package.json'))
|
||||||
expect(fetchResult.filesIndex['index.js']).toBe(path.resolve('index.js'))
|
expect(fetchResult.filesIndex.get('index.js')).toBe(path.resolve('index.js'))
|
||||||
expect(fetchResult.filesIndex['src/index.js']).toBe(path.resolve('src/index.js'))
|
expect(fetchResult.filesIndex.get('src/index.js')).toBe(path.resolve('src/index.js'))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ export type FetchFunction<FetcherResolution = Resolution, Options = FetchOptions
|
|||||||
export interface FetchResult {
|
export interface FetchResult {
|
||||||
local?: boolean
|
local?: boolean
|
||||||
manifest?: DependencyManifest
|
manifest?: DependencyManifest
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
requiresBuild: boolean
|
requiresBuild: boolean
|
||||||
integrity?: string
|
integrity?: string
|
||||||
}
|
}
|
||||||
@@ -44,7 +44,7 @@ export interface GitFetcherOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface GitFetcherResult {
|
export interface GitFetcherResult {
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
manifest?: DependencyManifest
|
manifest?: DependencyManifest
|
||||||
requiresBuild: boolean
|
requiresBuild: boolean
|
||||||
}
|
}
|
||||||
@@ -60,7 +60,7 @@ export interface DirectoryFetcherOptions {
|
|||||||
|
|
||||||
export interface DirectoryFetcherResult {
|
export interface DirectoryFetcherResult {
|
||||||
local: true
|
local: true
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
packageImportMethod: 'hardlink'
|
packageImportMethod: 'hardlink'
|
||||||
manifest?: DependencyManifest
|
manifest?: DependencyManifest
|
||||||
requiresBuild: boolean
|
requiresBuild: boolean
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ test('fetch', async () => {
|
|||||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
expect(filesIndex['package.json']).toBeTruthy()
|
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||||
expect(manifest?.name).toBe('is-positive')
|
expect(manifest?.name).toBe('is-positive')
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -67,7 +67,7 @@ test('fetch a package from Git sub folder', async () => {
|
|||||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
expect(filesIndex['public/index.html']).toBeTruthy()
|
expect(filesIndex.has('public/index.html')).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
test('prevent directory traversal attack when using Git sub folder', async () => {
|
test('prevent directory traversal attack when using Git sub folder', async () => {
|
||||||
@@ -129,7 +129,7 @@ test('fetch a package from Git that has a prepare script', async () => {
|
|||||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
expect(filesIndex['dist/index.js']).toBeTruthy()
|
expect(filesIndex.has('dist/index.js')).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
// Test case for https://github.com/pnpm/pnpm/issues/1866
|
// Test case for https://github.com/pnpm/pnpm/issues/1866
|
||||||
@@ -148,7 +148,7 @@ test('fetch a package without a package.json', async () => {
|
|||||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
expect(filesIndex['denolib.json']).toBeTruthy()
|
expect(filesIndex.has('denolib.json')).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
// Covers the regression reported in https://github.com/pnpm/pnpm/issues/4064
|
// Covers the regression reported in https://github.com/pnpm/pnpm/issues/4064
|
||||||
@@ -191,7 +191,7 @@ test('still able to shallow fetch for allowed hosts', async () => {
|
|||||||
// Discard final argument as it passes temporary directory
|
// Discard final argument as it passes temporary directory
|
||||||
expect(calls[i].slice(0, -1)).toEqual(expectedCalls[i])
|
expect(calls[i].slice(0, -1)).toEqual(expectedCalls[i])
|
||||||
}
|
}
|
||||||
expect(filesIndex['package.json']).toBeTruthy()
|
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||||
expect(manifest?.name).toBe('is-positive')
|
expect(manifest?.name).toBe('is-positive')
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -224,8 +224,8 @@ test('do not build the package when scripts are ignored', async () => {
|
|||||||
}, {
|
}, {
|
||||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||||
})
|
})
|
||||||
expect(filesIndex['package.json']).toBeTruthy()
|
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||||
expect(filesIndex['prepare.txt']).toBeFalsy()
|
expect(filesIndex.has('prepare.txt')).toBeFalsy()
|
||||||
expect(globalWarn).toHaveBeenCalledWith('The git-hosted package fetched from "https://github.com/pnpm-e2e/prepare-script-works.git" has to be built but the build scripts were ignored.')
|
expect(globalWarn).toHaveBeenCalledWith('The git-hosted package fetched from "https://github.com/pnpm-e2e/prepare-script-works.git" has to be built but the build scripts were ignored.')
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -261,7 +261,7 @@ test('allow git package with prepare script', async () => {
|
|||||||
allowBuild: (pkgName) => pkgName === '@pnpm.e2e/prepare-script-works',
|
allowBuild: (pkgName) => pkgName === '@pnpm.e2e/prepare-script-works',
|
||||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||||
})
|
})
|
||||||
expect(filesIndex['package.json']).toBeTruthy()
|
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||||
// Note: prepare.txt is in .gitignore so it won't be in the files index
|
// Note: prepare.txt is in .gitignore so it won't be in the files index
|
||||||
// The fact that no error was thrown proves the prepare script was allowed to run
|
// The fact that no error was thrown proves the prepare script was allowed to run
|
||||||
})
|
})
|
||||||
@@ -284,7 +284,7 @@ test('fetch only the included files', async () => {
|
|||||||
filesIndexFile: path.join(storeDir, 'index.json'),
|
filesIndexFile: path.join(storeDir, 'index.json'),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
expect(Object.keys(filesIndex).sort()).toStrictEqual([
|
expect(Array.from(filesIndex.keys()).sort()).toStrictEqual([
|
||||||
'README.md',
|
'README.md',
|
||||||
'dist/index.js',
|
'dist/index.js',
|
||||||
'package.json',
|
'package.json',
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
describe('basic custom fetcher contract', () => {
|
describe('basic custom fetcher contract', () => {
|
||||||
test('should successfully return FetchResult with manifest and filesIndex', async () => {
|
test('should successfully return FetchResult with manifest and filesIndex', async () => {
|
||||||
const mockManifest = { name: 'test-package', version: '1.0.0' }
|
const mockManifest = { name: 'test-package', version: '1.0.0' }
|
||||||
const mockFilesIndex = { 'package.json': '/path/to/store/package.json' }
|
const mockFilesIndex = new Map([['package.json', '/path/to/store/package.json']])
|
||||||
|
|
||||||
const customFetcher = createMockCustomFetcher(
|
const customFetcher = createMockCustomFetcher(
|
||||||
() => true,
|
() => true,
|
||||||
@@ -92,7 +92,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
const customFetcher = createMockCustomFetcher(
|
const customFetcher = createMockCustomFetcher(
|
||||||
() => true,
|
() => true,
|
||||||
async () => ({
|
async () => ({
|
||||||
filesIndex: {},
|
filesIndex: new Map(),
|
||||||
manifest: { name: 'pkg', version: '1.0.0', scripts: { install: 'node install.js' } },
|
manifest: { name: 'pkg', version: '1.0.0', scripts: { install: 'node install.js' } },
|
||||||
requiresBuild: true,
|
requiresBuild: true,
|
||||||
})
|
})
|
||||||
@@ -144,7 +144,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
async (cafs) => {
|
async (cafs) => {
|
||||||
receivedCafs = cafs
|
receivedCafs = cafs
|
||||||
return {
|
return {
|
||||||
filesIndex: {},
|
filesIndex: new Map(),
|
||||||
manifest: { name: 'pkg', version: '1.0.0' },
|
manifest: { name: 'pkg', version: '1.0.0' },
|
||||||
requiresBuild: false,
|
requiresBuild: false,
|
||||||
}
|
}
|
||||||
@@ -179,7 +179,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
;(opts.onProgress as any)?.({ done: 50, total: 100 }) // eslint-disable-line @typescript-eslint/no-explicit-any
|
;(opts.onProgress as any)?.({ done: 50, total: 100 }) // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||||
|
|
||||||
return {
|
return {
|
||||||
filesIndex: {},
|
filesIndex: new Map(),
|
||||||
manifest: { name: 'pkg', version: '1.0.0' },
|
manifest: { name: 'pkg', version: '1.0.0' },
|
||||||
requiresBuild: false,
|
requiresBuild: false,
|
||||||
}
|
}
|
||||||
@@ -216,7 +216,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
expect((resolution as any).cdnUrl).toBe('https://cdn.example.com/pkg.tgz') // eslint-disable-line @typescript-eslint/no-explicit-any
|
expect((resolution as any).cdnUrl).toBe('https://cdn.example.com/pkg.tgz') // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||||
|
|
||||||
return {
|
return {
|
||||||
filesIndex: {},
|
filesIndex: new Map(),
|
||||||
manifest: { name: 'pkg', version: '1.0.0' },
|
manifest: { name: 'pkg', version: '1.0.0' },
|
||||||
requiresBuild: false,
|
requiresBuild: false,
|
||||||
}
|
}
|
||||||
@@ -236,7 +236,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
const customFetcher = createMockCustomFetcher(
|
const customFetcher = createMockCustomFetcher(
|
||||||
() => true,
|
() => true,
|
||||||
async () => ({
|
async () => ({
|
||||||
filesIndex: {},
|
filesIndex: new Map(),
|
||||||
requiresBuild: false,
|
requiresBuild: false,
|
||||||
// Manifest is optional in FetchResult
|
// Manifest is optional in FetchResult
|
||||||
})
|
})
|
||||||
@@ -315,7 +315,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
||||||
)
|
)
|
||||||
|
|
||||||
expect(result.filesIndex['package.json']).toBeTruthy()
|
expect(result.filesIndex.get('package.json')).toBeTruthy()
|
||||||
expect(scope.isDone()).toBeTruthy()
|
expect(scope.isDone()).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -361,7 +361,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
||||||
)
|
)
|
||||||
|
|
||||||
expect(result.filesIndex['package.json']).toBeTruthy()
|
expect(result.filesIndex.get('package.json')).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
test('custom fetcher can transform resolution before delegating to tarball fetcher', async () => {
|
test('custom fetcher can transform resolution before delegating to tarball fetcher', async () => {
|
||||||
@@ -415,7 +415,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
|
||||||
)
|
)
|
||||||
|
|
||||||
expect(result.filesIndex['package.json']).toBeTruthy()
|
expect(result.filesIndex.get('package.json')).toBeTruthy()
|
||||||
expect(scope.isDone()).toBeTruthy()
|
expect(scope.isDone()).toBeTruthy()
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -484,7 +484,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
|
|
||||||
// Simulate fetch
|
// Simulate fetch
|
||||||
const result = {
|
const result = {
|
||||||
filesIndex: { 'package.json': '/store/pkg.json' },
|
filesIndex: new Map([['package.json', '/store/pkg.json']]),
|
||||||
manifest: { name: 'cached-pkg', version: (resolution as any).version }, // eslint-disable-line @typescript-eslint/no-explicit-any
|
manifest: { name: 'cached-pkg', version: (resolution as any).version }, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -530,7 +530,7 @@ describe('custom fetcher implementation examples', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
filesIndex: {},
|
filesIndex: new Map(),
|
||||||
manifest: { name: 'auth-pkg', version: '1.0.0' },
|
manifest: { name: 'auth-pkg', version: '1.0.0' },
|
||||||
requiresBuild: false,
|
requiresBuild: false,
|
||||||
authToken, // Could store for future use
|
authToken, // Could store for future use
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ test('should fail to pick fetcher if the type is not defined', async () => {
|
|||||||
|
|
||||||
describe('custom fetcher support', () => {
|
describe('custom fetcher support', () => {
|
||||||
test('should use custom fetcher when canFetch returns true', async () => {
|
test('should use custom fetcher when canFetch returns true', async () => {
|
||||||
const mockFetchResult = { filesIndex: {}, manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
const mockFetchResult = { filesIndex: new Map(), manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||||
const customFetch = jest.fn(async () => mockFetchResult)
|
const customFetch = jest.fn(async () => mockFetchResult)
|
||||||
const remoteTarball = jest.fn() as FetchFunction
|
const remoteTarball = jest.fn() as FetchFunction
|
||||||
|
|
||||||
@@ -87,7 +87,7 @@ describe('custom fetcher support', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
test('should use custom fetcher when canFetch returns promise resolving to true', async () => {
|
test('should use custom fetcher when canFetch returns promise resolving to true', async () => {
|
||||||
const mockFetchResult = { filesIndex: {}, manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
const mockFetchResult = { filesIndex: new Map(), manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||||
const customFetch = jest.fn(async () => mockFetchResult)
|
const customFetch = jest.fn(async () => mockFetchResult)
|
||||||
|
|
||||||
const customFetcher: Partial<CustomFetcher> = {
|
const customFetcher: Partial<CustomFetcher> = {
|
||||||
@@ -150,8 +150,8 @@ describe('custom fetcher support', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
test('should check custom fetchers in order and use first match', async () => {
|
test('should check custom fetchers in order and use first match', async () => {
|
||||||
const mockFetchResult1 = { filesIndex: {}, manifest: { name: 'fetcher1', version: '1.0.0' }, requiresBuild: false }
|
const mockFetchResult1 = { filesIndex: new Map(), manifest: { name: 'fetcher1', version: '1.0.0' }, requiresBuild: false }
|
||||||
const mockFetchResult2 = { filesIndex: {}, manifest: { name: 'fetcher2', version: '1.0.0' }, requiresBuild: false }
|
const mockFetchResult2 = { filesIndex: new Map(), manifest: { name: 'fetcher2', version: '1.0.0' }, requiresBuild: false }
|
||||||
|
|
||||||
const fetcher1: Partial<CustomFetcher> = {
|
const fetcher1: Partial<CustomFetcher> = {
|
||||||
canFetch: () => true,
|
canFetch: () => true,
|
||||||
@@ -184,7 +184,7 @@ describe('custom fetcher support', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
test('should handle custom resolution types', async () => {
|
test('should handle custom resolution types', async () => {
|
||||||
const mockFetchResult = { filesIndex: {}, manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
const mockFetchResult = { filesIndex: new Map(), manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }
|
||||||
const customFetch = jest.fn(async () => mockFetchResult)
|
const customFetch = jest.fn(async () => mockFetchResult)
|
||||||
|
|
||||||
const customFetcher: Partial<CustomFetcher> = {
|
const customFetcher: Partial<CustomFetcher> = {
|
||||||
@@ -217,7 +217,7 @@ describe('custom fetcher support', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
test('should pass all fetch options to custom fetcher.fetch', async () => {
|
test('should pass all fetch options to custom fetcher.fetch', async () => {
|
||||||
const customFetch = jest.fn(async () => ({ filesIndex: {}, manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }))
|
const customFetch = jest.fn(async () => ({ filesIndex: new Map(), manifest: { name: 'test', version: '1.0.0' }, requiresBuild: false }))
|
||||||
|
|
||||||
const customFetcher: Partial<CustomFetcher> = {
|
const customFetcher: Partial<CustomFetcher> = {
|
||||||
canFetch: () => true,
|
canFetch: () => true,
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ export function createGitHostedTarballFetcher (fetchRemoteTarball: FetchFunction
|
|||||||
filesIndexFile: tempIndexFile,
|
filesIndexFile: tempIndexFile,
|
||||||
})
|
})
|
||||||
try {
|
try {
|
||||||
const prepareResult = await prepareGitHostedPkg(filesIndex as Record<string, string>, cafs, tempIndexFile, opts.filesIndexFile, fetcherOpts, opts, resolution)
|
const prepareResult = await prepareGitHostedPkg(filesIndex, cafs, tempIndexFile, opts.filesIndexFile, fetcherOpts, opts, resolution)
|
||||||
if (prepareResult.ignoredBuild) {
|
if (prepareResult.ignoredBuild) {
|
||||||
globalWarn(`The git-hosted package fetched from "${resolution.tarball}" has to be built but the build scripts were ignored.`)
|
globalWarn(`The git-hosted package fetched from "${resolution.tarball}" has to be built but the build scripts were ignored.`)
|
||||||
}
|
}
|
||||||
@@ -52,13 +52,13 @@ export function createGitHostedTarballFetcher (fetchRemoteTarball: FetchFunction
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface PrepareGitHostedPkgResult {
|
interface PrepareGitHostedPkgResult {
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
manifest?: DependencyManifest
|
manifest?: DependencyManifest
|
||||||
ignoredBuild: boolean
|
ignoredBuild: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
async function prepareGitHostedPkg (
|
async function prepareGitHostedPkg (
|
||||||
filesIndex: Record<string, string>,
|
filesIndex: Map<string, string>,
|
||||||
cafs: Cafs,
|
cafs: Cafs,
|
||||||
filesIndexFileNonBuilt: string,
|
filesIndexFileNonBuilt: string,
|
||||||
filesIndexFile: string,
|
filesIndexFile: string,
|
||||||
@@ -80,7 +80,7 @@ async function prepareGitHostedPkg (
|
|||||||
allowBuild: fetcherOpts.allowBuild,
|
allowBuild: fetcherOpts.allowBuild,
|
||||||
}, tempLocation, resolution.path ?? '')
|
}, tempLocation, resolution.path ?? '')
|
||||||
const files = await packlist(pkgDir)
|
const files = await packlist(pkgDir)
|
||||||
if (!resolution.path && files.length === Object.keys(filesIndex).length) {
|
if (!resolution.path && files.length === filesIndex.size) {
|
||||||
if (!shouldBeBuilt) {
|
if (!shouldBeBuilt) {
|
||||||
if (filesIndexFileNonBuilt !== filesIndexFile) {
|
if (filesIndexFileNonBuilt !== filesIndexFile) {
|
||||||
await renameOverwrite(filesIndexFileNonBuilt, filesIndexFile)
|
await renameOverwrite(filesIndexFileNonBuilt, filesIndexFile)
|
||||||
|
|||||||
@@ -223,7 +223,7 @@ test("don't fail when integrity check of local file succeeds", async () => {
|
|||||||
pkg,
|
pkg,
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(typeof filesIndex['package.json']).toBe('string')
|
expect(typeof filesIndex.get('package.json')).toBe('string')
|
||||||
})
|
})
|
||||||
|
|
||||||
test("don't fail when fetching a local tarball in offline mode", async () => {
|
test("don't fail when fetching a local tarball in offline mode", async () => {
|
||||||
@@ -250,7 +250,7 @@ test("don't fail when fetching a local tarball in offline mode", async () => {
|
|||||||
pkg,
|
pkg,
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(typeof filesIndex['package.json']).toBe('string')
|
expect(typeof filesIndex.get('package.json')).toBe('string')
|
||||||
})
|
})
|
||||||
|
|
||||||
test('fail when trying to fetch a non-local tarball in offline mode', async () => {
|
test('fail when trying to fetch a non-local tarball in offline mode', async () => {
|
||||||
@@ -464,7 +464,7 @@ test('take only the files included in the package, when fetching a git-hosted pa
|
|||||||
pkg,
|
pkg,
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(Object.keys(result.filesIndex).sort()).toStrictEqual([
|
expect(Array.from(result.filesIndex.keys()).sort()).toStrictEqual([
|
||||||
'README.md',
|
'README.md',
|
||||||
'dist/index.js',
|
'dist/index.js',
|
||||||
'package.json',
|
'package.json',
|
||||||
@@ -515,8 +515,8 @@ test('do not build the package when scripts are ignored', async () => {
|
|||||||
pkg,
|
pkg,
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(filesIndex).toHaveProperty(['package.json'])
|
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||||
expect(filesIndex).not.toHaveProperty(['prepare.txt'])
|
expect(filesIndex.has('prepare.txt')).toBeFalsy()
|
||||||
expect(globalWarn).toHaveBeenCalledWith(`The git-hosted package fetched from "${tarball}" has to be built but the build scripts were ignored.`)
|
expect(globalWarn).toHaveBeenCalledWith(`The git-hosted package fetched from "${tarball}" has to be built but the build scripts were ignored.`)
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -532,7 +532,7 @@ test('when extracting files with the same name, pick the last ones', async () =>
|
|||||||
readManifest: true,
|
readManifest: true,
|
||||||
pkg,
|
pkg,
|
||||||
})
|
})
|
||||||
const pkgJson = JSON.parse(fs.readFileSync(filesIndex['package.json'], 'utf8'))
|
const pkgJson = JSON.parse(fs.readFileSync(filesIndex.get('package.json')!, 'utf8'))
|
||||||
expect(pkgJson.name).toBe('pkg2')
|
expect(pkgJson.name).toBe('pkg2')
|
||||||
expect(manifest?.name).toBe('pkg2')
|
expect(manifest?.name).toBe('pkg2')
|
||||||
})
|
})
|
||||||
@@ -560,8 +560,8 @@ test('use the subfolder when path is present', async () => {
|
|||||||
pkg,
|
pkg,
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(filesIndex).toHaveProperty(['package.json'])
|
expect(filesIndex.has('package.json')).toBeTruthy()
|
||||||
expect(filesIndex).not.toHaveProperty(['lerna.json'])
|
expect(filesIndex.has('lerna.json')).toBeFalsy()
|
||||||
})
|
})
|
||||||
|
|
||||||
test('prevent directory traversal attack when path is present', async () => {
|
test('prevent directory traversal attack when path is present', async () => {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ export type ImportFile = (src: string, dest: string) => void
|
|||||||
export function importIndexedDir (
|
export function importIndexedDir (
|
||||||
importFile: ImportFile,
|
importFile: ImportFile,
|
||||||
newDir: string,
|
newDir: string,
|
||||||
filenames: Record<string, string>,
|
filenames: Map<string, string>,
|
||||||
opts: {
|
opts: {
|
||||||
keepModulesDir?: boolean
|
keepModulesDir?: boolean
|
||||||
}
|
}
|
||||||
@@ -36,16 +36,16 @@ export function importIndexedDir (
|
|||||||
} catch {} // eslint-disable-line:no-empty
|
} catch {} // eslint-disable-line:no-empty
|
||||||
if (util.types.isNativeError(err) && 'code' in err && err.code === 'EEXIST') {
|
if (util.types.isNativeError(err) && 'code' in err && err.code === 'EEXIST') {
|
||||||
const { uniqueFileMap, conflictingFileNames } = getUniqueFileMap(filenames)
|
const { uniqueFileMap, conflictingFileNames } = getUniqueFileMap(filenames)
|
||||||
if (Object.keys(conflictingFileNames).length === 0) throw err
|
if (conflictingFileNames.size === 0) throw err
|
||||||
filenameConflictsLogger.debug({
|
filenameConflictsLogger.debug({
|
||||||
conflicts: conflictingFileNames,
|
conflicts: Object.fromEntries(conflictingFileNames),
|
||||||
writingTo: newDir,
|
writingTo: newDir,
|
||||||
})
|
})
|
||||||
globalWarn(
|
globalWarn(
|
||||||
`Not all files were linked to "${path.relative(process.cwd(), newDir)}". ` +
|
`Not all files were linked to "${path.relative(process.cwd(), newDir)}". ` +
|
||||||
'Some of the files have equal names in different case, ' +
|
'Some of the files have equal names in different case, ' +
|
||||||
'which is an issue on case-insensitive filesystems. ' +
|
'which is an issue on case-insensitive filesystems. ' +
|
||||||
`The conflicting file names are: ${JSON.stringify(conflictingFileNames)}`
|
`The conflicting file names are: ${JSON.stringify(Object.fromEntries(conflictingFileNames))}`
|
||||||
)
|
)
|
||||||
importIndexedDir(importFile, newDir, uniqueFileMap, opts)
|
importIndexedDir(importFile, newDir, uniqueFileMap, opts)
|
||||||
return
|
return
|
||||||
@@ -65,27 +65,27 @@ They were renamed.`)
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface SanitizeFilenamesResult {
|
interface SanitizeFilenamesResult {
|
||||||
sanitizedFilenames: Record<string, string>
|
sanitizedFilenames: Map<string, string>
|
||||||
invalidFilenames: string[]
|
invalidFilenames: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
function sanitizeFilenames (filenames: Record<string, string>): SanitizeFilenamesResult {
|
function sanitizeFilenames (filenames: Map<string, string>): SanitizeFilenamesResult {
|
||||||
const sanitizedFilenames: Record<string, string> = {}
|
const sanitizedFilenames = new Map<string, string>()
|
||||||
const invalidFilenames: string[] = []
|
const invalidFilenames: string[] = []
|
||||||
for (const [filename, src] of Object.entries(filenames)) {
|
for (const [filename, src] of filenames) {
|
||||||
const sanitizedFilename = filename.split('/').map((f) => sanitizeFilename(f)).join('/')
|
const sanitizedFilename = filename.split('/').map((f) => sanitizeFilename(f)).join('/')
|
||||||
if (sanitizedFilename !== filename) {
|
if (sanitizedFilename !== filename) {
|
||||||
invalidFilenames.push(filename)
|
invalidFilenames.push(filename)
|
||||||
}
|
}
|
||||||
sanitizedFilenames[sanitizedFilename] = src
|
sanitizedFilenames.set(sanitizedFilename, src)
|
||||||
}
|
}
|
||||||
return { sanitizedFilenames, invalidFilenames }
|
return { sanitizedFilenames, invalidFilenames }
|
||||||
}
|
}
|
||||||
|
|
||||||
function tryImportIndexedDir (importFile: ImportFile, newDir: string, filenames: Record<string, string>): void {
|
function tryImportIndexedDir (importFile: ImportFile, newDir: string, filenames: Map<string, string>): void {
|
||||||
makeEmptyDir(newDir, { recursive: true })
|
makeEmptyDir(newDir, { recursive: true })
|
||||||
const allDirs = new Set<string>()
|
const allDirs = new Set<string>()
|
||||||
for (const f in filenames) {
|
for (const f of filenames.keys()) {
|
||||||
const dir = path.dirname(f)
|
const dir = path.dirname(f)
|
||||||
if (dir === '.') continue
|
if (dir === '.') continue
|
||||||
allDirs.add(dir)
|
allDirs.add(dir)
|
||||||
@@ -93,29 +93,29 @@ function tryImportIndexedDir (importFile: ImportFile, newDir: string, filenames:
|
|||||||
Array.from(allDirs)
|
Array.from(allDirs)
|
||||||
.sort((d1, d2) => d1.length - d2.length) // from shortest to longest
|
.sort((d1, d2) => d1.length - d2.length) // from shortest to longest
|
||||||
.forEach((dir) => fs.mkdirSync(path.join(newDir, dir), { recursive: true }))
|
.forEach((dir) => fs.mkdirSync(path.join(newDir, dir), { recursive: true }))
|
||||||
for (const [f, src] of Object.entries(filenames)) {
|
for (const [f, src] of filenames) {
|
||||||
const dest = path.join(newDir, f)
|
const dest = path.join(newDir, f)
|
||||||
importFile(src, dest)
|
importFile(src, dest)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GetUniqueFileMapResult {
|
interface GetUniqueFileMapResult {
|
||||||
conflictingFileNames: Record<string, string>
|
conflictingFileNames: Map<string, string>
|
||||||
uniqueFileMap: Record<string, string>
|
uniqueFileMap: Map<string, string>
|
||||||
}
|
}
|
||||||
|
|
||||||
function getUniqueFileMap (fileMap: Record<string, string>): GetUniqueFileMapResult {
|
function getUniqueFileMap (fileMap: Map<string, string>): GetUniqueFileMapResult {
|
||||||
const lowercaseFiles = new Map<string, string>()
|
const lowercaseFiles = new Map<string, string>()
|
||||||
const conflictingFileNames: Record<string, string> = {}
|
const conflictingFileNames = new Map<string, string>()
|
||||||
const uniqueFileMap: Record<string, string> = {}
|
const uniqueFileMap = new Map<string, string>()
|
||||||
for (const filename of Object.keys(fileMap).sort()) {
|
for (const filename of Array.from(fileMap.keys()).sort()) {
|
||||||
const lowercaseFilename = filename.toLowerCase()
|
const lowercaseFilename = filename.toLowerCase()
|
||||||
if (lowercaseFiles.has(lowercaseFilename)) {
|
if (lowercaseFiles.has(lowercaseFilename)) {
|
||||||
conflictingFileNames[filename] = lowercaseFiles.get(lowercaseFilename)!
|
conflictingFileNames.set(filename, lowercaseFiles.get(lowercaseFilename)!)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
lowercaseFiles.set(lowercaseFilename, filename)
|
lowercaseFiles.set(lowercaseFilename, filename)
|
||||||
uniqueFileMap[filename] = fileMap[filename]
|
uniqueFileMap.set(filename, fileMap.get(filename)!)
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
conflictingFileNames,
|
conflictingFileNames,
|
||||||
|
|||||||
@@ -134,14 +134,13 @@ function pickFileFromFilesMap (filesMap: FilesMap): string {
|
|||||||
// A package might not have a package.json file.
|
// A package might not have a package.json file.
|
||||||
// For instance, the Node.js package.
|
// For instance, the Node.js package.
|
||||||
// Or injected packages in a Bit workspace.
|
// Or injected packages in a Bit workspace.
|
||||||
if (filesMap['package.json']) {
|
if (filesMap.has('package.json')) {
|
||||||
return 'package.json'
|
return 'package.json'
|
||||||
}
|
}
|
||||||
const files = Object.keys(filesMap)
|
if (filesMap.size === 0) {
|
||||||
if (files.length === 0) {
|
|
||||||
throw new Error('pickFileFromFilesMap cannot pick a file from an empty FilesMap')
|
throw new Error('pickFileFromFilesMap cannot pick a file from an empty FilesMap')
|
||||||
}
|
}
|
||||||
return files[0]
|
return filesMap.keys().next().value!
|
||||||
}
|
}
|
||||||
|
|
||||||
function createCloneFunction (): CloneFunction {
|
function createCloneFunction (): CloneFunction {
|
||||||
@@ -220,7 +219,7 @@ function pkgLinkedToStore (filesMap: FilesMap, linkedPkgDir: string): boolean {
|
|||||||
} catch (err: unknown) {
|
} catch (err: unknown) {
|
||||||
if (util.types.isNativeError(err) && 'code' in err && err.code === 'ENOENT') return false
|
if (util.types.isNativeError(err) && 'code' in err && err.code === 'ENOENT') return false
|
||||||
}
|
}
|
||||||
const stats1 = fs.statSync(filesMap[filename])
|
const stats1 = fs.statSync(filesMap.get(filename)!)
|
||||||
if (stats0.ino === stats1.ino) return true
|
if (stats0.ino === stats1.ino) return true
|
||||||
globalInfo(`Relinking ${linkedPkgDir} from the store`)
|
globalInfo(`Relinking ${linkedPkgDir} from the store`)
|
||||||
return false
|
return false
|
||||||
|
|||||||
@@ -50,10 +50,10 @@ beforeEach(() => {
|
|||||||
testOnLinuxOnly('packageImportMethod=auto: clone files by default', () => {
|
testOnLinuxOnly('packageImportMethod=auto: clone files by default', () => {
|
||||||
const importPackage = createIndexedPkgImporter('auto')
|
const importPackage = createIndexedPkgImporter('auto')
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
'package.json': 'hash1',
|
['package.json', 'hash1'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'remote',
|
resolvedFrom: 'remote',
|
||||||
})).toBe('clone')
|
})).toBe('clone')
|
||||||
@@ -75,10 +75,10 @@ testOnLinuxOnly('packageImportMethod=auto: link files if cloning fails', () => {
|
|||||||
throw new Error('This file system does not support cloning')
|
throw new Error('This file system does not support cloning')
|
||||||
})
|
})
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
'package.json': 'hash1',
|
['package.json', 'hash1'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'remote',
|
resolvedFrom: 'remote',
|
||||||
})).toBe('hardlink')
|
})).toBe('hardlink')
|
||||||
@@ -89,10 +89,10 @@ testOnLinuxOnly('packageImportMethod=auto: link files if cloning fails', () => {
|
|||||||
|
|
||||||
// The copy function will not be called again
|
// The copy function will not be called again
|
||||||
expect(importPackage('project2/package', {
|
expect(importPackage('project2/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
'package.json': 'hash1',
|
['package.json', 'hash1'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'remote',
|
resolvedFrom: 'remote',
|
||||||
})).toBe('hardlink')
|
})).toBe('hardlink')
|
||||||
@@ -114,9 +114,9 @@ testOnLinuxOnly('packageImportMethod=auto: link files if cloning fails and even
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'remote',
|
resolvedFrom: 'remote',
|
||||||
})).toBe('hardlink')
|
})).toBe('hardlink')
|
||||||
@@ -136,9 +136,9 @@ testOnLinuxOnly('packageImportMethod=auto: chooses copying if cloning and hard l
|
|||||||
throw new Error('EXDEV: cross-device link not permitted')
|
throw new Error('EXDEV: cross-device link not permitted')
|
||||||
})
|
})
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'remote',
|
resolvedFrom: 'remote',
|
||||||
})).toBe('copy')
|
})).toBe('copy')
|
||||||
@@ -155,11 +155,11 @@ testOnLinuxOnly('packageImportMethod=hardlink: fall back to copying if hardlinki
|
|||||||
throw new Error('This file system does not support hard linking')
|
throw new Error('This file system does not support hard linking')
|
||||||
})
|
})
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
'package.json': 'hash1',
|
['package.json', 'hash1'],
|
||||||
license: 'hash3',
|
['license', 'hash3'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'remote',
|
resolvedFrom: 'remote',
|
||||||
})).toBe('hardlink')
|
})).toBe('hardlink')
|
||||||
@@ -173,10 +173,10 @@ test('packageImportMethod=hardlink does not relink package from store if package
|
|||||||
const importPackage = createIndexedPkgImporter('hardlink')
|
const importPackage = createIndexedPkgImporter('hardlink')
|
||||||
jest.mocked(gfs.statSync).mockReturnValue({ ino: BigInt(1) } as fs.BigIntStats)
|
jest.mocked(gfs.statSync).mockReturnValue({ ino: BigInt(1) } as fs.BigIntStats)
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
'package.json': 'hash1',
|
['package.json', 'hash1'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'store',
|
resolvedFrom: 'store',
|
||||||
})).toBeUndefined()
|
})).toBeUndefined()
|
||||||
@@ -187,10 +187,10 @@ test('packageImportMethod=hardlink relinks package from store if package.json is
|
|||||||
let ino = 0
|
let ino = 0
|
||||||
jest.mocked(gfs.statSync as jest.Mock).mockImplementation(() => ({ ino: ++ino }))
|
jest.mocked(gfs.statSync as jest.Mock).mockImplementation(() => ({ ino: ++ino }))
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
'package.json': 'hash1',
|
['package.json', 'hash1'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'store',
|
resolvedFrom: 'store',
|
||||||
})).toBe('hardlink')
|
})).toBe('hardlink')
|
||||||
@@ -204,9 +204,9 @@ test('packageImportMethod=hardlink does not relink package from store if package
|
|||||||
return { ino: BigInt(1) } as BigIntStats
|
return { ino: BigInt(1) } as BigIntStats
|
||||||
}) as unknown as typeof gfs.statSync)
|
}) as unknown as typeof gfs.statSync)
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'store',
|
resolvedFrom: 'store',
|
||||||
})).toBeUndefined()
|
})).toBeUndefined()
|
||||||
@@ -221,10 +221,10 @@ test('packageImportMethod=hardlink links packages when they are not found', () =
|
|||||||
return { ino: BigInt(0) } as BigIntStats
|
return { ino: BigInt(0) } as BigIntStats
|
||||||
}) as unknown as typeof gfs.statSync)
|
}) as unknown as typeof gfs.statSync)
|
||||||
expect(importPackage('project/package', {
|
expect(importPackage('project/package', {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'index.js': 'hash2',
|
['index.js', 'hash2'],
|
||||||
'package.json': 'hash1',
|
['package.json', 'hash1'],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'store',
|
resolvedFrom: 'store',
|
||||||
})).toBe('hardlink')
|
})).toBe('hardlink')
|
||||||
|
|||||||
@@ -12,10 +12,10 @@ test('importIndexedDir() keepModulesDir merges node_modules', async () => {
|
|||||||
fs.writeFileSync(path.join(tmp, 'dest/node_modules/b/index.js'), 'module.exports = 1')
|
fs.writeFileSync(path.join(tmp, 'dest/node_modules/b/index.js'), 'module.exports = 1')
|
||||||
|
|
||||||
const newDir = path.join(tmp, 'dest')
|
const newDir = path.join(tmp, 'dest')
|
||||||
const filenames = {
|
const filenames = new Map([
|
||||||
'node_modules/a/index.js': path.join(tmp, 'src/node_modules/a/index.js'),
|
['node_modules/a/index.js', path.join(tmp, 'src/node_modules/a/index.js')],
|
||||||
}
|
])
|
||||||
importIndexedDir(fs.linkSync, newDir, filenames, { keepModulesDir: true })
|
importIndexedDir(fs.linkSync, newDir, filenames, { keepModulesDir: true })
|
||||||
|
|
||||||
expect(fs.readdirSync(path.join(newDir, 'node_modules'))).toEqual(['a', 'b'])
|
expect(fs.readdirSync(path.join(newDir, 'node_modules')).sort()).toEqual(['a', 'b'])
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -8,10 +8,10 @@ test('importing a package with invalid files', () => {
|
|||||||
const importPackage = createIndexedPkgImporter('copy')
|
const importPackage = createIndexedPkgImporter('copy')
|
||||||
const target = path.resolve('target')
|
const target = path.resolve('target')
|
||||||
importPackage(target, {
|
importPackage(target, {
|
||||||
filesMap: {
|
filesMap: new Map([
|
||||||
'foo?bar/qar>zoo.txt': import.meta.filename,
|
['foo?bar/qar>zoo.txt', import.meta.filename],
|
||||||
'1*2.txt': import.meta.filename,
|
['1*2.txt', import.meta.filename],
|
||||||
},
|
]),
|
||||||
force: false,
|
force: false,
|
||||||
resolvedFrom: 'remote',
|
resolvedFrom: 'remote',
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { type PackageFilesIndex } from '@pnpm/store.cafs'
|
|||||||
export function readdir (index: PackageFilesIndex, dir: string): string[] {
|
export function readdir (index: PackageFilesIndex, dir: string): string[] {
|
||||||
const dirs = new Set<string>()
|
const dirs = new Set<string>()
|
||||||
const prefix = dir ? `${dir}/` : ''
|
const prefix = dir ? `${dir}/` : ''
|
||||||
for (const filePath of Object.keys(index.files)) {
|
for (const filePath of index.files.keys()) {
|
||||||
if (filePath.startsWith(prefix)) {
|
if (filePath.startsWith(prefix)) {
|
||||||
const parts = filePath.substring(dir.length).split('/')
|
const parts = filePath.substring(dir.length).split('/')
|
||||||
dirs.add(parts[0] || parts[1])
|
dirs.add(parts[0] || parts[1])
|
||||||
@@ -15,7 +15,7 @@ export function readdir (index: PackageFilesIndex, dir: string): string[] {
|
|||||||
export type DirEntityType = 'file' | 'directory'
|
export type DirEntityType = 'file' | 'directory'
|
||||||
|
|
||||||
export function dirEntityType (index: PackageFilesIndex, p: string): DirEntityType | undefined {
|
export function dirEntityType (index: PackageFilesIndex, p: string): DirEntityType | undefined {
|
||||||
if (index.files[p]) return 'file'
|
if (index.files.has(p)) return 'file'
|
||||||
const prefix = `${p}/`
|
const prefix = `${p}/`
|
||||||
return Object.keys(index.files).some((k) => k.startsWith(prefix)) ? 'directory' : undefined
|
return Array.from(index.files.keys()).some((k) => k.startsWith(prefix)) ? 'directory' : undefined
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ export function createFuseHandlersFromLockfile (lockfile: LockfileObject, storeD
|
|||||||
cb(-1)
|
cb(-1)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const fileInfo = dirEnt.index.files[dirEnt.subPath]
|
const fileInfo = dirEnt.index.files.get(dirEnt.subPath)
|
||||||
if (!fileInfo) {
|
if (!fileInfo) {
|
||||||
cb(-1)
|
cb(-1)
|
||||||
return
|
return
|
||||||
@@ -112,7 +112,7 @@ export function createFuseHandlersFromLockfile (lockfile: LockfileObject, storeD
|
|||||||
if (dirEnt.entryType === 'index') {
|
if (dirEnt.entryType === 'index') {
|
||||||
switch (cafsExplorer.dirEntityType(dirEnt.index, dirEnt.subPath)) {
|
switch (cafsExplorer.dirEntityType(dirEnt.index, dirEnt.subPath)) {
|
||||||
case 'file': {
|
case 'file': {
|
||||||
const { size, mode } = dirEnt.index.files[dirEnt.subPath]
|
const { size, mode } = dirEnt.index.files.get(dirEnt.subPath)!
|
||||||
// eslint-disable-next-line n/no-callback-literal
|
// eslint-disable-next-line n/no-callback-literal
|
||||||
cb(0, schemas.Stat.file({
|
cb(0, schemas.Stat.file({
|
||||||
...STAT_DEFAULT,
|
...STAT_DEFAULT,
|
||||||
|
|||||||
@@ -58,10 +58,13 @@ test('patch package with exact version', async () => {
|
|||||||
|
|
||||||
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
||||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||||
|
expect(filesIndex.sideEffects).toBeTruthy()
|
||||||
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
||||||
const patchedFileIntegrity = filesIndex.sideEffects?.[sideEffectsKey].added?.['index.js']?.integrity
|
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||||
|
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||||
|
const patchedFileIntegrity = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('index.js')?.integrity
|
||||||
expect(patchedFileIntegrity).toBeTruthy()
|
expect(patchedFileIntegrity).toBeTruthy()
|
||||||
const originalFileIntegrity = filesIndex.files['index.js'].integrity
|
const originalFileIntegrity = filesIndex.files.get('index.js')!.integrity
|
||||||
expect(originalFileIntegrity).toBeTruthy()
|
expect(originalFileIntegrity).toBeTruthy()
|
||||||
// The integrity of the original file differs from the integrity of the patched file
|
// The integrity of the original file differs from the integrity of the patched file
|
||||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||||
@@ -153,10 +156,13 @@ test('patch package with version range', async () => {
|
|||||||
|
|
||||||
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
||||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||||
|
expect(filesIndex.sideEffects).toBeTruthy()
|
||||||
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
||||||
const patchedFileIntegrity = filesIndex.sideEffects?.[sideEffectsKey].added?.['index.js']?.integrity
|
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||||
|
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||||
|
const patchedFileIntegrity = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('index.js')?.integrity
|
||||||
expect(patchedFileIntegrity).toBeTruthy()
|
expect(patchedFileIntegrity).toBeTruthy()
|
||||||
const originalFileIntegrity = filesIndex.files['index.js'].integrity
|
const originalFileIntegrity = filesIndex.files.get('index.js')!.integrity
|
||||||
expect(originalFileIntegrity).toBeTruthy()
|
expect(originalFileIntegrity).toBeTruthy()
|
||||||
// The integrity of the original file differs from the integrity of the patched file
|
// The integrity of the original file differs from the integrity of the patched file
|
||||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||||
@@ -320,10 +326,13 @@ test('patch package when scripts are ignored', async () => {
|
|||||||
|
|
||||||
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
||||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||||
|
expect(filesIndex.sideEffects).toBeTruthy()
|
||||||
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
||||||
const patchedFileIntegrity = filesIndex.sideEffects?.[sideEffectsKey].added?.['index.js']?.integrity
|
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||||
|
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||||
|
const patchedFileIntegrity = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('index.js')?.integrity
|
||||||
expect(patchedFileIntegrity).toBeTruthy()
|
expect(patchedFileIntegrity).toBeTruthy()
|
||||||
const originalFileIntegrity = filesIndex.files['index.js'].integrity
|
const originalFileIntegrity = filesIndex.files.get('index.js')!.integrity
|
||||||
expect(originalFileIntegrity).toBeTruthy()
|
expect(originalFileIntegrity).toBeTruthy()
|
||||||
// The integrity of the original file differs from the integrity of the patched file
|
// The integrity of the original file differs from the integrity of the patched file
|
||||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||||
@@ -408,10 +417,13 @@ test('patch package when the package is not in onlyBuiltDependencies list', asyn
|
|||||||
|
|
||||||
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
const filesIndexFile = path.join(opts.storeDir, 'index/c7/1ccf199e0fdae37aad13946b937d67bcd35fa111b84d21b3a19439cfdc2812-is-positive@1.0.0.v8')
|
||||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||||
|
expect(filesIndex.sideEffects).toBeTruthy()
|
||||||
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
const sideEffectsKey = `${ENGINE_NAME};patch=${patchFileHash}`
|
||||||
const patchedFileIntegrity = filesIndex.sideEffects?.[sideEffectsKey].added?.['index.js']?.integrity
|
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||||
|
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||||
|
const patchedFileIntegrity = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('index.js')?.integrity
|
||||||
expect(patchedFileIntegrity).toBeTruthy()
|
expect(patchedFileIntegrity).toBeTruthy()
|
||||||
const originalFileIntegrity = filesIndex.files['index.js'].integrity
|
const originalFileIntegrity = filesIndex.files.get('index.js')!.integrity
|
||||||
expect(originalFileIntegrity).toBeTruthy()
|
expect(originalFileIntegrity).toBeTruthy()
|
||||||
// The integrity of the original file differs from the integrity of the patched file
|
// The integrity of the original file differs from the integrity of the patched file
|
||||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||||
|
|||||||
@@ -94,9 +94,13 @@ test('using side effects cache', async () => {
|
|||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
})}`
|
})}`
|
||||||
expect(filesIndex.sideEffects).toHaveProperty([sideEffectsKey, 'added', 'generated-by-preinstall.js'])
|
expect(filesIndex.sideEffects).toBeTruthy()
|
||||||
expect(filesIndex.sideEffects).toHaveProperty([sideEffectsKey, 'added', 'generated-by-postinstall.js'])
|
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||||
delete filesIndex.sideEffects![sideEffectsKey].added?.['generated-by-postinstall.js']
|
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||||
|
const addedFiles = filesIndex.sideEffects!.get(sideEffectsKey)!.added!
|
||||||
|
expect(addedFiles.get('generated-by-preinstall.js')).toBeTruthy()
|
||||||
|
expect(addedFiles.get('generated-by-postinstall.js')).toBeTruthy()
|
||||||
|
addedFiles.delete('generated-by-postinstall.js')
|
||||||
fs.writeFileSync(filesIndexFile, v8.serialize(filesIndex))
|
fs.writeFileSync(filesIndexFile, v8.serialize(filesIndex))
|
||||||
|
|
||||||
rimraf('node_modules')
|
rimraf('node_modules')
|
||||||
@@ -182,12 +186,17 @@ test('a postinstall script does not modify the original sources added to the sto
|
|||||||
|
|
||||||
const filesIndexFile = getIndexFilePathInCafs(opts.storeDir, getIntegrity('@pnpm/postinstall-modifies-source', '1.0.0'), '@pnpm/postinstall-modifies-source@1.0.0')
|
const filesIndexFile = getIndexFilePathInCafs(opts.storeDir, getIntegrity('@pnpm/postinstall-modifies-source', '1.0.0'), '@pnpm/postinstall-modifies-source@1.0.0')
|
||||||
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
const filesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||||
const patchedFileIntegrity = filesIndex.sideEffects?.[`${ENGINE_NAME};deps=${hashObject({
|
expect(filesIndex.sideEffects).toBeTruthy()
|
||||||
|
expect(filesIndex.sideEffects!.has(`${ENGINE_NAME};deps=${hashObject({
|
||||||
id: `@pnpm/postinstall-modifies-source@1.0.0:${getIntegrity('@pnpm/postinstall-modifies-source', '1.0.0')}`,
|
id: `@pnpm/postinstall-modifies-source@1.0.0:${getIntegrity('@pnpm/postinstall-modifies-source', '1.0.0')}`,
|
||||||
deps: {},
|
deps: {},
|
||||||
})}`].added?.['empty-file.txt']?.integrity
|
})}`)).toBeTruthy()
|
||||||
|
const patchedFileIntegrity = filesIndex.sideEffects!.get(`${ENGINE_NAME};deps=${hashObject({
|
||||||
|
id: `@pnpm/postinstall-modifies-source@1.0.0:${getIntegrity('@pnpm/postinstall-modifies-source', '1.0.0')}`,
|
||||||
|
deps: {},
|
||||||
|
})}`)!.added!.get('empty-file.txt')?.integrity
|
||||||
expect(patchedFileIntegrity).toBeTruthy()
|
expect(patchedFileIntegrity).toBeTruthy()
|
||||||
const originalFileIntegrity = filesIndex.files['empty-file.txt'].integrity
|
const originalFileIntegrity = filesIndex.files.get('empty-file.txt')!.integrity
|
||||||
expect(originalFileIntegrity).toBeTruthy()
|
expect(originalFileIntegrity).toBeTruthy()
|
||||||
// The integrity of the original file differs from the integrity of the patched file
|
// The integrity of the original file differs from the integrity of the patched file
|
||||||
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
expect(originalFileIntegrity).not.toEqual(patchedFileIntegrity)
|
||||||
@@ -218,8 +227,11 @@ test('a corrupted side-effects cache is ignored', async () => {
|
|||||||
},
|
},
|
||||||
})}`
|
})}`
|
||||||
|
|
||||||
expect(filesIndex.sideEffects).toHaveProperty([sideEffectsKey, 'added', 'generated-by-preinstall.js'])
|
expect(filesIndex.sideEffects).toBeTruthy()
|
||||||
const sideEffectFileStat = filesIndex.sideEffects![sideEffectsKey].added!['generated-by-preinstall.js']
|
expect(filesIndex.sideEffects!.has(sideEffectsKey)).toBeTruthy()
|
||||||
|
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added).toBeTruthy()
|
||||||
|
expect(filesIndex.sideEffects!.get(sideEffectsKey)!.added!.has('generated-by-preinstall.js')).toBeTruthy()
|
||||||
|
const sideEffectFileStat = filesIndex.sideEffects!.get(sideEffectsKey)!.added!.get('generated-by-preinstall.js')!
|
||||||
const sideEffectFile = getFilePathByModeInCafs(opts.storeDir, sideEffectFileStat.integrity, sideEffectFileStat.mode)
|
const sideEffectFile = getFilePathByModeInCafs(opts.storeDir, sideEffectFileStat.integrity, sideEffectFileStat.mode)
|
||||||
expect(fs.existsSync(sideEffectFile)).toBeTruthy()
|
expect(fs.existsSync(sideEffectFile)).toBeTruthy()
|
||||||
rimraf(sideEffectFile) // we remove the side effect file to break the store
|
rimraf(sideEffectFile) // we remove the side effect file to break the store
|
||||||
|
|||||||
@@ -692,10 +692,10 @@ test.each([['isolated'], ['hoisted']])('using side effects cache with nodeLinker
|
|||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
})}`
|
})}`
|
||||||
expect(cacheIntegrity).toHaveProperty(['sideEffects', sideEffectsKey, 'added', 'generated-by-postinstall.js'])
|
expect(cacheIntegrity!.sideEffects!.get(sideEffectsKey)!.added!.has('generated-by-postinstall.js')).toBeTruthy()
|
||||||
delete cacheIntegrity!.sideEffects![sideEffectsKey].added!['generated-by-postinstall.js']
|
cacheIntegrity!.sideEffects!.get(sideEffectsKey)!.added!.delete('generated-by-postinstall.js')
|
||||||
|
|
||||||
expect(cacheIntegrity).toHaveProperty(['sideEffects', sideEffectsKey, 'added', 'generated-by-preinstall.js'])
|
expect(cacheIntegrity!.sideEffects!.get(sideEffectsKey)!.added!.has('generated-by-preinstall.js')).toBeTruthy()
|
||||||
fs.writeFileSync(cacheIntegrityPath, v8.serialize(cacheIntegrity))
|
fs.writeFileSync(cacheIntegrityPath, v8.serialize(cacheIntegrity))
|
||||||
|
|
||||||
prefix = f.prepare('side-effects')
|
prefix = f.prepare('side-effects')
|
||||||
|
|||||||
@@ -265,17 +265,17 @@ async function resolveAndFetch (
|
|||||||
|
|
||||||
const isInstallable = (
|
const isInstallable = (
|
||||||
ctx.force === true ||
|
ctx.force === true ||
|
||||||
(
|
(
|
||||||
manifest == null
|
manifest == null
|
||||||
? undefined
|
? undefined
|
||||||
: packageIsInstallable(id, manifest, {
|
: packageIsInstallable(id, manifest, {
|
||||||
engineStrict: ctx.engineStrict,
|
engineStrict: ctx.engineStrict,
|
||||||
lockfileDir: options.lockfileDir,
|
lockfileDir: options.lockfileDir,
|
||||||
nodeVersion: ctx.nodeVersion,
|
nodeVersion: ctx.nodeVersion,
|
||||||
optional: wantedDependency.optional === true,
|
optional: wantedDependency.optional === true,
|
||||||
supportedArchitectures: options.supportedArchitectures,
|
supportedArchitectures: options.supportedArchitectures,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
// We can skip fetching the package only if the manifest
|
// We can skip fetching the package only if the manifest
|
||||||
// is present after resolution
|
// is present after resolution
|
||||||
@@ -505,12 +505,12 @@ function fetchToStore (
|
|||||||
if (opts.fetchRawManifest && !result.fetchRawManifest) {
|
if (opts.fetchRawManifest && !result.fetchRawManifest) {
|
||||||
result.fetching = removeKeyOnFail(
|
result.fetching = removeKeyOnFail(
|
||||||
result.fetching.then(async ({ files }) => {
|
result.fetching.then(async ({ files }) => {
|
||||||
if (!files.filesIndex['package.json']) return {
|
if (!files.filesIndex.get('package.json')) return {
|
||||||
files,
|
files,
|
||||||
bundledManifest: undefined,
|
bundledManifest: undefined,
|
||||||
}
|
}
|
||||||
if (files.unprocessed) {
|
if (files.unprocessed) {
|
||||||
const { integrity, mode } = files.filesIndex['package.json']
|
const { integrity, mode } = files.filesIndex.get('package.json')!
|
||||||
const manifestPath = ctx.getFilePathByModeInCafs(integrity, mode)
|
const manifestPath = ctx.getFilePathByModeInCafs(integrity, mode)
|
||||||
return {
|
return {
|
||||||
files,
|
files,
|
||||||
@@ -519,7 +519,7 @@ function fetchToStore (
|
|||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
files,
|
files,
|
||||||
bundledManifest: await readBundledManifest(files.filesIndex['package.json']),
|
bundledManifest: await readBundledManifest(files.filesIndex.get('package.json')!),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ test('request package', async () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
const { files } = await pkgResponse.fetching!()
|
const { files } = await pkgResponse.fetching!()
|
||||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||||
expect(files.resolvedFrom).toBe('remote')
|
expect(files.resolvedFrom).toBe('remote')
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -382,12 +382,12 @@ test('fetchPackageToStore()', async () => {
|
|||||||
|
|
||||||
const { files, bundledManifest } = await fetchResult.fetching()
|
const { files, bundledManifest } = await fetchResult.fetching()
|
||||||
expect(bundledManifest).toBeTruthy() // we always read the bundled manifest
|
expect(bundledManifest).toBeTruthy() // we always read the bundled manifest
|
||||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||||
expect(files.resolvedFrom).toBe('remote')
|
expect(files.resolvedFrom).toBe('remote')
|
||||||
|
|
||||||
const indexFile = readV8FileStrictSync<PackageFilesIndex>(fetchResult.filesIndexFile)
|
const indexFile = readV8FileStrictSync<PackageFilesIndex>(fetchResult.filesIndexFile)
|
||||||
expect(indexFile).toBeTruthy()
|
expect(indexFile).toBeTruthy()
|
||||||
expect(typeof indexFile.files['package.json'].checkedAt).toBeTruthy()
|
expect(typeof indexFile.files.get('package.json')!.checkedAt).toBeTruthy()
|
||||||
|
|
||||||
const fetchResult2 = packageRequester.fetchPackageToStore({
|
const fetchResult2 = packageRequester.fetchPackageToStore({
|
||||||
fetchRawManifest: true,
|
fetchRawManifest: true,
|
||||||
@@ -470,9 +470,9 @@ test('fetchPackageToStore() concurrency check', async () => {
|
|||||||
const fetchResult = fetchResults[0]
|
const fetchResult = fetchResults[0]
|
||||||
const { files } = await fetchResult.fetching()
|
const { files } = await fetchResult.fetching()
|
||||||
|
|
||||||
ino1 = fs.statSync(files.filesIndex['package.json'] as string).ino
|
ino1 = fs.statSync(files.filesIndex.get('package.json') as string).ino
|
||||||
|
|
||||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||||
expect(files.resolvedFrom).toBe('remote')
|
expect(files.resolvedFrom).toBe('remote')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -480,9 +480,9 @@ test('fetchPackageToStore() concurrency check', async () => {
|
|||||||
const fetchResult = fetchResults[1]
|
const fetchResult = fetchResults[1]
|
||||||
const { files } = await fetchResult.fetching()
|
const { files } = await fetchResult.fetching()
|
||||||
|
|
||||||
ino2 = fs.statSync(files.filesIndex['package.json'] as string).ino
|
ino2 = fs.statSync(files.filesIndex.get('package.json') as string).ino
|
||||||
|
|
||||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||||
expect(files.resolvedFrom).toBe('remote')
|
expect(files.resolvedFrom).toBe('remote')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -549,7 +549,7 @@ test('fetchPackageToStore() does not cache errors', async () => {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
const { files } = await fetchResult.fetching()
|
const { files } = await fetchResult.fetching()
|
||||||
expect(Object.keys(files.filesIndex).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
expect(Array.from(files.filesIndex.keys()).sort()).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort())
|
||||||
expect(files.resolvedFrom).toBe('remote')
|
expect(files.resolvedFrom).toBe('remote')
|
||||||
|
|
||||||
expect(nock.isDone()).toBeTruthy()
|
expect(nock.isDone()).toBeTruthy()
|
||||||
@@ -699,7 +699,7 @@ test('refetch package to store if it has been modified', async () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
const { filesIndex } = (await fetchResult.fetching()).files
|
const { filesIndex } = (await fetchResult.fetching()).files
|
||||||
indexJsFile = filesIndex['index.js'] as string
|
indexJsFile = filesIndex.get('index.js') as string
|
||||||
}
|
}
|
||||||
|
|
||||||
// We should restart the workers otherwise the locker cache will still try to read the file
|
// We should restart the workers otherwise the locker cache will still try to read the file
|
||||||
|
|||||||
9
pnpm-lock.yaml
generated
9
pnpm-lock.yaml
generated
@@ -8494,6 +8494,9 @@ importers:
|
|||||||
'@pnpm/types':
|
'@pnpm/types':
|
||||||
specifier: workspace:*
|
specifier: workspace:*
|
||||||
version: link:../../packages/types
|
version: link:../../packages/types
|
||||||
|
'@pnpm/util.lex-comparator':
|
||||||
|
specifier: 'catalog:'
|
||||||
|
version: 3.0.2
|
||||||
chalk:
|
chalk:
|
||||||
specifier: 'catalog:'
|
specifier: 'catalog:'
|
||||||
version: 5.6.0
|
version: 5.6.0
|
||||||
@@ -8531,6 +8534,9 @@ importers:
|
|||||||
promise-share:
|
promise-share:
|
||||||
specifier: 'catalog:'
|
specifier: 'catalog:'
|
||||||
version: 1.0.0
|
version: 1.0.0
|
||||||
|
ramda:
|
||||||
|
specifier: 'catalog:'
|
||||||
|
version: '@pnpm/ramda@0.28.1'
|
||||||
uuid:
|
uuid:
|
||||||
specifier: 'catalog:'
|
specifier: 'catalog:'
|
||||||
version: 9.0.1
|
version: 9.0.1
|
||||||
@@ -8556,6 +8562,9 @@ importers:
|
|||||||
'@pnpm/store.cafs':
|
'@pnpm/store.cafs':
|
||||||
specifier: workspace:*
|
specifier: workspace:*
|
||||||
version: link:../cafs
|
version: link:../cafs
|
||||||
|
'@types/ramda':
|
||||||
|
specifier: 'catalog:'
|
||||||
|
version: 0.29.12
|
||||||
'@types/uuid':
|
'@types/uuid':
|
||||||
specifier: 'catalog:'
|
specifier: 'catalog:'
|
||||||
version: 8.3.4
|
version: 8.3.4
|
||||||
|
|||||||
@@ -134,7 +134,7 @@ test('importPackage hooks', async () => {
|
|||||||
module.exports = { hooks: { importPackage } }
|
module.exports = { hooks: { importPackage } }
|
||||||
|
|
||||||
function importPackage (to, opts) {
|
function importPackage (to, opts) {
|
||||||
fs.writeFileSync('args.json', JSON.stringify([to, opts]), 'utf8')
|
fs.writeFileSync('args.json', JSON.stringify([to, Array.from(opts.filesMap.keys()).sort()]), 'utf8')
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
`
|
`
|
||||||
@@ -147,10 +147,10 @@ test('importPackage hooks', async () => {
|
|||||||
|
|
||||||
await execPnpm(['add', 'is-positive@1.0.0'])
|
await execPnpm(['add', 'is-positive@1.0.0'])
|
||||||
|
|
||||||
const [to, opts] = loadJsonFileSync<any>('args.json') // eslint-disable-line
|
const [to, files] = loadJsonFileSync<any>('args.json') // eslint-disable-line
|
||||||
|
|
||||||
expect(typeof to).toBe('string')
|
expect(typeof to).toBe('string')
|
||||||
expect(Object.keys(opts.filesMap).sort()).toStrictEqual([
|
expect(files).toStrictEqual([
|
||||||
'index.js',
|
'index.js',
|
||||||
'license',
|
'license',
|
||||||
'package.json',
|
'package.json',
|
||||||
|
|||||||
@@ -160,7 +160,7 @@ test("don't fail on case insensitive filesystems when package has 2 files with s
|
|||||||
project.has('@pnpm.e2e/with-same-file-in-different-cases')
|
project.has('@pnpm.e2e/with-same-file-in-different-cases')
|
||||||
|
|
||||||
const { files: integrityFile } = readV8FileStrictSync<PackageFilesIndex>(project.getPkgIndexFilePath('@pnpm.e2e/with-same-file-in-different-cases', '1.0.0'))
|
const { files: integrityFile } = readV8FileStrictSync<PackageFilesIndex>(project.getPkgIndexFilePath('@pnpm.e2e/with-same-file-in-different-cases', '1.0.0'))
|
||||||
const packageFiles = Object.keys(integrityFile).sort()
|
const packageFiles = Array.from(integrityFile.keys()).sort()
|
||||||
|
|
||||||
expect(packageFiles).toStrictEqual(['Foo.js', 'foo.js', 'package.json'])
|
expect(packageFiles).toStrictEqual(['Foo.js', 'foo.js', 'package.json'])
|
||||||
const files = fs.readdirSync('node_modules/@pnpm.e2e/with-same-file-in-different-cases')
|
const files = fs.readdirSync('node_modules/@pnpm.e2e/with-same-file-in-different-cases')
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ async function parseLicense (
|
|||||||
pkg: {
|
pkg: {
|
||||||
manifest: PackageManifest
|
manifest: PackageManifest
|
||||||
files:
|
files:
|
||||||
| { local: true, files: Record<string, string> }
|
| { local: true, files: Map<string, string> }
|
||||||
| { local: false, files: PackageFiles }
|
| { local: false, files: PackageFiles }
|
||||||
},
|
},
|
||||||
opts: { storeDir: string }
|
opts: { storeDir: string }
|
||||||
@@ -173,9 +173,9 @@ async function parseLicense (
|
|||||||
// check if we discovered a license, if not attempt to parse the LICENSE file
|
// check if we discovered a license, if not attempt to parse the LICENSE file
|
||||||
if (!license || /see license/i.test(license)) {
|
if (!license || /see license/i.test(license)) {
|
||||||
const { files: pkgFileIndex } = pkg.files
|
const { files: pkgFileIndex } = pkg.files
|
||||||
const licenseFile = LICENSE_FILES.find((licenseFile) => licenseFile in pkgFileIndex)
|
const licenseFile = LICENSE_FILES.find((licenseFile) => pkgFileIndex.has(licenseFile))
|
||||||
if (licenseFile) {
|
if (licenseFile) {
|
||||||
const licensePackageFileInfo = pkgFileIndex[licenseFile]
|
const licensePackageFileInfo = pkgFileIndex.get(licenseFile)
|
||||||
let licenseContents: Buffer | undefined
|
let licenseContents: Buffer | undefined
|
||||||
if (pkg.files.local) {
|
if (pkg.files.local) {
|
||||||
licenseContents = await readFile(licensePackageFileInfo as string)
|
licenseContents = await readFile(licensePackageFileInfo as string)
|
||||||
@@ -216,7 +216,7 @@ async function readLicenseFileFromCafs (storeDir: string, { integrity, mode }: P
|
|||||||
|
|
||||||
export type ReadPackageIndexFileResult =
|
export type ReadPackageIndexFileResult =
|
||||||
| { local: false, files: PackageFiles }
|
| { local: false, files: PackageFiles }
|
||||||
| { local: true, files: Record<string, string> }
|
| { local: true, files: Map<string, string> }
|
||||||
|
|
||||||
export interface ReadPackageIndexFileOptions {
|
export interface ReadPackageIndexFileOptions {
|
||||||
storeDir: string
|
storeDir: string
|
||||||
@@ -344,13 +344,10 @@ export async function getPkgInfo (
|
|||||||
// Fetch the package manifest
|
// Fetch the package manifest
|
||||||
let packageManifestDir!: string
|
let packageManifestDir!: string
|
||||||
if (packageFileIndexInfo.local) {
|
if (packageFileIndexInfo.local) {
|
||||||
packageManifestDir = packageFileIndexInfo.files['package.json']
|
packageManifestDir = packageFileIndexInfo.files.get('package.json') as string
|
||||||
} else {
|
} else {
|
||||||
const packageFileIndex = packageFileIndexInfo.files as Record<
|
const packageFileIndex = packageFileIndexInfo.files
|
||||||
string,
|
const packageManifestFile = packageFileIndex.get('package.json') as PackageFileInfo
|
||||||
PackageFileInfo
|
|
||||||
>
|
|
||||||
const packageManifestFile = packageFileIndex['package.json']
|
|
||||||
packageManifestDir = getFilePathByModeInCafs(
|
packageManifestDir = getFilePathByModeInCafs(
|
||||||
opts.storeDir,
|
opts.storeDir,
|
||||||
packageManifestFile.integrity,
|
packageManifestFile.integrity,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import type { IntegrityLike } from 'ssri'
|
import type { IntegrityLike } from 'ssri'
|
||||||
import type { DependencyManifest } from '@pnpm/types'
|
import type { DependencyManifest } from '@pnpm/types'
|
||||||
|
|
||||||
export type PackageFiles = Record<string, PackageFileInfo>
|
export type PackageFiles = Map<string, PackageFileInfo>
|
||||||
|
|
||||||
export interface PackageFileInfo {
|
export interface PackageFileInfo {
|
||||||
checkedAt?: number // Nullable for backward compatibility
|
checkedAt?: number // Nullable for backward compatibility
|
||||||
@@ -10,7 +10,7 @@ export interface PackageFileInfo {
|
|||||||
size: number
|
size: number
|
||||||
}
|
}
|
||||||
|
|
||||||
export type SideEffects = Record<string, SideEffectsDiff>
|
export type SideEffects = Map<string, SideEffectsDiff>
|
||||||
|
|
||||||
export interface SideEffectsDiff {
|
export interface SideEffectsDiff {
|
||||||
deleted?: string[]
|
deleted?: string[]
|
||||||
@@ -26,7 +26,7 @@ export type PackageFilesResponse = {
|
|||||||
requiresBuild: boolean
|
requiresBuild: boolean
|
||||||
} & ({
|
} & ({
|
||||||
unprocessed?: false
|
unprocessed?: false
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
} | {
|
} | {
|
||||||
unprocessed: true
|
unprocessed: true
|
||||||
filesIndex: PackageFiles
|
filesIndex: PackageFiles
|
||||||
@@ -53,12 +53,10 @@ export type ImportPackageFunctionAsync = (
|
|||||||
|
|
||||||
export type FileType = 'exec' | 'nonexec' | 'index'
|
export type FileType = 'exec' | 'nonexec' | 'index'
|
||||||
|
|
||||||
export interface FilesIndex {
|
export type FilesIndex = Map<string, {
|
||||||
[filename: string]: {
|
mode: number
|
||||||
mode: number
|
size: number
|
||||||
size: number
|
} & FileWriteResult>
|
||||||
} & FileWriteResult
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FileWriteResult {
|
export interface FileWriteResult {
|
||||||
checkedAt: number
|
checkedAt: number
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ export function addFilesFromDir (
|
|||||||
readManifest?: boolean
|
readManifest?: boolean
|
||||||
} = {}
|
} = {}
|
||||||
): AddToStoreResult {
|
): AddToStoreResult {
|
||||||
const filesIndex: FilesIndex = {}
|
const filesIndex = new Map() as FilesIndex
|
||||||
let manifest: DependencyManifest | undefined
|
let manifest: DependencyManifest | undefined
|
||||||
let files: File[]
|
let files: File[]
|
||||||
if (opts.files) {
|
if (opts.files) {
|
||||||
@@ -50,11 +50,11 @@ export function addFilesFromDir (
|
|||||||
}
|
}
|
||||||
// Remove the file type information (regular file, directory, etc.) and leave just the permission bits (rwx for owner, group, and others)
|
// Remove the file type information (regular file, directory, etc.) and leave just the permission bits (rwx for owner, group, and others)
|
||||||
const mode = stat.mode & 0o777
|
const mode = stat.mode & 0o777
|
||||||
filesIndex[relativePath] = {
|
filesIndex.set(relativePath, {
|
||||||
mode,
|
mode,
|
||||||
size: stat.size,
|
size: stat.size,
|
||||||
...addBuffer(buffer, mode),
|
...addBuffer(buffer, mode),
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
return { manifest, filesIndex }
|
return { manifest, filesIndex }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ export function addFilesFromTarball (
|
|||||||
const ignore = _ignore ?? (() => false)
|
const ignore = _ignore ?? (() => false)
|
||||||
const tarContent = isGzip(tarballBuffer) ? gunzipSync(tarballBuffer) : (Buffer.isBuffer(tarballBuffer) ? tarballBuffer : Buffer.from(tarballBuffer))
|
const tarContent = isGzip(tarballBuffer) ? gunzipSync(tarballBuffer) : (Buffer.isBuffer(tarballBuffer) ? tarballBuffer : Buffer.from(tarballBuffer))
|
||||||
const { files } = parseTarball(tarContent)
|
const { files } = parseTarball(tarContent)
|
||||||
const filesIndex: FilesIndex = {}
|
const filesIndex = new Map() as FilesIndex
|
||||||
let manifestBuffer: Buffer | undefined
|
let manifestBuffer: Buffer | undefined
|
||||||
|
|
||||||
for (const [relativePath, { mode, offset, size }] of files) {
|
for (const [relativePath, { mode, offset, size }] of files) {
|
||||||
@@ -28,11 +28,11 @@ export function addFilesFromTarball (
|
|||||||
if (readManifest && relativePath === 'package.json') {
|
if (readManifest && relativePath === 'package.json') {
|
||||||
manifestBuffer = fileBuffer
|
manifestBuffer = fileBuffer
|
||||||
}
|
}
|
||||||
filesIndex[relativePath] = {
|
filesIndex.set(relativePath, {
|
||||||
mode,
|
mode,
|
||||||
size,
|
size,
|
||||||
...addBufferToCafs(fileBuffer, mode),
|
...addBufferToCafs(fileBuffer, mode),
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
filesIndex,
|
filesIndex,
|
||||||
|
|||||||
@@ -49,11 +49,11 @@ export function checkPkgFilesIntegrity (
|
|||||||
// We verify all side effects cache. We could optimize it to verify only the side effects cache
|
// We verify all side effects cache. We could optimize it to verify only the side effects cache
|
||||||
// that satisfies the current os/arch/platform.
|
// that satisfies the current os/arch/platform.
|
||||||
// However, it likely won't make a big difference.
|
// However, it likely won't make a big difference.
|
||||||
for (const [sideEffectName, { added }] of Object.entries(pkgIndex.sideEffects)) {
|
for (const [sideEffectName, { added }] of pkgIndex.sideEffects) {
|
||||||
if (added) {
|
if (added) {
|
||||||
const { passed } = _checkFilesIntegrity(added)
|
const { passed } = _checkFilesIntegrity(added)
|
||||||
if (!passed) {
|
if (!passed) {
|
||||||
delete pkgIndex.sideEffects![sideEffectName]
|
pkgIndex.sideEffects!.delete(sideEffectName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -69,7 +69,7 @@ function checkFilesIntegrity (
|
|||||||
): VerifyResult {
|
): VerifyResult {
|
||||||
let allVerified = true
|
let allVerified = true
|
||||||
let manifest: DependencyManifest | undefined
|
let manifest: DependencyManifest | undefined
|
||||||
for (const [f, fstat] of Object.entries(files)) {
|
for (const [f, fstat] of files) {
|
||||||
if (!fstat.integrity) {
|
if (!fstat.integrity) {
|
||||||
throw new Error(`Integrity checksum is missing for ${f}`)
|
throw new Error(`Integrity checksum is missing for ${f}`)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { getFilePathByModeInCafs } from './getFilePathInCafs.js'
|
|||||||
import { parseJsonBufferSync } from './parseJson.js'
|
import { parseJsonBufferSync } from './parseJson.js'
|
||||||
|
|
||||||
export function readManifestFromStore (storeDir: string, pkgIndex: PackageFilesIndex): PackageManifest | undefined {
|
export function readManifestFromStore (storeDir: string, pkgIndex: PackageFilesIndex): PackageManifest | undefined {
|
||||||
const pkg = pkgIndex.files['package.json']
|
const pkg = pkgIndex.files.get('package.json')
|
||||||
if (pkg) {
|
if (pkg) {
|
||||||
const fileName = getFilePathByModeInCafs(storeDir, pkg.integrity, pkg.mode)
|
const fileName = getFilePathByModeInCafs(storeDir, pkg.integrity, pkg.mode)
|
||||||
return parseJsonBufferSync(gfs.readFileSync(fileName)) as PackageManifest
|
return parseJsonBufferSync(gfs.readFileSync(fileName)) as PackageManifest
|
||||||
|
|||||||
@@ -18,12 +18,12 @@ describe('cafs', () => {
|
|||||||
const { filesIndex } = cafs.addFilesFromTarball(
|
const { filesIndex } = cafs.addFilesFromTarball(
|
||||||
fs.readFileSync(f.find('node-gyp-6.1.0.tgz'))
|
fs.readFileSync(f.find('node-gyp-6.1.0.tgz'))
|
||||||
)
|
)
|
||||||
expect(Object.keys(filesIndex)).toHaveLength(121)
|
expect(filesIndex.size).toBe(121)
|
||||||
const pkgFile = filesIndex['package.json']
|
const pkgFile = filesIndex.get('package.json')
|
||||||
expect(pkgFile.size).toBe(1121)
|
expect(pkgFile!.size).toBe(1121)
|
||||||
expect(pkgFile.mode).toBe(420)
|
expect(pkgFile!.mode).toBe(420)
|
||||||
expect(typeof pkgFile.checkedAt).toBe('number')
|
expect(typeof pkgFile!.checkedAt).toBe('number')
|
||||||
expect(pkgFile.integrity.toString()).toBe('sha512-8xCvrlC7W3TlwXxetv5CZTi53szYhmT7tmpXF/ttNthtTR9TC7Y7WJFPmJToHaSQ4uObuZyOARdOJYNYuTSbXA==')
|
expect(pkgFile!.integrity.toString()).toBe('sha512-8xCvrlC7W3TlwXxetv5CZTi53szYhmT7tmpXF/ttNthtTR9TC7Y7WJFPmJToHaSQ4uObuZyOARdOJYNYuTSbXA==')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('replaces an already existing file, if the integrity of it was broken', () => {
|
it('replaces an already existing file, if the integrity of it was broken', () => {
|
||||||
@@ -34,7 +34,7 @@ describe('cafs', () => {
|
|||||||
let addFilesResult = addFiles()
|
let addFilesResult = addFiles()
|
||||||
|
|
||||||
// Modifying the file in the store
|
// Modifying the file in the store
|
||||||
const filePath = getFilePathByModeInCafs(storeDir, addFilesResult.filesIndex['foo.txt'].integrity, 420)
|
const filePath = getFilePathByModeInCafs(storeDir, addFilesResult.filesIndex.get('foo.txt')!.integrity, 420)
|
||||||
fs.appendFileSync(filePath, 'bar')
|
fs.appendFileSync(filePath, 'bar')
|
||||||
|
|
||||||
addFilesResult = addFiles()
|
addFilesResult = addFiles()
|
||||||
@@ -48,7 +48,7 @@ describe('cafs', () => {
|
|||||||
const addFiles = () => createCafs(storeDir).addFilesFromDir(srcDir)
|
const addFiles = () => createCafs(storeDir).addFilesFromDir(srcDir)
|
||||||
|
|
||||||
const { filesIndex } = addFiles()
|
const { filesIndex } = addFiles()
|
||||||
expect(filesIndex['subdir/should-exist.txt']).toBeDefined()
|
expect(filesIndex.get('subdir/should-exist.txt')).toBeDefined()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('symlinks are resolved and added as regular files', async () => {
|
it('symlinks are resolved and added as regular files', async () => {
|
||||||
@@ -63,10 +63,10 @@ describe('cafs', () => {
|
|||||||
await symlinkDir(path.join(srcDir, 'lib'), path.join(srcDir, 'lib-symlink'))
|
await symlinkDir(path.join(srcDir, 'lib'), path.join(srcDir, 'lib-symlink'))
|
||||||
|
|
||||||
const { filesIndex } = createCafs(storeDir).addFilesFromDir(srcDir)
|
const { filesIndex } = createCafs(storeDir).addFilesFromDir(srcDir)
|
||||||
expect(filesIndex['symlink.js']).toBeDefined()
|
expect(filesIndex.get('symlink.js')).toBeDefined()
|
||||||
expect(filesIndex['symlink.js']).toStrictEqual(filesIndex['index.js'])
|
expect(filesIndex.get('symlink.js')).toStrictEqual(filesIndex.get('index.js'))
|
||||||
expect(filesIndex['lib/index.js']).toBeDefined()
|
expect(filesIndex.get('lib/index.js')).toBeDefined()
|
||||||
expect(filesIndex['lib/index.js']).toStrictEqual(filesIndex['lib-symlink/index.js'])
|
expect(filesIndex.get('lib/index.js')).toStrictEqual(filesIndex.get('lib-symlink/index.js'))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -74,13 +74,13 @@ describe('checkPkgFilesIntegrity()', () => {
|
|||||||
it("doesn't fail if file was removed from the store", () => {
|
it("doesn't fail if file was removed from the store", () => {
|
||||||
const storeDir = temporaryDirectory()
|
const storeDir = temporaryDirectory()
|
||||||
expect(checkPkgFilesIntegrity(storeDir, {
|
expect(checkPkgFilesIntegrity(storeDir, {
|
||||||
files: {
|
files: new Map([
|
||||||
foo: {
|
['foo', {
|
||||||
integrity: 'sha512-8xCvrlC7W3TlwXxetv5CZTi53szYhmT7tmpXF/ttNthtTR9TC7Y7WJFPmJToHaSQ4uObuZyOARdOJYNYuTSbXA==',
|
integrity: 'sha512-8xCvrlC7W3TlwXxetv5CZTi53szYhmT7tmpXF/ttNthtTR9TC7Y7WJFPmJToHaSQ4uObuZyOARdOJYNYuTSbXA==',
|
||||||
mode: 420,
|
mode: 420,
|
||||||
size: 10,
|
size: 10,
|
||||||
},
|
}],
|
||||||
},
|
]),
|
||||||
}).passed).toBeFalsy()
|
}).passed).toBeFalsy()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@@ -91,7 +91,7 @@ test('file names are normalized when unpacking a tarball', () => {
|
|||||||
const { filesIndex } = cafs.addFilesFromTarball(
|
const { filesIndex } = cafs.addFilesFromTarball(
|
||||||
fs.readFileSync(f.find('colorize-semver-diff.tgz'))
|
fs.readFileSync(f.find('colorize-semver-diff.tgz'))
|
||||||
)
|
)
|
||||||
expect(Object.keys(filesIndex).sort()).toStrictEqual([
|
expect(Array.from(filesIndex.keys()).sort()).toStrictEqual([
|
||||||
'LICENSE',
|
'LICENSE',
|
||||||
'README.md',
|
'README.md',
|
||||||
'lib/index.d.ts',
|
'lib/index.d.ts',
|
||||||
@@ -114,7 +114,7 @@ test('unpack an older version of tar that prefixes with spaces', () => {
|
|||||||
const { filesIndex } = cafs.addFilesFromTarball(
|
const { filesIndex } = cafs.addFilesFromTarball(
|
||||||
fs.readFileSync(f.find('parsers-3.0.0-rc.48.1.tgz'))
|
fs.readFileSync(f.find('parsers-3.0.0-rc.48.1.tgz'))
|
||||||
)
|
)
|
||||||
expect(Object.keys(filesIndex).sort()).toStrictEqual([
|
expect(Array.from(filesIndex.keys()).sort()).toStrictEqual([
|
||||||
'lib/grammars/resolution.d.ts',
|
'lib/grammars/resolution.d.ts',
|
||||||
'lib/grammars/resolution.js',
|
'lib/grammars/resolution.js',
|
||||||
'lib/grammars/resolution.pegjs',
|
'lib/grammars/resolution.pegjs',
|
||||||
@@ -142,7 +142,7 @@ test('unpack a tarball that contains hard links', () => {
|
|||||||
const { filesIndex } = cafs.addFilesFromTarball(
|
const { filesIndex } = cafs.addFilesFromTarball(
|
||||||
fs.readFileSync(f.find('vue.examples.todomvc.todo-store-0.0.1.tgz'))
|
fs.readFileSync(f.find('vue.examples.todomvc.todo-store-0.0.1.tgz'))
|
||||||
)
|
)
|
||||||
expect(Object.keys(filesIndex).length).toBeGreaterThan(0)
|
expect(filesIndex.size).toBeGreaterThan(0)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Related issue: https://github.com/pnpm/pnpm/issues/7120
|
// Related issue: https://github.com/pnpm/pnpm/issues/7120
|
||||||
@@ -152,5 +152,5 @@ test('unpack should not fail when the tarball format seems to be not USTAR or GN
|
|||||||
const { filesIndex } = cafs.addFilesFromTarball(
|
const { filesIndex } = cafs.addFilesFromTarball(
|
||||||
fs.readFileSync(f.find('devextreme-17.1.6.tgz'))
|
fs.readFileSync(f.find('devextreme-17.1.6.tgz'))
|
||||||
)
|
)
|
||||||
expect(Object.keys(filesIndex).length).toBeGreaterThan(0)
|
expect(filesIndex.size).toBeGreaterThan(0)
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import {
|
|||||||
} from '@pnpm/store-controller-types'
|
} from '@pnpm/store-controller-types'
|
||||||
import memoize from 'mem'
|
import memoize from 'mem'
|
||||||
import pathTemp from 'path-temp'
|
import pathTemp from 'path-temp'
|
||||||
import { map as mapValues } from 'ramda'
|
|
||||||
|
|
||||||
export { type CafsLocker }
|
export { type CafsLocker }
|
||||||
|
|
||||||
@@ -83,13 +82,13 @@ function getFlatMap (
|
|||||||
storeDir: string,
|
storeDir: string,
|
||||||
filesResponse: PackageFilesResponse,
|
filesResponse: PackageFilesResponse,
|
||||||
targetEngine?: string
|
targetEngine?: string
|
||||||
): { filesMap: Record<string, string>, isBuilt: boolean } {
|
): { filesMap: Map<string, string>, isBuilt: boolean } {
|
||||||
let isBuilt!: boolean
|
let isBuilt!: boolean
|
||||||
let filesIndex!: PackageFiles
|
let filesIndex!: PackageFiles
|
||||||
if (targetEngine && ((filesResponse.sideEffects?.[targetEngine]) != null)) {
|
if (targetEngine && filesResponse.sideEffects?.has(targetEngine)) {
|
||||||
filesIndex = applySideEffectsDiff(filesResponse.filesIndex as PackageFiles, filesResponse.sideEffects?.[targetEngine])
|
filesIndex = applySideEffectsDiff(filesResponse.filesIndex as PackageFiles, filesResponse.sideEffects.get(targetEngine)!)
|
||||||
isBuilt = true
|
isBuilt = true
|
||||||
} else if (!filesResponse.unprocessed) {
|
} else if (filesResponse.unprocessed !== true) {
|
||||||
return {
|
return {
|
||||||
filesMap: filesResponse.filesIndex,
|
filesMap: filesResponse.filesIndex,
|
||||||
isBuilt: false,
|
isBuilt: false,
|
||||||
@@ -98,15 +97,18 @@ function getFlatMap (
|
|||||||
filesIndex = filesResponse.filesIndex
|
filesIndex = filesResponse.filesIndex
|
||||||
isBuilt = false
|
isBuilt = false
|
||||||
}
|
}
|
||||||
const filesMap = mapValues(({ integrity, mode }) => getFilePathByModeInCafs(storeDir, integrity, mode), filesIndex)
|
const filesMap = new Map<string, string>()
|
||||||
|
for (const [fileName, { integrity, mode }] of filesIndex) {
|
||||||
|
filesMap.set(fileName, getFilePathByModeInCafs(storeDir, integrity, mode))
|
||||||
|
}
|
||||||
return { filesMap, isBuilt }
|
return { filesMap, isBuilt }
|
||||||
}
|
}
|
||||||
|
|
||||||
function applySideEffectsDiff (baseFiles: PackageFiles, { added, deleted }: SideEffectsDiff): PackageFiles {
|
function applySideEffectsDiff (baseFiles: PackageFiles, { added, deleted }: SideEffectsDiff): PackageFiles {
|
||||||
const filesWithSideEffects: PackageFiles = { ...added }
|
const filesWithSideEffects: PackageFiles = new Map(added)
|
||||||
for (const fileName in baseFiles) {
|
for (const [fileName, fileInfo] of baseFiles) {
|
||||||
if (!deleted?.includes(fileName) && !filesWithSideEffects[fileName]) {
|
if (!deleted?.includes(fileName) && !filesWithSideEffects.has(fileName)) {
|
||||||
filesWithSideEffects[fileName] = baseFiles[fileName]
|
filesWithSideEffects.set(fileName, fileInfo)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return filesWithSideEffects
|
return filesWithSideEffects
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ export async function prune ({ cacheDir, storeDir }: PruneOptions, removeAlienFi
|
|||||||
await Promise.all(pkgIndexFiles.map(async (pkgIndexFilePath) => {
|
await Promise.all(pkgIndexFiles.map(async (pkgIndexFilePath) => {
|
||||||
const { files: pkgFilesIndex } = await readV8FileStrictAsync<PackageFilesIndex>(pkgIndexFilePath)
|
const { files: pkgFilesIndex } = await readV8FileStrictAsync<PackageFilesIndex>(pkgIndexFilePath)
|
||||||
// TODO: implement prune of Node.js packages, they don't have a package.json file
|
// TODO: implement prune of Node.js packages, they don't have a package.json file
|
||||||
if (pkgFilesIndex['package.json'] && removedHashes.has(pkgFilesIndex['package.json'].integrity)) {
|
if (pkgFilesIndex.has('package.json') && removedHashes.has(pkgFilesIndex.get('package.json')!.integrity)) {
|
||||||
await fs.unlink(pkgIndexFilePath)
|
await fs.unlink(pkgIndexFilePath)
|
||||||
pkgCounter++
|
pkgCounter++
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,6 +42,7 @@
|
|||||||
"@pnpm/store-path": "workspace:*",
|
"@pnpm/store-path": "workspace:*",
|
||||||
"@pnpm/store.cafs": "workspace:*",
|
"@pnpm/store.cafs": "workspace:*",
|
||||||
"@pnpm/types": "workspace:*",
|
"@pnpm/types": "workspace:*",
|
||||||
|
"@pnpm/util.lex-comparator": "catalog:",
|
||||||
"chalk": "catalog:",
|
"chalk": "catalog:",
|
||||||
"render-help": "catalog:"
|
"render-help": "catalog:"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { type Config } from '@pnpm/config'
|
import { type Config } from '@pnpm/config'
|
||||||
|
import util from 'util'
|
||||||
import { createResolver } from '@pnpm/client'
|
import { createResolver } from '@pnpm/client'
|
||||||
import { type TarballResolution } from '@pnpm/lockfile.types'
|
import { type TarballResolution } from '@pnpm/lockfile.types'
|
||||||
|
|
||||||
@@ -8,6 +9,7 @@ import { sortDeepKeys } from '@pnpm/object.key-sorting'
|
|||||||
import { getStorePath } from '@pnpm/store-path'
|
import { getStorePath } from '@pnpm/store-path'
|
||||||
import { getIndexFilePathInCafs, type PackageFilesIndex } from '@pnpm/store.cafs'
|
import { getIndexFilePathInCafs, type PackageFilesIndex } from '@pnpm/store.cafs'
|
||||||
import { parseWantedDependency } from '@pnpm/parse-wanted-dependency'
|
import { parseWantedDependency } from '@pnpm/parse-wanted-dependency'
|
||||||
|
import { lexCompare } from '@pnpm/util.lex-comparator'
|
||||||
|
|
||||||
import renderHelp from 'render-help'
|
import renderHelp from 'render-help'
|
||||||
|
|
||||||
@@ -87,7 +89,7 @@ export async function handler (opts: CatIndexCommandOptions, params: string[]):
|
|||||||
)
|
)
|
||||||
try {
|
try {
|
||||||
const pkgFilesIndex = await readV8FileStrictAsync<PackageFilesIndex>(filesIndexFile)
|
const pkgFilesIndex = await readV8FileStrictAsync<PackageFilesIndex>(filesIndexFile)
|
||||||
return JSON.stringify(sortDeepKeys(pkgFilesIndex), null, 2)
|
return JSON.stringify(sortDeepKeys(pkgFilesIndex), replacer, 2)
|
||||||
} catch {
|
} catch {
|
||||||
throw new PnpmError(
|
throw new PnpmError(
|
||||||
'INVALID_PACKAGE',
|
'INVALID_PACKAGE',
|
||||||
@@ -95,3 +97,12 @@ export async function handler (opts: CatIndexCommandOptions, params: string[]):
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function replacer (key: string, value: unknown) {
|
||||||
|
if (util.types.isMap(value)) {
|
||||||
|
const entries = Array.from((value as Map<string, unknown>).entries())
|
||||||
|
entries.sort(([key1], [key2]) => lexCompare(key1, key2))
|
||||||
|
return Object.fromEntries(entries)
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import path from 'path'
|
import path from 'path'
|
||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
|
import util from 'util'
|
||||||
import chalk from 'chalk'
|
import chalk from 'chalk'
|
||||||
|
|
||||||
import { type Config } from '@pnpm/config'
|
import { type Config } from '@pnpm/config'
|
||||||
@@ -69,19 +70,21 @@ export async function handler (opts: FindHashCommandOptions, params: string[]):
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const [, file] of Object.entries(pkgFilesIndex.files)) {
|
if (util.types.isMap(pkgFilesIndex.files)) {
|
||||||
if (file?.integrity === hash) {
|
for (const [, file] of pkgFilesIndex.files) {
|
||||||
result.push({ name: pkgFilesIndex.name ?? 'unknown', version: pkgFilesIndex?.version ?? 'unknown', filesIndexFile: filesIndexFile.replace(indexDir, '') })
|
if (file?.integrity === hash) {
|
||||||
|
result.push({ name: pkgFilesIndex.name ?? 'unknown', version: pkgFilesIndex?.version ?? 'unknown', filesIndexFile: filesIndexFile.replace(indexDir, '') })
|
||||||
|
|
||||||
// a package is only found once.
|
// a package is only found once.
|
||||||
continue
|
continue
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pkgFilesIndex?.sideEffects) {
|
if (pkgFilesIndex?.sideEffects && util.types.isMap(pkgFilesIndex.sideEffects)) {
|
||||||
for (const { added } of Object.values(pkgFilesIndex.sideEffects)) {
|
for (const { added } of pkgFilesIndex.sideEffects.values()) {
|
||||||
if (!added) continue
|
if (!added) continue
|
||||||
for (const file of Object.values(added)) {
|
for (const file of added.values()) {
|
||||||
if (file?.integrity === hash) {
|
if (file?.integrity === hash) {
|
||||||
result.push({ name: pkgFilesIndex.name ?? 'unknown', version: pkgFilesIndex?.version ?? 'unknown', filesIndexFile: filesIndexFile.replace(indexDir, '') })
|
result.push({ name: pkgFilesIndex.name ?? 'unknown', version: pkgFilesIndex?.version ?? 'unknown', filesIndexFile: filesIndexFile.replace(indexDir, '') })
|
||||||
|
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ export async function storeStatus (maybeOpts: StoreStatusOptions): Promise<strin
|
|||||||
? getIndexFilePathInCafs(storeDir, integrity, id)
|
? getIndexFilePathInCafs(storeDir, integrity, id)
|
||||||
: path.join(storeDir, dp.depPathToFilename(id, maybeOpts.virtualStoreDirMaxLength), 'integrity.json')
|
: path.join(storeDir, dp.depPathToFilename(id, maybeOpts.virtualStoreDirMaxLength), 'integrity.json')
|
||||||
const { files } = await readV8FileStrictAsync<PackageFilesIndex>(pkgIndexFilePath)
|
const { files } = await readV8FileStrictAsync<PackageFilesIndex>(pkgIndexFilePath)
|
||||||
return (await dint.check(path.join(virtualStoreDir, dp.depPathToFilename(depPath, maybeOpts.virtualStoreDirMaxLength), 'node_modules', name), files)) === false
|
return (await dint.check(path.join(virtualStoreDir, dp.depPathToFilename(depPath, maybeOpts.virtualStoreDirMaxLength), 'node_modules', name), Object.fromEntries(files))) === false
|
||||||
}, { concurrency: 8 })
|
}, { concurrency: 8 })
|
||||||
|
|
||||||
if ((reporter != null) && typeof reporter === 'function') {
|
if ((reporter != null) && typeof reporter === 'function') {
|
||||||
|
|||||||
@@ -37,6 +37,7 @@
|
|||||||
"@pnpm/types": "workspace:*",
|
"@pnpm/types": "workspace:*",
|
||||||
"p-limit": "catalog:",
|
"p-limit": "catalog:",
|
||||||
"promise-share": "catalog:",
|
"promise-share": "catalog:",
|
||||||
|
"ramda": "catalog:",
|
||||||
"uuid": "catalog:"
|
"uuid": "catalog:"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
@@ -50,6 +51,7 @@
|
|||||||
"@pnpm/package-store": "workspace:*",
|
"@pnpm/package-store": "workspace:*",
|
||||||
"@pnpm/server": "workspace:*",
|
"@pnpm/server": "workspace:*",
|
||||||
"@pnpm/store.cafs": "workspace:*",
|
"@pnpm/store.cafs": "workspace:*",
|
||||||
|
"@types/ramda": "catalog:",
|
||||||
"@types/uuid": "catalog:",
|
"@types/uuid": "catalog:",
|
||||||
"@zkochan/rimraf": "catalog:",
|
"@zkochan/rimraf": "catalog:",
|
||||||
"get-port": "catalog:",
|
"get-port": "catalog:",
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ import {
|
|||||||
|
|
||||||
import pLimit from 'p-limit'
|
import pLimit from 'p-limit'
|
||||||
import pShare from 'promise-share'
|
import pShare from 'promise-share'
|
||||||
|
import { omit } from 'ramda'
|
||||||
|
import v8 from 'v8'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
|
||||||
export interface StoreServerController extends StoreController {
|
export interface StoreServerController extends StoreController {
|
||||||
@@ -66,8 +68,8 @@ function limitFetch<T>(limit: (fn: () => PromiseLike<T>) => Promise<T>, url: str
|
|||||||
url = url.replace('http://unix:', 'unix:')
|
url = url.replace('http://unix:', 'unix:')
|
||||||
}
|
}
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
body: JSON.stringify(body),
|
body: v8.serialize(body),
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/octet-stream' },
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
retry: {
|
retry: {
|
||||||
retries: 100,
|
retries: 100,
|
||||||
@@ -76,7 +78,8 @@ function limitFetch<T>(limit: (fn: () => PromiseLike<T>) => Promise<T>, url: str
|
|||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw await response.json()
|
throw await response.json()
|
||||||
}
|
}
|
||||||
const json = await response.json() as any // eslint-disable-line
|
const arrayBuffer = await response.arrayBuffer()
|
||||||
|
const json = v8.deserialize(Buffer.from(arrayBuffer)) as any // eslint-disable-line
|
||||||
if (json.error) {
|
if (json.error) {
|
||||||
throw json.error
|
throw json.error
|
||||||
}
|
}
|
||||||
@@ -93,7 +96,7 @@ async function requestPackage (
|
|||||||
const msgId = uuidv4()
|
const msgId = uuidv4()
|
||||||
const packageResponseBody = await limitedFetch(`${remotePrefix}/requestPackage`, {
|
const packageResponseBody = await limitedFetch(`${remotePrefix}/requestPackage`, {
|
||||||
msgId,
|
msgId,
|
||||||
options,
|
options: omit(['allowBuild', 'onFetchError'], options),
|
||||||
wantedDependency,
|
wantedDependency,
|
||||||
})
|
})
|
||||||
if (options.skipFetch === true) {
|
if (options.skipFetch === true) {
|
||||||
@@ -121,7 +124,7 @@ async function fetchPackage (
|
|||||||
|
|
||||||
const fetchResponseBody = await limitedFetch(`${remotePrefix}/fetchPackage`, {
|
const fetchResponseBody = await limitedFetch(`${remotePrefix}/fetchPackage`, {
|
||||||
msgId,
|
msgId,
|
||||||
options,
|
options: omit(['allowBuild', 'onFetchError'], options),
|
||||||
}) as object & { filesIndexFile: string, inStoreLocation: string }
|
}) as object & { filesIndexFile: string, inStoreLocation: string }
|
||||||
const fetching = limitedFetch(`${remotePrefix}/packageFilesResponse`, {
|
const fetching = limitedFetch(`${remotePrefix}/packageFilesResponse`, {
|
||||||
msgId,
|
msgId,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import assert from 'assert'
|
import assert from 'assert'
|
||||||
import http, { type IncomingMessage, type Server, type ServerResponse } from 'http'
|
import http, { type IncomingMessage, type Server, type ServerResponse } from 'http'
|
||||||
import util from 'util'
|
import util from 'util'
|
||||||
|
import v8 from 'v8'
|
||||||
import { globalInfo } from '@pnpm/logger'
|
import { globalInfo } from '@pnpm/logger'
|
||||||
import {
|
import {
|
||||||
type PkgRequestFetchResult,
|
type PkgRequestFetchResult,
|
||||||
@@ -57,14 +58,16 @@ export function createServer (
|
|||||||
}
|
}
|
||||||
|
|
||||||
const bodyPromise = new Promise<RequestBody>((resolve, reject) => {
|
const bodyPromise = new Promise<RequestBody>((resolve, reject) => {
|
||||||
let body: any = '' // eslint-disable-line
|
const chunks: Buffer[] = []
|
||||||
req.on('data', (data) => {
|
req.on('data', (chunk) => {
|
||||||
body += data
|
chunks.push(chunk)
|
||||||
})
|
})
|
||||||
req.on('end', async () => {
|
req.on('end', async () => {
|
||||||
try {
|
try {
|
||||||
if (body.length > 0) {
|
const bodyBuffer = Buffer.concat(chunks)
|
||||||
body = JSON.parse(body)
|
let body: any // eslint-disable-line
|
||||||
|
if (bodyBuffer.byteLength > 0) {
|
||||||
|
body = v8.deserialize(bodyBuffer)
|
||||||
} else {
|
} else {
|
||||||
body = {}
|
body = {}
|
||||||
}
|
}
|
||||||
@@ -85,10 +88,10 @@ export function createServer (
|
|||||||
if (pkgResponse.fetching) {
|
if (pkgResponse.fetching) {
|
||||||
filesPromises[body.msgId] = pkgResponse.fetching
|
filesPromises[body.msgId] = pkgResponse.fetching
|
||||||
}
|
}
|
||||||
res.end(JSON.stringify(pkgResponse.body))
|
res.end(v8.serialize(pkgResponse.body))
|
||||||
} catch (err: unknown) {
|
} catch (err: unknown) {
|
||||||
assert(util.types.isNativeError(err))
|
assert(util.types.isNativeError(err))
|
||||||
res.end(JSON.stringify({
|
res.end(v8.serialize({
|
||||||
error: {
|
error: {
|
||||||
message: err.message,
|
message: err.message,
|
||||||
...JSON.parse(JSON.stringify(err)),
|
...JSON.parse(JSON.stringify(err)),
|
||||||
@@ -102,10 +105,10 @@ export function createServer (
|
|||||||
body = await bodyPromise
|
body = await bodyPromise
|
||||||
const pkgResponse = (store.fetchPackage as FetchPackageToStoreFunction)(body.options as any) // eslint-disable-line
|
const pkgResponse = (store.fetchPackage as FetchPackageToStoreFunction)(body.options as any) // eslint-disable-line
|
||||||
filesPromises[body.msgId] = pkgResponse.fetching
|
filesPromises[body.msgId] = pkgResponse.fetching
|
||||||
res.end(JSON.stringify({ filesIndexFile: pkgResponse.filesIndexFile }))
|
res.end(v8.serialize({ filesIndexFile: pkgResponse.filesIndexFile }))
|
||||||
} catch (err: unknown) {
|
} catch (err: unknown) {
|
||||||
assert(util.types.isNativeError(err))
|
assert(util.types.isNativeError(err))
|
||||||
res.end(JSON.stringify({
|
res.end(v8.serialize({
|
||||||
error: {
|
error: {
|
||||||
message: err.message,
|
message: err.message,
|
||||||
...JSON.parse(JSON.stringify(err)),
|
...JSON.parse(JSON.stringify(err)),
|
||||||
@@ -118,7 +121,7 @@ export function createServer (
|
|||||||
body = await bodyPromise
|
body = await bodyPromise
|
||||||
const filesResponse = await filesPromises[body.msgId]()
|
const filesResponse = await filesPromises[body.msgId]()
|
||||||
delete filesPromises[body.msgId]
|
delete filesPromises[body.msgId]
|
||||||
res.end(JSON.stringify(filesResponse))
|
res.end(v8.serialize(filesResponse))
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case '/prune':
|
case '/prune':
|
||||||
@@ -129,7 +132,7 @@ export function createServer (
|
|||||||
case '/importPackage': {
|
case '/importPackage': {
|
||||||
const importPackageBody = (await bodyPromise) as any // eslint-disable-line @typescript-eslint/no-explicit-any
|
const importPackageBody = (await bodyPromise) as any // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||||
await store.importPackage(importPackageBody.to, importPackageBody.opts)
|
await store.importPackage(importPackageBody.to, importPackageBody.opts)
|
||||||
res.end(JSON.stringify('OK'))
|
res.end(v8.serialize('OK'))
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case '/upload': {
|
case '/upload': {
|
||||||
@@ -141,7 +144,7 @@ export function createServer (
|
|||||||
}
|
}
|
||||||
const uploadBody = (await bodyPromise) as any // eslint-disable-line @typescript-eslint/no-explicit-any
|
const uploadBody = (await bodyPromise) as any // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||||
await lock(uploadBody.builtPkgLocation, async () => store.upload(uploadBody.builtPkgLocation, uploadBody.opts))
|
await lock(uploadBody.builtPkgLocation, async () => store.upload(uploadBody.builtPkgLocation, uploadBody.opts))
|
||||||
res.end(JSON.stringify('OK'))
|
res.end(v8.serialize('OK'))
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case '/stop':
|
case '/stop':
|
||||||
@@ -152,20 +155,20 @@ export function createServer (
|
|||||||
}
|
}
|
||||||
globalInfo('Got request to stop the server')
|
globalInfo('Got request to stop the server')
|
||||||
await close()
|
await close()
|
||||||
res.end(JSON.stringify('OK'))
|
res.end(v8.serialize('OK'))
|
||||||
globalInfo('Server stopped')
|
globalInfo('Server stopped')
|
||||||
break
|
break
|
||||||
default: {
|
default: {
|
||||||
res.statusCode = 404
|
res.statusCode = 404
|
||||||
const error = { error: `${req.url!} does not match any route` }
|
const error = { error: `${req.url!} does not match any route` }
|
||||||
res.end(JSON.stringify(error))
|
res.end(v8.serialize(error))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e: any) { // eslint-disable-line
|
} catch (e: any) { // eslint-disable-line
|
||||||
res.statusCode = 503
|
res.statusCode = 503
|
||||||
const jsonErr = JSON.parse(JSON.stringify(e))
|
const jsonErr = JSON.parse(JSON.stringify(e))
|
||||||
jsonErr.message = e.message
|
jsonErr.message = e.message
|
||||||
res.end(JSON.stringify(jsonErr))
|
res.end(v8.serialize(jsonErr))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ test('server', async () => {
|
|||||||
expect(response.body.manifest!.version).toBe('1.0.0')
|
expect(response.body.manifest!.version).toBe('1.0.0')
|
||||||
|
|
||||||
expect(files.resolvedFrom).toBe('remote')
|
expect(files.resolvedFrom).toBe('remote')
|
||||||
expect(files.filesIndex).toHaveProperty(['package.json'])
|
expect(files.filesIndex.has('package.json')).toBeTruthy()
|
||||||
|
|
||||||
await server.close()
|
await server.close()
|
||||||
await storeCtrl.close()
|
await storeCtrl.close()
|
||||||
@@ -112,7 +112,7 @@ test('fetchPackage', async () => {
|
|||||||
expect(bundledManifest).toBeTruthy()
|
expect(bundledManifest).toBeTruthy()
|
||||||
|
|
||||||
expect(files.resolvedFrom).toBe('remote')
|
expect(files.resolvedFrom).toBe('remote')
|
||||||
expect(files.filesIndex).toHaveProperty(['package.json'])
|
expect(files.filesIndex.has('package.json')).toBeTruthy()
|
||||||
|
|
||||||
await server.close()
|
await server.close()
|
||||||
await storeCtrl.close()
|
await storeCtrl.close()
|
||||||
@@ -177,7 +177,7 @@ test('server upload', async () => {
|
|||||||
fs.writeFileSync(filesIndexFile, v8.serialize({
|
fs.writeFileSync(filesIndexFile, v8.serialize({
|
||||||
name: 'fake-pkg',
|
name: 'fake-pkg',
|
||||||
version: '1.0.0',
|
version: '1.0.0',
|
||||||
files: {},
|
files: new Map(),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
await storeCtrl.upload(path.join(import.meta.dirname, '__fixtures__/side-effect-fake-dir'), {
|
await storeCtrl.upload(path.join(import.meta.dirname, '__fixtures__/side-effect-fake-dir'), {
|
||||||
@@ -186,7 +186,7 @@ test('server upload', async () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
const cacheIntegrity = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
const cacheIntegrity = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||||
expect(Object.keys(cacheIntegrity.sideEffects![fakeEngine].added!).sort()).toStrictEqual(['side-effect.js', 'side-effect.txt'])
|
expect(Array.from(cacheIntegrity.sideEffects!.get(fakeEngine)!.added!.keys()).sort()).toStrictEqual(['side-effect.js', 'side-effect.txt'])
|
||||||
|
|
||||||
await server.close()
|
await server.close()
|
||||||
await storeCtrl.close()
|
await storeCtrl.close()
|
||||||
@@ -337,7 +337,7 @@ test('server route not found', async () => {
|
|||||||
const response = await fetch(`${remotePrefix}/a-random-endpoint`, { method: 'POST' })
|
const response = await fetch(`${remotePrefix}/a-random-endpoint`, { method: 'POST' })
|
||||||
// Ensure error is correct
|
// Ensure error is correct
|
||||||
expect(response.status).toBe(404)
|
expect(response.status).toBe(404)
|
||||||
expect((await response.json() as any).error).toBeTruthy() // eslint-disable-line
|
expect((v8.deserialize(Buffer.from(await response.arrayBuffer())) as any).error).toBeTruthy() // eslint-disable-line
|
||||||
|
|
||||||
await server.close()
|
await server.close()
|
||||||
await storeCtrlForServer.close()
|
await storeCtrlForServer.close()
|
||||||
|
|||||||
@@ -175,7 +175,7 @@ export interface PackageResponse {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export type FilesMap = Record<string, string>
|
export type FilesMap = Map<string, string>
|
||||||
|
|
||||||
export interface ImportOptions {
|
export interface ImportOptions {
|
||||||
disableRelinkLocalDirDeps?: boolean
|
disableRelinkLocalDirDeps?: boolean
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ function availableParallelism (): number {
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface AddFilesResult {
|
interface AddFilesResult {
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
manifest: DependencyManifest
|
manifest: DependencyManifest
|
||||||
requiresBuild: boolean
|
requiresBuild: boolean
|
||||||
integrity?: string
|
integrity?: string
|
||||||
@@ -80,7 +80,7 @@ export async function addFilesFromDir (opts: AddFilesFromDirOptions): Promise<Ad
|
|||||||
workerPool = createTarballWorkerPool()
|
workerPool = createTarballWorkerPool()
|
||||||
}
|
}
|
||||||
const localWorker = await workerPool.checkoutWorkerAsync(true)
|
const localWorker = await workerPool.checkoutWorkerAsync(true)
|
||||||
return new Promise<{ filesIndex: Record<string, string>, manifest: DependencyManifest, requiresBuild: boolean }>((resolve, reject) => {
|
return new Promise<{ filesIndex: Map<string, string>, manifest: DependencyManifest, requiresBuild: boolean }>((resolve, reject) => {
|
||||||
localWorker.once('message', ({ status, error, value }) => {
|
localWorker.once('message', ({ status, error, value }) => {
|
||||||
workerPool!.checkinWorker(localWorker)
|
workerPool!.checkinWorker(localWorker)
|
||||||
if (status === 'error') {
|
if (status === 'error') {
|
||||||
|
|||||||
@@ -81,7 +81,10 @@ async function handleMessage (
|
|||||||
let { storeDir, filesIndexFile, readManifest, verifyStoreIntegrity } = message
|
let { storeDir, filesIndexFile, readManifest, verifyStoreIntegrity } = message
|
||||||
let pkgFilesIndex: PackageFilesIndex | undefined
|
let pkgFilesIndex: PackageFilesIndex | undefined
|
||||||
try {
|
try {
|
||||||
pkgFilesIndex = readV8FileStrictSync(filesIndexFile)
|
pkgFilesIndex = readV8FileStrictSync<PackageFilesIndex>(filesIndexFile)
|
||||||
|
if (pkgFilesIndex?.files && !(pkgFilesIndex.files instanceof Map)) {
|
||||||
|
pkgFilesIndex = undefined
|
||||||
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// ignoring. It is fine if the integrity file is not present. Just refetch the package
|
// ignoring. It is fine if the integrity file is not present. Just refetch the package
|
||||||
}
|
}
|
||||||
@@ -185,7 +188,7 @@ function calcIntegrity (buffer: Buffer): string {
|
|||||||
interface AddFilesFromDirResult {
|
interface AddFilesFromDirResult {
|
||||||
status: string
|
status: string
|
||||||
value: {
|
value: {
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
manifest?: DependencyManifest
|
manifest?: DependencyManifest
|
||||||
requiresBuild: boolean
|
requiresBuild: boolean
|
||||||
}
|
}
|
||||||
@@ -237,8 +240,8 @@ function addFilesFromDir ({ dir, storeDir, filesIndexFile, sideEffectsCacheKey,
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
filesIndex.sideEffects = filesIndex.sideEffects ?? {}
|
filesIndex.sideEffects ??= new Map()
|
||||||
filesIndex.sideEffects[sideEffectsCacheKey] = calculateDiff(filesIndex.files, filesIntegrity)
|
filesIndex.sideEffects.set(sideEffectsCacheKey, calculateDiff(filesIndex.files, filesIntegrity))
|
||||||
if (filesIndex.requiresBuild == null) {
|
if (filesIndex.requiresBuild == null) {
|
||||||
requiresBuild = pkgRequiresBuild(manifest, filesIntegrity)
|
requiresBuild = pkgRequiresBuild(manifest, filesIntegrity)
|
||||||
} else {
|
} else {
|
||||||
@@ -253,23 +256,23 @@ function addFilesFromDir ({ dir, storeDir, filesIndexFile, sideEffectsCacheKey,
|
|||||||
|
|
||||||
function calculateDiff (baseFiles: PackageFiles, sideEffectsFiles: PackageFiles): SideEffectsDiff {
|
function calculateDiff (baseFiles: PackageFiles, sideEffectsFiles: PackageFiles): SideEffectsDiff {
|
||||||
const deleted: string[] = []
|
const deleted: string[] = []
|
||||||
const added: PackageFiles = {}
|
const added: PackageFiles = new Map()
|
||||||
for (const file of new Set([...Object.keys(baseFiles), ...Object.keys(sideEffectsFiles)])) {
|
for (const file of new Set([...baseFiles.keys(), ...sideEffectsFiles.keys()])) {
|
||||||
if (!sideEffectsFiles[file]) {
|
if (!sideEffectsFiles.has(file)) {
|
||||||
deleted.push(file)
|
deleted.push(file)
|
||||||
} else if (
|
} else if (
|
||||||
!baseFiles[file] ||
|
!baseFiles.has(file) ||
|
||||||
baseFiles[file].integrity !== sideEffectsFiles[file].integrity ||
|
baseFiles.get(file)!.integrity !== sideEffectsFiles.get(file)!.integrity ||
|
||||||
baseFiles[file].mode !== sideEffectsFiles[file].mode
|
baseFiles.get(file)!.mode !== sideEffectsFiles.get(file)!.mode
|
||||||
) {
|
) {
|
||||||
added[file] = sideEffectsFiles[file]
|
added.set(file, sideEffectsFiles.get(file)!)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const diff: SideEffectsDiff = {}
|
const diff: SideEffectsDiff = {}
|
||||||
if (deleted.length > 0) {
|
if (deleted.length > 0) {
|
||||||
diff.deleted = deleted
|
diff.deleted = deleted
|
||||||
}
|
}
|
||||||
if (Object.keys(added).length > 0) {
|
if (added.size > 0) {
|
||||||
diff.added = added
|
diff.added = added
|
||||||
}
|
}
|
||||||
return diff
|
return diff
|
||||||
@@ -277,20 +280,20 @@ function calculateDiff (baseFiles: PackageFiles, sideEffectsFiles: PackageFiles)
|
|||||||
|
|
||||||
interface ProcessFilesIndexResult {
|
interface ProcessFilesIndexResult {
|
||||||
filesIntegrity: PackageFiles
|
filesIntegrity: PackageFiles
|
||||||
filesMap: Record<string, string>
|
filesMap: Map<string, string>
|
||||||
}
|
}
|
||||||
|
|
||||||
function processFilesIndex (filesIndex: FilesIndex): ProcessFilesIndexResult {
|
function processFilesIndex (filesIndex: FilesIndex): ProcessFilesIndexResult {
|
||||||
const filesIntegrity: PackageFiles = {}
|
const filesIntegrity: PackageFiles = new Map()
|
||||||
const filesMap: Record<string, string> = {}
|
const filesMap = new Map<string, string>()
|
||||||
for (const [k, { checkedAt, filePath, integrity, mode, size }] of Object.entries(filesIndex)) {
|
for (const [k, { checkedAt, filePath, integrity, mode, size }] of filesIndex) {
|
||||||
filesIntegrity[k] = {
|
filesIntegrity.set(k, {
|
||||||
checkedAt,
|
checkedAt,
|
||||||
integrity: integrity.toString(), // TODO: use the raw Integrity object
|
integrity: integrity.toString(), // TODO: use the raw Integrity object
|
||||||
mode,
|
mode,
|
||||||
size,
|
size,
|
||||||
}
|
})
|
||||||
filesMap[k] = filePath
|
filesMap.set(k, filePath)
|
||||||
}
|
}
|
||||||
return { filesIntegrity, filesMap }
|
return { filesIntegrity, filesMap }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ export type ExtendFilesMapStats = Pick<fs.Stats, 'ino' | 'isFile' | 'isDirectory
|
|||||||
|
|
||||||
export interface ExtendFilesMapOptions {
|
export interface ExtendFilesMapOptions {
|
||||||
/** Map relative path of each file to their real path */
|
/** Map relative path of each file to their real path */
|
||||||
filesIndex: Record<string, string>
|
filesIndex: Map<string, string>
|
||||||
/** Map relative path of each file to their stats */
|
/** Map relative path of each file to their stats */
|
||||||
filesStats?: Record<string, ExtendFilesMapStats | null>
|
filesStats?: Record<string, ExtendFilesMapStats | null>
|
||||||
}
|
}
|
||||||
@@ -150,7 +150,7 @@ export async function extendFilesMap ({ filesIndex, filesStats }: ExtendFilesMap
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all(Object.entries(filesIndex).map(async ([relativePath, realPath]) => {
|
await Promise.all(Array.from(filesIndex.entries()).map(async ([relativePath, realPath]) => {
|
||||||
const stats = filesStats?.[relativePath] ?? await fs.promises.stat(realPath)
|
const stats = filesStats?.[relativePath] ?? await fs.promises.stat(realPath)
|
||||||
if (stats.isFile()) {
|
if (stats.isFile()) {
|
||||||
addInodeAndAncestors(relativePath, stats.ino)
|
addInodeAndAncestors(relativePath, stats.ino)
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ test('optimally synchronizes source and target', async () => {
|
|||||||
|
|
||||||
const sourceFetchResult = await fetchFromDir(sourceDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const sourceFetchResult = await fetchFromDir(sourceDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
const targetFetchResultBefore = await fetchFromDir(targetDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultBefore = await fetchFromDir(targetDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
expect(Object.keys(targetFetchResultBefore.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultBefore.filesIndex.keys()).sort()).not.toStrictEqual(Array.from(sourceFetchResult.filesIndex.keys()).sort())
|
||||||
expect(
|
expect(
|
||||||
filesToModify
|
filesToModify
|
||||||
.map(suffix => path.resolve(targetDir, suffix))
|
.map(suffix => path.resolve(targetDir, suffix))
|
||||||
@@ -128,8 +128,8 @@ test('optimally synchronizes source and target', async () => {
|
|||||||
await patchers[0].apply()
|
await patchers[0].apply()
|
||||||
|
|
||||||
const targetFetchResultAfter = await fetchFromDir(targetDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultAfter = await fetchFromDir(targetDir, { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultAfter.filesIndex.keys()).sort()).toStrictEqual(Array.from(sourceFetchResult.filesIndex.keys()).sort())
|
||||||
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).not.toStrictEqual(Object.keys(targetFetchResultBefore.filesIndex).sort())
|
expect(Array.from(targetFetchResultAfter.filesIndex.keys()).sort()).not.toStrictEqual(Array.from(targetFetchResultBefore.filesIndex.keys()).sort())
|
||||||
expect(
|
expect(
|
||||||
filesToModify
|
filesToModify
|
||||||
.map(suffix => path.resolve(targetDir, suffix))
|
.map(suffix => path.resolve(targetDir, suffix))
|
||||||
@@ -200,19 +200,20 @@ test('multiple patchers', async () => {
|
|||||||
const targetFetchResultBefore1 = await fetchFromDir('target1', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultBefore1 = await fetchFromDir('target1', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
const targetFetchResultBefore2 = await fetchFromDir('target2', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultBefore2 = await fetchFromDir('target2', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
const targetFetchResultBefore3 = await fetchFromDir('target3', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultBefore3 = await fetchFromDir('target3', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
expect(Object.keys(targetFetchResultBefore1.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
const expected = Array.from(sourceFetchResult.filesIndex.keys()).sort()
|
||||||
expect(Object.keys(targetFetchResultBefore2.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultBefore1.filesIndex.keys()).sort()).not.toStrictEqual(expected)
|
||||||
expect(Object.keys(targetFetchResultBefore3.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultBefore2.filesIndex.keys()).sort()).not.toStrictEqual(expected)
|
||||||
expect(Object.keys(targetFetchResultBefore1.filesIndex).sort()).toStrictEqual([])
|
expect(Array.from(targetFetchResultBefore3.filesIndex.keys()).sort()).not.toStrictEqual(expected)
|
||||||
expect(Object.keys(targetFetchResultBefore2.filesIndex).sort()).toStrictEqual([])
|
expect(Array.from(targetFetchResultBefore1.filesIndex.keys()).sort()).toStrictEqual([])
|
||||||
expect(Object.keys(targetFetchResultBefore3.filesIndex).sort()).toStrictEqual([])
|
expect(Array.from(targetFetchResultBefore2.filesIndex.keys()).sort()).toStrictEqual([])
|
||||||
|
expect(Array.from(targetFetchResultBefore3.filesIndex.keys()).sort()).toStrictEqual([])
|
||||||
|
|
||||||
await Promise.all(patchers.map(patcher => patcher.apply()))
|
await Promise.all(patchers.map(patcher => patcher.apply()))
|
||||||
|
|
||||||
const targetFetchResultAfter1 = await fetchFromDir('target1', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultAfter1 = await fetchFromDir('target1', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
const targetFetchResultAfter2 = await fetchFromDir('target2', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultAfter2 = await fetchFromDir('target2', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
const targetFetchResultAfter3 = await fetchFromDir('target3', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultAfter3 = await fetchFromDir('target3', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
expect(Object.keys(targetFetchResultAfter1.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultAfter1.filesIndex.keys()).sort()).toStrictEqual(expected)
|
||||||
expect(Object.keys(targetFetchResultAfter2.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultAfter2.filesIndex.keys()).sort()).toStrictEqual(expected)
|
||||||
expect(Object.keys(targetFetchResultAfter3.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultAfter3.filesIndex.keys()).sort()).toStrictEqual(expected)
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -132,7 +132,7 @@ test('applies a patch on a directory', async () => {
|
|||||||
|
|
||||||
const sourceFetchResult = await fetchFromDir('source', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const sourceFetchResult = await fetchFromDir('source', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
const targetFetchResultBefore = await fetchFromDir('target', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultBefore = await fetchFromDir('target', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
expect(Object.keys(targetFetchResultBefore.filesIndex).sort()).not.toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultBefore.filesIndex.keys()).sort()).not.toStrictEqual(Array.from(sourceFetchResult.filesIndex.keys()).sort())
|
||||||
expect(
|
expect(
|
||||||
filesToModify
|
filesToModify
|
||||||
.map(suffix => `target/${suffix}`)
|
.map(suffix => `target/${suffix}`)
|
||||||
@@ -148,8 +148,8 @@ test('applies a patch on a directory', async () => {
|
|||||||
await applyPatch(optimizedDirPath, path.resolve('source'), path.resolve('target'))
|
await applyPatch(optimizedDirPath, path.resolve('source'), path.resolve('target'))
|
||||||
|
|
||||||
const targetFetchResultAfter = await fetchFromDir('target', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
const targetFetchResultAfter = await fetchFromDir('target', { includeOnlyPackageFiles: false, resolveSymlinks: true })
|
||||||
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).toStrictEqual(Object.keys(sourceFetchResult.filesIndex).sort())
|
expect(Array.from(targetFetchResultAfter.filesIndex.keys()).sort()).toStrictEqual(Array.from(sourceFetchResult.filesIndex.keys()).sort())
|
||||||
expect(Object.keys(targetFetchResultAfter.filesIndex).sort()).not.toStrictEqual(Object.keys(targetFetchResultBefore.filesIndex).sort())
|
expect(Array.from(targetFetchResultAfter.filesIndex.keys()).sort()).not.toStrictEqual(Array.from(targetFetchResultBefore.filesIndex.keys()).sort())
|
||||||
expect(
|
expect(
|
||||||
filesToModify
|
filesToModify
|
||||||
.map(suffix => `target/${suffix}`)
|
.map(suffix => `target/${suffix}`)
|
||||||
|
|||||||
@@ -26,9 +26,9 @@ test('without provided stats', async () => {
|
|||||||
'foo/bar.txt',
|
'foo/bar.txt',
|
||||||
'foo_bar.txt',
|
'foo_bar.txt',
|
||||||
]
|
]
|
||||||
const filesIndex: Record<string, string> = {}
|
const filesIndex = new Map<string, string>()
|
||||||
for (const filePath of filePaths) {
|
for (const filePath of filePaths) {
|
||||||
filesIndex[filePath] = path.resolve(filePath)
|
filesIndex.set(filePath, path.resolve(filePath))
|
||||||
fs.mkdirSync(path.dirname(filePath), { recursive: true })
|
fs.mkdirSync(path.dirname(filePath), { recursive: true })
|
||||||
fs.writeFileSync(filePath, '')
|
fs.writeFileSync(filePath, '')
|
||||||
}
|
}
|
||||||
@@ -51,7 +51,7 @@ test('without provided stats', async () => {
|
|||||||
} as InodeMap)
|
} as InodeMap)
|
||||||
|
|
||||||
for (const filePath of filePaths) {
|
for (const filePath of filePaths) {
|
||||||
expect(statMethod).toHaveBeenCalledWith(filesIndex[filePath])
|
expect(statMethod).toHaveBeenCalledWith(filesIndex.get(filePath))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -66,11 +66,11 @@ test('with provided stats', async () => {
|
|||||||
'foo/bar.txt',
|
'foo/bar.txt',
|
||||||
'foo_bar.txt',
|
'foo_bar.txt',
|
||||||
]
|
]
|
||||||
const filesIndex: Record<string, string> = {}
|
const filesIndex = new Map<string, string>()
|
||||||
const filesStats: Record<string, ExtendFilesMapStats> = {}
|
const filesStats: Record<string, ExtendFilesMapStats> = {}
|
||||||
let ino = startingIno
|
let ino = startingIno
|
||||||
for (const filePath of filePaths) {
|
for (const filePath of filePaths) {
|
||||||
filesIndex[filePath] = path.resolve(filePath)
|
filesIndex.set(filePath, path.resolve(filePath))
|
||||||
filesStats[filePath] = {
|
filesStats[filePath] = {
|
||||||
ino,
|
ino,
|
||||||
isDirectory: () => false,
|
isDirectory: () => false,
|
||||||
|
|||||||
Reference in New Issue
Block a user