mirror of
https://github.com/pnpm/pnpm.git
synced 2026-01-08 15:08:27 -05:00
feat: add support for side-effects cache in the new content-addressable store
The new content-addressable store should support side-effects cache. ref #2470 PR #2562
This commit is contained in:
7
.changeset/plenty-schools-wonder.md
Normal file
7
.changeset/plenty-schools-wonder.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
"@pnpm/package-requester": minor
|
||||
"@pnpm/resolve-dependencies": minor
|
||||
"@pnpm/store-controller-types": minor
|
||||
---
|
||||
|
||||
Package request response contains the path to the files index file.
|
||||
6
.changeset/slimy-ravens-matter.md
Normal file
6
.changeset/slimy-ravens-matter.md
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
"@pnpm/cafs": minor
|
||||
"@pnpm/store-controller-types": minor
|
||||
---
|
||||
|
||||
sideEffects property added to files index file.
|
||||
9
.changeset/smooth-grapes-fold.md
Normal file
9
.changeset/smooth-grapes-fold.md
Normal file
@@ -0,0 +1,9 @@
|
||||
---
|
||||
"@pnpm/headless": major
|
||||
"@pnpm/package-store": major
|
||||
"@pnpm/server": major
|
||||
"@pnpm/store-controller-types": major
|
||||
"supi": minor
|
||||
---
|
||||
|
||||
The importPackage function of the store controller is importing packages directly from the side-effects cache.
|
||||
8
.changeset/stupid-apples-sleep.md
Normal file
8
.changeset/stupid-apples-sleep.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
"@pnpm/build-modules": major
|
||||
"@pnpm/package-store": major
|
||||
"@pnpm/server": major
|
||||
"@pnpm/store-controller-types": major
|
||||
---
|
||||
|
||||
The upload function of the store controller accepts `opts.filesIndexFile` instead of `opts.packageId`.
|
||||
@@ -90,7 +90,7 @@ async function buildDependency (
|
||||
try {
|
||||
await opts.storeController.upload(depNode.peripheralLocation, {
|
||||
engine: ENGINE_NAME,
|
||||
packageId: depNode.packageId,
|
||||
filesIndexFile: depNode.filesIndexFile,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.statusCode === 403) {
|
||||
@@ -153,6 +153,7 @@ function getSubgraphToBuild (
|
||||
|
||||
export interface DependenciesGraphNode {
|
||||
fetchingBundledManifest?: () => Promise<PackageManifest>,
|
||||
filesIndexFile: string,
|
||||
hasBundledDependencies: boolean,
|
||||
peripheralLocation: string,
|
||||
children: {[alias: string]: string},
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@pnpm/fetcher-base": "workspace:7.0.0-alpha.3",
|
||||
"@pnpm/store-controller-types": "workspace:8.0.0-alpha.3",
|
||||
"@zkochan/rimraf": "1.0.0",
|
||||
"concat-stream": "^2.0.0",
|
||||
"decompress-maybe": "^1.0.0",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { DeferredManifestPromise } from '@pnpm/fetcher-base'
|
||||
import { PackageFileInfo } from '@pnpm/store-controller-types'
|
||||
import rimraf = require('@zkochan/rimraf')
|
||||
import fs = require('mz/fs')
|
||||
import pLimit from 'p-limit'
|
||||
@@ -9,14 +10,11 @@ import { parseJsonBuffer } from './parseJson'
|
||||
const limit = pLimit(20)
|
||||
const MAX_BULK_SIZE = 1 * 1024 * 1024 // 1MB
|
||||
|
||||
export type PackageFileInfo = {
|
||||
integrity: string,
|
||||
mode: number,
|
||||
size: number,
|
||||
export type PackageFilesIndex = {
|
||||
files: Record<string, PackageFileInfo>,
|
||||
sideEffects?: Record<string, Record<string, PackageFileInfo>>
|
||||
}
|
||||
|
||||
export type PackageFilesIndex = { files: Record<string, PackageFileInfo> }
|
||||
|
||||
export default async function (
|
||||
cafsDir: string,
|
||||
pkgIndex: Record<string, PackageFileInfo>,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { PackageFileInfo } from '@pnpm/store-controller-types'
|
||||
import getStream = require('get-stream')
|
||||
import path = require('path')
|
||||
import exists = require('path-exists')
|
||||
@@ -7,7 +8,6 @@ import ssri = require('ssri')
|
||||
import addFilesFromDir from './addFilesFromDir'
|
||||
import addFilesFromTarball from './addFilesFromTarball'
|
||||
import checkFilesIntegrity, {
|
||||
PackageFileInfo,
|
||||
PackageFilesIndex,
|
||||
}from './checkFilesIntegrity'
|
||||
import getFilePathInCafs, {
|
||||
|
||||
@@ -11,6 +11,9 @@
|
||||
"references": [
|
||||
{
|
||||
"path": "../fetcher-base"
|
||||
},
|
||||
{
|
||||
"path": "../store-controller-types"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -224,7 +224,10 @@ export default async (opts: HeadlessOptions) => {
|
||||
lockfileDir,
|
||||
optional: opts.include.optionalDependencies,
|
||||
}),
|
||||
linkAllPkgs(opts.storeController, depNodes, opts),
|
||||
linkAllPkgs(opts.storeController, depNodes, {
|
||||
force: opts.force,
|
||||
targetEngine: opts.sideEffectsCacheRead && ENGINE_NAME || undefined,
|
||||
}),
|
||||
])
|
||||
|
||||
stageLogger.debug({
|
||||
@@ -543,11 +546,11 @@ async function lockfileToDepGraph (
|
||||
children: {},
|
||||
depPath,
|
||||
fetchingFiles: fetchResponse.files,
|
||||
filesIndexFile: fetchResponse.filesIndexFile,
|
||||
finishing: fetchResponse.finishing,
|
||||
hasBin: pkgSnapshot.hasBin === true,
|
||||
hasBundledDependencies: !!pkgSnapshot.bundledDependencies,
|
||||
independent,
|
||||
isBuilt: pkgLocation.isBuilt,
|
||||
modules,
|
||||
name: pkgName,
|
||||
optional: !!pkgSnapshot.optional,
|
||||
@@ -659,10 +662,11 @@ export interface DependenciesGraphNode {
|
||||
optional: boolean,
|
||||
depPath: string, // this option is only needed for saving pendingBuild when running with --ignore-scripts flag
|
||||
packageId: string, // TODO: this option is currently only needed when running postinstall scripts but even there it should be not used
|
||||
isBuilt: boolean,
|
||||
isBuilt?: boolean,
|
||||
requiresBuild: boolean,
|
||||
prepare: boolean,
|
||||
hasBin: boolean,
|
||||
filesIndexFile: string,
|
||||
}
|
||||
|
||||
export interface DependenciesGraph {
|
||||
@@ -676,16 +680,19 @@ async function linkAllPkgs (
|
||||
depNodes: DependenciesGraphNode[],
|
||||
opts: {
|
||||
force: boolean,
|
||||
targetEngine?: string,
|
||||
}
|
||||
) {
|
||||
return Promise.all(
|
||||
depNodes.map(async (depNode) => {
|
||||
const filesResponse = await depNode.fetchingFiles()
|
||||
|
||||
return storeController.importPackage(depNode.peripheralLocation, {
|
||||
const { isBuilt } = await storeController.importPackage(depNode.peripheralLocation, {
|
||||
filesResponse,
|
||||
force: opts.force,
|
||||
targetEngine: opts.targetEngine,
|
||||
})
|
||||
depNode.isBuilt = isBuilt
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
///<reference path="../../../typings/index.d.ts" />
|
||||
import assertProject from '@pnpm/assert-project'
|
||||
import { WANTED_LOCKFILE } from '@pnpm/constants'
|
||||
import { ENGINE_NAME, WANTED_LOCKFILE } from '@pnpm/constants'
|
||||
import {
|
||||
PackageManifestLog,
|
||||
RootLog,
|
||||
@@ -712,7 +712,7 @@ test('installing with hoistPattern=* and shamefullyHoist=true', async (t) => {
|
||||
|
||||
const ENGINE_DIR = `${process.platform}-${process.arch}-node-${process.version.split('.')[0]}`
|
||||
|
||||
test.skip('using side effects cache', async (t) => {
|
||||
test('using side effects cache', async (t) => {
|
||||
const prefix = path.join(fixtures, 'side-effects')
|
||||
|
||||
// Right now, hardlink does not work with side effects, so we specify copy as the packageImportMethod
|
||||
@@ -725,16 +725,25 @@ test.skip('using side effects cache', async (t) => {
|
||||
}, {}, {}, { packageImportMethod: 'copy' })
|
||||
await headless(opts)
|
||||
|
||||
const cacheIntegrityPath = path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}/diskusage@1.1.3/side_effects/${ENGINE_DIR}/integrity.json`)
|
||||
t.comment(opts.storeDir)
|
||||
const cacheIntegrityPath = path.join(opts.storeDir, 'files/10/0c9ac65f21cb83e1d3b9339731937e96d930d0000075d266d3443307659d27759e81f3bc0e87b202ade1f10c4af6845d060b4a985ee6b3ccc4de163a3d2171-index.json')
|
||||
const cacheIntegrity = await loadJsonFile(cacheIntegrityPath)
|
||||
t.ok(cacheIntegrity['build/Makefile'])
|
||||
delete cacheIntegrity['build/Makefile']
|
||||
t.ok(cacheIntegrity['sideEffects'], 'files index has side effects')
|
||||
t.ok(cacheIntegrity['sideEffects'][ENGINE_NAME]['build/Makefile'])
|
||||
delete cacheIntegrity['sideEffects'][ENGINE_NAME]['build/Makefile']
|
||||
|
||||
t.ok(cacheIntegrity['build/binding.Makefile'])
|
||||
t.ok(cacheIntegrity['sideEffects'][ENGINE_NAME]['build/binding.Makefile'])
|
||||
await writeJsonFile(cacheIntegrityPath, cacheIntegrity)
|
||||
|
||||
await rimraf(path.join(prefix, 'node_modules'))
|
||||
await headless(opts)
|
||||
const opts2 = await testDefaults({
|
||||
lockfileDir: prefix,
|
||||
sideEffectsCacheRead: true,
|
||||
sideEffectsCacheWrite: true,
|
||||
storeDir: opts.storeDir,
|
||||
verifyStoreIntegrity: false,
|
||||
}, {}, {}, { packageImportMethod: 'copy' })
|
||||
await headless(opts2)
|
||||
|
||||
t.notOk(await exists(path.join(prefix, 'node_modules/diskusage/build/Makefile')), 'side effects cache correctly used')
|
||||
t.ok(await exists(path.join(prefix, 'node_modules/diskusage/build/binding.Makefile')), 'side effects cache correctly used')
|
||||
|
||||
@@ -234,17 +234,19 @@ async function resolveAndFetch (
|
||||
},
|
||||
bundledManifest: fetchResult.bundledManifest,
|
||||
files: fetchResult.files,
|
||||
filesIndexFile: fetchResult.filesIndexFile,
|
||||
finishing: fetchResult.finishing,
|
||||
} as PackageResponse
|
||||
}
|
||||
} catch (err) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
type FetchLock = {
|
||||
finishing: Promise<void>,
|
||||
files: Promise<PackageFilesResponse>,
|
||||
bundledManifest?: Promise<BundledManifest>,
|
||||
files: Promise<PackageFilesResponse>,
|
||||
filesIndexFile: string,
|
||||
finishing: Promise<void>,
|
||||
inStoreLocation: string,
|
||||
}
|
||||
|
||||
@@ -274,8 +276,9 @@ function fetchToStore (
|
||||
resolution: Resolution,
|
||||
}
|
||||
): {
|
||||
files: () => Promise<PackageFilesResponse>,
|
||||
bundledManifest?: () => Promise<BundledManifest>,
|
||||
filesIndexFile: string,
|
||||
files: () => Promise<PackageFilesResponse>,
|
||||
finishing: () => Promise<void>,
|
||||
inStoreLocation: string,
|
||||
} {
|
||||
@@ -286,19 +289,24 @@ function fetchToStore (
|
||||
const bundledManifest = pDefer<BundledManifest>()
|
||||
const files = pDefer<PackageFilesResponse>()
|
||||
const finishing = pDefer<void>()
|
||||
const filesIndexFile = opts.resolution['integrity']
|
||||
? ctx.getFilePathInCafs(opts.resolution['integrity'], 'index')
|
||||
: path.join(target, 'integrity.json')
|
||||
|
||||
doFetchToStore(bundledManifest, files, finishing) // tslint:disable-line
|
||||
doFetchToStore(filesIndexFile, bundledManifest, files, finishing) // tslint:disable-line
|
||||
|
||||
if (opts.fetchRawManifest) {
|
||||
ctx.fetchingLocker.set(opts.pkgId, {
|
||||
bundledManifest: removeKeyOnFail(bundledManifest.promise),
|
||||
files: removeKeyOnFail(files.promise),
|
||||
filesIndexFile,
|
||||
finishing: removeKeyOnFail(finishing.promise),
|
||||
inStoreLocation: target,
|
||||
})
|
||||
} else {
|
||||
ctx.fetchingLocker.set(opts.pkgId, {
|
||||
files: removeKeyOnFail(files.promise),
|
||||
filesIndexFile,
|
||||
finishing: removeKeyOnFail(finishing.promise),
|
||||
inStoreLocation: target,
|
||||
})
|
||||
@@ -310,9 +318,9 @@ function fetchToStore (
|
||||
// Changing the value of fromStore is needed for correct reporting of `pnpm server`.
|
||||
// Otherwise, if a package was not in store when the server started, it will be always
|
||||
// reported as "downloaded" instead of "reused".
|
||||
files.promise.then(({ filesIndex, fromStore }) => { // tslint:disable-line
|
||||
files.promise.then((cache) => { // tslint:disable-line
|
||||
// If it's already in the store, we don't need to update the cache
|
||||
if (fromStore) {
|
||||
if (cache.fromStore) {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -323,13 +331,11 @@ function fetchToStore (
|
||||
if (!tmp) return
|
||||
|
||||
ctx.fetchingLocker.set(opts.pkgId, {
|
||||
bundledManifest: tmp.bundledManifest,
|
||||
...tmp,
|
||||
files: Promise.resolve({
|
||||
filesIndex,
|
||||
...cache,
|
||||
fromStore: true,
|
||||
}),
|
||||
finishing: tmp.finishing,
|
||||
inStoreLocation: tmp.inStoreLocation,
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
@@ -352,6 +358,7 @@ function fetchToStore (
|
||||
return {
|
||||
bundledManifest: result.bundledManifest ? pShare(result.bundledManifest) : undefined,
|
||||
files: pShare(result.files),
|
||||
filesIndexFile: result.filesIndexFile,
|
||||
finishing: pShare(result.finishing),
|
||||
inStoreLocation: result.inStoreLocation,
|
||||
}
|
||||
@@ -366,15 +373,13 @@ function fetchToStore (
|
||||
}
|
||||
|
||||
async function doFetchToStore (
|
||||
filesIndexFile: string,
|
||||
bundledManifest: pDefer.DeferredPromise<BundledManifest>,
|
||||
files: pDefer.DeferredPromise<PackageFilesResponse>,
|
||||
finishing: pDefer.DeferredPromise<void>
|
||||
) {
|
||||
try {
|
||||
const isLocalTarballDep = opts.pkgId.startsWith('file:')
|
||||
const pkgIndexFilePath = opts.resolution['integrity']
|
||||
? ctx.getFilePathInCafs(opts.resolution['integrity'], 'index')
|
||||
: path.join(target, 'integrity.json')
|
||||
|
||||
if (
|
||||
!opts.force &&
|
||||
@@ -385,7 +390,7 @@ function fetchToStore (
|
||||
) {
|
||||
let pkgFilesIndex
|
||||
try {
|
||||
pkgFilesIndex = await loadJsonFile<PackageFilesIndex>(pkgIndexFilePath)
|
||||
pkgFilesIndex = await loadJsonFile<PackageFilesIndex>(filesIndexFile)
|
||||
} catch (err) {
|
||||
// ignoring. It is fine if the integrity file is not present. Just refetch the package
|
||||
}
|
||||
@@ -400,6 +405,7 @@ function fetchToStore (
|
||||
files.resolve({
|
||||
filesIndex: pkgFilesIndex.files,
|
||||
fromStore: true,
|
||||
sideEffects: pkgFilesIndex.sideEffects,
|
||||
})
|
||||
if (manifest) {
|
||||
manifest()
|
||||
@@ -475,7 +481,7 @@ function fetchToStore (
|
||||
}
|
||||
})
|
||||
)
|
||||
await writeJsonFile(pkgIndexFilePath, { files: integrity }, { indent: undefined })
|
||||
await writeJsonFile(filesIndexFile, { files: integrity }, { indent: undefined })
|
||||
finishing.resolve(undefined)
|
||||
|
||||
if (isLocalTarballDep && opts.resolution['integrity']) { // tslint:disable-line:no-string-literal
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { importingLogger } from '@pnpm/core-loggers'
|
||||
import { globalInfo, globalWarn } from '@pnpm/logger'
|
||||
import { ImportPackageFunction } from '@pnpm/store-controller-types'
|
||||
import { PackageFilesResponse } from '@pnpm/store-controller-types'
|
||||
import fs = require('mz/fs')
|
||||
import pLimit from 'p-limit'
|
||||
import path = require('path')
|
||||
@@ -11,7 +11,16 @@ import importIndexedDir from '../fs/importIndexedDir'
|
||||
|
||||
const limitLinking = pLimit(16)
|
||||
|
||||
export default (packageImportMethod?: 'auto' | 'hardlink' | 'copy' | 'clone'): (to: string, opts: { filesMap: Record<string, string>, fromStore: boolean, force: boolean }) => ReturnType<ImportPackageFunction> => {
|
||||
export default (
|
||||
packageImportMethod?: 'auto' | 'hardlink' | 'copy' | 'clone'
|
||||
): (
|
||||
to: string,
|
||||
opts: {
|
||||
filesMap: Record<string, string>,
|
||||
fromStore: boolean,
|
||||
force: boolean
|
||||
}
|
||||
) => ReturnType<(to: string, opts: { filesResponse: PackageFilesResponse, force: boolean }) => Promise<void>> => {
|
||||
const importPackage = createImportPackage(packageImportMethod)
|
||||
return (to, opts) => limitLinking(() => importPackage(to, opts))
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import { getFilePathByModeInCafs as _getFilePathByModeInCafs } from '@pnpm/cafs'
|
||||
import {
|
||||
getFilePathByModeInCafs as _getFilePathByModeInCafs,
|
||||
PackageFileInfo,
|
||||
PackageFilesIndex,
|
||||
} from '@pnpm/cafs'
|
||||
import { FetchFunction } from '@pnpm/fetcher-base'
|
||||
import createPackageRequester, { getCacheByEngine } from '@pnpm/package-requester'
|
||||
import pkgIdToFilename from '@pnpm/pkgid-to-filename'
|
||||
@@ -8,6 +12,7 @@ import {
|
||||
StoreController,
|
||||
} from '@pnpm/store-controller-types'
|
||||
import rimraf = require('@zkochan/rimraf')
|
||||
import loadJsonFile = require('load-json-file')
|
||||
import pFilter = require('p-filter')
|
||||
import path = require('path')
|
||||
import exists = require('path-exists')
|
||||
@@ -38,12 +43,22 @@ export default async function (
|
||||
const impPkg = createImportPackage(initOpts.packageImportMethod)
|
||||
const cafsDir = path.join(storeDir, 'files')
|
||||
const getFilePathByModeInCafs = _getFilePathByModeInCafs.bind(null, cafsDir)
|
||||
const importPackage: ImportPackageFunction = (to, opts) => {
|
||||
const importPackage: ImportPackageFunction = async (to, opts) => {
|
||||
const filesMap = {} as Record<string, string>
|
||||
for (const [fileName, fileMeta] of Object.entries(opts.filesResponse.filesIndex)) {
|
||||
let isBuilt!: boolean
|
||||
let filesIndex!: Record<string, PackageFileInfo>
|
||||
if (opts.targetEngine && opts.filesResponse.sideEffects?.[opts.targetEngine]) {
|
||||
filesIndex = opts.filesResponse.sideEffects?.[opts.targetEngine]
|
||||
isBuilt = true
|
||||
} else {
|
||||
filesIndex = opts.filesResponse.filesIndex
|
||||
isBuilt = false
|
||||
}
|
||||
for (const [fileName, fileMeta] of Object.entries(filesIndex)) {
|
||||
filesMap[fileName] = getFilePathByModeInCafs(fileMeta.integrity, fileMeta.mode)
|
||||
}
|
||||
return impPkg(to, { filesMap, fromStore: opts.filesResponse.fromStore, force: opts.force })
|
||||
await impPkg(to, { filesMap, fromStore: opts.filesResponse.fromStore, force: opts.force })
|
||||
return { isBuilt }
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -80,23 +95,30 @@ export default async function (
|
||||
}
|
||||
}
|
||||
|
||||
async function upload (builtPkgLocation: string, opts: {packageId: string, engine: string}) {
|
||||
const filesIndex = await packageRequester.cafs.addFilesFromDir(builtPkgLocation)
|
||||
async function upload (builtPkgLocation: string, opts: {filesIndexFile: string, engine: string}) {
|
||||
const sideEffectsIndex = await packageRequester.cafs.addFilesFromDir(builtPkgLocation)
|
||||
// TODO: move this to a function
|
||||
// This is duplicated in @pnpm/package-requester
|
||||
const integrity = {}
|
||||
await Promise.all(
|
||||
Object.keys(filesIndex)
|
||||
Object.keys(sideEffectsIndex)
|
||||
.map(async (filename) => {
|
||||
const fileIntegrity = await filesIndex[filename].generatingIntegrity
|
||||
const fileIntegrity = await sideEffectsIndex[filename].generatingIntegrity
|
||||
integrity[filename] = {
|
||||
integrity: fileIntegrity.toString(), // TODO: use the raw Integrity object
|
||||
mode: filesIndex[filename].mode,
|
||||
size: filesIndex[filename].size,
|
||||
mode: sideEffectsIndex[filename].mode,
|
||||
size: sideEffectsIndex[filename].size,
|
||||
}
|
||||
})
|
||||
)
|
||||
const cachePath = path.join(storeDir, opts.packageId, 'side_effects', opts.engine)
|
||||
await writeJsonFile(path.join(cachePath, 'integrity.json'), integrity, { indent: undefined })
|
||||
let filesIndex!: PackageFilesIndex
|
||||
try {
|
||||
filesIndex = await loadJsonFile<PackageFilesIndex>(opts.filesIndexFile)
|
||||
} catch (err) {
|
||||
filesIndex = { files: integrity }
|
||||
}
|
||||
filesIndex.sideEffects = filesIndex.sideEffects ?? {}
|
||||
filesIndex.sideEffects[opts.engine] = integrity
|
||||
await writeJsonFile(opts.filesIndexFile, filesIndex, { indent: undefined })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,6 +177,7 @@ export interface ResolvedPackage {
|
||||
optional: boolean,
|
||||
fetchingFiles: () => Promise<PackageFilesResponse>,
|
||||
fetchingBundledManifest?: () => Promise<DependencyManifest>,
|
||||
filesIndexFile: string,
|
||||
finishing: () => Promise<void>,
|
||||
path: string,
|
||||
name: string,
|
||||
@@ -790,6 +791,7 @@ function getResolvedPackage (
|
||||
engineCache: !options.force && options.pkgResponse.body.cacheByEngine?.[ENGINE_NAME],
|
||||
fetchingBundledManifest: options.pkgResponse.bundledManifest,
|
||||
fetchingFiles: options.pkgResponse.files!,
|
||||
filesIndexFile: options.pkgResponse.filesIndexFile!,
|
||||
finishing: options.pkgResponse.finishing!,
|
||||
hasBin: options.hasBin,
|
||||
hasBundledDependencies: !!(options.pkg.bundledDependencies || options.pkg.bundleDependencies),
|
||||
|
||||
@@ -48,17 +48,17 @@ export default function (
|
||||
filesResponse: PackageFilesResponse,
|
||||
force: boolean,
|
||||
}) => {
|
||||
await limitedFetch(`${remotePrefix}/importPackage`, {
|
||||
return limitedFetch(`${remotePrefix}/importPackage`, {
|
||||
opts,
|
||||
to,
|
||||
})
|
||||
}) as Promise<{ isBuilt: boolean }>
|
||||
},
|
||||
prune: async () => {
|
||||
await limitedFetch(`${remotePrefix}/prune`, {})
|
||||
},
|
||||
requestPackage: requestPackage.bind(null, remotePrefix, limitedFetch),
|
||||
stop: async () => { await limitedFetch(`${remotePrefix}/stop`, {}) },
|
||||
upload: async (builtPkgLocation: string, opts: {packageId: string, engine: string}) => {
|
||||
upload: async (builtPkgLocation: string, opts: {filesIndexFile: string, engine: string}) => {
|
||||
await limitedFetch(`${remotePrefix}/upload`, {
|
||||
builtPkgLocation,
|
||||
opts,
|
||||
@@ -139,8 +139,9 @@ function fetchPackage (
|
||||
limitedFetch: (url: string, body: object) => any, // tslint:disable-line
|
||||
options: FetchPackageToStoreOptions
|
||||
): {
|
||||
files: () => Promise<PackageFilesResponse>,
|
||||
bundledManifest?: () => Promise<DependencyManifest>,
|
||||
files: () => Promise<PackageFilesResponse>,
|
||||
filesIndexFile: string,
|
||||
finishing: () => Promise<void>,
|
||||
inStoreLocation: string,
|
||||
} {
|
||||
@@ -150,7 +151,7 @@ function fetchPackage (
|
||||
msgId,
|
||||
options,
|
||||
})
|
||||
.then((fetchResponseBody: object & {inStoreLocation: string}) => {
|
||||
.then((fetchResponseBody: object & {filesIndexFile: string, inStoreLocation: string}) => {
|
||||
const fetchingBundledManifest = options.fetchRawManifest
|
||||
? limitedFetch(`${remotePrefix}/rawManifestResponse`, { msgId })
|
||||
: undefined
|
||||
@@ -161,6 +162,7 @@ function fetchPackage (
|
||||
return {
|
||||
bundledManifest: fetchingBundledManifest && pShare(fetchingBundledManifest),
|
||||
files: pShare(fetchingFiles),
|
||||
filesIndexFile: fetchResponseBody.filesIndexFile,
|
||||
finishing: pShare(Promise.all([fetchingBundledManifest, fetchingFiles]).then(() => undefined)),
|
||||
inStoreLocation: fetchResponseBody.inStoreLocation,
|
||||
}
|
||||
|
||||
@@ -183,15 +183,15 @@ test('server upload', async t => {
|
||||
const storeCtrl = await connectStoreController({ remotePrefix, concurrency: 100 })
|
||||
|
||||
const fakeEngine = 'client-engine'
|
||||
const fakePkgId = 'test.example.com/fake-pkg/1.0.0'
|
||||
const filesIndexFile = path.join(storeDir, 'test.example.com/fake-pkg/1.0.0.json')
|
||||
|
||||
await storeCtrl.upload(path.join(__dirname, 'side-effect-fake-dir'), {
|
||||
engine: fakeEngine,
|
||||
packageId: fakePkgId,
|
||||
filesIndexFile,
|
||||
})
|
||||
|
||||
const cacheIntegrity = await loadJsonFile(path.join(storeDir, fakePkgId, 'side_effects', fakeEngine, 'integrity.json'))
|
||||
t.deepEqual(Object.keys(cacheIntegrity).sort(), ['side-effect.js', 'side-effect.txt'], 'all files uploaded to cache')
|
||||
const cacheIntegrity = await loadJsonFile(filesIndexFile)
|
||||
t.deepEqual(Object.keys(cacheIntegrity['sideEffects'][fakeEngine]).sort(), ['side-effect.js', 'side-effect.txt'], 'all files uploaded to cache')
|
||||
|
||||
await server.close()
|
||||
await storeCtrl.close()
|
||||
@@ -213,21 +213,21 @@ test('disable server upload', async t => {
|
||||
const storeCtrl = await connectStoreController({ remotePrefix, concurrency: 100 })
|
||||
|
||||
const fakeEngine = 'client-engine'
|
||||
const fakePkgId = 'test.example.com/fake-pkg/1.0.0'
|
||||
const storeDir = tempy.directory()
|
||||
const filesIndexFile = path.join(storeDir, 'test.example.com/fake-pkg/1.0.0.json')
|
||||
|
||||
let thrown = false
|
||||
try {
|
||||
await storeCtrl.upload(path.join(__dirname, 'side-effect-fake-dir'), {
|
||||
engine: fakeEngine,
|
||||
packageId: fakePkgId,
|
||||
filesIndexFile,
|
||||
})
|
||||
} catch (e) {
|
||||
thrown = true
|
||||
}
|
||||
t.ok(thrown, 'error is thrown when trying to upload')
|
||||
|
||||
const cachePath = path.join('.store', fakePkgId, 'side_effects', fakeEngine, 'package')
|
||||
t.notOk(await fs.exists(cachePath), 'cache directory not created')
|
||||
t.notOk(await fs.exists(filesIndexFile), 'cache directory not created')
|
||||
|
||||
await server.close()
|
||||
await storeCtrl.close()
|
||||
|
||||
@@ -42,13 +42,14 @@ export interface StoreController {
|
||||
importPackage: ImportPackageFunction,
|
||||
close (): Promise<void>,
|
||||
prune (): Promise<void>,
|
||||
upload (builtPkgLocation: string, opts: {packageId: string, engine: string}): Promise<void>,
|
||||
upload (builtPkgLocation: string, opts: {filesIndexFile: string, engine: string}): Promise<void>,
|
||||
}
|
||||
|
||||
export type FetchPackageToStoreFunction = (
|
||||
opts: FetchPackageToStoreOptions
|
||||
) => {
|
||||
bundledManifest?: () => Promise<BundledManifest>,
|
||||
filesIndexFile: string,
|
||||
files: () => Promise<PackageFilesResponse>,
|
||||
finishing: () => Promise<void>,
|
||||
inStoreLocation: string,
|
||||
@@ -65,14 +66,22 @@ export interface FetchPackageToStoreOptions {
|
||||
export type ImportPackageFunction = (
|
||||
to: string,
|
||||
opts: {
|
||||
targetEngine?: string,
|
||||
filesResponse: PackageFilesResponse,
|
||||
force: boolean,
|
||||
}
|
||||
) => Promise<void>
|
||||
) => Promise<{ isBuilt: boolean }>
|
||||
|
||||
export type PackageFileInfo = {
|
||||
integrity: string,
|
||||
mode: number,
|
||||
size: number,
|
||||
}
|
||||
|
||||
export interface PackageFilesResponse {
|
||||
fromStore: boolean,
|
||||
filesIndex: Record<string, { mode: number, integrity: string }>,
|
||||
filesIndex: Record<string, PackageFileInfo>,
|
||||
sideEffects?: Record<string, Record<string, PackageFileInfo>>
|
||||
}
|
||||
|
||||
export type RequestPackageFunction = (
|
||||
@@ -99,6 +108,7 @@ export interface RequestPackageOptions {
|
||||
export type PackageResponse = {
|
||||
bundledManifest?: () => Promise<BundledManifest>,
|
||||
files?: () => Promise<PackageFilesResponse>,
|
||||
filesIndexFile?: string,
|
||||
finishing?: () => Promise<void>, // a package request is finished once its integrity is generated and saved
|
||||
body: {
|
||||
isLocal: boolean,
|
||||
|
||||
@@ -190,6 +190,7 @@ export default async function linkPackages (
|
||||
lockfileDir: opts.lockfileDir,
|
||||
optional: opts.include.optionalDependencies,
|
||||
registries: opts.registries,
|
||||
sideEffectsCacheRead: opts.sideEffectsCacheRead,
|
||||
storeController: opts.storeController,
|
||||
virtualStoreDir: opts.virtualStoreDir,
|
||||
}
|
||||
@@ -365,6 +366,7 @@ async function linkNewPackages (
|
||||
optional: boolean,
|
||||
registries: Registries,
|
||||
lockfileDir: string,
|
||||
sideEffectsCacheRead: boolean,
|
||||
storeController: StoreController,
|
||||
virtualStoreDir: string,
|
||||
}
|
||||
@@ -423,7 +425,10 @@ async function linkNewPackages (
|
||||
lockfileDir: opts.lockfileDir,
|
||||
optional: opts.optional,
|
||||
}),
|
||||
linkAllPkgs(opts.storeController, newPkgs, opts),
|
||||
linkAllPkgs(opts.storeController, newPkgs, {
|
||||
force: opts.force,
|
||||
targetEngine: opts.sideEffectsCacheRead && ENGINE_NAME || undefined,
|
||||
}),
|
||||
])
|
||||
|
||||
return newDepPaths
|
||||
@@ -466,16 +471,19 @@ async function linkAllPkgs (
|
||||
depNodes: DependenciesGraphNode[],
|
||||
opts: {
|
||||
force: boolean,
|
||||
targetEngine?: string,
|
||||
}
|
||||
) {
|
||||
return Promise.all(
|
||||
depNodes.map(async ({ fetchingFiles, independent, peripheralLocation }) => {
|
||||
const filesResponse = await fetchingFiles()
|
||||
depNodes.map(async (depNode) => {
|
||||
const filesResponse = await depNode.fetchingFiles()
|
||||
|
||||
return storeController.importPackage(peripheralLocation, {
|
||||
const { isBuilt } = await storeController.importPackage(depNode.peripheralLocation, {
|
||||
filesResponse,
|
||||
force: opts.force,
|
||||
targetEngine: opts.targetEngine,
|
||||
})
|
||||
depNode.isBuilt = isBuilt
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ export interface DependenciesGraphNode {
|
||||
modules: string,
|
||||
fetchingBundledManifest?: () => Promise<DependencyManifest>,
|
||||
fetchingFiles: () => Promise<PackageFilesResponse>,
|
||||
filesIndexFile: string,
|
||||
resolution: Resolution,
|
||||
peripheralLocation: string,
|
||||
children: {[alias: string]: string},
|
||||
@@ -240,6 +241,7 @@ function resolvePeersOfNode (
|
||||
dev: resolvedPackage.dev,
|
||||
fetchingBundledManifest: resolvedPackage.fetchingBundledManifest,
|
||||
fetchingFiles: resolvedPackage.fetchingFiles,
|
||||
filesIndexFile: resolvedPackage.filesIndexFile,
|
||||
hasBin: resolvedPackage.hasBin,
|
||||
hasBundledDependencies: resolvedPackage.hasBundledDependencies,
|
||||
independent,
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import { ENGINE_NAME } from '@pnpm/constants'
|
||||
import { prepareEmpty } from '@pnpm/prepare'
|
||||
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
|
||||
import rimraf = require('@zkochan/rimraf')
|
||||
import loadJsonFile = require('load-json-file')
|
||||
import fs = require('mz/fs')
|
||||
import path = require('path')
|
||||
import exists = require('path-exists')
|
||||
import { addDependenciesToPackage } from 'supi'
|
||||
import tape = require('tape')
|
||||
import promisifyTape from 'tape-promise'
|
||||
import writeJsonFile = require('write-json-file')
|
||||
import { testDefaults } from '../utils'
|
||||
|
||||
const test = promisifyTape(tape)
|
||||
@@ -67,7 +70,7 @@ test.skip('caching side effects of native package when hoisting is used', async
|
||||
await project.has('.pnpm/node_modules/es6-promise') // verifying that a flat node_modules was created
|
||||
})
|
||||
|
||||
test.skip('using side effects cache', async (t) => {
|
||||
test('using side effects cache', async (t) => {
|
||||
prepareEmpty(t)
|
||||
|
||||
// Right now, hardlink does not work with side effects, so we specify copy as the packageImportMethod
|
||||
@@ -80,13 +83,26 @@ test.skip('using side effects cache', async (t) => {
|
||||
}, {}, {}, { packageImportMethod: 'copy' })
|
||||
const manifest = await addDependenciesToPackage({}, ['diskusage@1.1.3'], opts)
|
||||
|
||||
const cacheBuildDir = path.join(opts.storeDir, `localhost+${REGISTRY_MOCK_PORT}/diskusage/1.1.3/side_effects/${ENGINE_DIR}/package/build`)
|
||||
await fs.writeFile(path.join(cacheBuildDir, 'new-file.txt'), 'some new content')
|
||||
const filesIndexFile = path.join(opts.storeDir, 'files/10/0c9ac65f21cb83e1d3b9339731937e96d930d0000075d266d3443307659d27759e81f3bc0e87b202ade1f10c4af6845d060b4a985ee6b3ccc4de163a3d2171-index.json')
|
||||
const filesIndex = await loadJsonFile(filesIndexFile)
|
||||
t.ok(filesIndex['sideEffects'], 'files index has side effects')
|
||||
t.ok(filesIndex['sideEffects'][ENGINE_NAME]['build/Makefile'])
|
||||
delete filesIndex['sideEffects'][ENGINE_NAME]['build/Makefile']
|
||||
await writeJsonFile(filesIndexFile, filesIndex)
|
||||
|
||||
await rimraf('node_modules')
|
||||
await addDependenciesToPackage(manifest, ['diskusage@1.1.3'], opts)
|
||||
await rimraf('pnpm-lock.yaml') // to avoid headless install
|
||||
const opts2 = await testDefaults({
|
||||
fastUnpack: false,
|
||||
sideEffectsCacheRead: true,
|
||||
sideEffectsCacheWrite: true,
|
||||
storeDir: opts.storeDir,
|
||||
verifyStoreIntegrity: false,
|
||||
}, {}, {}, { packageImportMethod: 'copy' })
|
||||
await addDependenciesToPackage(manifest, ['diskusage@1.1.3'], opts2)
|
||||
|
||||
t.ok(await exists('node_modules/diskusage/build/new-file.txt'), 'side effects cache correctly used')
|
||||
t.notOk(await exists(path.resolve('node_modules/diskusage/build/Makefile')), 'side effects cache correctly used')
|
||||
t.ok(await exists(path.resolve('node_modules/diskusage/build/binding.Makefile')), 'side effects cache correctly used')
|
||||
})
|
||||
|
||||
test.skip('readonly side effects cache', async (t) => {
|
||||
|
||||
2
pnpm-lock.yaml
generated
2
pnpm-lock.yaml
generated
@@ -97,6 +97,7 @@ importers:
|
||||
packages/cafs:
|
||||
dependencies:
|
||||
'@pnpm/fetcher-base': 'link:../fetcher-base'
|
||||
'@pnpm/store-controller-types': 'link:../store-controller-types'
|
||||
'@zkochan/rimraf': 1.0.0
|
||||
concat-stream: 2.0.0
|
||||
decompress-maybe: 1.0.0
|
||||
@@ -117,6 +118,7 @@ importers:
|
||||
tempy: 0.5.0
|
||||
specifiers:
|
||||
'@pnpm/fetcher-base': 'workspace:7.0.0-alpha.3'
|
||||
'@pnpm/store-controller-types': 'workspace:8.0.0-alpha.3'
|
||||
'@types/concat-stream': ^1.6.0
|
||||
'@types/mz': ^2.7.1
|
||||
'@types/node': ^13.13.6
|
||||
|
||||
Reference in New Issue
Block a user