mirror of
https://github.com/pnpm/pnpm.git
synced 2025-12-23 23:29:17 -05:00
feat: install Node.js runtime as a dependency (#9755)
This commit is contained in:
5
.changeset/angry-streets-repair.md
Normal file
5
.changeset/angry-streets-repair.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@pnpm/types": minor
|
||||
---
|
||||
|
||||
Added "devEngines" to the manifest fields.
|
||||
5
.changeset/few-lands-admire.md
Normal file
5
.changeset/few-lands-admire.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@pnpm/link-bins": minor
|
||||
---
|
||||
|
||||
Create a command shim for the Node.js binary.
|
||||
5
.changeset/fresh-eggs-agree.md
Normal file
5
.changeset/fresh-eggs-agree.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@pnpm/read-project-manifest": major
|
||||
---
|
||||
|
||||
Added @pnpm/logger to peer deps.
|
||||
5
.changeset/hip-badgers-wait.md
Normal file
5
.changeset/hip-badgers-wait.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@pnpm/crypto.shasums-file": major
|
||||
---
|
||||
|
||||
Initial release.
|
||||
5
.changeset/little-rockets-hug.md
Normal file
5
.changeset/little-rockets-hug.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@pnpm/default-reporter": patch
|
||||
---
|
||||
|
||||
Print the ID of the dependency in the installation summery, if no version if found.
|
||||
6
.changeset/sharp-coats-lead.md
Normal file
6
.changeset/sharp-coats-lead.md
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
"@pnpm/package-requester": major
|
||||
"@pnpm/store-controller-types": major
|
||||
---
|
||||
|
||||
expectedPkg removed from options of the fetch package to store function.
|
||||
38
.changeset/smooth-experts-type.md
Normal file
38
.changeset/smooth-experts-type.md
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
"@pnpm/read-project-manifest": minor
|
||||
"@pnpm/resolve-dependencies": minor
|
||||
"@pnpm/package-requester": minor
|
||||
"@pnpm/resolver-base": minor
|
||||
"@pnpm/fetcher-base": minor
|
||||
"@pnpm/pick-fetcher": minor
|
||||
"@pnpm/headless": minor
|
||||
"@pnpm/client": minor
|
||||
"@pnpm/node.resolver": minor
|
||||
"@pnpm/node.fetcher": minor
|
||||
"@pnpm/core": minor
|
||||
"@pnpm/lockfile.types": minor
|
||||
"@pnpm/lockfile.utils": minor
|
||||
"pnpm": minor
|
||||
---
|
||||
|
||||
Added support for resolving and downloading the Node.js runtime specified in the [devEngines](https://github.com/openjs-foundation/package-metadata-interoperability-collab-space/issues/15) field of `package.json`.
|
||||
|
||||
Usage example:
|
||||
|
||||
```json
|
||||
{
|
||||
"devEngines": {
|
||||
"runtime": {
|
||||
"name": "node",
|
||||
"version": "^24.4.0",
|
||||
"onFail": "download"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
When running `pnpm install`, pnpm will resolve Node.js to the latest version that satisfies the specified range and install it as a dependency of the project. As a result, when running scripts, the locally installed Node.js version will be used.
|
||||
|
||||
Unlike the existing options, `useNodeVersion` and `executionEnv.nodeVersion`, this new field supports version ranges, which are locked to exact versions during installation. The resolved version is stored in the pnpm lockfile, along with an integrity checksum for future validation of the Node.js content's validity.
|
||||
|
||||
Related PR: [#9755](https://github.com/pnpm/pnpm/pull/9755).
|
||||
6
.changeset/twenty-lions-rush.md
Normal file
6
.changeset/twenty-lions-rush.md
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
"@pnpm/fetcher-base": major
|
||||
"@pnpm/tarball-fetcher": major
|
||||
---
|
||||
|
||||
Added a new required field for the fetcher function.
|
||||
5
.changeset/violet-bananas-peel.md
Normal file
5
.changeset/violet-bananas-peel.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
"@pnpm/constants": minor
|
||||
---
|
||||
|
||||
Add getNodeBinLocationForCurrentOS.
|
||||
@@ -89,7 +89,7 @@ export function getPkgsDiff (
|
||||
latest: log.latest,
|
||||
name: log.name,
|
||||
realName: log.realName,
|
||||
version: log.version,
|
||||
version: log.version ?? log.id,
|
||||
}
|
||||
return pkgsDiff
|
||||
}, {
|
||||
|
||||
13
crypto/shasums-file/README.md
Normal file
13
crypto/shasums-file/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# @pnpm/crypto.shasums-file
|
||||
|
||||
> Utils for working with shasums files
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
pnpm add @pnpm/crypto.shasums-file
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
48
crypto/shasums-file/package.json
Normal file
48
crypto/shasums-file/package.json
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"name": "@pnpm/crypto.shasums-file",
|
||||
"version": "1000.0.0-0",
|
||||
"description": "Utils for working with shasums files",
|
||||
"keywords": [
|
||||
"pnpm",
|
||||
"pnpm10",
|
||||
"crypto",
|
||||
"shasums-file"
|
||||
],
|
||||
"license": "MIT",
|
||||
"funding": "https://opencollective.com/pnpm",
|
||||
"repository": "https://github.com/pnpm/pnpm/blob/main/crypto/shasums-file",
|
||||
"homepage": "https://github.com/pnpm/pnpm/blob/main/crypto/shasums-file#readme",
|
||||
"bugs": {
|
||||
"url": "https://github.com/pnpm/pnpm/issues"
|
||||
},
|
||||
"main": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"exports": {
|
||||
".": "./lib/index.js"
|
||||
},
|
||||
"files": [
|
||||
"lib",
|
||||
"!*.map"
|
||||
],
|
||||
"scripts": {
|
||||
"lint": "eslint \"src/**/*.ts\" \"test/**/*.ts\"",
|
||||
"_test": "jest",
|
||||
"test": "pnpm run compile && pnpm run _test",
|
||||
"prepublishOnly": "pnpm run compile",
|
||||
"compile": "tsc --build && pnpm run lint --fix"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pnpm/crypto.hash": "workspace:*",
|
||||
"@pnpm/error": "workspace:*",
|
||||
"@pnpm/fetching-types": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@pnpm/crypto.shasums-file": "workspace:*"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.12"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "@pnpm/jest-config"
|
||||
}
|
||||
}
|
||||
53
crypto/shasums-file/src/index.ts
Normal file
53
crypto/shasums-file/src/index.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { createHash } from '@pnpm/crypto.hash'
|
||||
import { PnpmError } from '@pnpm/error'
|
||||
import {
|
||||
type FetchFromRegistry,
|
||||
} from '@pnpm/fetching-types'
|
||||
|
||||
export async function fetchShasumsFile (
|
||||
fetch: FetchFromRegistry,
|
||||
shasumsUrl: string,
|
||||
expectedVersionIntegrity?: string
|
||||
): Promise<string> {
|
||||
const res = await fetch(shasumsUrl)
|
||||
if (!res.ok) {
|
||||
throw new PnpmError(
|
||||
'NODE_FETCH_INTEGRITY_FAILED',
|
||||
`Failed to fetch integrity file: ${shasumsUrl} (status: ${res.status})`
|
||||
)
|
||||
}
|
||||
|
||||
const body = await res.text()
|
||||
if (expectedVersionIntegrity) {
|
||||
const actualVersionIntegrity = createHash(body)
|
||||
if (expectedVersionIntegrity !== actualVersionIntegrity) {
|
||||
throw new PnpmError('NODE_VERSION_INTEGRITY_MISMATCH', `The integrity of ${shasumsUrl} failed. Expected: ${expectedVersionIntegrity}. Actual: ${actualVersionIntegrity}`)
|
||||
}
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
const SHA256_REGEX = /^[a-f0-9]{64}$/
|
||||
|
||||
export function pickFileChecksumFromShasumsFile (body: string, fileName: string): string {
|
||||
const line = body.split('\n').find(line => line.trim().endsWith(` ${fileName}`))
|
||||
|
||||
if (!line) {
|
||||
throw new PnpmError(
|
||||
'NODE_INTEGRITY_HASH_NOT_FOUND',
|
||||
`SHA-256 hash not found in SHASUMS256.txt for: ${fileName}`
|
||||
)
|
||||
}
|
||||
|
||||
const [sha256] = line.trim().split(/\s+/)
|
||||
if (!SHA256_REGEX.test(sha256)) {
|
||||
throw new PnpmError(
|
||||
'NODE_MALFORMED_INTEGRITY_HASH',
|
||||
`Malformed SHA-256 for ${fileName}: ${sha256}`
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(sha256, 'hex')
|
||||
const base64 = buffer.toString('base64')
|
||||
return `sha256-${base64}`
|
||||
}
|
||||
16
crypto/shasums-file/test/index.ts
Normal file
16
crypto/shasums-file/test/index.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { pickFileChecksumFromShasumsFile } from '@pnpm/crypto.shasums-file'
|
||||
|
||||
describe('pickFileChecksumFromShasumsFile', () => {
|
||||
it('picks the right checksum for a file', () => {
|
||||
expect(pickFileChecksumFromShasumsFile(`ed52239294ad517fbe91a268146d5d2aa8a17d2d62d64873e43219078ba71c4e foo.tar.gz
|
||||
be127be1d98cad94c56f46245d0f2de89934d300028694456861a6d5ac558bf3 foo.msi`, 'foo.tar.gz')).toEqual('sha256-7VIjkpStUX++kaJoFG1dKqihfS1i1khz5DIZB4unHE4=')
|
||||
})
|
||||
it('throws an error if no integrity found', () => {
|
||||
expect(() => pickFileChecksumFromShasumsFile(`ed52239294ad517fbe91a268146d5d2aa8a17d2d62d64873e43219078ba71c4e foo.tar.gz
|
||||
be127be1d98cad94c56f46245d0f2de89934d300028694456861a6d5ac558bf3 foo.msi`, 'bar.zip')).toThrow(/SHA-256 hash not found in SHASUMS256.txt for: bar.zip/)
|
||||
})
|
||||
it('throws an error if a malformed integrity is found', () => {
|
||||
expect(() => pickFileChecksumFromShasumsFile(`ed52239294ad517fbe91 foo.tar.gz
|
||||
be127be1d98cad94c56f46245d0f2de89934d300028694456861a6d5ac558bf3 foo.msi`, 'foo.tar.gz')).toThrow(/Malformed SHA-256 for foo.tar.gz: ed52239294ad517fbe91/)
|
||||
})
|
||||
})
|
||||
17
crypto/shasums-file/test/tsconfig.json
Normal file
17
crypto/shasums-file/test/tsconfig.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"noEmit": false,
|
||||
"outDir": "../test.lib",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
"../../../__typings__/**/*.d.ts"
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": ".."
|
||||
}
|
||||
]
|
||||
}
|
||||
22
crypto/shasums-file/tsconfig.json
Normal file
22
crypto/shasums-file/tsconfig.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"extends": "@pnpm/tsconfig",
|
||||
"compilerOptions": {
|
||||
"outDir": "lib",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts",
|
||||
"../../__typings__/**/*.d.ts"
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": "../../network/fetching-types"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/error"
|
||||
},
|
||||
{
|
||||
"path": "../hash"
|
||||
}
|
||||
]
|
||||
}
|
||||
8
crypto/shasums-file/tsconfig.lint.json
Normal file
8
crypto/shasums-file/tsconfig.lint.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"include": [
|
||||
"src/**/*.ts",
|
||||
"test/**/*.ts",
|
||||
"../../__typings__/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
3
deps/graph-builder/src/lockfileToDepGraph.ts
vendored
3
deps/graph-builder/src/lockfileToDepGraph.ts
vendored
@@ -220,8 +220,7 @@ async function buildGraphFromPackages (
|
||||
force: false,
|
||||
lockfileDir: opts.lockfileDir,
|
||||
ignoreScripts: opts.ignoreScripts,
|
||||
pkg: { id: packageId, resolution },
|
||||
expectedPkg: { name: pkgName, version: pkgVersion },
|
||||
pkg: { name: pkgName, version: pkgVersion, id: packageId, resolution },
|
||||
})
|
||||
} catch (err) {
|
||||
if (pkgSnapshot.optional) return
|
||||
|
||||
5
env/node.fetcher/package.json
vendored
5
env/node.fetcher/package.json
vendored
@@ -32,12 +32,15 @@
|
||||
"compile": "tsc --build && pnpm run lint --fix"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pnpm/constants": "workspace:*",
|
||||
"@pnpm/create-cafs-store": "workspace:*",
|
||||
"@pnpm/crypto.shasums-file": "workspace:*",
|
||||
"@pnpm/error": "workspace:*",
|
||||
"@pnpm/fetcher-base": "workspace:*",
|
||||
"@pnpm/fetching-types": "workspace:*",
|
||||
"@pnpm/pick-fetcher": "workspace:*",
|
||||
"@pnpm/node.resolver": "workspace:*",
|
||||
"@pnpm/tarball-fetcher": "workspace:*",
|
||||
"@pnpm/worker": "workspace:*",
|
||||
"adm-zip": "catalog:",
|
||||
"detect-libc": "catalog:",
|
||||
"rename-overwrite": "catalog:",
|
||||
|
||||
187
env/node.fetcher/src/index.ts
vendored
187
env/node.fetcher/src/index.ts
vendored
@@ -1,15 +1,19 @@
|
||||
import fs from 'fs'
|
||||
import fsPromises from 'fs/promises'
|
||||
import path from 'path'
|
||||
import { getNodeBinLocationForCurrentOS } from '@pnpm/constants'
|
||||
import { PnpmError } from '@pnpm/error'
|
||||
import { fetchShasumsFile, pickFileChecksumFromShasumsFile } from '@pnpm/crypto.shasums-file'
|
||||
import {
|
||||
type FetchFromRegistry,
|
||||
type RetryTimeoutOptions,
|
||||
type Response,
|
||||
} from '@pnpm/fetching-types'
|
||||
import { pickFetcher } from '@pnpm/pick-fetcher'
|
||||
import { createCafsStore } from '@pnpm/create-cafs-store'
|
||||
import { type Cafs } from '@pnpm/cafs-types'
|
||||
import { createTarballFetcher } from '@pnpm/tarball-fetcher'
|
||||
import { type FetchFunction } from '@pnpm/fetcher-base'
|
||||
import { type NodeRuntimeFetcher, type FetchResult } from '@pnpm/fetcher-base'
|
||||
import { getNodeMirror, parseEnvSpecifier } from '@pnpm/node.resolver'
|
||||
import { addFilesFromDir } from '@pnpm/worker'
|
||||
import AdmZip from 'adm-zip'
|
||||
import renameOverwrite from 'rename-overwrite'
|
||||
import tempy from 'tempy'
|
||||
@@ -17,12 +21,74 @@ import { isNonGlibcLinux } from 'detect-libc'
|
||||
import ssri from 'ssri'
|
||||
import { getNodeArtifactAddress } from './getNodeArtifactAddress'
|
||||
|
||||
export function createNodeRuntimeFetcher (ctx: {
|
||||
fetch: FetchFromRegistry
|
||||
rawConfig: Record<string, string>
|
||||
offline?: boolean
|
||||
}): { nodeRuntime: NodeRuntimeFetcher } {
|
||||
const fetchNodeRuntime: NodeRuntimeFetcher = async (cafs, resolution, opts) => {
|
||||
if (!opts.pkg.version && !opts.pkg.id) {
|
||||
throw new PnpmError('CANNOT_FETCH_NODE_WITHOUT_VERSION', 'Cannot fetch Node.js without a version')
|
||||
}
|
||||
if (ctx.offline) {
|
||||
throw new PnpmError('CANNOT_DOWNLOAD_NODE_OFFLINE', 'Cannot download Node.js because offline mode is enabled.')
|
||||
}
|
||||
// Sometimes the id comes in as runtime:<version> and sometimes as node@runtime:<version>.
|
||||
// It would be nice to normalize this but unfortunately some parts of the code rely on IDs that start with the protocol.
|
||||
const version = opts.pkg.version ?? opts.pkg.id.replace(/(?:node@)?runtime:/, '')
|
||||
const { releaseChannel } = parseEnvSpecifier(version)
|
||||
|
||||
await validateSystemCompatibility()
|
||||
|
||||
const nodeMirrorBaseUrl = getNodeMirror(ctx.rawConfig, releaseChannel)
|
||||
const artifactInfo = await getNodeArtifactInfo(ctx.fetch, version, {
|
||||
nodeMirrorBaseUrl,
|
||||
expectedVersionIntegrity: resolution.integrity,
|
||||
cachedShasumsFile: resolution._shasumsFileContent,
|
||||
})
|
||||
const manifest = {
|
||||
name: 'node',
|
||||
version,
|
||||
bin: getNodeBinLocationForCurrentOS(),
|
||||
}
|
||||
|
||||
if (artifactInfo.isZip) {
|
||||
const tempLocation = await cafs.tempDir()
|
||||
await downloadAndUnpackZip(ctx.fetch, artifactInfo, tempLocation)
|
||||
return {
|
||||
...await addFilesFromDir({
|
||||
storeDir: cafs.storeDir,
|
||||
dir: tempLocation,
|
||||
filesIndexFile: opts.filesIndexFile,
|
||||
readManifest: false,
|
||||
}),
|
||||
manifest,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...await downloadAndUnpackTarball(ctx.fetch, artifactInfo, { cafs, filesIndexFile: opts.filesIndexFile }),
|
||||
manifest,
|
||||
}
|
||||
}
|
||||
return {
|
||||
nodeRuntime: fetchNodeRuntime,
|
||||
}
|
||||
}
|
||||
|
||||
// Constants
|
||||
const DEFAULT_NODE_MIRROR_BASE_URL = 'https://nodejs.org/download/release/'
|
||||
const SHA256_REGEX = /^[a-f0-9]{64}$/
|
||||
|
||||
export interface FetchNodeOptionsToDir {
|
||||
storeDir: string
|
||||
fetchTimeout?: number
|
||||
nodeMirrorBaseUrl?: string
|
||||
retry?: RetryTimeoutOptions
|
||||
}
|
||||
|
||||
export interface FetchNodeOptions {
|
||||
storeDir: string
|
||||
cafs: Cafs
|
||||
filesIndexFile: string
|
||||
fetchTimeout?: number
|
||||
nodeMirrorBaseUrl?: string
|
||||
retry?: RetryTimeoutOptions
|
||||
@@ -48,19 +114,19 @@ export async function fetchNode (
|
||||
fetch: FetchFromRegistry,
|
||||
version: string,
|
||||
targetDir: string,
|
||||
opts: FetchNodeOptions
|
||||
opts: FetchNodeOptionsToDir
|
||||
): Promise<void> {
|
||||
await validateSystemCompatibility()
|
||||
|
||||
const nodeMirrorBaseUrl = opts.nodeMirrorBaseUrl ?? DEFAULT_NODE_MIRROR_BASE_URL
|
||||
const artifactInfo = await getNodeArtifactInfo(fetch, version, nodeMirrorBaseUrl)
|
||||
const artifactInfo = await getNodeArtifactInfo(fetch, version, { nodeMirrorBaseUrl })
|
||||
|
||||
if (artifactInfo.isZip) {
|
||||
await downloadAndUnpackZip(fetch, artifactInfo, targetDir)
|
||||
return
|
||||
}
|
||||
|
||||
await downloadAndUnpackTarball(fetch, artifactInfo, targetDir, opts)
|
||||
await downloadAndUnpackTarballToDir(fetch, artifactInfo, targetDir, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -89,11 +155,15 @@ async function validateSystemCompatibility (): Promise<void> {
|
||||
async function getNodeArtifactInfo (
|
||||
fetch: FetchFromRegistry,
|
||||
version: string,
|
||||
nodeMirrorBaseUrl: string
|
||||
opts: {
|
||||
nodeMirrorBaseUrl: string
|
||||
expectedVersionIntegrity?: string
|
||||
cachedShasumsFile?: string
|
||||
}
|
||||
): Promise<NodeArtifactInfo> {
|
||||
const tarball = getNodeArtifactAddress({
|
||||
version,
|
||||
baseUrl: nodeMirrorBaseUrl,
|
||||
baseUrl: opts.nodeMirrorBaseUrl,
|
||||
platform: process.platform,
|
||||
arch: process.arch,
|
||||
})
|
||||
@@ -102,7 +172,11 @@ async function getNodeArtifactInfo (
|
||||
const shasumsFileUrl = `${tarball.dirname}/SHASUMS256.txt`
|
||||
const url = `${tarball.dirname}/${tarballFileName}`
|
||||
|
||||
const integrity = await loadArtifactIntegrity(fetch, shasumsFileUrl, tarballFileName)
|
||||
const integrity = opts.cachedShasumsFile
|
||||
? pickFileChecksumFromShasumsFile(opts.cachedShasumsFile, tarballFileName)
|
||||
: await loadArtifactIntegrity(fetch, tarballFileName, shasumsFileUrl, {
|
||||
expectedVersionIntegrity: opts.expectedVersionIntegrity,
|
||||
})
|
||||
|
||||
return {
|
||||
url,
|
||||
@@ -112,49 +186,28 @@ async function getNodeArtifactInfo (
|
||||
}
|
||||
}
|
||||
|
||||
interface LoadArtifactIntegrityOptions {
|
||||
expectedVersionIntegrity?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads and verifies the integrity hash for a Node.js artifact.
|
||||
* Loads and extracts the integrity hash for a specific Node.js artifact.
|
||||
*
|
||||
* @param fetch - Function to fetch resources from registry
|
||||
* @param integritiesFileUrl - URL of the SHASUMS256.txt file
|
||||
* @param fileName - Name of the file to find integrity for
|
||||
* @param shasumsUrl - URL of the SHASUMS256.txt file
|
||||
* @param options - Optional configuration for integrity verification
|
||||
* @returns Promise resolving to the integrity hash in base64 format
|
||||
* @throws {PnpmError} When integrity file cannot be fetched or parsed
|
||||
*/
|
||||
async function loadArtifactIntegrity (
|
||||
fetch: FetchFromRegistry,
|
||||
integritiesFileUrl: string,
|
||||
fileName: string
|
||||
fileName: string,
|
||||
shasumsUrl: string,
|
||||
options?: LoadArtifactIntegrityOptions
|
||||
): Promise<string> {
|
||||
const res = await fetch(integritiesFileUrl)
|
||||
if (!res.ok) {
|
||||
throw new PnpmError(
|
||||
'NODE_FETCH_INTEGRITY_FAILED',
|
||||
`Failed to fetch integrity file: ${integritiesFileUrl} (status: ${res.status})`
|
||||
)
|
||||
}
|
||||
|
||||
const body = await res.text()
|
||||
const line = body.split('\n').find(line => line.trim().endsWith(` ${fileName}`))
|
||||
|
||||
if (!line) {
|
||||
throw new PnpmError(
|
||||
'NODE_INTEGRITY_HASH_NOT_FOUND',
|
||||
`SHA-256 hash not found in SHASUMS256.txt for: ${fileName}`
|
||||
)
|
||||
}
|
||||
|
||||
const [sha256] = line.trim().split(/\s+/)
|
||||
if (!SHA256_REGEX.test(sha256)) {
|
||||
throw new PnpmError(
|
||||
'NODE_MALFORMED_INTEGRITY_HASH',
|
||||
`Malformed SHA-256 for ${fileName}: ${sha256}`
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(sha256, 'hex')
|
||||
const base64 = buffer.toString('base64')
|
||||
return `sha256-${base64}`
|
||||
const body = await fetchShasumsFile(fetch, shasumsUrl, options?.expectedVersionIntegrity)
|
||||
return pickFileChecksumFromShasumsFile(body, fileName)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -165,11 +218,11 @@ async function loadArtifactIntegrity (
|
||||
* @param targetDir - Directory where Node.js should be installed
|
||||
* @param opts - Configuration options for the fetch operation
|
||||
*/
|
||||
async function downloadAndUnpackTarball (
|
||||
async function downloadAndUnpackTarballToDir (
|
||||
fetch: FetchFromRegistry,
|
||||
artifactInfo: NodeArtifactInfo,
|
||||
targetDir: string,
|
||||
opts: FetchNodeOptions
|
||||
opts: FetchNodeOptionsToDir
|
||||
): Promise<void> {
|
||||
const getAuthHeader = () => undefined
|
||||
const fetchers = createTarballFetcher(fetch, getAuthHeader, {
|
||||
@@ -181,19 +234,20 @@ async function downloadAndUnpackTarball (
|
||||
})
|
||||
|
||||
const cafs = createCafsStore(opts.storeDir)
|
||||
const fetchTarball = pickFetcher(fetchers, { tarball: artifactInfo.url }) as FetchFunction
|
||||
|
||||
// Create a unique index file name for Node.js tarballs
|
||||
const indexFileName = `node-${encodeURIComponent(artifactInfo.url)}`
|
||||
const filesIndexFile = path.join(opts.storeDir, indexFileName)
|
||||
|
||||
const { filesIndex } = await fetchTarball(cafs, {
|
||||
const { filesIndex } = await fetchers.remoteTarball(cafs, {
|
||||
tarball: artifactInfo.url,
|
||||
integrity: artifactInfo.integrity,
|
||||
}, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg: {
|
||||
id: '',
|
||||
},
|
||||
})
|
||||
|
||||
cafs.importPackage(targetDir, {
|
||||
@@ -206,6 +260,32 @@ async function downloadAndUnpackTarball (
|
||||
})
|
||||
}
|
||||
|
||||
async function downloadAndUnpackTarball (
|
||||
fetch: FetchFromRegistry,
|
||||
artifactInfo: NodeArtifactInfo,
|
||||
opts: FetchNodeOptions
|
||||
): Promise<FetchResult> {
|
||||
const getAuthHeader = () => undefined
|
||||
const fetchers = createTarballFetcher(fetch, getAuthHeader, {
|
||||
retry: opts.retry,
|
||||
timeout: opts.fetchTimeout,
|
||||
// These are not needed for fetching Node.js
|
||||
rawConfig: {},
|
||||
unsafePerm: false,
|
||||
})
|
||||
|
||||
return fetchers.remoteTarball(opts.cafs, {
|
||||
tarball: artifactInfo.url,
|
||||
integrity: artifactInfo.integrity,
|
||||
}, {
|
||||
filesIndexFile: opts.filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {
|
||||
id: '',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads and unpacks a zip file containing Node.js.
|
||||
*
|
||||
@@ -223,12 +303,12 @@ async function downloadAndUnpackZip (
|
||||
const tmp = path.join(tempy.directory(), 'pnpm.zip')
|
||||
|
||||
try {
|
||||
await downloadWithIntegrityCheck(response, tmp, artifactInfo.integrity, artifactInfo.url)
|
||||
await downloadWithIntegrityCheck(response, tmp, artifactInfo.integrity)
|
||||
await extractZipToTarget(tmp, artifactInfo.basename, targetDir)
|
||||
} finally {
|
||||
// Clean up temporary file
|
||||
try {
|
||||
await fs.promises.unlink(tmp)
|
||||
await fsPromises.unlink(tmp)
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
@@ -247,8 +327,7 @@ async function downloadAndUnpackZip (
|
||||
async function downloadWithIntegrityCheck (
|
||||
response: Response,
|
||||
tmpPath: string,
|
||||
expectedIntegrity: string,
|
||||
url: string
|
||||
expectedIntegrity: string
|
||||
): Promise<void> {
|
||||
// Collect all chunks from the response
|
||||
const chunks: Buffer[] = []
|
||||
@@ -261,7 +340,7 @@ async function downloadWithIntegrityCheck (
|
||||
ssri.checkData(data, expectedIntegrity, { error: true })
|
||||
|
||||
// Write the verified data to file
|
||||
await fs.promises.writeFile(tmpPath, data)
|
||||
await fsPromises.writeFile(tmpPath, data)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
2
env/node.fetcher/test/node.test.ts
vendored
2
env/node.fetcher/test/node.test.ts
vendored
@@ -2,7 +2,7 @@ import AdmZip from 'adm-zip'
|
||||
import { Response } from 'node-fetch'
|
||||
import path from 'path'
|
||||
import { Readable } from 'stream'
|
||||
import { fetchNode, type FetchNodeOptions } from '@pnpm/node.fetcher'
|
||||
import { fetchNode, type FetchNodeOptionsToDir as FetchNodeOptions } from '@pnpm/node.fetcher'
|
||||
import { tempDir } from '@pnpm/prepare'
|
||||
import { isNonGlibcLinux } from 'detect-libc'
|
||||
|
||||
|
||||
13
env/node.fetcher/tsconfig.json
vendored
13
env/node.fetcher/tsconfig.json
vendored
@@ -13,10 +13,10 @@
|
||||
"path": "../../__utils__/prepare"
|
||||
},
|
||||
{
|
||||
"path": "../../fetching/fetcher-base"
|
||||
"path": "../../crypto/shasums-file"
|
||||
},
|
||||
{
|
||||
"path": "../../fetching/pick-fetcher"
|
||||
"path": "../../fetching/fetcher-base"
|
||||
},
|
||||
{
|
||||
"path": "../../fetching/tarball-fetcher"
|
||||
@@ -24,6 +24,9 @@
|
||||
{
|
||||
"path": "../../network/fetching-types"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/constants"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/error"
|
||||
},
|
||||
@@ -32,6 +35,12 @@
|
||||
},
|
||||
{
|
||||
"path": "../../store/create-cafs-store"
|
||||
},
|
||||
{
|
||||
"path": "../../worker"
|
||||
},
|
||||
{
|
||||
"path": "../node.resolver"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
8
env/node.resolver/package.json
vendored
8
env/node.resolver/package.json
vendored
@@ -32,8 +32,14 @@
|
||||
"compile": "tsc --build && pnpm run lint --fix"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pnpm/config": "workspace:*",
|
||||
"@pnpm/constants": "workspace:*",
|
||||
"@pnpm/crypto.hash": "workspace:*",
|
||||
"@pnpm/crypto.shasums-file": "workspace:*",
|
||||
"@pnpm/error": "workspace:*",
|
||||
"@pnpm/fetching-types": "workspace:*",
|
||||
"@pnpm/node.fetcher": "workspace:*",
|
||||
"@pnpm/resolver-base": "workspace:*",
|
||||
"@pnpm/types": "workspace:*",
|
||||
"semver": "catalog:",
|
||||
"version-selector-type": "catalog:"
|
||||
},
|
||||
|
||||
65
env/node.resolver/src/index.ts
vendored
65
env/node.resolver/src/index.ts
vendored
@@ -1,6 +1,71 @@
|
||||
import { getNodeBinLocationForCurrentOS } from '@pnpm/constants'
|
||||
import { createHash } from '@pnpm/crypto.hash'
|
||||
import { fetchShasumsFile } from '@pnpm/crypto.shasums-file'
|
||||
import { PnpmError } from '@pnpm/error'
|
||||
import { type FetchFromRegistry } from '@pnpm/fetching-types'
|
||||
import { type WantedDependency, type NodeRuntimeResolution, type ResolveResult } from '@pnpm/resolver-base'
|
||||
import semver from 'semver'
|
||||
import versionSelectorType from 'version-selector-type'
|
||||
import { type PkgResolutionId } from '@pnpm/types'
|
||||
import { parseEnvSpecifier } from './parseEnvSpecifier'
|
||||
import { getNodeMirror } from './getNodeMirror'
|
||||
|
||||
export { getNodeMirror, parseEnvSpecifier }
|
||||
|
||||
export interface NodeRuntimeResolveResult extends ResolveResult {
|
||||
resolution: NodeRuntimeResolution
|
||||
resolvedVia: 'nodejs.org'
|
||||
}
|
||||
|
||||
export async function resolveNodeRuntime (
|
||||
ctx: {
|
||||
fetchFromRegistry: FetchFromRegistry
|
||||
rawConfig: Record<string, string>
|
||||
offline?: boolean
|
||||
},
|
||||
wantedDependency: WantedDependency
|
||||
): Promise<NodeRuntimeResolveResult | null> {
|
||||
if (wantedDependency.alias !== 'node' || !wantedDependency.bareSpecifier?.startsWith('runtime:')) return null
|
||||
if (ctx.offline) throw new PnpmError('NO_OFFLINE_NODEJS_RESOLUTION', 'Offline Node.js resolution is not supported')
|
||||
const versionSpec = wantedDependency.bareSpecifier.substring('runtime:'.length)
|
||||
const { releaseChannel, versionSpecifier } = parseEnvSpecifier(versionSpec)
|
||||
const nodeMirrorBaseUrl = getNodeMirror(ctx.rawConfig, releaseChannel)
|
||||
const version = await resolveNodeVersion(ctx.fetchFromRegistry, versionSpecifier, nodeMirrorBaseUrl)
|
||||
if (!version) {
|
||||
throw new PnpmError('NODEJS_VERSION_NOT_FOUND', `Could not find a Node.js version that satisfies ${versionSpec}`)
|
||||
}
|
||||
const { versionIntegrity: integrity, shasumsFileContent } = await loadShasumsFile(ctx.fetchFromRegistry, nodeMirrorBaseUrl, version)
|
||||
return {
|
||||
id: `node@runtime:${version}` as PkgResolutionId,
|
||||
normalizedBareSpecifier: `runtime:${versionSpec}`,
|
||||
resolvedVia: 'nodejs.org',
|
||||
manifest: {
|
||||
name: 'node',
|
||||
version,
|
||||
bin: getNodeBinLocationForCurrentOS(),
|
||||
},
|
||||
resolution: {
|
||||
type: 'nodeRuntime',
|
||||
integrity,
|
||||
_shasumsFileContent: shasumsFileContent,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async function loadShasumsFile (fetch: FetchFromRegistry, nodeMirrorBaseUrl: string, version: string): Promise<{
|
||||
shasumsFileContent: string
|
||||
versionIntegrity: string
|
||||
}> {
|
||||
const integritiesFileUrl = `${nodeMirrorBaseUrl}/v${version}/SHASUMS256.txt`
|
||||
const shasumsFileContent = await fetchShasumsFile(fetch, integritiesFileUrl)
|
||||
|
||||
const versionIntegrity = createHash(shasumsFileContent)
|
||||
|
||||
return {
|
||||
shasumsFileContent,
|
||||
versionIntegrity,
|
||||
}
|
||||
}
|
||||
|
||||
interface NodeVersion {
|
||||
version: string
|
||||
|
||||
20
env/node.resolver/tsconfig.json
vendored
20
env/node.resolver/tsconfig.json
vendored
@@ -9,6 +9,15 @@
|
||||
"../../__typings__/**/*.d.ts"
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": "../../config/config"
|
||||
},
|
||||
{
|
||||
"path": "../../crypto/hash"
|
||||
},
|
||||
{
|
||||
"path": "../../crypto/shasums-file"
|
||||
},
|
||||
{
|
||||
"path": "../../network/fetch"
|
||||
},
|
||||
@@ -16,7 +25,16 @@
|
||||
"path": "../../network/fetching-types"
|
||||
},
|
||||
{
|
||||
"path": "../node.fetcher"
|
||||
"path": "../../packages/constants"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/error"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/types"
|
||||
},
|
||||
{
|
||||
"path": "../../resolving/resolver-base"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { resolveNodeVersion } from '@pnpm/node.resolver'
|
||||
import { getNodeMirror } from './getNodeMirror'
|
||||
import { resolveNodeVersion, parseEnvSpecifier, getNodeMirror } from '@pnpm/node.resolver'
|
||||
import { getNodeDir, type NvmNodeCommandOptions } from './node'
|
||||
import { parseEnvSpecifier } from './parseEnvSpecifier'
|
||||
import { createFetchFromRegistry } from '@pnpm/fetch'
|
||||
import { globalInfo } from '@pnpm/logger'
|
||||
|
||||
|
||||
4
env/plugin-commands-env/src/envList.ts
vendored
4
env/plugin-commands-env/src/envList.ts
vendored
@@ -1,12 +1,10 @@
|
||||
import { promises as fs, existsSync } from 'fs'
|
||||
import path from 'path'
|
||||
import { createFetchFromRegistry } from '@pnpm/fetch'
|
||||
import { resolveNodeVersions } from '@pnpm/node.resolver'
|
||||
import { resolveNodeVersions, parseEnvSpecifier, getNodeMirror } from '@pnpm/node.resolver'
|
||||
import { PnpmError } from '@pnpm/error'
|
||||
import semver from 'semver'
|
||||
import { getNodeMirror } from './getNodeMirror'
|
||||
import { getNodeVersionsBaseDir, type NvmNodeCommandOptions } from './node'
|
||||
import { parseEnvSpecifier } from './parseEnvSpecifier'
|
||||
import { getNodeExecPathAndTargetDir, getNodeExecPathInNodeDir } from './utils'
|
||||
|
||||
export async function envList (opts: NvmNodeCommandOptions, params: string[]): Promise<string> {
|
||||
|
||||
2
env/plugin-commands-env/src/node.ts
vendored
2
env/plugin-commands-env/src/node.ts
vendored
@@ -6,11 +6,11 @@ import { getSystemNodeVersion } from '@pnpm/env.system-node-version'
|
||||
import { createFetchFromRegistry, type FetchFromRegistry } from '@pnpm/fetch'
|
||||
import { globalInfo, globalWarn } from '@pnpm/logger'
|
||||
import { fetchNode } from '@pnpm/node.fetcher'
|
||||
import { getNodeMirror } from '@pnpm/node.resolver'
|
||||
import { getStorePath } from '@pnpm/store-path'
|
||||
import { type PrepareExecutionEnvOptions, type PrepareExecutionEnvResult } from '@pnpm/types'
|
||||
import loadJsonFile from 'load-json-file'
|
||||
import writeJsonFile from 'write-json-file'
|
||||
import { getNodeMirror } from './getNodeMirror'
|
||||
import { isValidVersion, parseNodeSpecifier } from './parseNodeSpecifier'
|
||||
|
||||
export type NvmNodeCommandOptions = Pick<Config,
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
import { type Resolution, type GitResolution, type DirectoryResolution } from '@pnpm/resolver-base'
|
||||
import {
|
||||
type Resolution,
|
||||
type GitResolution,
|
||||
type DirectoryResolution,
|
||||
type NodeRuntimeResolution,
|
||||
} from '@pnpm/resolver-base'
|
||||
import { type Cafs } from '@pnpm/cafs-types'
|
||||
import { type DependencyManifest } from '@pnpm/types'
|
||||
|
||||
export interface PkgNameVersion {
|
||||
name?: string
|
||||
version?: string
|
||||
id: string
|
||||
}
|
||||
|
||||
export interface FetchOptions {
|
||||
@@ -43,6 +49,8 @@ export interface GitFetcherResult {
|
||||
|
||||
export type GitFetcher = FetchFunction<GitResolution, GitFetcherOptions, GitFetcherResult>
|
||||
|
||||
export type NodeRuntimeFetcher = FetchFunction<NodeRuntimeResolution>
|
||||
|
||||
export interface DirectoryFetcherOptions {
|
||||
lockfileDir: string
|
||||
readManifest?: boolean
|
||||
@@ -64,6 +72,7 @@ export interface Fetchers {
|
||||
gitHostedTarball: FetchFunction
|
||||
directory: DirectoryFetcher
|
||||
git: GitFetcher
|
||||
nodeRuntime: NodeRuntimeFetcher
|
||||
}
|
||||
|
||||
interface CustomFetcherFactoryOptions {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { Resolution } from '@pnpm/resolver-base'
|
||||
import type { Fetchers, FetchFunction, DirectoryFetcher, GitFetcher } from '@pnpm/fetcher-base'
|
||||
import type { Fetchers, FetchFunction, DirectoryFetcher, GitFetcher, NodeRuntimeFetcher } from '@pnpm/fetcher-base'
|
||||
|
||||
export function pickFetcher (fetcherByHostingType: Partial<Fetchers>, resolution: Resolution): FetchFunction | DirectoryFetcher | GitFetcher {
|
||||
export function pickFetcher (fetcherByHostingType: Partial<Fetchers>, resolution: Resolution): FetchFunction | DirectoryFetcher | GitFetcher | NodeRuntimeFetcher {
|
||||
let fetcherType: keyof Fetchers | undefined = resolution.type
|
||||
|
||||
if (resolution.type == null) {
|
||||
|
||||
@@ -46,6 +46,7 @@ const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
|
||||
retries: 1,
|
||||
},
|
||||
})
|
||||
const pkg = { id: '' }
|
||||
|
||||
test('fail when tarball size does not match content-length', async () => {
|
||||
const scope = nock(registry)
|
||||
@@ -70,7 +71,7 @@ test('fail when tarball size does not match content-length', async () => {
|
||||
fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
).rejects.toThrow(
|
||||
new BadTarballError({
|
||||
@@ -102,7 +103,7 @@ test('retry when tarball size does not match content-length', async () => {
|
||||
const result = await fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(result.filesIndex).toBeTruthy()
|
||||
@@ -128,7 +129,7 @@ test('fail when integrity check fails two times in a row', async () => {
|
||||
fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
).rejects.toThrow(
|
||||
new TarballIntegrityError({
|
||||
@@ -167,7 +168,7 @@ test('retry when integrity check fails', async () => {
|
||||
onStart (size, attempts) {
|
||||
params.push([size, attempts])
|
||||
},
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(params[0]).toStrictEqual([1194, 1])
|
||||
@@ -190,7 +191,7 @@ test('fail when integrity check of local file fails', async () => {
|
||||
fetch.localTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
).rejects.toThrow(
|
||||
new TarballIntegrityError({
|
||||
@@ -216,7 +217,7 @@ test("don't fail when integrity check of local file succeeds", async () => {
|
||||
const { filesIndex } = await fetch.localTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(typeof filesIndex['package.json']).toBe('string')
|
||||
@@ -243,7 +244,7 @@ test("don't fail when fetching a local tarball in offline mode", async () => {
|
||||
const { filesIndex } = await fetch.localTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(typeof filesIndex['package.json']).toBe('string')
|
||||
@@ -271,7 +272,7 @@ test('fail when trying to fetch a non-local tarball in offline mode', async () =
|
||||
fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
).rejects.toThrow(
|
||||
new PnpmError('NO_OFFLINE_TARBALL',
|
||||
@@ -299,7 +300,7 @@ test('retry on server error', async () => {
|
||||
const index = await fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(index).toBeTruthy()
|
||||
@@ -323,7 +324,7 @@ test('throw error when accessing private package w/o authorization', async () =>
|
||||
fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
).rejects.toThrow(
|
||||
new FetchError(
|
||||
@@ -356,7 +357,7 @@ test('do not retry when package does not exist', async () => {
|
||||
fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
).rejects.toThrow(
|
||||
new FetchError(
|
||||
@@ -407,7 +408,7 @@ test('accessing private packages', async () => {
|
||||
const index = await fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(index).toBeTruthy()
|
||||
@@ -428,7 +429,7 @@ test('fetch a big repository', async () => {
|
||||
const result = await fetch.gitHostedTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(result.filesIndex).toBeTruthy()
|
||||
@@ -443,7 +444,7 @@ test('fail when preparing a git-hosted package', async () => {
|
||||
fetch.gitHostedTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
).rejects.toThrow('Failed to prepare git-hosted package fetched from "https://codeload.github.com/pnpm-e2e/prepare-script-fails/tar.gz/ba58874aae1210a777eb309dd01a9fdacc7e54e7": @pnpm.e2e/prepare-script-fails@1.0.0 npm-install: `npm install`')
|
||||
})
|
||||
@@ -456,7 +457,7 @@ test('take only the files included in the package, when fetching a git-hosted pa
|
||||
const result = await fetch.gitHostedTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(Object.keys(result.filesIndex).sort()).toStrictEqual([
|
||||
@@ -482,7 +483,7 @@ test('fail when extracting a broken tarball', async () => {
|
||||
fetch.remoteTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
).rejects.toThrow(`Failed to add tarball from "${registry}foo.tgz" to store: Invalid checksum for TAR header at offset 0. Expected 0, got NaN`
|
||||
)
|
||||
@@ -507,7 +508,7 @@ test('do not build the package when scripts are ignored', async () => {
|
||||
const { filesIndex } = await fetch.gitHostedTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(filesIndex).toHaveProperty(['package.json'])
|
||||
@@ -525,7 +526,7 @@ test('when extracting files with the same name, pick the last ones', async () =>
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
readManifest: true,
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
const pkgJson = JSON.parse(fs.readFileSync(filesIndex['package.json'], 'utf8'))
|
||||
expect(pkgJson.name).toBe('pkg2')
|
||||
@@ -552,7 +553,7 @@ test('use the subfolder when path is present', async () => {
|
||||
const { filesIndex } = await fetch.gitHostedTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})
|
||||
|
||||
expect(filesIndex).toHaveProperty(['package.json'])
|
||||
@@ -579,7 +580,7 @@ test('prevent directory traversal attack when path is present', async () => {
|
||||
await expect(() => fetch.gitHostedTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})).rejects.toThrow(`Failed to prepare git-hosted package fetched from "${tarball}": Path "${path}" should be a sub directory`)
|
||||
})
|
||||
|
||||
@@ -603,6 +604,6 @@ test('fail when path is not exists', async () => {
|
||||
await expect(() => fetch.gitHostedTarball(cafs, resolution, {
|
||||
filesIndexFile,
|
||||
lockfileDir: process.cwd(),
|
||||
pkg: {},
|
||||
pkg,
|
||||
})).rejects.toThrow(`Failed to prepare git-hosted package fetched from "${tarball}": Path "${path}" is not a directory`)
|
||||
})
|
||||
|
||||
@@ -109,10 +109,16 @@ export interface GitRepositoryResolution {
|
||||
path?: string
|
||||
}
|
||||
|
||||
export interface NodeRuntimeResolution {
|
||||
type: 'nodeRuntime'
|
||||
integrity: string
|
||||
}
|
||||
|
||||
export type Resolution =
|
||||
TarballResolution |
|
||||
GitRepositoryResolution |
|
||||
DirectoryResolution
|
||||
DirectoryResolution |
|
||||
NodeRuntimeResolution
|
||||
|
||||
export type LockfileResolution = Resolution | {
|
||||
integrity: string
|
||||
|
||||
@@ -7,5 +7,8 @@ export function packageIdFromSnapshot (
|
||||
pkgSnapshot: PackageSnapshot
|
||||
): PkgId {
|
||||
if (pkgSnapshot.id) return pkgSnapshot.id as PkgId
|
||||
if (depPath.startsWith('node@runtime:')) {
|
||||
return depPath as unknown as PkgId
|
||||
}
|
||||
return dp.tryGetPackageId(depPath) ?? depPath
|
||||
}
|
||||
|
||||
@@ -18,3 +18,7 @@ export const FULL_META_DIR = 'metadata-full-v1.3' // This is currently not used
|
||||
export const FULL_FILTERED_META_DIR = 'metadata-v1.3'
|
||||
|
||||
export const USEFUL_NON_ROOT_PNPM_FIELDS = ['executionEnv'] as const
|
||||
|
||||
export function getNodeBinLocationForCurrentOS (): string {
|
||||
return process.platform === 'win32' ? 'node.exe' : 'bin/node'
|
||||
}
|
||||
|
||||
@@ -6,7 +6,18 @@ jest.mock('@pnpm/os.env.path-extender', () => ({
|
||||
addDirToEnvPath: jest.fn(),
|
||||
}))
|
||||
|
||||
jest.mock('fs')
|
||||
jest.mock('fs', () => {
|
||||
const actualFs = jest.createMockFromModule('fs')
|
||||
return {
|
||||
// @ts-expect-error
|
||||
...actualFs,
|
||||
promises: {
|
||||
// @ts-expect-error
|
||||
...actualFs.promises,
|
||||
writeFile: jest.fn(),
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
test('setup makes no changes', async () => {
|
||||
(addDirToEnvPath as jest.Mock).mockReturnValue(Promise.resolve<PathExtenderReport>({
|
||||
|
||||
@@ -54,6 +54,20 @@ export interface DependenciesMeta {
|
||||
}
|
||||
}
|
||||
|
||||
export interface DevEngineDependency {
|
||||
name: string
|
||||
version?: string
|
||||
onFail?: 'ignore' | 'warn' | 'error' | 'download'
|
||||
}
|
||||
|
||||
export interface DevEngines {
|
||||
os?: DevEngineDependency | DevEngineDependency[]
|
||||
cpu?: DevEngineDependency | DevEngineDependency[]
|
||||
libc?: DevEngineDependency | DevEngineDependency[]
|
||||
runtime?: DevEngineDependency | DevEngineDependency[]
|
||||
packageManager?: DevEngineDependency | DevEngineDependency[]
|
||||
}
|
||||
|
||||
export interface PublishConfig extends Record<string, unknown> {
|
||||
directory?: string
|
||||
linkDirectory?: boolean
|
||||
@@ -101,6 +115,7 @@ export interface BaseManifest {
|
||||
npm?: string
|
||||
pnpm?: string
|
||||
}
|
||||
devEngines?: DevEngines
|
||||
cpu?: string[]
|
||||
os?: string[]
|
||||
libc?: string[]
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
"@pnpm/fetching-types": "workspace:*",
|
||||
"@pnpm/git-fetcher": "workspace:*",
|
||||
"@pnpm/network.auth-header": "workspace:*",
|
||||
"@pnpm/node.fetcher": "workspace:*",
|
||||
"@pnpm/resolver-base": "workspace:*",
|
||||
"@pnpm/tarball-fetcher": "workspace:*",
|
||||
"@pnpm/types": "workspace:*",
|
||||
|
||||
@@ -6,11 +6,12 @@ import {
|
||||
import { type AgentOptions, createFetchFromRegistry } from '@pnpm/fetch'
|
||||
import { type SslConfig } from '@pnpm/types'
|
||||
import { type FetchFromRegistry, type GetAuthHeader, type RetryTimeoutOptions } from '@pnpm/fetching-types'
|
||||
import type { CustomFetchers, GitFetcher, DirectoryFetcher } from '@pnpm/fetcher-base'
|
||||
import type { CustomFetchers, GitFetcher, DirectoryFetcher, NodeRuntimeFetcher } from '@pnpm/fetcher-base'
|
||||
import { createDirectoryFetcher } from '@pnpm/directory-fetcher'
|
||||
import { createGitFetcher } from '@pnpm/git-fetcher'
|
||||
import { createTarballFetcher, type TarballFetchers } from '@pnpm/tarball-fetcher'
|
||||
import { createGetAuthHeaderByURI } from '@pnpm/network.auth-header'
|
||||
import { createNodeRuntimeFetcher } from '@pnpm/node.fetcher'
|
||||
import mapValues from 'ramda/src/map'
|
||||
|
||||
export type { ResolveFunction }
|
||||
@@ -57,18 +58,24 @@ export function createResolver (opts: ClientOptions): { resolve: ResolveFunction
|
||||
type Fetchers = {
|
||||
git: GitFetcher
|
||||
directory: DirectoryFetcher
|
||||
nodeRuntime: NodeRuntimeFetcher
|
||||
} & TarballFetchers
|
||||
|
||||
function createFetchers (
|
||||
fetchFromRegistry: FetchFromRegistry,
|
||||
getAuthHeader: GetAuthHeader,
|
||||
opts: Pick<ClientOptions, 'rawConfig' | 'retry' | 'gitShallowHosts' | 'resolveSymlinksInInjectedDirs' | 'unsafePerm' | 'includeOnlyPackageFiles'>,
|
||||
opts: Pick<ClientOptions, 'rawConfig' | 'retry' | 'gitShallowHosts' | 'resolveSymlinksInInjectedDirs' | 'unsafePerm' | 'includeOnlyPackageFiles' | 'offline'>,
|
||||
customFetchers?: CustomFetchers
|
||||
): Fetchers {
|
||||
const defaultFetchers = {
|
||||
...createTarballFetcher(fetchFromRegistry, getAuthHeader, opts),
|
||||
...createGitFetcher(opts),
|
||||
...createDirectoryFetcher({ resolveSymlinks: opts.resolveSymlinksInInjectedDirs, includeOnlyPackageFiles: opts.includeOnlyPackageFiles }),
|
||||
...createNodeRuntimeFetcher({
|
||||
fetch: fetchFromRegistry,
|
||||
offline: opts.offline,
|
||||
rawConfig: opts.rawConfig,
|
||||
}),
|
||||
}
|
||||
|
||||
const overwrites = mapValues(
|
||||
|
||||
@@ -9,6 +9,9 @@
|
||||
"../../__typings__/**/*.d.ts"
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": "../../env/node.fetcher"
|
||||
},
|
||||
{
|
||||
"path": "../../fetching/directory-fetcher"
|
||||
},
|
||||
|
||||
147
pkg-manager/core/test/install/nodeRuntime.ts
Normal file
147
pkg-manager/core/test/install/nodeRuntime.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { LOCKFILE_VERSION, WANTED_LOCKFILE } from '@pnpm/constants'
|
||||
import { prepareEmpty } from '@pnpm/prepare'
|
||||
import { addDependenciesToPackage, install } from '@pnpm/core'
|
||||
import { getIntegrity } from '@pnpm/registry-mock'
|
||||
import { sync as rimraf } from '@zkochan/rimraf'
|
||||
import { sync as writeYamlFile } from 'write-yaml-file'
|
||||
import { testDefaults } from '../utils'
|
||||
|
||||
test('installing Node.js runtime', async () => {
|
||||
const project = prepareEmpty()
|
||||
const { updatedManifest: manifest } = await addDependenciesToPackage({}, ['node@runtime:22.0.0'], testDefaults({ fastUnpack: false }))
|
||||
|
||||
project.isExecutable('.bin/node')
|
||||
expect(project.readLockfile()).toStrictEqual({
|
||||
settings: {
|
||||
autoInstallPeers: true,
|
||||
excludeLinksFromLockfile: false,
|
||||
},
|
||||
importers: {
|
||||
'.': {
|
||||
dependencies: {
|
||||
node: {
|
||||
specifier: 'runtime:22.0.0',
|
||||
version: 'runtime:22.0.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
lockfileVersion: LOCKFILE_VERSION,
|
||||
packages: {
|
||||
'node@runtime:22.0.0': {
|
||||
hasBin: true,
|
||||
resolution: {
|
||||
integrity: 'sha256-NexAQ7DxOFuPb9J7KNeuLtuSeaxFVUGlTrqSqs7AEbo=',
|
||||
type: 'nodeRuntime',
|
||||
},
|
||||
},
|
||||
},
|
||||
snapshots: {
|
||||
'node@runtime:22.0.0': {},
|
||||
},
|
||||
})
|
||||
|
||||
rimraf('node_modules')
|
||||
await install(manifest, testDefaults({ frozenLockfile: true }, {
|
||||
offline: true, // We want to verify that Node.js is resolved from cache.
|
||||
}))
|
||||
project.isExecutable('.bin/node')
|
||||
|
||||
await addDependenciesToPackage(manifest, ['@pnpm.e2e/dep-of-pkg-with-1-dep@100.1.0'], testDefaults({ fastUnpack: false }))
|
||||
project.has('@pnpm.e2e/dep-of-pkg-with-1-dep')
|
||||
|
||||
expect(project.readLockfile()).toStrictEqual({
|
||||
settings: {
|
||||
autoInstallPeers: true,
|
||||
excludeLinksFromLockfile: false,
|
||||
},
|
||||
importers: {
|
||||
'.': {
|
||||
dependencies: {
|
||||
node: {
|
||||
specifier: 'runtime:22.0.0',
|
||||
version: 'runtime:22.0.0',
|
||||
},
|
||||
'@pnpm.e2e/dep-of-pkg-with-1-dep': {
|
||||
specifier: '100.1.0',
|
||||
version: '100.1.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
lockfileVersion: LOCKFILE_VERSION,
|
||||
packages: {
|
||||
'node@runtime:22.0.0': {
|
||||
hasBin: true,
|
||||
resolution: {
|
||||
integrity: 'sha256-NexAQ7DxOFuPb9J7KNeuLtuSeaxFVUGlTrqSqs7AEbo=',
|
||||
type: 'nodeRuntime',
|
||||
},
|
||||
},
|
||||
'@pnpm.e2e/dep-of-pkg-with-1-dep@100.1.0': {
|
||||
resolution: {
|
||||
integrity: getIntegrity('@pnpm.e2e/dep-of-pkg-with-1-dep', '100.1.0'),
|
||||
},
|
||||
},
|
||||
},
|
||||
snapshots: {
|
||||
'node@runtime:22.0.0': {},
|
||||
'@pnpm.e2e/dep-of-pkg-with-1-dep@100.1.0': {},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('installing node.js runtime fails if offline mode is used and node.js not found locally', async () => {
|
||||
prepareEmpty()
|
||||
await expect(
|
||||
addDependenciesToPackage({}, ['node@runtime:22.0.0'], testDefaults({ fastUnpack: false }, { offline: true }))
|
||||
).rejects.toThrow(/Offline Node.js resolution is not supported/)
|
||||
})
|
||||
|
||||
test('installing Node.js runtime from RC channel', async () => {
|
||||
const project = prepareEmpty()
|
||||
await addDependenciesToPackage({}, ['node@runtime:24.0.0-rc.4'], testDefaults({ fastUnpack: false }))
|
||||
|
||||
project.isExecutable('.bin/node')
|
||||
})
|
||||
|
||||
test('installing Node.js runtime fails if integrity check fails', async () => {
|
||||
prepareEmpty()
|
||||
|
||||
writeYamlFile(WANTED_LOCKFILE, {
|
||||
settings: {
|
||||
autoInstallPeers: true,
|
||||
excludeLinksFromLockfile: false,
|
||||
},
|
||||
importers: {
|
||||
'.': {
|
||||
devDependencies: {
|
||||
node: {
|
||||
specifier: 'runtime:22.0.0',
|
||||
version: 'runtime:22.0.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
lockfileVersion: LOCKFILE_VERSION,
|
||||
packages: {
|
||||
'node@runtime:22.0.0': {
|
||||
hasBin: true,
|
||||
resolution: {
|
||||
integrity: 'sha256-nEXaq7dXofUpB9j7knEUlTUsEAXfvugLtRQsQS7aeBO=',
|
||||
type: 'nodeRuntime',
|
||||
},
|
||||
},
|
||||
},
|
||||
snapshots: {
|
||||
'node@runtime:22.0.0': {},
|
||||
},
|
||||
})
|
||||
|
||||
const manifest = {
|
||||
devDependencies: {
|
||||
node: 'runtime:22.0.0',
|
||||
},
|
||||
}
|
||||
await expect(install(manifest, testDefaults({ frozenLockfile: true }))).rejects.toThrow(/The integrity of .* failed/)
|
||||
})
|
||||
@@ -213,6 +213,8 @@ async function fetchDeps (
|
||||
const pkgResolution = {
|
||||
id: packageId,
|
||||
resolution,
|
||||
name: pkgName,
|
||||
version: pkgVersion,
|
||||
}
|
||||
if (skipFetch) {
|
||||
const { filesIndexFile } = opts.storeController.getFilesIndexFilePath({
|
||||
@@ -227,10 +229,6 @@ async function fetchDeps (
|
||||
lockfileDir: opts.lockfileDir,
|
||||
ignoreScripts: opts.ignoreScripts,
|
||||
pkg: pkgResolution,
|
||||
expectedPkg: {
|
||||
name: pkgName,
|
||||
version: pkgVersion,
|
||||
},
|
||||
}) as any // eslint-disable-line
|
||||
if (fetchResponse instanceof Promise) fetchResponse = await fetchResponse
|
||||
} catch (err: any) { // eslint-disable-line
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
"compile": "tsc --build && pnpm run lint --fix"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pnpm/constants": "workspace:*",
|
||||
"@pnpm/error": "workspace:*",
|
||||
"@pnpm/manifest-utils": "workspace:*",
|
||||
"@pnpm/package-bins": "workspace:*",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { promises as fs, existsSync } from 'fs'
|
||||
import Module from 'module'
|
||||
import path from 'path'
|
||||
import { getNodeBinLocationForCurrentOS } from '@pnpm/constants'
|
||||
import { PnpmError } from '@pnpm/error'
|
||||
import { logger, globalWarn } from '@pnpm/logger'
|
||||
import { getAllDependenciesFromManifest } from '@pnpm/manifest-utils'
|
||||
@@ -205,6 +206,16 @@ async function getPackageBins (
|
||||
: await safeReadPkgJson(target)
|
||||
|
||||
if (manifest == null) {
|
||||
if (path.basename(target) === 'node') {
|
||||
return [{
|
||||
name: 'node',
|
||||
path: path.join(target, getNodeBinLocationForCurrentOS()),
|
||||
ownName: true,
|
||||
pkgName: '',
|
||||
pkgVersion: '',
|
||||
makePowerShellShim: false,
|
||||
}]
|
||||
}
|
||||
// There's a directory in node_modules without package.json: ${target}.
|
||||
// This used to be a warning but it didn't really cause any issues.
|
||||
return []
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
{
|
||||
"path": "../../fs/read-modules-dir"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/constants"
|
||||
},
|
||||
{
|
||||
"path": "../../packages/error"
|
||||
},
|
||||
|
||||
@@ -293,13 +293,13 @@ async function resolveAndFetch (
|
||||
ignoreScripts: options.ignoreScripts,
|
||||
lockfileDir: options.lockfileDir,
|
||||
pkg: {
|
||||
...pkg,
|
||||
...(options.expectedPkg?.name != null
|
||||
? (updated ? { name: options.expectedPkg.name, version: pkg.version } : options.expectedPkg)
|
||||
: pkg
|
||||
),
|
||||
id,
|
||||
resolution,
|
||||
},
|
||||
expectedPkg: options.expectedPkg?.name != null
|
||||
? (updated ? { name: options.expectedPkg.name, version: pkg.version } : options.expectedPkg)
|
||||
: pkg,
|
||||
onFetchError: options.onFetchError,
|
||||
})
|
||||
|
||||
@@ -354,7 +354,6 @@ function fetchToStore (
|
||||
readManifest?: boolean
|
||||
) => Promise<{ verified: boolean, pkgFilesIndex: PackageFilesIndex, manifest?: DependencyManifest, requiresBuild: boolean }>
|
||||
fetch: (
|
||||
packageId: string,
|
||||
resolution: Resolution,
|
||||
opts: FetchOptions
|
||||
) => Promise<FetchResult>
|
||||
@@ -498,22 +497,22 @@ function fetchToStore (
|
||||
if (
|
||||
(
|
||||
pkgFilesIndex.name != null &&
|
||||
opts.expectedPkg?.name != null &&
|
||||
pkgFilesIndex.name.toLowerCase() !== opts.expectedPkg.name.toLowerCase()
|
||||
opts.pkg?.name != null &&
|
||||
pkgFilesIndex.name.toLowerCase() !== opts.pkg.name.toLowerCase()
|
||||
) ||
|
||||
(
|
||||
pkgFilesIndex.version != null &&
|
||||
opts.expectedPkg?.version != null &&
|
||||
opts.pkg?.version != null &&
|
||||
// We used to not normalize the package versions before writing them to the lockfile and store.
|
||||
// So it may happen that the version will be in different formats.
|
||||
// For instance, v1.0.0 and 1.0.0
|
||||
// Hence, we need to use semver.eq() to compare them.
|
||||
!equalOrSemverEqual(pkgFilesIndex.version, opts.expectedPkg.version)
|
||||
!equalOrSemverEqual(pkgFilesIndex.version, opts.pkg.version)
|
||||
)
|
||||
) {
|
||||
const msg = `Package name mismatch found while reading ${JSON.stringify(opts.pkg.resolution)} from the store.`
|
||||
const hint = `This means that either the lockfile is broken or the package metadata (name and version) inside the package's package.json file doesn't match the metadata in the registry. \
|
||||
Expected package: ${opts.expectedPkg.name}@${opts.expectedPkg.version}. \
|
||||
Expected package: ${opts.pkg.name}@${opts.pkg.version}. \
|
||||
Actual package in the store with the given integrity: ${pkgFilesIndex.name}@${pkgFilesIndex.version}.`
|
||||
if (ctx.strictStorePkgContentCheck ?? true) {
|
||||
throw new PnpmError('UNEXPECTED_PKG_CONTENT_IN_STORE', msg, {
|
||||
@@ -553,7 +552,6 @@ Actual package in the store with the given integrity: ${pkgFilesIndex.name}@${pk
|
||||
const priority = (++ctx.requestsQueue.counter % ctx.requestsQueue.concurrency === 0 ? -1 : 1) * 1000
|
||||
|
||||
const fetchedPackage = await ctx.requestsQueue.add(async () => ctx.fetch(
|
||||
opts.pkg.id,
|
||||
opts.pkg.resolution,
|
||||
{
|
||||
filesIndexFile,
|
||||
@@ -577,6 +575,7 @@ Actual package in the store with the given integrity: ${pkgFilesIndex.name}@${pk
|
||||
pkg: {
|
||||
name: opts.pkg.name,
|
||||
version: opts.pkg.version,
|
||||
id: opts.pkg.id,
|
||||
},
|
||||
}
|
||||
), { priority })
|
||||
@@ -634,7 +633,6 @@ async function tarballIsUpToDate (
|
||||
async function fetcher (
|
||||
fetcherByHostingType: Fetchers,
|
||||
cafs: Cafs,
|
||||
packageId: string,
|
||||
resolution: Resolution,
|
||||
opts: FetchOptions
|
||||
): Promise<FetchResult> {
|
||||
@@ -643,7 +641,7 @@ async function fetcher (
|
||||
return await fetch(cafs, resolution as any, opts) // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
} catch (err: any) { // eslint-disable-line
|
||||
packageRequestLogger.warn({
|
||||
message: `Fetching ${packageId} failed!`,
|
||||
message: `Fetching ${opts.pkg.id} failed!`,
|
||||
prefix: opts.lockfileDir,
|
||||
})
|
||||
throw err
|
||||
|
||||
@@ -864,10 +864,6 @@ test('throw exception if the package data in the store differs from the expected
|
||||
id: pkgResponse.body.id,
|
||||
resolution: pkgResponse.body.resolution,
|
||||
},
|
||||
expectedPkg: {
|
||||
name: 'is-negative',
|
||||
version: '1.0.0',
|
||||
},
|
||||
})
|
||||
await expect(fetching()).rejects.toThrow(/Package name mismatch found while reading/)
|
||||
}
|
||||
@@ -892,10 +888,6 @@ test('throw exception if the package data in the store differs from the expected
|
||||
id: pkgResponse.body.id,
|
||||
resolution: pkgResponse.body.resolution,
|
||||
},
|
||||
expectedPkg: {
|
||||
name: 'is-negative',
|
||||
version: '2.0.0',
|
||||
},
|
||||
})
|
||||
await expect(fetching()).rejects.toThrow(/Package name mismatch found while reading/)
|
||||
}
|
||||
@@ -920,10 +912,6 @@ test('throw exception if the package data in the store differs from the expected
|
||||
id: pkgResponse.body.id,
|
||||
resolution: pkgResponse.body.resolution,
|
||||
},
|
||||
expectedPkg: {
|
||||
name: 'is-positive',
|
||||
version: 'v1.0.0',
|
||||
},
|
||||
})
|
||||
await expect(fetching()).resolves.toStrictEqual(expect.anything())
|
||||
}
|
||||
@@ -947,10 +935,6 @@ test('throw exception if the package data in the store differs from the expected
|
||||
id: pkgResponse.body.id,
|
||||
resolution: pkgResponse.body.resolution,
|
||||
},
|
||||
expectedPkg: {
|
||||
name: 'IS-positive',
|
||||
version: 'v1.0.0',
|
||||
},
|
||||
})
|
||||
await expect(fetching()).resolves.toStrictEqual(expect.anything())
|
||||
}
|
||||
@@ -1075,10 +1059,6 @@ test('should skip store integrity check and resolve manifest if fetchRawManifest
|
||||
id: pkgResponse.body.id,
|
||||
resolution: pkgResponse.body.resolution,
|
||||
},
|
||||
expectedPkg: {
|
||||
name: 'is-positive',
|
||||
version: '1.0.0',
|
||||
},
|
||||
})
|
||||
|
||||
await fetchResult.fetching()
|
||||
|
||||
@@ -119,3 +119,58 @@ describeOnLinuxOnly('filters optional dependencies based on --libc', () => {
|
||||
expect(pkgDirs).not.toContain(notFound)
|
||||
})
|
||||
})
|
||||
|
||||
test('install Node.js when devEngines runtime is set with onFail=download', async () => {
|
||||
const project = prepare({
|
||||
devEngines: {
|
||||
runtime: {
|
||||
name: 'node',
|
||||
version: '24.0.0',
|
||||
onFail: 'download',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await install.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: process.cwd(),
|
||||
})
|
||||
|
||||
project.isExecutable('.bin/node')
|
||||
const lockfile = project.readLockfile()
|
||||
expect(lockfile.importers['.'].devDependencies).toStrictEqual({
|
||||
node: {
|
||||
specifier: 'runtime:24.0.0',
|
||||
version: 'runtime:24.0.0',
|
||||
},
|
||||
})
|
||||
|
||||
await add.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: process.cwd(),
|
||||
}, ['is-positive@1.0.0'])
|
||||
|
||||
await add.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: process.cwd(),
|
||||
}, ['is-even'])
|
||||
})
|
||||
|
||||
test('do not install Node.js when devEngines runtime is not set to onFail=download', async () => {
|
||||
const project = prepare({
|
||||
devEngines: {
|
||||
runtime: {
|
||||
name: 'node',
|
||||
version: '24.0.0',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
await install.handler({
|
||||
...DEFAULT_OPTS,
|
||||
dir: process.cwd(),
|
||||
})
|
||||
|
||||
const lockfile = project.readLockfile()
|
||||
expect(lockfile.importers['.'].devDependencies).toBeUndefined()
|
||||
})
|
||||
|
||||
@@ -5,12 +5,13 @@ import {
|
||||
type PackageSnapshot,
|
||||
pruneSharedLockfile,
|
||||
} from '@pnpm/lockfile.pruner'
|
||||
import { type DirectoryResolution, type Resolution } from '@pnpm/resolver-base'
|
||||
import { type Resolution } from '@pnpm/resolver-base'
|
||||
import { type DepPath, type Registries } from '@pnpm/types'
|
||||
import * as dp from '@pnpm/dependency-path'
|
||||
import getNpmTarballUrl from 'get-npm-tarball-url'
|
||||
import { type KeyValuePair } from 'ramda'
|
||||
import partition from 'ramda/src/partition'
|
||||
import omit from 'ramda/src/omit'
|
||||
import { depPathToRef } from './depPathToRef'
|
||||
import { type ResolvedPackage } from './resolveDependencies'
|
||||
import { type DependenciesGraph } from '.'
|
||||
@@ -80,7 +81,7 @@ function toLockfileDependency (
|
||||
if (opts.depPath.includes(':')) {
|
||||
// There is no guarantee that a non-npmjs.org-hosted package is going to have a version field.
|
||||
// Also, for local directory dependencies, the version is not needed.
|
||||
if (pkg.version && (lockfileResolution as DirectoryResolution).type !== 'directory') {
|
||||
if (pkg.version && (!('type' in lockfileResolution) || lockfileResolution.type !== 'directory' && lockfileResolution.type !== 'nodeRuntime')) {
|
||||
result['version'] = pkg.version
|
||||
}
|
||||
}
|
||||
@@ -182,6 +183,9 @@ function toLockfileResolution (
|
||||
lockfileIncludeTarballUrl?: boolean
|
||||
): LockfileResolution {
|
||||
if (resolution.type !== undefined || !resolution['integrity']) {
|
||||
if (resolution.type === 'nodeRuntime') {
|
||||
return omit(['_shasumsFileContent'], resolution)
|
||||
}
|
||||
return resolution as LockfileResolution
|
||||
}
|
||||
if (lockfileIncludeTarballUrl) {
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"devEngines": {
|
||||
"runtime": {
|
||||
"name": "node",
|
||||
"version": "24",
|
||||
"onFail": "download"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -43,6 +43,9 @@
|
||||
"read-yaml-file": "catalog:",
|
||||
"strip-bom": "catalog:"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@pnpm/logger": "catalog:"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@pnpm/read-project-manifest": "workspace:*",
|
||||
"@pnpm/test-fixtures": "workspace:*",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { promises as fs, type Stats } from 'fs'
|
||||
import path from 'path'
|
||||
import { PnpmError } from '@pnpm/error'
|
||||
import { globalWarn } from '@pnpm/logger'
|
||||
import { type ProjectManifest } from '@pnpm/types'
|
||||
import { extractComments, type CommentSpecifier } from '@pnpm/text.comments-parser'
|
||||
import { writeProjectManifest } from '@pnpm/write-project-manifest'
|
||||
@@ -58,7 +59,7 @@ export async function tryReadProjectManifest (projectDir: string): Promise<{
|
||||
const { data, text } = await readJsonFile(manifestPath)
|
||||
return {
|
||||
fileName: 'package.json',
|
||||
manifest: data,
|
||||
manifest: convertManifestAfterRead(data),
|
||||
writeProjectManifest: createManifestWriter({
|
||||
...detectFileFormatting(text),
|
||||
initialManifest: data,
|
||||
@@ -73,7 +74,7 @@ export async function tryReadProjectManifest (projectDir: string): Promise<{
|
||||
const { data, text } = await readJson5File(manifestPath)
|
||||
return {
|
||||
fileName: 'package.json5',
|
||||
manifest: data,
|
||||
manifest: convertManifestAfterRead(data),
|
||||
writeProjectManifest: createManifestWriter({
|
||||
...detectFileFormattingAndComments(text),
|
||||
initialManifest: data,
|
||||
@@ -88,7 +89,7 @@ export async function tryReadProjectManifest (projectDir: string): Promise<{
|
||||
const manifest = await readPackageYaml(manifestPath)
|
||||
return {
|
||||
fileName: 'package.yaml',
|
||||
manifest,
|
||||
manifest: convertManifestAfterRead(manifest),
|
||||
writeProjectManifest: createManifestWriter({ initialManifest: manifest, manifestPath }),
|
||||
}
|
||||
} catch (err: any) { // eslint-disable-line
|
||||
@@ -155,7 +156,7 @@ export async function readExactProjectManifest (manifestPath: string): Promise<R
|
||||
case 'package.json': {
|
||||
const { data, text } = await readJsonFile(manifestPath)
|
||||
return {
|
||||
manifest: data,
|
||||
manifest: convertManifestAfterRead(data),
|
||||
writeProjectManifest: createManifestWriter({
|
||||
...detectFileFormatting(text),
|
||||
initialManifest: data,
|
||||
@@ -166,7 +167,7 @@ export async function readExactProjectManifest (manifestPath: string): Promise<R
|
||||
case 'package.json5': {
|
||||
const { data, text } = await readJson5File(manifestPath)
|
||||
return {
|
||||
manifest: data,
|
||||
manifest: convertManifestAfterRead(data),
|
||||
writeProjectManifest: createManifestWriter({
|
||||
...detectFileFormattingAndComments(text),
|
||||
initialManifest: data,
|
||||
@@ -177,7 +178,7 @@ export async function readExactProjectManifest (manifestPath: string): Promise<R
|
||||
case 'package.yaml': {
|
||||
const manifest = await readPackageYaml(manifestPath)
|
||||
return {
|
||||
manifest,
|
||||
manifest: convertManifestAfterRead(manifest),
|
||||
writeProjectManifest: createManifestWriter({ initialManifest: manifest, manifestPath }),
|
||||
}
|
||||
}
|
||||
@@ -207,7 +208,7 @@ function createManifestWriter (
|
||||
): WriteProjectManifest {
|
||||
let initialManifest = normalize(opts.initialManifest)
|
||||
return async (updatedManifest: ProjectManifest, force?: boolean) => {
|
||||
updatedManifest = normalize(updatedManifest)
|
||||
updatedManifest = convertManifestBeforeWrite(normalize(updatedManifest))
|
||||
if (force === true || !equal(initialManifest, updatedManifest)) {
|
||||
await writeProjectManifest(opts.manifestPath, updatedManifest, {
|
||||
comments: opts.comments,
|
||||
@@ -221,6 +222,29 @@ function createManifestWriter (
|
||||
}
|
||||
}
|
||||
|
||||
function convertManifestAfterRead (manifest: ProjectManifest): ProjectManifest {
|
||||
if (manifest.devEngines?.runtime && !manifest.devDependencies?.['node']) {
|
||||
const runtimes = Array.isArray(manifest.devEngines.runtime) ? manifest.devEngines.runtime : [manifest.devEngines.runtime]
|
||||
const nodeRuntime = runtimes.find((runtime) => runtime.name === 'node')
|
||||
if (nodeRuntime && nodeRuntime.onFail === 'download') {
|
||||
if ('webcontainer' in process.versions) {
|
||||
globalWarn('Installation of Node.js versions is not supported in WebContainer')
|
||||
} else {
|
||||
manifest.devDependencies ??= {}
|
||||
manifest.devDependencies['node'] = `runtime:${nodeRuntime.version}`
|
||||
}
|
||||
}
|
||||
}
|
||||
return manifest
|
||||
}
|
||||
|
||||
function convertManifestBeforeWrite (manifest: ProjectManifest): ProjectManifest {
|
||||
if (manifest.devDependencies?.['node']?.startsWith('runtime:')) {
|
||||
delete manifest.devDependencies['node']
|
||||
}
|
||||
return manifest
|
||||
}
|
||||
|
||||
const dependencyKeys = new Set([
|
||||
'dependencies',
|
||||
'devDependencies',
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/// <reference path="../../../__typings__/index.d.ts"/>
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { readProjectManifest, tryReadProjectManifest } from '@pnpm/read-project-manifest'
|
||||
import { fixtures } from '@pnpm/test-fixtures'
|
||||
import tempy from 'tempy'
|
||||
@@ -30,6 +31,37 @@ test('readProjectManifest()', async () => {
|
||||
).toStrictEqual(null)
|
||||
})
|
||||
|
||||
test('readProjectManifest() converts devEngines runtime to devDependencies', async () => {
|
||||
const dir = f.prepare('package-json-with-dev-engines')
|
||||
const { manifest, writeProjectManifest } = await tryReadProjectManifest(dir)
|
||||
expect(manifest).toStrictEqual(
|
||||
{
|
||||
devDependencies: {
|
||||
node: 'runtime:24',
|
||||
},
|
||||
devEngines: {
|
||||
runtime: {
|
||||
name: 'node',
|
||||
version: '24',
|
||||
onFail: 'download',
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
await writeProjectManifest(manifest!)
|
||||
const pkgJson = JSON.parse(fs.readFileSync(path.join(dir, 'package.json'), 'utf8'))
|
||||
expect(pkgJson).toStrictEqual({
|
||||
devDependencies: {},
|
||||
devEngines: {
|
||||
runtime: {
|
||||
name: 'node',
|
||||
version: '24',
|
||||
onFail: 'download',
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('preserve tab indentation in json file', async () => {
|
||||
process.chdir(tempy.directory())
|
||||
|
||||
|
||||
63
pnpm-lock.yaml
generated
63
pnpm-lock.yaml
generated
@@ -1877,6 +1877,22 @@ importers:
|
||||
specifier: workspace:*
|
||||
version: 'link:'
|
||||
|
||||
crypto/shasums-file:
|
||||
dependencies:
|
||||
'@pnpm/crypto.hash':
|
||||
specifier: workspace:*
|
||||
version: link:../hash
|
||||
'@pnpm/error':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/error
|
||||
'@pnpm/fetching-types':
|
||||
specifier: workspace:*
|
||||
version: link:../../network/fetching-types
|
||||
devDependencies:
|
||||
'@pnpm/crypto.shasums-file':
|
||||
specifier: workspace:*
|
||||
version: 'link:'
|
||||
|
||||
dedupe/check:
|
||||
dependencies:
|
||||
'@pnpm/dedupe.types':
|
||||
@@ -2048,9 +2064,15 @@ importers:
|
||||
|
||||
env/node.fetcher:
|
||||
dependencies:
|
||||
'@pnpm/constants':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/constants
|
||||
'@pnpm/create-cafs-store':
|
||||
specifier: workspace:*
|
||||
version: link:../../store/create-cafs-store
|
||||
'@pnpm/crypto.shasums-file':
|
||||
specifier: workspace:*
|
||||
version: link:../../crypto/shasums-file
|
||||
'@pnpm/error':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/error
|
||||
@@ -2060,12 +2082,15 @@ importers:
|
||||
'@pnpm/fetching-types':
|
||||
specifier: workspace:*
|
||||
version: link:../../network/fetching-types
|
||||
'@pnpm/pick-fetcher':
|
||||
'@pnpm/node.resolver':
|
||||
specifier: workspace:*
|
||||
version: link:../../fetching/pick-fetcher
|
||||
version: link:../node.resolver
|
||||
'@pnpm/tarball-fetcher':
|
||||
specifier: workspace:*
|
||||
version: link:../../fetching/tarball-fetcher
|
||||
'@pnpm/worker':
|
||||
specifier: workspace:*
|
||||
version: link:../../worker
|
||||
adm-zip:
|
||||
specifier: 'catalog:'
|
||||
version: 0.5.16
|
||||
@@ -2103,12 +2128,30 @@ importers:
|
||||
|
||||
env/node.resolver:
|
||||
dependencies:
|
||||
'@pnpm/config':
|
||||
specifier: workspace:*
|
||||
version: link:../../config/config
|
||||
'@pnpm/constants':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/constants
|
||||
'@pnpm/crypto.hash':
|
||||
specifier: workspace:*
|
||||
version: link:../../crypto/hash
|
||||
'@pnpm/crypto.shasums-file':
|
||||
specifier: workspace:*
|
||||
version: link:../../crypto/shasums-file
|
||||
'@pnpm/error':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/error
|
||||
'@pnpm/fetching-types':
|
||||
specifier: workspace:*
|
||||
version: link:../../network/fetching-types
|
||||
'@pnpm/node.fetcher':
|
||||
'@pnpm/resolver-base':
|
||||
specifier: workspace:*
|
||||
version: link:../node.fetcher
|
||||
version: link:../../resolving/resolver-base
|
||||
'@pnpm/types':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/types
|
||||
semver:
|
||||
specifier: 'catalog:'
|
||||
version: 7.7.1
|
||||
@@ -4474,6 +4517,9 @@ importers:
|
||||
'@pnpm/network.auth-header':
|
||||
specifier: workspace:*
|
||||
version: link:../../network/auth-header
|
||||
'@pnpm/node.fetcher':
|
||||
specifier: workspace:*
|
||||
version: link:../../env/node.fetcher
|
||||
'@pnpm/resolver-base':
|
||||
specifier: workspace:*
|
||||
version: link:../../resolving/resolver-base
|
||||
@@ -5074,6 +5120,9 @@ importers:
|
||||
|
||||
pkg-manager/link-bins:
|
||||
dependencies:
|
||||
'@pnpm/constants':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/constants
|
||||
'@pnpm/error':
|
||||
specifier: workspace:*
|
||||
version: link:../../packages/error
|
||||
@@ -5948,6 +5997,9 @@ importers:
|
||||
'@pnpm/graceful-fs':
|
||||
specifier: workspace:*
|
||||
version: link:../../fs/graceful-fs
|
||||
'@pnpm/logger':
|
||||
specifier: 'catalog:'
|
||||
version: 1001.0.0
|
||||
'@pnpm/text.comments-parser':
|
||||
specifier: workspace:*
|
||||
version: link:../../text/comments-parser
|
||||
@@ -6670,6 +6722,9 @@ importers:
|
||||
'@pnpm/local-resolver':
|
||||
specifier: workspace:*
|
||||
version: link:../local-resolver
|
||||
'@pnpm/node.resolver':
|
||||
specifier: workspace:*
|
||||
version: link:../../env/node.resolver
|
||||
'@pnpm/npm-resolver':
|
||||
specifier: workspace:*
|
||||
version: link:../npm-resolver
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
"@pnpm/fetching-types": "workspace:*",
|
||||
"@pnpm/git-resolver": "workspace:*",
|
||||
"@pnpm/local-resolver": "workspace:*",
|
||||
"@pnpm/node.resolver": "workspace:*",
|
||||
"@pnpm/npm-resolver": "workspace:*",
|
||||
"@pnpm/resolver-base": "workspace:*",
|
||||
"@pnpm/tarball-resolver": "workspace:*"
|
||||
|
||||
@@ -2,6 +2,7 @@ import { PnpmError } from '@pnpm/error'
|
||||
import { type FetchFromRegistry, type GetAuthHeader } from '@pnpm/fetching-types'
|
||||
import { type GitResolveResult, createGitResolver } from '@pnpm/git-resolver'
|
||||
import { type LocalResolveResult, resolveFromLocal } from '@pnpm/local-resolver'
|
||||
import { resolveNodeRuntime, type NodeRuntimeResolveResult } from '@pnpm/node.resolver'
|
||||
import {
|
||||
createNpmResolver,
|
||||
type JsrResolveResult,
|
||||
@@ -33,16 +34,20 @@ export type DefaultResolveResult =
|
||||
| LocalResolveResult
|
||||
| TarballResolveResult
|
||||
| WorkspaceResolveResult
|
||||
| NodeRuntimeResolveResult
|
||||
|
||||
export type DefaultResolver = (wantedDependency: WantedDependency, opts: ResolveOptions) => Promise<DefaultResolveResult>
|
||||
|
||||
export function createResolver (
|
||||
fetchFromRegistry: FetchFromRegistry,
|
||||
getAuthHeader: GetAuthHeader,
|
||||
pnpmOpts: ResolverFactoryOptions
|
||||
pnpmOpts: ResolverFactoryOptions & {
|
||||
rawConfig: Record<string, string>
|
||||
}
|
||||
): { resolve: DefaultResolver, clearCache: () => void } {
|
||||
const { resolveFromNpm, resolveFromJsr, clearCache } = createNpmResolver(fetchFromRegistry, getAuthHeader, pnpmOpts)
|
||||
const resolveFromGit = createGitResolver(pnpmOpts)
|
||||
const _resolveNodeRuntime = resolveNodeRuntime.bind(null, { fetchFromRegistry, offline: pnpmOpts.offline, rawConfig: pnpmOpts.rawConfig })
|
||||
return {
|
||||
resolve: async (wantedDependency, opts) => {
|
||||
const resolution = await resolveFromNpm(wantedDependency, opts as ResolveFromNpmOptions) ??
|
||||
@@ -51,7 +56,8 @@ export function createResolver (
|
||||
await resolveFromTarball(fetchFromRegistry, wantedDependency as { bareSpecifier: string }) ??
|
||||
await resolveFromGit(wantedDependency as { bareSpecifier: string }) ??
|
||||
await resolveFromLocal(wantedDependency as { bareSpecifier: string }, opts)
|
||||
))
|
||||
)) ??
|
||||
await _resolveNodeRuntime(wantedDependency)
|
||||
if (!resolution) {
|
||||
throw new PnpmError(
|
||||
'SPEC_NOT_SUPPORTED_BY_ANY_RESOLVER',
|
||||
|
||||
@@ -9,6 +9,7 @@ test('createResolver()', () => {
|
||||
registries: {
|
||||
default: 'https://registry.npmjs.org/',
|
||||
},
|
||||
rawConfig: {},
|
||||
})
|
||||
expect(typeof resolve).toEqual('function')
|
||||
})
|
||||
|
||||
@@ -9,6 +9,9 @@
|
||||
"../../__typings__/**/*.d.ts"
|
||||
],
|
||||
"references": [
|
||||
{
|
||||
"path": "../../env/node.resolver"
|
||||
},
|
||||
{
|
||||
"path": "../../network/fetch"
|
||||
},
|
||||
|
||||
@@ -32,10 +32,17 @@ export interface GitResolution {
|
||||
type: 'git'
|
||||
}
|
||||
|
||||
export interface NodeRuntimeResolution {
|
||||
type: 'nodeRuntime'
|
||||
integrity: string
|
||||
_shasumsFileContent?: string
|
||||
}
|
||||
|
||||
export type Resolution =
|
||||
| TarballResolution
|
||||
| DirectoryResolution
|
||||
| GitResolution
|
||||
| NodeRuntimeResolution
|
||||
|
||||
export interface ResolveResult {
|
||||
id: PkgResolutionId
|
||||
|
||||
@@ -92,10 +92,6 @@ export interface FetchPackageToStoreOptions {
|
||||
id: string
|
||||
resolution: Resolution
|
||||
}
|
||||
/**
|
||||
* Expected package is the package name and version that are found in the lockfile.
|
||||
*/
|
||||
expectedPkg?: PkgNameVersion
|
||||
onFetchError?: OnFetchError
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user