perf: replace node-fetch with undici (#10537)

Replace node-fetch with native undici for HTTP requests throughout pnpm.

Key changes:
- Replace node-fetch with undici's fetch() and dispatcher system
- Replace @pnpm/network.agent with a new dispatcher module in @pnpm/network.fetch
- Cache dispatchers via LRU cache keyed by connection parameters
- Handle proxies via undici ProxyAgent instead of http/https-proxy-agent
- Convert test mocking from nock to undici MockAgent where applicable
- Add minimatch@9 override to fix ESM incompatibility with brace-expansion
This commit is contained in:
Zoltan Kochan
2026-03-29 12:44:00 +02:00
committed by GitHub
parent 978062f143
commit 6c480a4375
75 changed files with 4411 additions and 3289 deletions

View File

@@ -0,0 +1,13 @@
---
"@pnpm/network.fetch": major
"@pnpm/fetching.types": major
"pnpm": minor
---
Replace node-fetch with undici as the HTTP client [#10537](https://github.com/pnpm/pnpm/pull/10537).
- Use undici's native `fetch()` with dispatcher-based connection management
- Support HTTP, HTTPS, SOCKS4, and SOCKS5 proxies
- Cache dispatchers via LRU cache keyed by connection parameters
- Handle per-registry client certificates via nerf-dart URL matching
- Convert test HTTP mocking from nock to undici MockAgent

View File

@@ -11,7 +11,10 @@ export function reportRequestRetry (
return requestRetry$.pipe(
map((log) => {
const retriesLeft = log.maxRetries - log.attempt + 1
const errorCode = log.error.httpStatusCode ?? log.error.status ?? log.error.errno ?? log.error.code
// Extract error code from various possible locations
// HTTP status codes are numeric, system error codes are strings
const errorCode = log.error.status ?? log.error.statusCode ?? log.error.code ?? log.error.errno ??
log.error.cause?.code ?? log.error.cause?.errno ?? 'unknown'
const msg = `${log.method} ${log.url} error (${errorCode}). \
Will retry in ${prettyMilliseconds(log.timeout, { verbose: true })}. \
${retriesLeft} retries left.`

View File

@@ -17,7 +17,11 @@ test('print warning about request retry', async () => {
requestRetryLogger.debug({
attempt: 2,
error: new Error(),
error: {
name: 'Error',
message: 'Connection failed',
code: 'ECONNREFUSED',
},
maxRetries: 5,
method: 'GET',
timeout: 12500,
@@ -27,5 +31,5 @@ test('print warning about request retry', async () => {
expect.assertions(1)
const output = await firstValueFrom(output$)
expect(output).toBe(formatWarn('GET https://foo.bar/qar error (undefined). Will retry in 12.5 seconds. 4 retries left.'))
expect(output).toBe(formatWarn('GET https://foo.bar/qar error (ECONNREFUSED). Will retry in 12.5 seconds. 4 retries left.'))
})

View File

@@ -5,11 +5,20 @@ import {
export const requestRetryLogger = logger<RequestRetryMessage>('request-retry')
export interface RequestRetryError extends Error {
httpStatusCode?: string
status?: string
export interface RequestRetryError {
name?: string
message?: string
// HTTP status codes (numeric)
status?: number
statusCode?: number
// System error properties
errno?: number
code?: string
// undici wraps the actual error in a cause property
cause?: {
code?: string
errno?: number
}
}
export interface RequestRetryMessage {

View File

@@ -2,6 +2,8 @@
"words": [
"adduser",
"adipiscing",
"agentkeepalive",
"agentkeepalive's",
"amet",
"andreineculau",
"appdata",

View File

@@ -52,8 +52,8 @@
"@pnpm/deps.compliance.audit": "workspace:*",
"@pnpm/logger": "workspace:*",
"@pnpm/test-fixtures": "workspace:*",
"@types/ramda": "catalog:",
"nock": "catalog:"
"@pnpm/testing.mock-agent": "workspace:*",
"@types/ramda": "catalog:"
},
"engines": {
"node": ">=22.13"

View File

@@ -1,7 +1,7 @@
import { PnpmError } from '@pnpm/error'
import type { GetAuthHeader } from '@pnpm/fetching.types'
import type { EnvLockfile, LockfileObject } from '@pnpm/lockfile.types'
import { type AgentOptions, fetchWithAgent, type RetryTimeoutOptions } from '@pnpm/network.fetch'
import { type DispatcherOptions, fetchWithDispatcher, type RetryTimeoutOptions } from '@pnpm/network.fetch'
import type { DependenciesField } from '@pnpm/types'
import { lockfileToAuditTree } from './lockfileToAuditTree.js'
@@ -13,7 +13,7 @@ export async function audit (
lockfile: LockfileObject,
getAuthHeader: GetAuthHeader,
opts: {
agentOptions?: AgentOptions
dispatcherOptions?: DispatcherOptions
envLockfile?: EnvLockfile | null
include?: { [dependenciesField in DependenciesField]: boolean }
lockfileDir: string
@@ -34,7 +34,7 @@ export async function audit (
...getAuthHeaders(authHeaderValue),
}
const requestOptions = {
agentOptions: opts.agentOptions ?? {},
dispatcherOptions: opts.dispatcherOptions ?? {},
body: requestBody,
headers: requestHeaders,
method: 'POST',
@@ -42,13 +42,13 @@ export async function audit (
timeout: opts.timeout,
}
const quickRes = await fetchWithAgent(quickAuditUrl, requestOptions)
const quickRes = await fetchWithDispatcher(quickAuditUrl, requestOptions)
if (quickRes.status === 200) {
return (quickRes.json() as Promise<AuditReport>)
}
const res = await fetchWithAgent(auditUrl, requestOptions)
const res = await fetchWithDispatcher(auditUrl, requestOptions)
if (res.status === 200) {
return (res.json() as Promise<AuditReport>)
}

View File

@@ -2,8 +2,8 @@ import { LOCKFILE_VERSION } from '@pnpm/constants'
import { audit } from '@pnpm/deps.compliance.audit'
import type { PnpmError } from '@pnpm/error'
import { fixtures } from '@pnpm/test-fixtures'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import type { DepPath, ProjectId } from '@pnpm/types'
import nock from 'nock'
import { lockfileToAuditTree } from '../lib/lockfileToAuditTree.js'
@@ -460,53 +460,51 @@ describe('audit', () => {
test('an error is thrown if the audit endpoint responds with a non-OK code', async () => {
const registry = 'http://registry.registry/'
const getAuthHeader = () => undefined
nock(registry, {
badheaders: ['authorization'],
})
.post('/-/npm/v1/security/audits/quick')
await setupMockAgent()
getMockAgent().get('http://registry.registry')
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(500, { message: 'Something bad happened' })
nock(registry, {
badheaders: ['authorization'],
})
.post('/-/npm/v1/security/audits')
getMockAgent().get('http://registry.registry')
.intercept({ path: '/-/npm/v1/security/audits', method: 'POST' })
.reply(500, { message: 'Fallback failed too' })
let err!: PnpmError
try {
await audit({
importers: {},
lockfileVersion: LOCKFILE_VERSION,
},
getAuthHeader,
{
lockfileDir: f.find('one-project'),
registry,
retry: {
retries: 0,
let err!: PnpmError
try {
await audit({
importers: {},
lockfileVersion: LOCKFILE_VERSION,
},
virtualStoreDirMaxLength: 120,
})
} catch (_err: any) { // eslint-disable-line
err = _err
}
getAuthHeader,
{
lockfileDir: f.find('one-project'),
registry,
retry: {
retries: 0,
},
virtualStoreDirMaxLength: 120,
})
} catch (_err: any) { // eslint-disable-line
err = _err
}
expect(err).toBeDefined()
expect(err.code).toBe('ERR_PNPM_AUDIT_BAD_RESPONSE')
expect(err.message).toBe('The audit endpoint (at http://registry.registry/-/npm/v1/security/audits/quick) responded with 500: {"message":"Something bad happened"}. Fallback endpoint (at http://registry.registry/-/npm/v1/security/audits) responded with 500: {"message":"Fallback failed too"}')
expect(err).toBeDefined()
expect(err.code).toBe('ERR_PNPM_AUDIT_BAD_RESPONSE')
expect(err.message).toBe('The audit endpoint (at http://registry.registry/-/npm/v1/security/audits/quick) responded with 500: {"message":"Something bad happened"}. Fallback endpoint (at http://registry.registry/-/npm/v1/security/audits) responded with 500: {"message":"Fallback failed too"}')
} finally {
await teardownMockAgent()
}
})
test('falls back to /audits if /audits/quick fails', async () => {
const registry = 'http://registry.registry/'
const getAuthHeader = () => undefined
nock(registry, {
badheaders: ['authorization'],
})
.post('/-/npm/v1/security/audits/quick')
await setupMockAgent()
getMockAgent().get('http://registry.registry')
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(500, { message: 'Something bad happened' })
nock(registry, {
badheaders: ['authorization'],
})
.post('/-/npm/v1/security/audits')
getMockAgent().get('http://registry.registry')
.intercept({ path: '/-/npm/v1/security/audits', method: 'POST' })
.reply(200, {
actions: [],
advisories: {},
@@ -526,35 +524,88 @@ describe('audit', () => {
muted: [],
})
expect(await audit({
importers: {},
lockfileVersion: LOCKFILE_VERSION,
},
getAuthHeader,
{
lockfileDir: f.find('one-project'),
registry,
retry: {
retries: 0,
try {
expect(await audit({
importers: {},
lockfileVersion: LOCKFILE_VERSION,
},
virtualStoreDirMaxLength: 120,
})).toEqual({
actions: [],
advisories: {},
metadata: {
dependencies: 0,
devDependencies: 0,
optionalDependencies: 0,
totalDependencies: 0,
vulnerabilities: {
critical: 0,
high: 0,
info: 0,
low: 0,
moderate: 0,
getAuthHeader,
{
lockfileDir: f.find('one-project'),
registry,
retry: {
retries: 0,
},
},
muted: [],
})
virtualStoreDirMaxLength: 120,
})).toEqual({
actions: [],
advisories: {},
metadata: {
dependencies: 0,
devDependencies: 0,
optionalDependencies: 0,
totalDependencies: 0,
vulnerabilities: {
critical: 0,
high: 0,
info: 0,
low: 0,
moderate: 0,
},
},
muted: [],
})
} finally {
await teardownMockAgent()
}
})
test('sends authorization header when getAuthHeader returns a value', async () => {
const registry = 'http://registry.registry/'
const getAuthHeader = () => 'Bearer test-token'
await setupMockAgent()
// intercept will only match if the authorization header is present and correct
getMockAgent().get('http://registry.registry')
.intercept({
path: '/-/npm/v1/security/audits/quick',
method: 'POST',
headers: { authorization: 'Bearer test-token' },
})
.reply(200, { actions: [], advisories: {}, metadata: { dependencies: 0, devDependencies: 0, optionalDependencies: 0, totalDependencies: 0, vulnerabilities: { critical: 0, high: 0, info: 0, low: 0, moderate: 0 } }, muted: [] })
try {
const result = await audit(
{ importers: {}, lockfileVersion: LOCKFILE_VERSION },
getAuthHeader,
{ lockfileDir: f.find('one-project'), registry, retry: { retries: 0 }, virtualStoreDirMaxLength: 120 }
)
expect(result.advisories).toEqual({})
} finally {
await teardownMockAgent()
}
})
test('does not send authorization header when getAuthHeader returns undefined', async () => {
const registry = 'http://registry.registry/'
const getAuthHeader = () => undefined
await setupMockAgent()
let capturedHeaders: Record<string, string> = {}
getMockAgent().get('http://registry.registry')
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, (opts) => {
capturedHeaders = opts.headers as Record<string, string>
return { actions: [], advisories: {}, metadata: { dependencies: 0, devDependencies: 0, optionalDependencies: 0, totalDependencies: 0, vulnerabilities: { critical: 0, high: 0, info: 0, low: 0, moderate: 0 } }, muted: [] }
})
try {
await audit(
{ importers: {}, lockfileVersion: LOCKFILE_VERSION },
getAuthHeader,
{ lockfileDir: f.find('one-project'), registry, retry: { retries: 0 }, virtualStoreDirMaxLength: 120 }
)
expect(capturedHeaders).not.toHaveProperty('authorization')
} finally {
await teardownMockAgent()
}
})
})

View File

@@ -45,6 +45,9 @@
{
"path": "../../../network/fetch"
},
{
"path": "../../../testing/mock-agent"
},
{
"path": "../../../workspace/project-manifest-reader"
}

View File

@@ -68,6 +68,7 @@
"@pnpm/prepare": "workspace:*",
"@pnpm/registry-mock": "catalog:",
"@pnpm/test-fixtures": "workspace:*",
"@pnpm/testing.mock-agent": "workspace:*",
"@pnpm/workspace.projects-filter": "workspace:*",
"@types/ramda": "catalog:",
"@types/semver": "catalog:",

View File

@@ -190,7 +190,7 @@ export async function handler (opts: AuditOptions): Promise<{ exitCode: number,
const getAuthHeader = createGetAuthHeaderByURI({ allSettings: opts.rawConfig, userSettings: opts.userConfig })
try {
auditReport = await audit(lockfile, getAuthHeader, {
agentOptions: {
dispatcherOptions: {
ca: opts.ca,
cert: opts.cert,
httpProxy: opts.httpProxy,

View File

@@ -2,7 +2,7 @@ import path from 'node:path'
import { audit } from '@pnpm/deps.compliance.commands'
import { fixtures } from '@pnpm/test-fixtures'
import nock from 'nock'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { readYamlFileSync } from 'read-yaml-file'
import { AUDIT_REGISTRY, AUDIT_REGISTRY_OPTS } from './utils/options.js'
@@ -10,11 +10,19 @@ import * as responses from './utils/responses/index.js'
const f = fixtures(import.meta.dirname)
beforeEach(async () => {
await setupMockAgent()
})
afterEach(async () => {
await teardownMockAgent()
})
test('overrides are added for vulnerable dependencies', async () => {
const tmp = f.prepare('has-vulnerabilities')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { exitCode, output } = await audit.handler({
@@ -36,8 +44,8 @@ test('overrides are added for vulnerable dependencies', async () => {
test('no overrides are added if no vulnerabilities are found', async () => {
const tmp = f.prepare('fixture')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.NO_VULN_RESP)
const { exitCode, output } = await audit.handler({
@@ -55,8 +63,8 @@ test('no overrides are added if no vulnerabilities are found', async () => {
test('CVEs found in the allow list are not added as overrides', async () => {
const tmp = f.prepare('has-vulnerabilities')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { exitCode, output } = await audit.handler({

View File

@@ -5,11 +5,11 @@ import { audit } from '@pnpm/deps.compliance.commands'
import { readWantedLockfile } from '@pnpm/lockfile.fs'
import { addDistTag } from '@pnpm/registry-mock'
import { fixtures } from '@pnpm/test-fixtures'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import type { DepPath } from '@pnpm/types'
import { readProjectManifest } from '@pnpm/workspace.project-manifest-reader'
import { filterProjectsFromDir } from '@pnpm/workspace.projects-filter'
import chalk from 'chalk'
import nock from 'nock'
import { readYamlFileSync } from 'read-yaml-file'
import { MOCK_REGISTRY, MOCK_REGISTRY_OPTS } from './utils/options.js'
@@ -17,7 +17,14 @@ import { MOCK_REGISTRY, MOCK_REGISTRY_OPTS } from './utils/options.js'
const f = fixtures(import.meta.dirname)
describe('audit fix with update', () => {
afterEach(() => nock.cleanAll())
beforeEach(async () => {
await setupMockAgent()
// These tests need real connections to the mock registry
getMockAgent().enableNetConnect(/localhost/)
})
afterEach(async () => {
await teardownMockAgent()
})
test('top-level vulnerability is fixed by updating the vulnerable package', async () => {
const tmp = f.prepare('update-single-depth-2')
@@ -38,8 +45,8 @@ describe('audit fix with update', () => {
const mockResponse = await readFile(join(tmp, 'responses', 'top-level-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const { exitCode, output } = await audit.handler({
@@ -104,8 +111,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'top-level-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const { exitCode, output } = await audit.handler({
@@ -165,8 +172,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'depth-2-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const { exitCode, output } = await audit.handler({
@@ -216,8 +223,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'depth-3-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const { exitCode, output } = await audit.handler({
@@ -270,8 +277,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'unfixable-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const { exitCode, output } = await audit.handler({
@@ -334,8 +341,8 @@ The remaining vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'form-data-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const { exitCode, output } = await audit.handler({
@@ -400,8 +407,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'top-level-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const {
@@ -474,8 +481,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'depth-2-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const {
@@ -555,8 +562,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'top-level-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const {
@@ -646,8 +653,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'top-level-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const {
@@ -734,8 +741,8 @@ The fixed vulnerabilities are:
const mockResponse = await readFile(join(tmp, 'responses', 'top-level-vulnerability.json'), 'utf-8')
expect(mockResponse).toBeTruthy()
nock(MOCK_REGISTRY, { allowUnmocked: true })
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(MOCK_REGISTRY)
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, mockResponse)
const {

View File

@@ -2,7 +2,7 @@ import path from 'node:path'
import { audit } from '@pnpm/deps.compliance.commands'
import { fixtures } from '@pnpm/test-fixtures'
import nock from 'nock'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { readYamlFileSync } from 'read-yaml-file'
import { AUDIT_REGISTRY, AUDIT_REGISTRY_OPTS } from './utils/options.js'
@@ -10,11 +10,19 @@ import * as responses from './utils/responses/index.js'
const f = fixtures(import.meta.dirname)
beforeEach(async () => {
await setupMockAgent()
})
afterEach(async () => {
await teardownMockAgent()
})
test('ignores are added for vulnerable dependencies with no resolutions', async () => {
const tmp = f.prepare('has-vulnerabilities')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { exitCode, output } = await audit.handler({
@@ -38,8 +46,8 @@ test('ignores are added for vulnerable dependencies with no resolutions', async
test('the specified vulnerabilities are ignored', async () => {
const tmp = f.prepare('has-vulnerabilities')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { exitCode, output } = await audit.handler({
@@ -61,8 +69,8 @@ test('the specified vulnerabilities are ignored', async () => {
test('no ignores are added if no vulnerabilities are found', async () => {
const tmp = f.prepare('fixture')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.NO_VULN_RESP)
const { exitCode, output } = await audit.handler({
@@ -87,8 +95,8 @@ test('ignored CVEs are not duplicated', async () => {
'CVE-2017-16024',
]
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { exitCode, output } = await audit.handler({

View File

@@ -5,7 +5,7 @@ import { AuditEndpointNotExistsError } from '@pnpm/deps.compliance.audit'
import { audit } from '@pnpm/deps.compliance.commands'
import { install } from '@pnpm/installing.commands'
import { fixtures } from '@pnpm/test-fixtures'
import nock from 'nock'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { AUDIT_REGISTRY, AUDIT_REGISTRY_OPTS, DEFAULT_OPTS } from './utils/options.js'
import * as responses from './utils/responses/index.js'
@@ -21,12 +21,15 @@ describe('plugin-commands-audit', () => {
dir: hasVulnerabilitiesDir,
})
})
afterEach(() => {
nock.cleanAll()
beforeEach(async () => {
await setupMockAgent()
})
afterEach(async () => {
await teardownMockAgent()
})
test('audit', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { output, exitCode } = await audit.handler({
@@ -39,8 +42,8 @@ describe('plugin-commands-audit', () => {
})
test('audit --dev', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.DEV_VULN_ONLY_RESP)
const { output, exitCode } = await audit.handler({
@@ -56,8 +59,8 @@ describe('plugin-commands-audit', () => {
})
test('audit --audit-level', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { output, exitCode } = await audit.handler({
@@ -72,8 +75,8 @@ describe('plugin-commands-audit', () => {
})
test('audit: no vulnerabilities', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.NO_VULN_RESP)
const { output, exitCode } = await audit.handler({
@@ -87,8 +90,8 @@ describe('plugin-commands-audit', () => {
})
test('audit --json', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { output, exitCode } = await audit.handler({
@@ -104,8 +107,8 @@ describe('plugin-commands-audit', () => {
})
test.skip('audit does not exit with code 1 if the found vulnerabilities are having lower severity then what we asked for', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.DEV_VULN_ONLY_RESP)
const { output, exitCode } = await audit.handler({
@@ -122,8 +125,8 @@ describe('plugin-commands-audit', () => {
})
test('audit --json respects audit-level', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.DEV_VULN_ONLY_RESP)
const { exitCode, output } = await audit.handler({
@@ -141,8 +144,8 @@ describe('plugin-commands-audit', () => {
})
test('audit --json filters advisories by audit-level', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.DEV_VULN_ONLY_RESP)
const { exitCode, output } = await audit.handler({
@@ -165,11 +168,11 @@ describe('plugin-commands-audit', () => {
})
test('audit does not exit with code 1 if the registry responds with a non-200 response and ignoreRegistryErrors is used', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(500, { message: 'Something bad happened' })
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits', method: 'POST' })
.reply(500, { message: 'Fallback failed too' })
const { output, exitCode } = await audit.handler({
...AUDIT_REGISTRY_OPTS,
@@ -186,10 +189,12 @@ describe('plugin-commands-audit', () => {
})
test('audit sends authToken', async () => {
nock(AUDIT_REGISTRY, {
reqheaders: { authorization: 'Bearer 123' },
})
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({
path: '/-/npm/v1/security/audits/quick',
method: 'POST',
headers: { authorization: 'Bearer 123' },
})
.reply(200, responses.NO_VULN_RESP)
const { output, exitCode } = await audit.handler({
@@ -207,11 +212,11 @@ describe('plugin-commands-audit', () => {
})
test('audit endpoint does not exist', async () => {
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(404, {})
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits', method: 'POST' })
.reply(404, {})
await expect(audit.handler({
@@ -228,8 +233,8 @@ describe('plugin-commands-audit', () => {
test('audit: CVEs in ignoreCves do not show up', async () => {
const tmp = f.prepare('has-vulnerabilities')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { exitCode, output } = await audit.handler({
@@ -255,8 +260,8 @@ describe('plugin-commands-audit', () => {
test('audit: CVEs in ignoreGhsas do not show up', async () => {
const tmp = f.prepare('has-vulnerabilities')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { exitCode, output } = await audit.handler({
@@ -282,8 +287,8 @@ describe('plugin-commands-audit', () => {
test('audit: CVEs in ignoreCves do not show up when JSON output is used', async () => {
const tmp = f.prepare('has-vulnerabilities')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(AUDIT_REGISTRY.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { exitCode, output } = await audit.handler({

View File

@@ -2,26 +2,36 @@ import path from 'node:path'
import { audit } from '@pnpm/deps.compliance.commands'
import { fixtures } from '@pnpm/test-fixtures'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { readProjectManifest } from '@pnpm/workspace.project-manifest-reader'
import nock from 'nock'
import { readYamlFileSync } from 'read-yaml-file'
import { AUDIT_REGISTRY, AUDIT_REGISTRY_OPTS } from './utils/options.js'
import { DEFAULT_OPTS } from './utils/options.js'
import * as responses from './utils/responses/index.js'
const f = fixtures(import.meta.dirname)
const registries = DEFAULT_OPTS.registries
beforeEach(async () => {
await setupMockAgent()
})
afterEach(async () => {
await teardownMockAgent()
})
test('overrides with references (via $) are preserved during audit --fix', async () => {
const tmp = f.prepare('preserve-reference-overrides')
nock(AUDIT_REGISTRY)
.post('/-/npm/v1/security/audits/quick')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/-/npm/v1/security/audits/quick', method: 'POST' })
.reply(200, responses.ALL_VULN_RESP)
const { manifest: initialManifest } = await readProjectManifest(tmp)
const { exitCode, output } = await audit.handler({
...AUDIT_REGISTRY_OPTS,
...DEFAULT_OPTS,
auditLevel: 'moderate',
dir: tmp,
rootProjectManifestDir: tmp,

View File

@@ -48,7 +48,7 @@ export const DEFAULT_OPTS = {
registry: registries.default,
sort: true,
storeDir: '../store',
strictSsl: false,
strictSsl: true,
userAgent: 'pnpm',
userConfig: {},
useRunningStoreServer: false,

View File

@@ -20,7 +20,7 @@ async function writeResponse (lockfileDir: string, filename: string, opts: {
}
// @ts-expect-error
const auditReport = await audit(lockfile!, {
agentOptions: {},
dispatcherOptions: {},
include,
registry: 'https://registry.npmjs.org/',
})

View File

@@ -67,6 +67,9 @@
{
"path": "../../../store/path"
},
{
"path": "../../../testing/mock-agent"
},
{
"path": "../../../workspace/project-manifest-reader"
},

View File

@@ -66,11 +66,11 @@
"@pnpm/logger": "workspace:*",
"@pnpm/prepare": "workspace:*",
"@pnpm/shell.path": "workspace:*",
"@pnpm/testing.mock-agent": "workspace:*",
"@types/cross-spawn": "catalog:",
"@types/ramda": "catalog:",
"@types/semver": "catalog:",
"cross-spawn": "catalog:",
"nock": "catalog:"
"cross-spawn": "catalog:"
},
"engines": {
"node": ">=22.13"

View File

@@ -5,8 +5,8 @@ import path from 'node:path'
import { jest } from '@jest/globals'
import { prepare as prepareWithPkg, tempDir } from '@pnpm/prepare'
import { prependDirsToPath } from '@pnpm/shell.path'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import spawn from 'cross-spawn'
import nock from 'nock'
const require = createRequire(import.meta.dirname)
const pnpmTarballPath = require.resolve('@pnpm/tgz-fixtures/tgz/pnpm-9.1.0.tgz')
@@ -23,13 +23,13 @@ jest.unstable_mockModule('@pnpm/cli.meta', () => {
})
const { selfUpdate, installPnpm, linkExePlatformBinary } = await import('@pnpm/engine.pm.commands')
afterEach(() => {
nock.cleanAll()
nock.disableNetConnect()
beforeEach(async () => {
await setupMockAgent()
getMockAgent().enableNetConnect()
})
beforeEach(() => {
nock.enableNetConnect()
afterEach(async () => {
await teardownMockAgent()
})
function prepare (manifest: object = {}) {
@@ -112,8 +112,8 @@ function createExeMetadata (version: string, registry: string) {
* This prevents install() from making real HTTP requests for @pnpm/exe.
*/
function mockExeMetadata (registry: string, version: string) {
nock(registry)
.get('/@pnpm%2Fexe') // cspell:disable-line
getMockAgent().get(registry.replace(/\/$/, ''))
.intercept({ path: '/@pnpm%2Fexe', method: 'GET' }) // cspell:disable-line
.reply(200, createExeMetadata(version, registry))
}
@@ -123,14 +123,14 @@ function mockExeMetadata (registry: string, version: string) {
*/
function mockRegistryForUpdate (registry: string, version: string, metadata: object) {
// Use persist for metadata since multiple components request it
nock(registry)
.persist()
.get('/pnpm')
.reply(200, metadata)
getMockAgent().get(registry.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, metadata).persist()
mockExeMetadata(registry, version)
nock(registry)
.get(`/pnpm/-/pnpm-${version}.tgz`)
.replyWithFile(200, pnpmTarballPath)
const tgzData = fs.readFileSync(pnpmTarballPath)
getMockAgent().get(registry.replace(/\/$/, ''))
.intercept({ path: `/pnpm/-/pnpm-${version}.tgz`, method: 'GET' })
.reply(200, tgzData)
}
test('self-update', async () => {
@@ -161,14 +161,15 @@ test('self-update', async () => {
test('self-update by exact version', async () => {
const opts = prepare()
const metadata = createMetadata('9.2.0', opts.registries.default, ['9.1.0'])
nock(opts.registries.default)
.persist()
.get('/pnpm')
.reply(200, metadata)
const registry = opts.registries.default.replace(/\/$/, '')
getMockAgent().get(registry)
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, metadata).persist()
mockExeMetadata(opts.registries.default, '9.1.0')
nock(opts.registries.default)
.get('/pnpm/-/pnpm-9.1.0.tgz')
.replyWithFile(200, pnpmTarballPath)
const tgzData = fs.readFileSync(pnpmTarballPath)
getMockAgent().get(registry)
.intercept({ path: '/pnpm/-/pnpm-9.1.0.tgz', method: 'GET' })
.reply(200, tgzData)
await selfUpdate.handler(opts, ['9.1.0'])
@@ -193,8 +194,8 @@ test('self-update by exact version', async () => {
test('self-update does nothing when pnpm is up to date', async () => {
const opts = prepare()
nock(opts.registries.default)
.get('/pnpm')
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.0.0', opts.registries.default))
const output = await selfUpdate.handler(opts, [])
@@ -208,8 +209,8 @@ test('should update packageManager field when a newer pnpm version is available'
fs.writeFileSync(pkgJsonPath, JSON.stringify({
packageManager: 'pnpm@8.0.0',
}), 'utf8')
nock(opts.registries.default)
.get('/pnpm')
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.0.0', opts.registries.default))
const output = await selfUpdate.handler({
@@ -231,8 +232,8 @@ test('should not update packageManager field when current version matches latest
fs.writeFileSync(pkgJsonPath, JSON.stringify({
packageManager: 'pnpm@9.0.0',
}), 'utf8')
nock(opts.registries.default)
.get('/pnpm')
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.0.0', opts.registries.default))
const output = await selfUpdate.handler({
@@ -255,10 +256,9 @@ test('should update devEngines.packageManager version when a newer pnpm version
},
})
const pkgJsonPath = path.join(opts.dir, 'package.json')
nock(opts.registries.default)
.persist()
.get('/pnpm')
.reply(200, createMetadata('9.0.0', opts.registries.default))
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.0.0', opts.registries.default)).persist()
mockExeMetadata(opts.registries.default, '9.0.0')
const output = await selfUpdate.handler({
@@ -286,10 +286,9 @@ test('should update pnpm entry in devEngines.packageManager array', async () =>
},
})
const pkgJsonPath = path.join(opts.dir, 'package.json')
nock(opts.registries.default)
.persist()
.get('/pnpm')
.reply(200, createMetadata('9.0.0', opts.registries.default))
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.0.0', opts.registries.default)).persist()
mockExeMetadata(opts.registries.default, '9.0.0')
const output = await selfUpdate.handler({
@@ -315,10 +314,9 @@ test('should not modify devEngines.packageManager range when resolved version st
},
})
const pkgJsonPath = path.join(opts.dir, 'package.json')
nock(opts.registries.default)
.persist()
.get('/pnpm')
.reply(200, createMetadata('9.0.0', opts.registries.default))
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.0.0', opts.registries.default)).persist()
mockExeMetadata(opts.registries.default, '9.0.0')
const output = await selfUpdate.handler({
@@ -346,10 +344,9 @@ test('should fall back to ^version when complex range cannot accommodate the new
},
})
const pkgJsonPath = path.join(opts.dir, 'package.json')
nock(opts.registries.default)
.persist()
.get('/pnpm')
.reply(200, createMetadata('9.0.0', opts.registries.default))
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.0.0', opts.registries.default)).persist()
mockExeMetadata(opts.registries.default, '9.0.0')
await selfUpdate.handler({
@@ -372,10 +369,9 @@ test('should update devEngines.packageManager range when resolved version no lon
},
})
const pkgJsonPath = path.join(opts.dir, 'package.json')
nock(opts.registries.default)
.persist()
.get('/pnpm')
.reply(200, createMetadata('9.0.0', opts.registries.default))
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.0.0', opts.registries.default)).persist()
mockExeMetadata(opts.registries.default, '9.0.0')
const output = await selfUpdate.handler({
@@ -408,10 +404,9 @@ console.log('9.2.0')`, 'utf8')
// Create a hash symlink pointing to the install dir (like handleGlobalAdd does)
fs.symlinkSync(installDir, path.join(globalDir, 'fake-hash'))
nock(opts.registries.default)
.persist()
.get('/pnpm')
.reply(200, createMetadata('9.2.0', opts.registries.default))
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.2.0', opts.registries.default)).persist()
mockExeMetadata(opts.registries.default, '9.2.0')
const output = await selfUpdate.handler(opts, [])
@@ -480,8 +475,8 @@ test('self-update updates the packageManager field in package.json', async () =>
version: '9.0.0',
},
}
nock(opts.registries.default)
.get('/pnpm')
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.1.0', opts.registries.default))
const output = await selfUpdate.handler(opts, [])
@@ -494,13 +489,13 @@ test('self-update updates the packageManager field in package.json', async () =>
test('installPnpm without env lockfile uses resolution path', async () => {
const opts = prepare()
nock(opts.registries.default)
.persist()
.get('/pnpm')
.reply(200, createMetadata('9.1.0', opts.registries.default))
nock(opts.registries.default)
.get('/pnpm/-/pnpm-9.1.0.tgz')
.replyWithFile(200, pnpmTarballPath)
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm', method: 'GET' })
.reply(200, createMetadata('9.1.0', opts.registries.default)).persist()
const tgzData = fs.readFileSync(pnpmTarballPath)
getMockAgent().get(opts.registries.default.replace(/\/$/, ''))
.intercept({ path: '/pnpm/-/pnpm-9.1.0.tgz', method: 'GET' })
.reply(200, tgzData)
const result = await installPnpm('9.1.0', opts)

View File

@@ -70,6 +70,9 @@
{
"path": "../../../store/controller"
},
{
"path": "../../../testing/mock-agent"
},
{
"path": "../../../workspace/project-manifest-reader"
}

View File

@@ -45,8 +45,8 @@
"@pnpm/store.create-cafs-store": "workspace:*",
"@pnpm/store.index": "workspace:*",
"@pnpm/test-fixtures": "workspace:*",
"nock": "catalog:",
"tempy": "catalog:"
"tempy": "catalog:",
"undici": "catalog:"
},
"engines": {
"node": ">=22.13"

View File

@@ -1,3 +1,4 @@
import fs from 'node:fs'
import path from 'node:path'
import { jest } from '@jest/globals'
@@ -5,19 +6,28 @@ import type { Fetchers, FetchFunction, FetchOptions } from '@pnpm/fetching.fetch
import { pickFetcher } from '@pnpm/fetching.pick-fetcher'
import { createTarballFetcher } from '@pnpm/fetching.tarball-fetcher'
import type { CustomFetcher } from '@pnpm/hooks.types'
import { createFetchFromRegistry } from '@pnpm/network.fetch'
import { clearDispatcherCache, createFetchFromRegistry } from '@pnpm/network.fetch'
import type { AtomicResolution } from '@pnpm/resolving.resolver-base'
import type { Cafs } from '@pnpm/store.cafs-types'
import { createCafsStore } from '@pnpm/store.create-cafs-store'
import { StoreIndex } from '@pnpm/store.index'
import { fixtures } from '@pnpm/test-fixtures'
import nock from 'nock'
import { temporaryDirectory } from 'tempy'
import { type Dispatcher, getGlobalDispatcher, MockAgent, setGlobalDispatcher } from 'undici'
const f = fixtures(import.meta.dirname)
const storeIndex = new StoreIndex(temporaryDirectory())
let originalDispatcher: Dispatcher
beforeAll(() => {
originalDispatcher = getGlobalDispatcher()
})
afterAll(() => {
storeIndex.close()
setGlobalDispatcher(originalDispatcher)
})
// Test helpers to reduce type casting
@@ -271,58 +281,67 @@ describe('custom fetcher implementation examples', () => {
const tarballIntegrity = 'sha1-HssnaJydJVE+rbyZFKc/VAi+enY='
test('custom fetcher can delegate to remoteTarball fetcher', async () => {
const scope = nock(registry)
.get('/custom-pkg.tgz')
.replyWithFile(200, tarballPath, {
'Content-Length': '1279',
})
clearDispatcherCache()
const mockAgent = new MockAgent()
mockAgent.disableNetConnect()
setGlobalDispatcher(mockAgent)
const storeDir = temporaryDirectory()
const cafs = createCafsStore(storeDir)
const filesIndexFile = path.join(storeDir, 'index.json')
// Create standard fetchers to pass to custom fetcher
const fetchFromRegistry = createFetchFromRegistry({})
const tarballFetchers = createTarballFetcher(
fetchFromRegistry,
() => undefined,
{ rawConfig: {}, storeIndex }
)
// Custom fetcher that maps custom URLs to tarballs
const customFetcher = createMockCustomFetcher(
(_pkgId, resolution) => resolution.type === 'custom:url' && Boolean((resolution as any).customUrl), // eslint-disable-line @typescript-eslint/no-explicit-any
async (cafs, resolution, opts, fetchers) => {
// Map custom resolution to tarball resolution
const tarballResolution = {
tarball: (resolution as any).customUrl, // eslint-disable-line @typescript-eslint/no-explicit-any
integrity: tarballIntegrity,
}
// Delegate to standard tarball fetcher (passed via fetchers parameter)
return fetchers.remoteTarball(cafs, tarballResolution, opts)
}
)
const customResolution = createMockResolution({
type: 'custom:url',
customUrl: `${registry}custom-pkg.tgz`,
const tarballContent = fs.readFileSync(tarballPath)
const mockPool = mockAgent.get('http://localhost:4873')
mockPool.intercept({ path: '/custom-pkg.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'content-length': String(tarballContent.length) },
})
const fetcher = await pickFetcher(
tarballFetchers as Fetchers,
customResolution,
{ customFetchers: [customFetcher], packageId: 'custom-pkg@1.0.0' }
)
try {
const storeDir = temporaryDirectory()
const cafs = createCafsStore(storeDir)
const filesIndexFile = path.join(storeDir, 'index.json')
const result = await fetcher(
cafs,
customResolution,
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
)
// Create standard fetchers to pass to custom fetcher
const fetchFromRegistry = createFetchFromRegistry({})
const tarballFetchers = createTarballFetcher(
fetchFromRegistry,
() => undefined,
{ rawConfig: {}, storeIndex }
)
expect(result.filesMap.get('package.json')).toBeTruthy()
expect(scope.isDone()).toBeTruthy()
// Custom fetcher that maps custom URLs to tarballs
const customFetcher = createMockCustomFetcher(
(_pkgId, resolution) => resolution.type === 'custom:url' && Boolean((resolution as any).customUrl), // eslint-disable-line @typescript-eslint/no-explicit-any
async (cafs, resolution, opts, fetchers) => {
// Map custom resolution to tarball resolution
const tarballResolution = {
tarball: (resolution as any).customUrl, // eslint-disable-line @typescript-eslint/no-explicit-any
integrity: tarballIntegrity,
}
// Delegate to standard tarball fetcher (passed via fetchers parameter)
return fetchers.remoteTarball(cafs, tarballResolution, opts)
}
)
const customResolution = createMockResolution({
type: 'custom:url',
customUrl: `${registry}custom-pkg.tgz`,
})
const fetcher = await pickFetcher(
tarballFetchers as Fetchers,
customResolution,
{ customFetchers: [customFetcher], packageId: 'custom-pkg@1.0.0' }
)
const result = await fetcher(
cafs,
customResolution,
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
)
expect(result.filesMap.get('package.json')).toBeTruthy()
} finally {
await mockAgent.close()
setGlobalDispatcher(originalDispatcher)
}
})
test('custom fetcher can delegate to localTarball fetcher', async () => {
@@ -371,58 +390,67 @@ describe('custom fetcher implementation examples', () => {
})
test('custom fetcher can transform resolution before delegating to tarball fetcher', async () => {
const scope = nock(registry)
.get('/transformed-pkg.tgz')
.replyWithFile(200, tarballPath, {
'Content-Length': '1279',
})
clearDispatcherCache()
const mockAgent = new MockAgent()
mockAgent.disableNetConnect()
setGlobalDispatcher(mockAgent)
const storeDir = temporaryDirectory()
const cafs = createCafsStore(storeDir)
const filesIndexFile = path.join(storeDir, 'index.json')
const fetchFromRegistry = createFetchFromRegistry({})
const tarballFetchers = createTarballFetcher(
fetchFromRegistry,
() => undefined,
{ rawConfig: {}, storeIndex }
)
// Custom fetcher that transforms custom resolution to tarball URL
const customFetcher = createMockCustomFetcher(
(_pkgId, resolution) => resolution.type === 'custom:registry',
async (cafs, resolution, opts, fetchers) => {
// Transform custom registry format to standard tarball URL
const tarballUrl = `${registry}${(resolution as any).packageName}.tgz` // eslint-disable-line @typescript-eslint/no-explicit-any
const tarballResolution = {
tarball: tarballUrl,
integrity: tarballIntegrity,
}
return fetchers.remoteTarball(cafs, tarballResolution, opts)
}
)
const customResolution = createMockResolution({
type: 'custom:registry',
packageName: 'transformed-pkg',
const tarballContent = fs.readFileSync(tarballPath)
const mockPool = mockAgent.get('http://localhost:4873')
mockPool.intercept({ path: '/transformed-pkg.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'content-length': String(tarballContent.length) },
})
const fetcher = await pickFetcher(
tarballFetchers as Fetchers,
customResolution,
{ customFetchers: [customFetcher], packageId: 'transformed-pkg@1.0.0' }
)
try {
const storeDir = temporaryDirectory()
const cafs = createCafsStore(storeDir)
const filesIndexFile = path.join(storeDir, 'index.json')
const result = await fetcher(
cafs,
customResolution,
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
)
const fetchFromRegistry = createFetchFromRegistry({})
const tarballFetchers = createTarballFetcher(
fetchFromRegistry,
() => undefined,
{ rawConfig: {}, storeIndex }
)
expect(result.filesMap.get('package.json')).toBeTruthy()
expect(scope.isDone()).toBeTruthy()
// Custom fetcher that transforms custom resolution to tarball URL
const customFetcher = createMockCustomFetcher(
(_pkgId, resolution) => resolution.type === 'custom:registry',
async (cafs, resolution, opts, fetchers) => {
// Transform custom registry format to standard tarball URL
const tarballUrl = `${registry}${(resolution as any).packageName}.tgz` // eslint-disable-line @typescript-eslint/no-explicit-any
const tarballResolution = {
tarball: tarballUrl,
integrity: tarballIntegrity,
}
return fetchers.remoteTarball(cafs, tarballResolution, opts)
}
)
const customResolution = createMockResolution({
type: 'custom:registry',
packageName: 'transformed-pkg',
})
const fetcher = await pickFetcher(
tarballFetchers as Fetchers,
customResolution,
{ customFetchers: [customFetcher], packageId: 'transformed-pkg@1.0.0' }
)
const result = await fetcher(
cafs,
customResolution,
createMockFetchOptions({ filesIndexFile, lockfileDir: process.cwd() })
)
expect(result.filesMap.get('package.json')).toBeTruthy()
} finally {
await mockAgent.close()
setGlobalDispatcher(originalDispatcher)
}
})
test('custom fetcher can use gitHostedTarball fetcher for custom git URLs', async () => {

View File

@@ -65,9 +65,9 @@
"@types/ramda": "catalog:",
"@types/retry": "catalog:",
"@types/ssri": "catalog:",
"nock": "catalog:",
"ssri": "catalog:",
"tempy": "catalog:"
"tempy": "catalog:",
"undici": "catalog:"
},
"engines": {
"node": ">=22.13"

View File

@@ -1,4 +1,3 @@
import assert from 'node:assert'
import type { IncomingMessage } from 'node:http'
import util from 'node:util'
@@ -88,9 +87,25 @@ export function createDownloader (
reject(op.mainError())
return
}
// Extract error properties into a plain object because Error properties
// are non-enumerable and don't serialize well through the logging system
const errorInfo = {
name: error.name,
message: error.message,
code: error.code,
errno: error.errno,
// For HTTP errors from our ResponseError class
status: error.status,
statusCode: error.statusCode,
// undici wraps the actual network error in a cause property
cause: error.cause ? {
code: error.cause.code,
errno: error.cause.errno,
} : undefined,
}
requestRetryLogger.debug({
attempt,
error,
error: errorInfo,
maxRetries: retryOpts.retries,
method: 'GET',
timeout,
@@ -131,11 +146,10 @@ export function createDownloader (
: undefined
const startTime = Date.now()
let downloaded = 0
const chunks: Buffer[] = []
// This will handle the 'data', 'error', and 'end' events.
const chunks: Uint8Array[] = []
for await (const chunk of res.body!) {
chunks.push(chunk as Buffer)
downloaded += chunk.length
chunks.push(chunk as Uint8Array)
downloaded += (chunk as Uint8Array).byteLength
onProgress?.(downloaded)
}
if (size !== null && size !== downloaded) {
@@ -155,16 +169,16 @@ export function createDownloader (
data = Buffer.from(new SharedArrayBuffer(downloaded))
let offset: number = 0
for (const chunk of chunks) {
chunk.copy(data, offset)
offset += chunk.length
data.set(chunk, offset)
offset += chunk.byteLength
}
} catch (err: unknown) {
assert(util.types.isNativeError(err))
Object.assign(err, {
const error = util.types.isNativeError(err) ? err : new Error(String(err), { cause: err })
Object.assign(error, {
attempts: currentAttempt,
resource: url,
})
throw err
throw error
}
return addFilesFromTarball({
buffer: data,

View File

@@ -9,9 +9,9 @@ import { createCafsStore } from '@pnpm/store.create-cafs-store'
import { StoreIndex } from '@pnpm/store.index'
import { fixtures } from '@pnpm/test-fixtures'
import { lexCompare } from '@pnpm/util.lex-comparator'
import nock from 'nock'
import ssri from 'ssri'
import { temporaryDirectory } from 'tempy'
import { type Dispatcher, getGlobalDispatcher, MockAgent, setGlobalDispatcher } from 'undici'
const originalModule = await import('@pnpm/logger')
@@ -29,8 +29,26 @@ const {
TarballIntegrityError,
} = await import('@pnpm/fetching.tarball-fetcher')
let mockAgent: MockAgent
let originalDispatcher: Dispatcher
beforeAll(() => {
originalDispatcher = getGlobalDispatcher()
})
beforeEach(() => {
jest.mocked(globalWarn).mockClear()
mockAgent = new MockAgent()
mockAgent.disableNetConnect()
setGlobalDispatcher(mockAgent)
})
afterEach(async () => {
await mockAgent.close()
})
afterAll(() => {
setGlobalDispatcher(originalDispatcher)
})
const storeDir = temporaryDirectory()
@@ -46,12 +64,12 @@ const f = fixtures(import.meta.dirname)
const tarballPath = f.find('babel-helper-hoist-variables-6.24.1.tgz')
const tarballSize = 1279
const tarballIntegrity = 'sha1-HssnaJydJVE+rbyZFKc/VAi+enY='
const registry = 'http://example.com/'
const registry = 'http://example.com'
const fetchFromRegistry = createFetchFromRegistry({})
const getAuthHeader = () => undefined
const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
rawConfig: {},
storeIndex,
rawConfig: {},
retry: {
maxTimeout: 100,
minTimeout: 0,
@@ -61,12 +79,17 @@ const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
const pkg = {}
test('fail when tarball size does not match content-length', async () => {
const scope = nock(registry)
.get('/foo.tgz')
.times(2)
.replyWithFile(200, tarballPath, {
'Content-Length': (1024 * 1024).toString(),
})
const tarballContent = fs.readFileSync(tarballPath)
const mockPool = mockAgent.get(registry)
// First request
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'Content-Length': (1024 * 1024).toString() },
})
// Retry request
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'Content-Length': (1024 * 1024).toString() },
})
process.chdir(temporaryDirectory())
@@ -76,7 +99,7 @@ test('fail when tarball size does not match content-length', async () => {
// Content-Length mismatch,
// which indicates bad network connection. (see https://github.com/pnpm/pnpm/issues/1235)
integrity: 'sha1-HssnaJydJVE+rbzZFKc/VAi+enY=',
tarball: `${registry}foo.tgz`,
tarball: `${registry}/foo.tgz`,
}
await expect(
@@ -92,25 +115,24 @@ test('fail when tarball size does not match content-length', async () => {
tarballUrl: resolution.tarball,
})
)
expect(scope.isDone()).toBeTruthy()
})
test('retry when tarball size does not match content-length', async () => {
nock(registry)
.get('/foo.tgz')
.replyWithFile(200, tarballPath, {
'Content-Length': (1024 * 1024).toString(),
})
const tarballContent = fs.readFileSync(tarballPath)
const mockPool = mockAgent.get(registry)
nock(registry)
.get('/foo.tgz')
.replyWithFile(200, tarballPath, {
'Content-Length': tarballSize.toString(),
})
// First request with wrong content-length
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'Content-Length': (1024 * 1024).toString() },
})
// Retry with correct content-length
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'Content-Length': tarballSize.toString() },
})
process.chdir(temporaryDirectory())
const resolution = { tarball: 'http://example.com/foo.tgz' }
const resolution = { tarball: `${registry}/foo.tgz` }
const result = await fetch.remoteTarball(cafs, resolution, {
filesIndexFile,
@@ -119,22 +141,26 @@ test('retry when tarball size does not match content-length', async () => {
})
expect(result.filesMap).toBeTruthy()
expect(nock.isDone()).toBeTruthy()
})
test('fail when integrity check fails two times in a row', async () => {
const scope = nock(registry)
.get('/foo.tgz')
.times(2)
.replyWithFile(200, f.find('babel-helper-hoist-variables-7.0.0-alpha.10.tgz'), {
'Content-Length': '1194',
})
const wrongTarball = f.find('babel-helper-hoist-variables-7.0.0-alpha.10.tgz')
const wrongTarballContent = fs.readFileSync(wrongTarball)
const mockPool = mockAgent.get(registry)
// Both requests return wrong tarball
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, wrongTarballContent, {
headers: { 'Content-Length': '1194' },
})
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, wrongTarballContent, {
headers: { 'Content-Length': '1194' },
})
process.chdir(temporaryDirectory())
const resolution = {
integrity: tarballIntegrity,
tarball: 'http://example.com/foo.tgz',
tarball: `${registry}/foo.tgz`,
}
await expect(
@@ -152,25 +178,28 @@ test('fail when integrity check fails two times in a row', async () => {
url: resolution.tarball,
})
)
expect(scope.isDone()).toBeTruthy()
})
test('retry when integrity check fails', async () => {
const scope = nock(registry)
.get('/foo.tgz')
.replyWithFile(200, f.find('babel-helper-hoist-variables-7.0.0-alpha.10.tgz'), {
'Content-Length': '1194',
})
.get('/foo.tgz')
.replyWithFile(200, tarballPath, {
'Content-Length': tarballSize.toString(),
})
const wrongTarball = f.find('babel-helper-hoist-variables-7.0.0-alpha.10.tgz')
const wrongTarballContent = fs.readFileSync(wrongTarball)
const tarballContent = fs.readFileSync(tarballPath)
const mockPool = mockAgent.get(registry)
// First request returns wrong tarball
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, wrongTarballContent, {
headers: { 'Content-Length': '1194' },
})
// Retry returns correct tarball
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'Content-Length': tarballSize.toString() },
})
process.chdir(temporaryDirectory())
const resolution = {
integrity: tarballIntegrity,
tarball: 'http://example.com/foo.tgz',
tarball: `${registry}/foo.tgz`,
}
const params: Array<[number | null, number]> = []
@@ -185,8 +214,6 @@ test('retry when integrity check fails', async () => {
expect(params[0]).toStrictEqual([1194, 1])
expect(params[1]).toStrictEqual([tarballSize, 2])
expect(scope.isDone()).toBeTruthy()
})
test('fail when integrity check of local file fails', async () => {
@@ -245,9 +272,9 @@ test("don't fail when fetching a local tarball in offline mode", async () => {
}
const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
storeIndex,
offline: true,
rawConfig: {},
storeIndex,
retry: {
maxTimeout: 100,
minTimeout: 0,
@@ -269,13 +296,13 @@ test('fail when trying to fetch a non-local tarball in offline mode', async () =
const tarballAbsoluteLocation = f.find('babel-helper-hoist-variables-7.0.0-alpha.10.tgz')
const resolution = {
integrity: await getFileIntegrity(tarballAbsoluteLocation),
tarball: `${registry}foo.tgz`,
tarball: `${registry}/foo.tgz`,
}
const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
storeIndex,
offline: true,
rawConfig: {},
storeIndex,
retry: {
maxTimeout: 100,
minTimeout: 0,
@@ -296,19 +323,21 @@ The missing package may be downloaded from ${resolution.tarball}.`)
})
test('retry on server error', async () => {
const scope = nock(registry)
.get('/foo.tgz')
.reply(500)
.get('/foo.tgz')
.replyWithFile(200, tarballPath, {
'Content-Length': tarballSize.toString(),
})
const tarballContent = fs.readFileSync(tarballPath)
const mockPool = mockAgent.get(registry)
// First request returns 500
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(500, 'Internal Server Error')
// Retry returns success
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'Content-Length': tarballSize.toString() },
})
process.chdir(temporaryDirectory())
const resolution = {
integrity: tarballIntegrity,
tarball: 'http://example.com/foo.tgz',
tarball: `${registry}/foo.tgz`,
}
const index = await fetch.remoteTarball(cafs, resolution, {
@@ -318,20 +347,17 @@ test('retry on server error', async () => {
})
expect(index).toBeTruthy()
expect(scope.isDone()).toBeTruthy()
})
test('throw error when accessing private package w/o authorization', async () => {
const scope = nock(registry)
.get('/foo.tgz')
.reply(403)
const mockPool = mockAgent.get(registry)
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(403, 'Forbidden')
process.chdir(temporaryDirectory())
const resolution = {
integrity: tarballIntegrity,
tarball: 'http://example.com/foo.tgz',
tarball: `${registry}/foo.tgz`,
}
await expect(
@@ -347,24 +373,21 @@ test('throw error when accessing private package w/o authorization', async () =>
},
{
status: 403,
// statusText: 'Forbidden',
statusText: '',
statusText: 'Forbidden',
}
)
)
expect(scope.isDone()).toBeTruthy()
})
test('do not retry when package does not exist', async () => {
const scope = nock(registry)
.get('/foo.tgz')
.reply(404)
const mockPool = mockAgent.get(registry)
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(404, 'Not Found')
process.chdir(temporaryDirectory())
const resolution = {
integrity: tarballIntegrity,
tarball: 'http://example.com/foo.tgz',
tarball: `${registry}/foo.tgz`,
}
await expect(
@@ -380,33 +403,32 @@ test('do not retry when package does not exist', async () => {
},
{
status: 404,
statusText: '',
statusText: 'Not Found',
}
)
)
expect(scope.isDone()).toBeTruthy()
})
test('accessing private packages', async () => {
const scope = nock(
registry,
{
reqheaders: {
authorization: 'Bearer ofjergrg349gj3f2',
},
}
)
.get('/foo.tgz')
.replyWithFile(200, tarballPath, {
'Content-Length': tarballSize.toString(),
})
const tarballContent = fs.readFileSync(tarballPath)
const mockPool = mockAgent.get(registry)
mockPool.intercept({
path: '/foo.tgz',
method: 'GET',
headers: {
authorization: 'Bearer ofjergrg349gj3f2',
},
}).reply(200, tarballContent, {
headers: { 'Content-Length': tarballSize.toString() },
})
process.chdir(temporaryDirectory())
const getAuthHeader = () => 'Bearer ofjergrg349gj3f2'
const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
rawConfig: {},
storeIndex,
rawConfig: {},
retry: {
maxTimeout: 100,
minTimeout: 0,
@@ -416,8 +438,8 @@ test('accessing private packages', async () => {
const resolution = {
integrity: tarballIntegrity,
registry,
tarball: 'http://example.com/foo.tgz',
registry: `${registry}/`,
tarball: `${registry}/foo.tgz`,
}
const index = await fetch.remoteTarball(cafs, resolution, {
@@ -427,8 +449,6 @@ test('accessing private packages', async () => {
})
expect(index).toBeTruthy()
expect(scope.isDone()).toBeTruthy()
})
async function getFileIntegrity (filename: string) {
@@ -437,6 +457,9 @@ async function getFileIntegrity (filename: string) {
// Covers the regression reported in https://github.com/pnpm/pnpm/issues/4064
test('fetch a big repository', async () => {
// Enable network for this test
mockAgent.enableNetConnect(/codeload\.github\.com/)
process.chdir(temporaryDirectory())
const resolution = { tarball: 'https://codeload.github.com/sveltejs/action-deploy-docs/tar.gz/a65fbf5a90f53c9d72fed4daaca59da50f074355' }
@@ -451,6 +474,9 @@ test('fetch a big repository', async () => {
})
test('fail when preparing a git-hosted package', async () => {
// Enable network for this test
mockAgent.enableNetConnect(/codeload\.github\.com/)
process.chdir(temporaryDirectory())
const resolution = { tarball: 'https://codeload.github.com/pnpm-e2e/prepare-script-fails/tar.gz/ba58874aae1210a777eb309dd01a9fdacc7e54e7' }
@@ -466,6 +492,9 @@ test('fail when preparing a git-hosted package', async () => {
})
test('take only the files included in the package, when fetching a git-hosted package', async () => {
// Enable network for this test
mockAgent.enableNetConnect(/codeload\.github\.com/)
process.chdir(temporaryDirectory())
const resolution = { tarball: 'https://codeload.github.com/pnpm-e2e/pkg-with-ignored-files/tar.gz/958d6d487217512bb154d02836e9b5b922a600d8' }
@@ -484,15 +513,16 @@ test('take only the files included in the package, when fetching a git-hosted pa
})
test('fail when extracting a broken tarball', async () => {
const scope = nock(registry)
.get('/foo.tgz')
.times(2)
.reply(200, 'this is not a valid tarball')
const mockPool = mockAgent.get(registry)
// Both requests return invalid tarball content
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, 'this is not a valid tarball')
mockPool.intercept({ path: '/foo.tgz', method: 'GET' }).reply(200, 'this is not a valid tarball')
process.chdir(temporaryDirectory())
const resolution = {
tarball: `${registry}foo.tgz`,
tarball: `${registry}/foo.tgz`,
}
await expect(
@@ -501,21 +531,23 @@ test('fail when extracting a broken tarball', async () => {
lockfileDir: process.cwd(),
pkg,
})
).rejects.toThrow(`Failed to add tarball from "${registry}foo.tgz" to store: Invalid checksum for TAR header at offset 0. Expected 0, got NaN`
).rejects.toThrow(`Failed to add tarball from "${registry}/foo.tgz" to store: Invalid checksum for TAR header at offset 0. Expected 0, got NaN`
)
expect(scope.isDone()).toBeTruthy()
})
test('do not build the package when scripts are ignored', async () => {
// Enable network for this test
mockAgent.enableNetConnect(/codeload\.github\.com/)
process.chdir(temporaryDirectory())
const tarball = 'https://codeload.github.com/pnpm-e2e/prepare-script-works/tar.gz/55416a9c468806a935636c0ad0371a14a64df8c9'
const resolution = { tarball }
const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
storeIndex,
ignoreScripts: true,
rawConfig: {},
storeIndex,
retry: {
maxTimeout: 100,
minTimeout: 0,
@@ -551,6 +583,9 @@ test('when extracting files with the same name, pick the last ones', async () =>
})
test('use the subfolder when path is present', async () => {
// Enable network for this test
mockAgent.enableNetConnect(/codeload\.github\.com/)
process.chdir(temporaryDirectory())
const resolution = {
@@ -559,9 +594,9 @@ test('use the subfolder when path is present', async () => {
}
const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
storeIndex,
ignoreScripts: true,
rawConfig: {},
storeIndex,
retry: {
maxTimeout: 100,
minTimeout: 0,
@@ -579,6 +614,9 @@ test('use the subfolder when path is present', async () => {
})
test('prevent directory traversal attack when path is present', async () => {
// Enable network for this test
mockAgent.enableNetConnect(/codeload\.github\.com/)
process.chdir(temporaryDirectory())
const tarball = 'https://codeload.github.com/RexSkz/test-git-subfolder-fetch/tar.gz/2b42a57a945f19f8ffab8ecbd2021fdc2c58ee22'
@@ -586,9 +624,9 @@ test('prevent directory traversal attack when path is present', async () => {
const resolution = { tarball, path }
const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
storeIndex,
ignoreScripts: true,
rawConfig: {},
storeIndex,
retry: {
maxTimeout: 100,
minTimeout: 0,
@@ -604,6 +642,9 @@ test('prevent directory traversal attack when path is present', async () => {
})
test('fail when path is not exists', async () => {
// Enable network for this test
mockAgent.enableNetConnect(/codeload\.github\.com/)
process.chdir(temporaryDirectory())
const tarball = 'https://codeload.github.com/RexSkz/test-git-subfolder-fetch/tar.gz/2b42a57a945f19f8ffab8ecbd2021fdc2c58ee22'
@@ -611,9 +652,9 @@ test('fail when path is not exists', async () => {
const resolution = { tarball, path }
const fetch = createTarballFetcher(fetchFromRegistry, getAuthHeader, {
storeIndex,
ignoreScripts: true,
rawConfig: {},
storeIndex,
retry: {
maxTimeout: 100,
minTimeout: 0,

View File

@@ -31,8 +31,7 @@
"test": "pn compile"
},
"dependencies": {
"@zkochan/retry": "catalog:",
"node-fetch": "catalog:"
"@zkochan/retry": "catalog:"
},
"devDependencies": {
"@pnpm/fetching.types": "workspace:*"

View File

@@ -1,9 +1,8 @@
import type { RetryTimeoutOptions } from '@zkochan/retry'
import type { RequestInit as NodeRequestInit, Response } from 'node-fetch'
export type { Response, RetryTimeoutOptions }
export type { RetryTimeoutOptions }
export interface RequestInit extends NodeRequestInit {
export interface RequestInit extends globalThis.RequestInit {
retry?: RetryTimeoutOptions
timeout?: number
}

View File

@@ -6,7 +6,7 @@ import { createTarballFetcher, type TarballFetchers } from '@pnpm/fetching.tarba
import type { FetchFromRegistry, GetAuthHeader, RetryTimeoutOptions } from '@pnpm/fetching.types'
import type { CustomFetcher, CustomResolver } from '@pnpm/hooks.types'
import { createGetAuthHeaderByURI } from '@pnpm/network.auth-header'
import { type AgentOptions, createFetchFromRegistry } from '@pnpm/network.fetch'
import { createFetchFromRegistry, type DispatcherOptions } from '@pnpm/network.fetch'
import {
createResolver as _createResolver,
type ResolveFunction,
@@ -35,7 +35,7 @@ export type ClientOptions = {
includeOnlyPackageFiles?: boolean
preserveAbsolutePaths?: boolean
fetchMinSpeedKiBps?: number
} & ResolverFactoryOptions & AgentOptions
} & ResolverFactoryOptions & DispatcherOptions
export interface Client {
fetchers: Fetchers

View File

@@ -106,6 +106,7 @@
"@pnpm/store.index": "workspace:*",
"@pnpm/test-fixtures": "workspace:*",
"@pnpm/test-ipc-server": "workspace:*",
"@pnpm/testing.mock-agent": "workspace:*",
"@pnpm/worker": "workspace:*",
"@pnpm/workspace.projects-filter": "workspace:*",
"@types/normalize-path": "catalog:",
@@ -117,7 +118,6 @@
"ci-info": "catalog:",
"delay": "catalog:",
"jest-diff": "catalog:",
"nock": "catalog:",
"path-name": "catalog:",
"proxyquire": "catalog:",
"read-yaml-file": "catalog:",

View File

@@ -5,8 +5,8 @@ import { add } from '@pnpm/installing.commands'
import type { LockfileFile } from '@pnpm/lockfile.types'
import { prepare, preparePackages } from '@pnpm/prepare'
import { addDistTag } from '@pnpm/registry-mock'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { loadJsonFileSync } from 'load-json-file'
import nock from 'nock'
import { readYamlFileSync } from 'read-yaml-file'
import { DEFAULT_OPTS } from './utils/index.js'
@@ -20,9 +20,8 @@ const createOptions = (saveCatalogName = 'default'): add.AddCommandOptions => ({
storeDir: path.resolve('store'),
})
afterEach(() => {
nock.abortPendingRequests()
nock.cleanAll()
afterEach(async () => {
await teardownMockAgent()
})
test('saveCatalogName creates new workspace manifest with the new catalogs', async () => {
@@ -79,8 +78,10 @@ test('saveCatalogName works with different protocols', async () => {
})
// Mock the HEAD request that isRepoPublic() in @pnpm/resolving.git-resolver makes.
// Without this, transient network failures cause fallback to git+https:// resolution.
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/kevva/is-positive')
await setupMockAgent()
getMockAgent().enableNetConnect()
getMockAgent().get('https://github.com')
.intercept({ path: '/kevva/is-positive', method: 'HEAD' })
.reply(200)
const options = createOptions()
@@ -139,7 +140,6 @@ test('saveCatalogName works with different protocols', async () => {
},
},
} as Partial<LockfileFile>))
githubNock.done()
})
test('saveCatalogName does not work with local dependencies', async () => {

View File

@@ -105,6 +105,9 @@
{
"path": "../../store/index"
},
{
"path": "../../testing/mock-agent"
},
{
"path": "../../worker"
},

View File

@@ -139,6 +139,7 @@
"@pnpm/store.path": "workspace:*",
"@pnpm/test-fixtures": "workspace:*",
"@pnpm/test-ipc-server": "workspace:*",
"@pnpm/testing.mock-agent": "workspace:*",
"@pnpm/testing.temp-store": "workspace:*",
"@types/fs-extra": "catalog:",
"@types/is-windows": "catalog:",

View File

@@ -1,12 +1,16 @@
import { addDependenciesToPackage } from '@pnpm/installing.deps-installer'
import { prepareEmpty } from '@pnpm/prepare'
import nock from 'nock'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { testDefaults } from '../utils/index.js'
afterEach(() => {
nock.abortPendingRequests()
nock.cleanAll()
beforeEach(async () => {
await setupMockAgent()
getMockAgent().enableNetConnect()
})
afterEach(async () => {
await teardownMockAgent()
})
test('blockExoticSubdeps disallows git dependencies in subdependencies', async () => {
@@ -23,8 +27,8 @@ test('blockExoticSubdeps allows git dependencies in direct dependencies', async
// Mock the HEAD request that isRepoPublic() in @pnpm/resolving.git-resolver makes to check if the repo is public.
// Without this, transient network failures cause the resolver to fall back to git+https:// instead of
// resolving via the codeload tarball URL.
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/kevva/is-negative')
getMockAgent().get('https://github.com')
.intercept({ path: '/kevva/is-negative', method: 'HEAD' })
.reply(200)
const project = prepareEmpty()
@@ -41,8 +45,6 @@ test('blockExoticSubdeps allows git dependencies in direct dependencies', async
expect(manifest.dependencies).toStrictEqual({
'is-negative': 'github:kevva/is-negative#1.0.0',
})
githubNock.done()
})
test('blockExoticSubdeps allows registry dependencies in subdependencies', async () => {

View File

@@ -2,15 +2,19 @@ import { LOCKFILE_VERSION, WANTED_LOCKFILE } from '@pnpm/constants'
import { addDependenciesToPackage, install } from '@pnpm/installing.deps-installer'
import { prepareEmpty } from '@pnpm/prepare'
import { getIntegrity } from '@pnpm/registry-mock'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { rimrafSync } from '@zkochan/rimraf'
import nock from 'nock'
import { writeYamlFileSync } from 'write-yaml-file'
import { testDefaults } from '../utils/index.js'
afterEach(() => {
nock.abortPendingRequests()
nock.cleanAll()
beforeEach(async () => {
await setupMockAgent()
getMockAgent().enableNetConnect()
})
afterEach(async () => {
await teardownMockAgent()
})
const RESOLUTIONS = [
@@ -95,8 +99,7 @@ const RESOLUTIONS = [
},
]
// Derive SHA256 hex values from RESOLUTIONS integrity fields
const PLATFORM_HEX_DIGESTS: Record<string, string> = Object.fromEntries(
const PLATFORM_HEX_DIGESTS = Object.fromEntries(
RESOLUTIONS.map(({ resolution }) => {
const platform = resolution.url.match(/deno-(.+)\.zip$/)![1]
const hex = Buffer.from(resolution.integrity.replace('sha256-', ''), 'base64').toString('hex')
@@ -107,19 +110,20 @@ const PLATFORM_HEX_DIGESTS: Record<string, string> = Object.fromEntries(
test('installing Deno runtime', async () => {
// Mock GitHub API to avoid network flakiness
const assetNames = Object.keys(PLATFORM_HEX_DIGESTS).map((platform) => `deno-${platform}`)
const githubApiNock = nock('https://api.github.com', { allowUnmocked: true })
.get('/repos/denoland/deno/releases/tags/v2.4.2')
const githubApiPool = getMockAgent().get('https://api.github.com')
githubApiPool
.intercept({ path: '/repos/denoland/deno/releases/tags/v2.4.2', method: 'GET' })
.reply(200, {
assets: assetNames.map((name) => ({
name: `${name}.zip.sha256sum`,
browser_download_url: `https://github.com/denoland/deno/releases/download/v2.4.2/${name}.zip.sha256sum`,
})),
})
const githubDownloadNock = nock('https://github.com', { allowUnmocked: true })
const githubPool = getMockAgent().get('https://github.com')
for (const [platform, hex] of Object.entries(PLATFORM_HEX_DIGESTS)) {
const name = `deno-${platform}`
githubDownloadNock
.get(`/denoland/deno/releases/download/v2.4.2/${name}.zip.sha256sum`)
githubPool
.intercept({ path: `/denoland/deno/releases/download/v2.4.2/${name}.zip.sha256sum`, method: 'GET' })
.reply(200, `${hex} ${name}.zip`)
}
@@ -207,9 +211,6 @@ test('installing Deno runtime', async () => {
'@pnpm.e2e/dep-of-pkg-with-1-dep@100.1.0': {},
},
})
githubApiNock.done()
githubDownloadNock.done()
})
test('installing Deno runtime fails if offline mode is used and Deno not found locally', async () => {

View File

@@ -1,11 +1,13 @@
import fs from 'node:fs'
import type { PnpmError } from '@pnpm/error'
import { addDependenciesToPackage, mutateModulesInSingleProject } from '@pnpm/installing.deps-installer'
import { prepareEmpty } from '@pnpm/prepare'
import { REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
import { fixtures } from '@pnpm/test-fixtures'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import type { ProjectRootDir } from '@pnpm/types'
import { loadJsonFileSync } from 'load-json-file'
import nock from 'nock'
import { testDefaults } from '../utils/index.js'
@@ -44,27 +46,26 @@ test('fail if none of the available resolvers support a version spec', async ()
test('fail if a package cannot be fetched', async () => {
prepareEmpty()
await setupMockAgent()
const mockPool = getMockAgent().get(`http://localhost:${REGISTRY_MOCK_PORT}`)
/* eslint-disable @typescript-eslint/no-explicit-any */
nock(`http://localhost:${REGISTRY_MOCK_PORT}/`)
.get('/@pnpm.e2e%2Fpkg-with-1-dep') // cspell:disable-line
mockPool.intercept({ path: '/@pnpm.e2e%2Fpkg-with-1-dep', method: 'GET' }) // cspell:disable-line
.reply(200, loadJsonFileSync<any>(f.find('pkg-with-1-dep.json')))
nock(`http://localhost:${REGISTRY_MOCK_PORT}/`)
.get('/@pnpm.e2e%2Fdep-of-pkg-with-1-dep') // cspell:disable-line
mockPool.intercept({ path: '/@pnpm.e2e%2Fdep-of-pkg-with-1-dep', method: 'GET' }) // cspell:disable-line
.reply(200, loadJsonFileSync<any>(f.find('dep-of-pkg-with-1-dep.json')))
/* eslint-enable @typescript-eslint/no-explicit-any */
nock(`http://localhost:${REGISTRY_MOCK_PORT}/`)
.get('/@pnpm.e2e/pkg-with-1-dep/-/@pnpm.e2e/pkg-with-1-dep-100.0.0.tgz')
.replyWithFile(200, f.find('pkg-with-1-dep-100.0.0.tgz'))
nock(`http://localhost:${REGISTRY_MOCK_PORT}/`)
.get('/@pnpm.e2e/dep-of-pkg-with-1-dep/-/@pnpm.e2e/dep-of-pkg-with-1-dep-100.1.0.tgz')
.reply(403)
const tarballContent = fs.readFileSync(f.find('pkg-with-1-dep-100.0.0.tgz'))
mockPool.intercept({ path: '/@pnpm.e2e/pkg-with-1-dep/-/@pnpm.e2e/pkg-with-1-dep-100.0.0.tgz', method: 'GET' })
.reply(200, tarballContent, { headers: { 'content-length': String(tarballContent.length) } })
mockPool.intercept({ path: '/@pnpm.e2e/dep-of-pkg-with-1-dep/-/@pnpm.e2e/dep-of-pkg-with-1-dep-100.1.0.tgz', method: 'GET' })
.reply(403, 'Forbidden', { headers: { 'content-type': 'text/plain' } })
let err!: PnpmError
try {
await addDependenciesToPackage({}, ['@pnpm.e2e/pkg-with-1-dep@100.0.0'], testDefaults({}, {}, { retry: { retries: 0 } }))
throw new Error('should have failed')
} catch (_err: any) { // eslint-disable-line
nock.restore()
await teardownMockAgent()
err = _err
}
expect(err.code).toBe('ERR_PNPM_FETCH_403')

View File

@@ -11,26 +11,30 @@ import {
} from '@pnpm/installing.deps-installer'
import { prepareEmpty } from '@pnpm/prepare'
import { fixtures } from '@pnpm/test-fixtures'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { rimrafSync } from '@zkochan/rimraf'
import { isCI } from 'ci-info'
import nock from 'nock'
import { testDefaults } from '../utils/index.js'
const f = fixtures(import.meta.dirname)
const withGitProtocolDepFixture = f.find('with-git-protocol-dep')
afterEach(() => {
nock.abortPendingRequests()
nock.cleanAll()
beforeEach(async () => {
await setupMockAgent()
getMockAgent().enableNetConnect()
})
afterEach(async () => {
await teardownMockAgent()
})
test('from a github repo', async () => {
const project = prepareEmpty()
// Mock the HEAD request that isRepoPublic() in @pnpm/resolving.git-resolver makes.
// Without this, transient network failures cause fallback to git+https:// resolution.
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/kevva/is-negative')
getMockAgent().get('https://github.com')
.intercept({ path: '/kevva/is-negative', method: 'HEAD' })
.reply(200)
const { updatedManifest: manifest } = await addDependenciesToPackage({}, ['kevva/is-negative'], testDefaults())
@@ -40,13 +44,12 @@ test('from a github repo', async () => {
expect(manifest.dependencies).toStrictEqual({
'is-negative': 'github:kevva/is-negative',
})
githubNock.done()
})
test('from a github repo through URL', async () => {
const project = prepareEmpty()
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/kevva/is-negative')
getMockAgent().get('https://github.com')
.intercept({ path: '/kevva/is-negative', method: 'HEAD' })
.reply(200)
const { updatedManifest: manifest } = await addDependenciesToPackage({}, ['https://github.com/kevva/is-negative'], testDefaults())
@@ -54,13 +57,12 @@ test('from a github repo through URL', async () => {
project.has('is-negative')
expect(manifest.dependencies).toStrictEqual({ 'is-negative': 'github:kevva/is-negative' })
githubNock.done()
})
test('from a github repo with different name via named installation', async () => {
const project = prepareEmpty()
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/zkochan/hi')
getMockAgent().get('https://github.com')
.intercept({ path: '/zkochan/hi', method: 'HEAD' })
.reply(200)
const reporter = jest.fn()
@@ -98,14 +100,13 @@ test('from a github repo with different name via named installation', async () =
project.isExecutable('.bin/hi')
project.isExecutable('.bin/szia')
githubNock.done()
})
// This used to fail. Maybe won't be needed once api/install.ts gets refactored and covered with dedicated unit tests
test('from a github repo with different name', async () => {
const project = prepareEmpty()
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/zkochan/hi')
getMockAgent().get('https://github.com')
.intercept({ path: '/zkochan/hi', method: 'HEAD' })
.reply(200)
const reporter = jest.fn()
@@ -145,13 +146,12 @@ test('from a github repo with different name', async () => {
project.isExecutable('.bin/hi')
project.isExecutable('.bin/szia')
githubNock.done()
})
test('a subdependency is from a github repo with different name', async () => {
const project = prepareEmpty()
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/zkochan/hi')
getMockAgent().get('https://github.com')
.intercept({ path: '/zkochan/hi', method: 'HEAD' })
.reply(200)
await addDependenciesToPackage({}, ['@pnpm.e2e/has-aliased-git-dependency'], testDefaults({ fastUnpack: false }))
@@ -170,7 +170,6 @@ test('a subdependency is from a github repo with different name', async () => {
project.isExecutable('@pnpm.e2e/has-aliased-git-dependency/node_modules/.bin/szia')
expect(fs.existsSync(path.resolve(`node_modules/.pnpm/${depPathToFilename('@pnpm.e2e/has-say-hi-peer@1.0.0(hi@https://codeload.github.com/zkochan/hi/tar.gz/4cdebec76b7b9d1f6e219e06c42d92a6b8ea60cd)', 120)}/node_modules/@pnpm.e2e/has-say-hi-peer`))).toBeTruthy()
githubNock.done()
})
test('from a git repo', async () => {
@@ -207,9 +206,11 @@ test.skip('from a non-github git repo', async () => {
test('from a github repo the has no package.json file', async () => {
const project = prepareEmpty()
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/pnpm/for-testing.no-package-json')
.times(2)
getMockAgent().get('https://github.com')
.intercept({ path: '/pnpm/for-testing.no-package-json', method: 'HEAD' })
.reply(200)
getMockAgent().get('https://github.com')
.intercept({ path: '/pnpm/for-testing.no-package-json', method: 'HEAD' })
.reply(200)
const { updatedManifest: manifest } = await addDependenciesToPackage({}, ['pnpm/for-testing.no-package-json'], testDefaults())
@@ -227,7 +228,6 @@ test('from a github repo the has no package.json file', async () => {
// e.g. thrown: "Exceeded timeout of 240000 ms for a test.
await addDependenciesToPackage({}, ['pnpm/for-testing.no-package-json'], testDefaults())
project.has('for-testing.no-package-json')
githubNock.done()
})
test.skip('from a github repo that needs to be built. isolated node linker is used', async () => {
@@ -276,9 +276,11 @@ test.skip('from a github repo that needs to be built. hoisted node linker is us
test('re-adding a git repo with a different tag', async () => {
const project = prepareEmpty()
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/kevva/is-negative')
.times(2)
getMockAgent().get('https://github.com')
.intercept({ path: '/kevva/is-negative', method: 'HEAD' })
.reply(200)
getMockAgent().get('https://github.com')
.intercept({ path: '/kevva/is-negative', method: 'HEAD' })
.reply(200)
let { updatedManifest: manifest } = await addDependenciesToPackage({}, ['kevva/is-negative#1.0.0'], testDefaults())
project.has('is-negative')
@@ -317,7 +319,6 @@ test('re-adding a git repo with a different tag', async () => {
},
}
)
githubNock.done()
})
test('should not update when adding unrelated dependency', async () => {
@@ -365,9 +366,11 @@ test('git-hosted repository is not added to the store if it fails to be built',
test('from subdirectories of a git repo', async () => {
const project = prepareEmpty()
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/RexSkz/test-git-subfolder-fetch')
.times(2)
getMockAgent().get('https://github.com')
.intercept({ path: '/RexSkz/test-git-subfolder-fetch', method: 'HEAD' })
.reply(200)
getMockAgent().get('https://github.com')
.intercept({ path: '/RexSkz/test-git-subfolder-fetch', method: 'HEAD' })
.reply(200)
const { updatedManifest: manifest } = await addDependenciesToPackage({}, [
@@ -382,13 +385,12 @@ test('from subdirectories of a git repo', async () => {
'@my-namespace/simple-express-server': 'github:RexSkz/test-git-subfolder-fetch#path:/packages/simple-express-server',
'@my-namespace/simple-react-app': 'github:RexSkz/test-git-subfolder-fetch#path:/packages/simple-react-app',
})
githubNock.done()
})
test('no hash character for github subdirectory install', async () => {
prepareEmpty()
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/pnpm/only-allow')
getMockAgent().get('https://github.com')
.intercept({ path: '/pnpm/only-allow', method: 'HEAD' })
.reply(200)
await addDependenciesToPackage({}, [
@@ -397,5 +399,4 @@ test('no hash character for github subdirectory install', async () => {
expect(fs.readdirSync('./node_modules/.pnpm'))
.toContain('only-allow@https+++codeload.github.com+pnpm+only-allow+tar.gz+91ab41994c6a1b7319869fa8864163c9954f56ec+path++')
githubNock.done()
})

View File

@@ -19,20 +19,15 @@ import { readPackageJsonFromDir } from '@pnpm/pkg-manifest.reader'
import { prepareEmpty, preparePackages, tempDir } from '@pnpm/prepare'
import { addDistTag, getIntegrity, REGISTRY_MOCK_PORT } from '@pnpm/registry-mock'
import { fixtures } from '@pnpm/test-fixtures'
import { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import type { DepPath, ProjectManifest, ProjectRootDir } from '@pnpm/types'
import { rimrafSync } from '@zkochan/rimraf'
import { loadJsonFileSync } from 'load-json-file'
import nock from 'nock'
import { readYamlFileSync } from 'read-yaml-file'
import { writeYamlFileSync } from 'write-yaml-file'
import { testDefaults } from './utils/index.js'
afterEach(() => {
nock.abortPendingRequests()
nock.cleanAll()
})
const f = fixtures(import.meta.dirname)
const LOCKFILE_WARN_LOG = {
@@ -46,8 +41,10 @@ test('lockfile has correct format', async () => {
// Mock the HEAD request that isRepoPublic() in @pnpm/resolving.git-resolver makes to check if the repo is public.
// Without this, transient network failures cause the resolver to fall back to git+https:// instead of
// resolving via the codeload tarball URL.
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/kevva/is-negative')
await setupMockAgent()
getMockAgent().enableNetConnect()
getMockAgent().get('https://github.com')
.intercept({ path: '/kevva/is-negative', method: 'HEAD' })
.reply(200)
const project = prepareEmpty()
@@ -81,7 +78,7 @@ test('lockfile has correct format', async () => {
expect((lockfile.packages[id].resolution as TarballResolution).tarball).toBeFalsy()
expect(lockfile.packages).toHaveProperty(['is-negative@https://codeload.github.com/kevva/is-negative/tar.gz/1d7e288222b53a0cab90a331f1865220ec29560c'])
githubNock.done()
await teardownMockAgent()
})
test('lockfile has dev deps even when installing for prod only', async () => {
@@ -825,8 +822,10 @@ test('lockfile file has correct format when lockfile directory does not equal th
// Mock the HEAD request that isRepoPublic() in @pnpm/resolving.git-resolver makes to check if the repo is public.
// Without this, transient network failures cause the resolver to fall back to git+https:// instead of
// resolving via the codeload tarball URL.
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/kevva/is-negative')
await setupMockAgent()
getMockAgent().enableNetConnect()
getMockAgent().get('https://github.com')
.intercept({ path: '/kevva/is-negative', method: 'HEAD' })
.reply(200)
prepareEmpty()
@@ -902,7 +901,7 @@ test('lockfile file has correct format when lockfile directory does not equal th
expect(lockfile.packages).toHaveProperty(['is-negative@https://codeload.github.com/kevva/is-negative/tar.gz/1d7e288222b53a0cab90a331f1865220ec29560c'])
}
githubNock.done()
await teardownMockAgent()
})
test(`doing named installation when shared ${WANTED_LOCKFILE} exists already`, async () => {
@@ -1089,13 +1088,15 @@ const isPositiveMeta = loadJsonFileSync<any>(path.join(REGISTRY_MIRROR_DIR, 'is-
const tarballPath = f.find('is-positive-3.1.0.tgz')
test('tarball domain differs from registry domain', async () => {
nock('https://registry.example.com', { allowUnmocked: true })
.get('/is-positive')
await setupMockAgent()
getMockAgent().enableNetConnect(/localhost/)
getMockAgent().get('https://registry.example.com')
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
nock('https://registry.npmjs.org', { allowUnmocked: true })
.get('/is-positive/-/is-positive-3.1.0.tgz')
.replyWithFile(200, tarballPath)
const tarballContent = fs.readFileSync(tarballPath)
getMockAgent().get('https://registry.npmjs.org')
.intercept({ path: '/is-positive/-/is-positive-3.1.0.tgz', method: 'GET' })
.reply(200, tarballContent, { headers: { 'content-length': String(tarballContent.length) } })
const project = prepareEmpty()
@@ -1144,15 +1145,18 @@ test('tarball domain differs from registry domain', async () => {
'is-positive@3.1.0': {},
},
})
await teardownMockAgent()
})
test('tarball installed through non-standard URL endpoint from the registry domain', async () => {
nock('https://registry.npmjs.org', { allowUnmocked: true })
.head('/is-positive/download/is-positive-3.1.0.tgz')
.reply(200, '')
nock('https://registry.npmjs.org', { allowUnmocked: true })
.get('/is-positive/download/is-positive-3.1.0.tgz')
.replyWithFile(200, tarballPath)
await setupMockAgent()
getMockAgent().enableNetConnect(/localhost/)
const mockPool = getMockAgent().get('https://registry.npmjs.org')
mockPool.intercept({ path: '/is-positive/download/is-positive-3.1.0.tgz', method: 'HEAD' })
.reply(200, '').persist()
const tarballContent2 = fs.readFileSync(tarballPath)
mockPool.intercept({ path: '/is-positive/download/is-positive-3.1.0.tgz', method: 'GET' })
.reply(200, tarballContent2, { headers: { 'content-length': String(tarballContent2.length) } }).persist()
const project = prepareEmpty()
@@ -1201,6 +1205,7 @@ test('tarball installed through non-standard URL endpoint from the registry doma
'is-positive@https://registry.npmjs.org/is-positive/download/is-positive-3.1.0.tgz': {},
},
})
await teardownMockAgent()
})
// TODO: fix merge conflicts with the new lockfile format (TODOv8)

View File

@@ -162,6 +162,9 @@
{
"path": "../../store/path"
},
{
"path": "../../testing/mock-agent"
},
{
"path": "../../testing/temp-store"
},

View File

@@ -70,12 +70,12 @@
"@pnpm/store.cafs-types": "workspace:*",
"@pnpm/store.create-cafs-store": "workspace:*",
"@pnpm/test-fixtures": "workspace:*",
"@pnpm/testing.mock-agent": "workspace:*",
"@types/normalize-path": "catalog:",
"@types/ramda": "catalog:",
"@types/semver": "catalog:",
"@types/ssri": "catalog:",
"delay": "catalog:",
"nock": "catalog:",
"normalize-path": "catalog:",
"tempy": "catalog:"
},

View File

@@ -13,9 +13,9 @@ import type { PkgRequestFetchResult, PkgResolutionId, RequestPackageOptions } fr
import { createCafsStore } from '@pnpm/store.create-cafs-store'
import { StoreIndex } from '@pnpm/store.index'
import { fixtures } from '@pnpm/test-fixtures'
import { setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
import { restartWorkerPool } from '@pnpm/worker'
import delay from 'delay'
import nock from 'nock'
import normalize from 'normalize-path'
import { temporaryDirectory } from 'tempy'
@@ -57,9 +57,8 @@ function createFetchersForStore (storeDir: string) {
}).fetchers
}
afterEach(() => {
nock.abortPendingRequests()
nock.cleanAll()
afterEach(async () => {
await teardownMockAgent()
})
test('request package', async () => {
@@ -583,14 +582,18 @@ test('fetchPackageToStore() concurrency check', async () => {
})
test('fetchPackageToStore() does not cache errors', async () => {
nock(registry)
.get('/is-positive/-/is-positive-1.0.0.tgz')
.reply(404)
nock(registry)
.get('/is-positive/-/is-positive-1.0.0.tgz')
.replyWithFile(200, IS_POSITIVE_TARBALL)
const agent = await setupMockAgent()
const mockPool = agent.get(registry)
// First request returns 404
mockPool.intercept({ path: '/is-positive/-/is-positive-1.0.0.tgz', method: 'GET' }).reply(404, {})
// Second request returns the tarball
const tarballContent = fs.readFileSync(IS_POSITIVE_TARBALL)
mockPool.intercept({ path: '/is-positive/-/is-positive-1.0.0.tgz', method: 'GET' }).reply(200, tarballContent, {
headers: { 'content-length': String(tarballContent.length) },
})
const noRetryStoreIndex = new StoreIndex('.store')
storeIndexes.push(noRetryStoreIndex)
const noRetry = createClient({
authConfig,
rawConfig: {},
@@ -647,7 +650,7 @@ test('fetchPackageToStore() does not cache errors', async () => {
expect(Array.from(files.filesMap.keys()).sort((a, b) => a.localeCompare(b))).toStrictEqual(['package.json', 'index.js', 'license', 'readme.md'].sort((a, b) => a.localeCompare(b)))
expect(files.resolvedFrom).toBe('remote')
expect(nock.isDone()).toBeTruthy()
await teardownMockAgent()
})
// This test was added to cover the issue described here: https://github.com/pnpm/supi/issues/65
@@ -709,9 +712,13 @@ test('always return a package manifest in the response', async () => {
// Covers https://github.com/pnpm/pnpm/issues/1293
test('fetchPackageToStore() fetch raw manifest of cached package', async () => {
nock(registry)
.get('/is-positive/-/is-positive-1.0.0.tgz')
.replyWithFile(200, IS_POSITIVE_TARBALL)
const agent = await setupMockAgent()
const tarballContent = fs.readFileSync(IS_POSITIVE_TARBALL)
agent.get(registry)
.intercept({ path: '/is-positive/-/is-positive-1.0.0.tgz', method: 'GET' })
.reply(200, tarballContent, {
headers: { 'content-length': String(tarballContent.length) },
})
const storeDir = temporaryDirectory()
const cafs = createCafsStore(storeDir)
@@ -755,6 +762,7 @@ test('fetchPackageToStore() fetch raw manifest of cached package', async () => {
])
expect((await fetchResults[1].fetching()).bundledManifest).toBeTruthy()
await teardownMockAgent()
})
test('refetch package to store if it has been modified', async () => {
@@ -883,12 +891,6 @@ test('fetch a git package without a package.json', async () => {
const repo = 'denolib/camelcase'
const commit = 'aeb6b15f9c9957c8fa56f9731e914c4d8a6d2f2b'
// Mock the HEAD request that isRepoPublic() in @pnpm/resolving.git-resolver makes to check if the repo is public.
// Without this, transient network failures cause the resolver to fall back to git+https:// instead of
// resolving via the codeload tarball URL.
const githubNock = nock('https://github.com', { allowUnmocked: true })
.head('/denolib/camelcase')
.reply(200)
const storeDir = temporaryDirectory()
const cafs = createCafsStore(storeDir)
const requestPackage = createPackageRequester({
@@ -916,7 +918,6 @@ test('fetch a git package without a package.json', async () => {
expect(pkgResponse.body.isInstallable).toBeFalsy()
expect(pkgResponse.body.id).toBe(`https://codeload.github.com/${repo}/tar.gz/${commit}`)
}
githubNock.done()
})
test('throw exception if the package data in the store differs from the expected data', async () => {

View File

@@ -60,6 +60,9 @@
{
"path": "../../store/index"
},
{
"path": "../../testing/mock-agent"
},
{
"path": "../../worker"
},

View File

@@ -26,7 +26,6 @@ export async function writePnpFile (
dependencyTreeRoots: [],
ignorePattern: undefined,
packageRegistry,
pnpZipBackend: 'libzip',
shebang: undefined,
})
await fs.writeFile(path.join(opts.lockfileDir, '.pnp.cjs'), loaderFile, 'utf8')

View File

@@ -1,7 +1,7 @@
{
"name": "@pnpm/network.fetch",
"version": "1000.2.6",
"description": "node-fetch with retries",
"description": "Native fetch with retries",
"keywords": [
"pnpm",
"pnpm11",
@@ -33,12 +33,15 @@
".test": "cross-env NODE_OPTIONS=\"$NODE_OPTIONS --experimental-vm-modules --disable-warning=ExperimentalWarning --disable-warning=DEP0169\" jest"
},
"dependencies": {
"@pnpm/config.nerf-dart": "catalog:",
"@pnpm/core-loggers": "workspace:*",
"@pnpm/error": "workspace:*",
"@pnpm/fetching.types": "workspace:*",
"@pnpm/network.agent": "catalog:",
"@pnpm/types": "workspace:*",
"@zkochan/retry": "catalog:",
"node-fetch": "catalog:"
"lru-cache": "catalog:",
"socks": "catalog:",
"undici": "catalog:"
},
"peerDependencies": {
"@pnpm/logger": "catalog:"
@@ -46,8 +49,7 @@
"devDependencies": {
"@pnpm/logger": "workspace:*",
"@pnpm/network.fetch": "workspace:*",
"https-proxy-server-express": "catalog:",
"nock": "catalog:"
"https-proxy-server-express": "catalog:"
},
"engines": {
"node": ">=22.13"

View File

@@ -0,0 +1,348 @@
import net from 'node:net'
import tls from 'node:tls'
import { URL } from 'node:url'
import { nerfDart } from '@pnpm/config.nerf-dart'
import { PnpmError } from '@pnpm/error'
import type { SslConfig } from '@pnpm/types'
import { LRUCache } from 'lru-cache'
import { SocksClient } from 'socks'
import { Agent, type Dispatcher, ProxyAgent } from 'undici'
const DEFAULT_MAX_SOCKETS = 50
const DISPATCHER_CACHE = new LRUCache<string, Dispatcher>({
max: 50,
dispose: (dispatcher) => {
if (typeof (dispatcher as Agent).close === 'function') {
void (dispatcher as Agent).close()
}
},
})
export interface DispatcherOptions {
ca?: string | string[] | Buffer
cert?: string | string[] | Buffer
key?: string | Buffer
localAddress?: string
maxSockets?: number
strictSsl?: boolean
timeout?: number
httpProxy?: string
httpsProxy?: string
noProxy?: boolean | string
clientCertificates?: Record<string, SslConfig>
}
/**
* Clear the dispatcher cache. Useful for testing.
*/
export function clearDispatcherCache (): void {
DISPATCHER_CACHE.clear()
}
/**
* Get a dispatcher for the given URI and options.
* Returns undefined if no special configuration is needed (to use global dispatcher).
*/
export function getDispatcher (uri: string, opts: DispatcherOptions): Dispatcher | undefined {
// If no special options are set, use the global dispatcher
if (!needsCustomDispatcher(opts)) {
return undefined
}
const parsedUri = new URL(uri)
if ((opts.httpProxy || opts.httpsProxy) && !checkNoProxy(parsedUri, opts)) {
const proxyDispatcher = getProxyDispatcher(parsedUri, opts)
if (proxyDispatcher) return proxyDispatcher
}
return getNonProxyDispatcher(parsedUri, opts)
}
function needsCustomDispatcher (opts: DispatcherOptions): boolean {
return Boolean(
opts.httpProxy ||
opts.httpsProxy ||
opts.ca ||
opts.cert ||
opts.key ||
opts.localAddress ||
opts.strictSsl === false ||
opts.clientCertificates ||
opts.maxSockets
)
}
function parseProxyUrl (proxy: string, protocol: string): URL {
let proxyUrl = proxy
if (!proxyUrl.includes('://')) {
proxyUrl = `${protocol}//${proxyUrl}`
}
try {
return new URL(proxyUrl)
} catch {
throw new PnpmError('INVALID_PROXY', "Couldn't parse proxy URL", {
hint: 'If your proxy URL contains a username and password, make sure to URL-encode them ' +
'(you may use the encodeURIComponent function). For instance, ' +
'https-proxy=https://use%21r:pas%2As@my.proxy:1234/foo. ' +
'Do not encode the colon (:) between the username and password.',
})
}
}
function getSocksProxyType (protocol: string): 4 | 5 {
switch (protocol.replace(':', '')) {
case 'socks4':
case 'socks4a':
return 4
default:
return 5
}
}
function getProxyDispatcher (parsedUri: URL, opts: DispatcherOptions): Dispatcher | null {
const isHttps = parsedUri.protocol === 'https:'
const proxy = isHttps ? opts.httpsProxy : opts.httpProxy
if (!proxy) return null
const proxyUrl = parseProxyUrl(proxy, parsedUri.protocol)
const sslConfig = pickSettingByUrl(opts.clientCertificates, parsedUri.href)
const { ca, cert, key: certKey } = { ...opts, ...sslConfig }
const key = [
`proxy:${proxyUrl.protocol}//${proxyUrl.username}:${proxyUrl.password}@${proxyUrl.host}:${proxyUrl.port}`,
`https:${isHttps.toString()}`,
`local-address:${opts.localAddress ?? '>no-local-address<'}`,
`max-sockets:${(opts.maxSockets ?? DEFAULT_MAX_SOCKETS).toString()}`,
`strict-ssl:${isHttps ? Boolean(opts.strictSsl).toString() : '>no-strict-ssl<'}`,
`ca:${(isHttps && ca?.toString()) || '-'}`,
`cert:${(isHttps && cert?.toString()) || '-'}`,
`key:${(isHttps && certKey?.toString()) || '-'}`,
].join(':')
if (DISPATCHER_CACHE.has(key)) {
return DISPATCHER_CACHE.get(key)!
}
let dispatcher: Dispatcher
if (proxyUrl.protocol.startsWith('socks')) {
dispatcher = createSocksDispatcher(proxyUrl, parsedUri, opts, { ca, cert, key: certKey })
} else {
dispatcher = createHttpProxyDispatcher(proxyUrl, isHttps, opts, { ca, cert, key: certKey })
}
DISPATCHER_CACHE.set(key, dispatcher)
return dispatcher
}
function createHttpProxyDispatcher (
proxyUrl: URL,
isHttps: boolean,
opts: DispatcherOptions,
tlsConfig: { ca?: string | string[] | Buffer, cert?: string | string[] | Buffer, key?: string | Buffer }
): Dispatcher {
return new ProxyAgent({
uri: proxyUrl.href,
token: proxyUrl.username
? `Basic ${Buffer.from(`${decodeURIComponent(proxyUrl.username)}:${decodeURIComponent(proxyUrl.password)}`).toString('base64')}`
: undefined,
connections: opts.maxSockets ?? DEFAULT_MAX_SOCKETS,
requestTls: isHttps
? {
ca: tlsConfig.ca,
cert: tlsConfig.cert,
key: tlsConfig.key,
rejectUnauthorized: opts.strictSsl ?? true,
localAddress: opts.localAddress,
}
: undefined,
proxyTls: {
ca: opts.ca,
rejectUnauthorized: opts.strictSsl ?? true,
},
})
}
function createSocksDispatcher (
proxyUrl: URL,
targetUri: URL,
opts: DispatcherOptions,
tlsConfig: { ca?: string | string[] | Buffer, cert?: string | string[] | Buffer, key?: string | Buffer }
): Dispatcher {
const isHttps = targetUri.protocol === 'https:'
const socksType = getSocksProxyType(proxyUrl.protocol)
const proxyHost = proxyUrl.hostname
const proxyPort = parseInt(proxyUrl.port, 10) || (socksType === 4 ? 1080 : 1080)
return new Agent({
connections: opts.maxSockets ?? DEFAULT_MAX_SOCKETS,
connect: async (connectOpts, callback) => {
try {
const { socket } = await SocksClient.createConnection({
proxy: {
host: proxyHost,
port: proxyPort,
type: socksType,
userId: proxyUrl.username ? decodeURIComponent(proxyUrl.username) : undefined,
password: proxyUrl.password ? decodeURIComponent(proxyUrl.password) : undefined,
},
command: 'connect',
destination: {
host: connectOpts.hostname!,
port: parseInt(String(connectOpts.port!), 10),
},
})
if (isHttps) {
const tlsOpts: tls.ConnectionOptions = {
socket: socket as net.Socket,
servername: connectOpts.hostname!,
ca: tlsConfig.ca,
cert: tlsConfig.cert,
key: tlsConfig.key,
rejectUnauthorized: opts.strictSsl ?? true,
}
const tlsSocket = tls.connect(tlsOpts)
tlsSocket.on('secureConnect', () => {
callback(null, tlsSocket)
})
tlsSocket.on('error', (err) => {
callback(err, null)
})
} else {
callback(null, socket as net.Socket)
}
} catch (err) {
callback(err as Error, null)
}
},
})
}
function getNonProxyDispatcher (parsedUri: URL, opts: DispatcherOptions): Dispatcher {
const isHttps = parsedUri.protocol === 'https:'
const sslConfig = pickSettingByUrl(opts.clientCertificates, parsedUri.href)
const { ca, cert, key: certKey } = { ...opts, ...sslConfig }
const key = [
`https:${isHttps.toString()}`,
`local-address:${opts.localAddress ?? '>no-local-address<'}`,
`max-sockets:${(opts.maxSockets ?? DEFAULT_MAX_SOCKETS).toString()}`,
`strict-ssl:${isHttps ? Boolean(opts.strictSsl).toString() : '>no-strict-ssl<'}`,
`ca:${(isHttps && ca?.toString()) || '-'}`,
`cert:${(isHttps && cert?.toString()) || '-'}`,
`key:${(isHttps && certKey?.toString()) || '-'}`,
].join(':')
if (DISPATCHER_CACHE.has(key)) {
return DISPATCHER_CACHE.get(key)!
}
const connectTimeout = typeof opts.timeout !== 'number' || opts.timeout === 0
? 0
: opts.timeout + 1
const agent = new Agent({
connections: opts.maxSockets ?? DEFAULT_MAX_SOCKETS,
connectTimeout,
keepAliveTimeout: 4000,
keepAliveMaxTimeout: 15000,
connect: isHttps
? {
ca,
cert,
key: certKey,
rejectUnauthorized: opts.strictSsl ?? true,
localAddress: opts.localAddress,
}
: {
localAddress: opts.localAddress,
},
})
DISPATCHER_CACHE.set(key, agent)
return agent
}
function checkNoProxy (parsedUri: URL, opts: { noProxy?: boolean | string }): boolean {
const host = parsedUri.hostname
.split('.')
.filter(x => x)
.reverse()
if (typeof opts.noProxy === 'string') {
const noproxyArr = opts.noProxy.split(',').map(s => s.trim())
return noproxyArr.some(no => {
const noParts = no
.split('.')
.filter(x => x)
.reverse()
if (noParts.length === 0) {
return false
}
for (let i = 0; i < noParts.length; i++) {
if (host[i] !== noParts[i]) {
return false
}
}
return true
})
}
return opts.noProxy === true
}
/**
* Pick SSL/TLS configuration by URL using nerf-dart matching.
* This matches the behavior of @pnpm/network.config's pickSettingByUrl.
*/
function pickSettingByUrl<T> (
settings: Record<string, T> | undefined,
uri: string
): T | undefined {
if (!settings) return undefined
// Try exact match first
if (settings[uri]) return settings[uri]
// Use nerf-dart format for matching (e.g., //registry.npmjs.org/)
const nerf = nerfDart(uri)
if (settings[nerf]) return settings[nerf]
// Try without port
const parsedUrl = new URL(uri)
const withoutPort = removePort(parsedUrl)
if (settings[withoutPort]) return settings[withoutPort]
// Try progressively shorter nerf-dart paths
const maxParts = Object.keys(settings).reduce((max, key) => {
const parts = key.split('/').length
return parts > max ? parts : max
}, 0)
const parts = nerf.split('/')
for (let i = Math.min(parts.length, maxParts) - 1; i >= 3; i--) {
const key = `${parts.slice(0, i).join('/')}/`
if (settings[key]) {
return settings[key]
}
}
// If the URL had a port, try again without it
if (withoutPort !== uri) {
return pickSettingByUrl(settings, withoutPort)
}
return undefined
}
function removePort (parsedUrl: URL): string {
if (parsedUrl.port === '') return parsedUrl.href
const copy = new URL(parsedUrl.href)
copy.port = ''
const res = copy.toString()
return res.endsWith('/') ? res : `${res}/`
}

View File

@@ -1,13 +1,8 @@
import assert from 'node:assert'
import util from 'node:util'
import { requestRetryLogger } from '@pnpm/core-loggers'
import { operation, type RetryTimeoutOptions } from '@zkochan/retry'
import nodeFetch, { type Request, type RequestInit as NodeRequestInit, Response } from 'node-fetch'
import { type Dispatcher, fetch as undiciFetch } from 'undici'
export { isRedirect } from 'node-fetch'
export { Response, type RetryTimeoutOptions }
export { type RetryTimeoutOptions }
interface URLLike {
href: string
@@ -18,11 +13,18 @@ const NO_RETRY_ERROR_CODES = new Set([
'ERR_OSSL_PEM_NO_START_LINE',
])
export type RequestInfo = string | URLLike | Request
const REDIRECT_CODES = new Set([301, 302, 303, 307, 308])
export interface RequestInit extends NodeRequestInit {
export function isRedirect (statusCode: number): boolean {
return REDIRECT_CODES.has(statusCode)
}
export type RequestInfo = string | URLLike | URL
export interface RequestInit extends globalThis.RequestInit {
retry?: RetryTimeoutOptions
timeout?: number
dispatcher?: Dispatcher
}
export async function fetch (url: RequestInfo, opts: RequestInit = {}): Promise<Response> {
@@ -40,9 +42,13 @@ export async function fetch (url: RequestInfo, opts: RequestInit = {}): Promise<
try {
return await new Promise((resolve, reject) => {
op.attempt(async (attempt) => {
const urlString = typeof url === 'string' ? url : url.href ?? url.toString()
const { retry: _retry, timeout, dispatcher, ...fetchOpts } = opts
const signal = timeout ? AbortSignal.timeout(timeout) : undefined
try {
// this will be retried
const res = await nodeFetch(url as any, opts) // eslint-disable-line
// undici's Response type differs slightly from globalThis.Response (iterator types),
// requiring the double cast. This is a known TypeScript/undici compatibility issue.
const res = await undiciFetch(urlString, { ...fetchOpts, signal, dispatcher } as Parameters<typeof undiciFetch>[1]) as unknown as Response
// A retry on 409 sometimes helps when making requests to the Bit registry.
if ((res.status >= 500 && res.status < 600) || [408, 409, 420, 429].includes(res.status)) {
throw new ResponseError(res)
@@ -50,26 +56,44 @@ export async function fetch (url: RequestInfo, opts: RequestInit = {}): Promise<
resolve(res)
}
} catch (error: unknown) {
assert(util.types.isNativeError(error))
// Undici errors may not pass isNativeError check, so we handle them more carefully
const err = error as Error & { code?: string, cause?: { code?: string } }
// Check error code in both error.code and error.cause.code (undici wraps errors)
const errorCode = err?.code ?? err?.cause?.code
if (
'code' in error &&
typeof error.code === 'string' &&
NO_RETRY_ERROR_CODES.has(error.code)
typeof errorCode === 'string' &&
NO_RETRY_ERROR_CODES.has(errorCode)
) {
throw error
}
const timeout = op.retry(error)
if (timeout === false) {
const retryTimeout = op.retry(err)
if (retryTimeout === false) {
reject(op.mainError())
return
}
// Extract error properties into a plain object because Error properties
// are non-enumerable and don't serialize well through the logging system
const errorInfo = {
name: err.name,
message: err.message,
code: err.code,
errno: (err as Error & { errno?: number }).errno,
// For HTTP errors from ResponseError class
status: (err as Error & { status?: number }).status,
statusCode: (err as Error & { statusCode?: number }).statusCode,
// undici wraps the actual network error in a cause property
cause: err.cause ? {
code: err.cause.code,
errno: (err.cause as { errno?: number }).errno,
} : undefined,
}
requestRetryLogger.debug({
attempt,
error,
error: errorInfo,
maxRetries,
method: opts.method ?? 'GET',
timeout,
url: url.toString(),
timeout: retryTimeout,
url: urlString,
})
}
})

View File

@@ -1,10 +1,10 @@
import { URL } from 'node:url'
import type { FetchFromRegistry } from '@pnpm/fetching.types'
import { type AgentOptions, getAgent } from '@pnpm/network.agent'
import type { SslConfig } from '@pnpm/types'
import { fetch, isRedirect, type RequestInfo, type RequestInit, type Response } from './fetch.js'
import { type DispatcherOptions, getDispatcher } from './dispatcher.js'
import { fetch, isRedirect, type RequestInit } from './fetch.js'
const USER_AGENT = 'pnpm' // or maybe make it `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
@@ -16,34 +16,31 @@ const ACCEPT_ABBREVIATED_DOC = `${ABBREVIATED_DOC}; q=1.0, ${FULL_DOC}; q=0.8, *
const MAX_FOLLOWED_REDIRECTS = 20
export interface FetchWithAgentOptions extends RequestInit {
agentOptions: AgentOptions
export interface FetchWithDispatcherOptions extends RequestInit {
dispatcherOptions: DispatcherOptions
}
export function fetchWithAgent (url: RequestInfo, opts: FetchWithAgentOptions): Promise<Response> {
const agent = getAgent(url.toString(), {
...opts.agentOptions,
strictSsl: opts.agentOptions.strictSsl ?? true,
} as any) as any // eslint-disable-line
const headers = opts.headers ?? {}
// @ts-expect-error
headers['connection'] = agent ? 'keep-alive' : 'close'
export function fetchWithDispatcher (url: string | URL, opts: FetchWithDispatcherOptions): Promise<Response> {
const dispatcher = getDispatcher(url.toString(), {
...opts.dispatcherOptions,
strictSsl: opts.dispatcherOptions.strictSsl ?? true,
})
return fetch(url, {
...opts,
agent,
dispatcher,
})
}
export type { AgentOptions }
export type { DispatcherOptions }
export interface CreateFetchFromRegistryOptions extends AgentOptions {
export interface CreateFetchFromRegistryOptions extends DispatcherOptions {
userAgent?: string
sslConfigs?: Record<string, SslConfig>
}
export function createFetchFromRegistry (defaultOpts: CreateFetchFromRegistryOptions): FetchFromRegistry {
return async (url, opts): Promise<Response> => {
const headers = {
const headers: Record<string, string> = {
'user-agent': USER_AGENT,
...getHeaders({
auth: opts?.authHeaderValue,
@@ -57,22 +54,16 @@ export function createFetchFromRegistry (defaultOpts: CreateFetchFromRegistryOpt
const originalHost = urlObject.host
/* eslint-disable no-await-in-loop */
while (true) {
const agentOptions = {
const dispatcherOptions: DispatcherOptions = {
...defaultOpts,
...opts,
strictSsl: defaultOpts.strictSsl ?? true,
} as any // eslint-disable-line
clientCertificates: defaultOpts.sslConfigs,
}
// We should pass a URL object to node-fetch till this is not resolved:
// https://github.com/bitinn/node-fetch/issues/245
const response = await fetchWithAgent(urlObject, {
agentOptions: {
...agentOptions,
clientCertificates: defaultOpts.sslConfigs,
},
// if verifying integrity, node-fetch must not decompress
compress: opts?.compress ?? false,
method: opts?.method,
const response = await fetchWithDispatcher(urlObject, {
dispatcherOptions,
// if verifying integrity, native fetch must not decompress
headers,
redirect: 'manual',
retry: opts?.retry,

View File

@@ -1,3 +1,4 @@
export { fetch, type RetryTimeoutOptions } from './fetch.js'
export { type AgentOptions, createFetchFromRegistry, type CreateFetchFromRegistryOptions, fetchWithAgent } from './fetchFromRegistry.js'
export { clearDispatcherCache, getDispatcher } from './dispatcher.js'
export { fetch, isRedirect, type RetryTimeoutOptions } from './fetch.js'
export { createFetchFromRegistry, type CreateFetchFromRegistryOptions, type DispatcherOptions, fetchWithDispatcher } from './fetchFromRegistry.js'
export type { FetchFromRegistry } from '@pnpm/fetching.types'

View File

@@ -0,0 +1,271 @@
/// <reference path="../../../__typings__/index.d.ts"/>
import net from 'node:net'
import { clearDispatcherCache, type DispatcherOptions, getDispatcher } from '@pnpm/network.fetch'
import { Agent, ProxyAgent } from 'undici'
afterEach(() => {
clearDispatcherCache()
})
describe('getDispatcher', () => {
test('returns undefined when no special options are set', () => {
expect(getDispatcher('https://registry.npmjs.org/foo', {})).toBeUndefined()
})
test('returns a dispatcher when strictSsl is false', () => {
const dispatcher = getDispatcher('https://registry.npmjs.org/foo', { strictSsl: false })
expect(dispatcher).toBeDefined()
expect(dispatcher).toBeInstanceOf(Agent)
})
test('returns a dispatcher when ca is set', () => {
const dispatcher = getDispatcher('https://registry.npmjs.org/foo', { ca: 'test-ca' })
expect(dispatcher).toBeDefined()
expect(dispatcher).toBeInstanceOf(Agent)
})
test('returns a dispatcher when maxSockets is set', () => {
const dispatcher = getDispatcher('https://registry.npmjs.org/foo', { maxSockets: 10 })
expect(dispatcher).toBeDefined()
})
test('returns a dispatcher when localAddress is set', () => {
const dispatcher = getDispatcher('https://registry.npmjs.org/foo', { localAddress: '127.0.0.1' })
expect(dispatcher).toBeDefined()
})
test('caches dispatchers by configuration', () => {
const opts: DispatcherOptions = { strictSsl: false }
const d1 = getDispatcher('https://registry.npmjs.org/foo', opts)
const d2 = getDispatcher('https://registry.npmjs.org/bar', opts)
expect(d1).toBe(d2) // same config → same cached dispatcher
})
test('different maxSockets produce different dispatchers', () => {
const d1 = getDispatcher('https://registry.npmjs.org/foo', { maxSockets: 10 })
const d2 = getDispatcher('https://registry.npmjs.org/foo', { maxSockets: 20 })
expect(d1).not.toBe(d2)
})
test('clearDispatcherCache clears cached dispatchers', () => {
const opts: DispatcherOptions = { strictSsl: false }
const d1 = getDispatcher('https://registry.npmjs.org/foo', opts)
clearDispatcherCache()
const d2 = getDispatcher('https://registry.npmjs.org/foo', opts)
expect(d1).not.toBe(d2)
})
})
describe('HTTP proxy', () => {
test('returns ProxyAgent for httpProxy with http target', () => {
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'http://proxy.example.com:8080',
})
expect(dispatcher).toBeInstanceOf(ProxyAgent)
})
test('returns ProxyAgent for httpsProxy with https target', () => {
const dispatcher = getDispatcher('https://registry.npmjs.org/foo', {
httpsProxy: 'https://proxy.example.com:8080',
})
expect(dispatcher).toBeInstanceOf(ProxyAgent)
})
test('adds protocol prefix when proxy URL has none', () => {
// Should not throw — the proxy URL should get protocol prepended
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'proxy.example.com:8080',
})
expect(dispatcher).toBeInstanceOf(ProxyAgent)
})
test('throws PnpmError for invalid proxy URL', () => {
expect(() => {
getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'http://[invalid',
})
}).toThrow(/Couldn't parse proxy URL/)
})
test('proxy with authentication credentials', () => {
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'http://user%21:pass%40@proxy.example.com:8080',
})
expect(dispatcher).toBeInstanceOf(ProxyAgent)
})
})
describe('SOCKS proxy', () => {
test('returns Agent (not ProxyAgent) for socks5 proxy', () => {
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'socks5://proxy.example.com:1080',
})
expect(dispatcher).toBeDefined()
// SOCKS dispatcher is an Agent with custom connect, not a ProxyAgent
expect(dispatcher).toBeInstanceOf(Agent)
expect(dispatcher).not.toBeInstanceOf(ProxyAgent)
})
test('returns Agent for socks4 proxy', () => {
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'socks4://proxy.example.com:1080',
})
expect(dispatcher).toBeDefined()
expect(dispatcher).toBeInstanceOf(Agent)
})
test('returns Agent for socks proxy with https target', () => {
const dispatcher = getDispatcher('https://registry.npmjs.org/foo', {
httpsProxy: 'socks5://proxy.example.com:1080',
})
expect(dispatcher).toBeDefined()
expect(dispatcher).toBeInstanceOf(Agent)
})
test('SOCKS proxy dispatchers are cached', () => {
const opts: DispatcherOptions = { httpProxy: 'socks5://proxy.example.com:1080' }
const d1 = getDispatcher('http://registry.npmjs.org/foo', opts)
const d2 = getDispatcher('http://registry.npmjs.org/bar', opts)
expect(d1).toBe(d2)
})
test('SOCKS proxy can connect through a real SOCKS5 server', async () => {
// Create a minimal SOCKS5 server that accepts connections
const targetServer = net.createServer((socket) => {
socket.on('data', () => {
socket.write('HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nok')
socket.end()
})
})
const socksServer = net.createServer((socket) => {
// SOCKS5 handshake
socket.once('data', (data) => {
// Client greeting: version, method count, methods
if (data[0] === 0x05) {
// Reply: version 5, no auth required
socket.write(Buffer.from([0x05, 0x00]))
socket.once('data', (connectData) => {
// Connect request: version, cmd=connect, reserved, address type, addr, port
const port = connectData.readUInt16BE(connectData.length - 2)
// Reply: success
socket.write(Buffer.from([0x05, 0x00, 0x00, 0x01, 127, 0, 0, 1, (port >> 8) & 0xff, port & 0xff]))
// Tunnel the connection to the target
const target = net.connect(port, '127.0.0.1', () => {
socket.pipe(target)
target.pipe(socket)
})
target.on('error', () => socket.destroy())
})
}
})
})
await new Promise<void>((resolve) => targetServer.listen(0, resolve))
await new Promise<void>((resolve) => socksServer.listen(0, resolve))
const targetPort = (targetServer.address() as net.AddressInfo).port
const socksPort = (socksServer.address() as net.AddressInfo).port
try {
const dispatcher = getDispatcher(`http://127.0.0.1:${targetPort}/test`, {
httpProxy: `socks5://127.0.0.1:${socksPort}`,
})
expect(dispatcher).toBeDefined()
const { fetch: undiciFetch } = await import('undici')
const res = await undiciFetch(`http://127.0.0.1:${targetPort}/test`, {
dispatcher,
})
expect(res.status).toBe(200)
expect(await res.text()).toBe('ok')
} finally {
targetServer.close()
socksServer.close()
}
})
})
describe('noProxy', () => {
test('bypasses proxy when noProxy matches hostname', () => {
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'http://proxy.example.com:8080',
noProxy: 'registry.npmjs.org',
})
// Should return an Agent (direct), not ProxyAgent
expect(dispatcher).toBeInstanceOf(Agent)
expect(dispatcher).not.toBeInstanceOf(ProxyAgent)
})
test('bypasses proxy when noProxy matches domain suffix', () => {
const dispatcher = getDispatcher('http://sub.npmjs.org/foo', {
httpProxy: 'http://proxy.example.com:8080',
noProxy: 'npmjs.org',
})
expect(dispatcher).toBeInstanceOf(Agent)
expect(dispatcher).not.toBeInstanceOf(ProxyAgent)
})
test('does not bypass proxy when noProxy does not match', () => {
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'http://proxy.example.com:8080',
noProxy: 'other.org',
})
expect(dispatcher).toBeInstanceOf(ProxyAgent)
})
test('bypasses proxy when noProxy is true', () => {
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'http://proxy.example.com:8080',
noProxy: true,
})
expect(dispatcher).toBeInstanceOf(Agent)
expect(dispatcher).not.toBeInstanceOf(ProxyAgent)
})
test('handles comma-separated noProxy list', () => {
const dispatcher = getDispatcher('http://registry.npmjs.org/foo', {
httpProxy: 'http://proxy.example.com:8080',
noProxy: 'other.org, npmjs.org, example.com',
})
expect(dispatcher).toBeInstanceOf(Agent)
expect(dispatcher).not.toBeInstanceOf(ProxyAgent)
})
})
describe('client certificates', () => {
test('picks client certificate by nerf-dart URL match', () => {
const d1 = getDispatcher('https://registry.example.com/foo', {
clientCertificates: {
'//registry.example.com/': {
ca: 'test-ca',
cert: 'test-cert',
key: 'test-key',
},
},
})
// Should return a dispatcher (because clientCertificates is set)
expect(d1).toBeDefined()
})
test('different registries get different dispatchers with different certs', () => {
const opts: DispatcherOptions = {
clientCertificates: {
'//registry.example.com/': {
ca: 'ca-1',
cert: 'cert-1',
key: 'key-1',
},
'//other.example.com/': {
ca: 'ca-2',
cert: 'cert-2',
key: 'key-2',
},
},
}
const d1 = getDispatcher('https://registry.example.com/foo', opts)
const d2 = getDispatcher('https://other.example.com/foo', opts)
expect(d1).not.toBe(d2) // different certs → different dispatchers
})
})

View File

@@ -2,17 +2,46 @@
import fs from 'node:fs'
import path from 'node:path'
import { createFetchFromRegistry } from '@pnpm/network.fetch'
import { clearDispatcherCache, createFetchFromRegistry } from '@pnpm/network.fetch'
import { ProxyServer } from 'https-proxy-server-express'
import nock from 'nock'
import { type Dispatcher, getGlobalDispatcher, MockAgent, setGlobalDispatcher } from 'undici'
let originalDispatcher: Dispatcher | null = null
let currentMockAgent: MockAgent | null = null
function setupMockAgent (): MockAgent {
if (!originalDispatcher) {
originalDispatcher = getGlobalDispatcher()
}
clearDispatcherCache()
currentMockAgent = new MockAgent()
currentMockAgent.disableNetConnect()
setGlobalDispatcher(currentMockAgent)
return currentMockAgent
}
async function teardownMockAgent (): Promise<void> {
if (currentMockAgent) {
await currentMockAgent.close()
currentMockAgent = null
}
if (originalDispatcher) {
setGlobalDispatcher(originalDispatcher)
originalDispatcher = null
}
}
function getMockAgent (): MockAgent {
if (!currentMockAgent) {
throw new Error('MockAgent not initialized. Call setupMockAgent() first.')
}
return currentMockAgent
}
const CERTS_DIR = path.join(import.meta.dirname, '__certs__')
afterEach(() => {
nock.cleanAll()
})
test('fetchFromRegistry', async () => {
// This test uses real network - no mock needed
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry('https://registry.npmjs.org/is-positive')
const metadata = await res.json() as any // eslint-disable-line
@@ -21,6 +50,7 @@ test('fetchFromRegistry', async () => {
})
test('fetchFromRegistry fullMetadata', async () => {
// This test uses real network - no mock needed
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry('https://registry.npmjs.org/is-positive', { fullMetadata: true })
const metadata = await res.json() as any // eslint-disable-line
@@ -29,48 +59,63 @@ test('fetchFromRegistry fullMetadata', async () => {
})
test('authorization headers are removed before redirection if the target is on a different host', async () => {
nock('http://registry.pnpm.io/', {
reqheaders: { authorization: 'Bearer 123' },
})
.get('/is-positive')
.reply(302, '', { location: 'http://registry.other.org/is-positive' })
nock('http://registry.other.org/', { badheaders: ['authorization'] })
.get('/is-positive')
.reply(200, { ok: true })
setupMockAgent()
try {
const mockPool1 = getMockAgent().get('http://registry.pnpm.io')
mockPool1.intercept({
path: '/is-positive',
method: 'GET',
headers: { authorization: 'Bearer 123' },
}).reply(302, '', { headers: { location: 'http://registry.other.org/is-positive' } })
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'http://registry.pnpm.io/is-positive',
{ authHeaderValue: 'Bearer 123' }
)
const mockPool2 = getMockAgent().get('http://registry.other.org')
mockPool2.intercept({
path: '/is-positive',
method: 'GET',
}).reply(200, { ok: true }, { headers: { 'content-type': 'application/json' } })
expect(await res.json()).toStrictEqual({ ok: true })
expect(nock.isDone()).toBeTruthy()
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'http://registry.pnpm.io/is-positive',
{ authHeaderValue: 'Bearer 123' }
)
expect(await res.json()).toStrictEqual({ ok: true })
} finally {
await teardownMockAgent()
}
})
test('authorization headers are not removed before redirection if the target is on the same host', async () => {
nock('http://registry.pnpm.io/', {
reqheaders: { authorization: 'Bearer 123' },
})
.get('/is-positive')
.reply(302, '', { location: 'http://registry.pnpm.io/is-positive-new' })
nock('http://registry.pnpm.io/', {
reqheaders: { authorization: 'Bearer 123' },
})
.get('/is-positive-new')
.reply(200, { ok: true })
setupMockAgent()
try {
const mockPool = getMockAgent().get('http://registry.pnpm.io')
mockPool.intercept({
path: '/is-positive',
method: 'GET',
headers: { authorization: 'Bearer 123' },
}).reply(302, '', { headers: { location: 'http://registry.pnpm.io/is-positive-new' } })
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'http://registry.pnpm.io/is-positive',
{ authHeaderValue: 'Bearer 123' }
)
mockPool.intercept({
path: '/is-positive-new',
method: 'GET',
headers: { authorization: 'Bearer 123' },
}).reply(200, { ok: true }, { headers: { 'content-type': 'application/json' } })
expect(await res.json()).toStrictEqual({ ok: true })
expect(nock.isDone()).toBeTruthy()
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'http://registry.pnpm.io/is-positive',
{ authHeaderValue: 'Bearer 123' }
)
expect(await res.json()).toStrictEqual({ ok: true })
} finally {
await teardownMockAgent()
}
})
test('switch to the correct agent for requests on redirect from http: to https:', async () => {
// This test uses real network - no mock needed
const fetchFromRegistry = createFetchFromRegistry({})
// We can test this on any endpoint that redirects from http: to https:
@@ -129,7 +174,7 @@ test('fail if the client certificate is not provided', async () => {
strictSsl: false,
})
let err!: Error & { code: string }
let err!: Error & { code?: string, cause?: { code?: string } }
try {
await fetchFromRegistry(`https://localhost:${randomPort}/is-positive`, {
retry: {
@@ -141,72 +186,106 @@ test('fail if the client certificate is not provided', async () => {
} finally {
await proxyServer.stop()
}
expect(err?.code).toMatch(/ECONNRESET|ERR_SSL_TLSV13_ALERT_CERTIFICATE_REQUIRED/)
// undici errors may have the code in err.cause.code
const errorCode = err?.code ?? err?.cause?.code
expect(errorCode).toMatch(/ECONNRESET|ERR_SSL_TLSV13_ALERT_CERTIFICATE_REQUIRED|UNABLE_TO_VERIFY_LEAF_SIGNATURE|UND_ERR_SOCKET/)
})
test('redirect to protocol-relative URL', async () => {
nock('http://registry.pnpm.io/')
.get('/foo')
.reply(302, '', { location: '//registry.other.org/foo' })
nock('http://registry.other.org/')
.get('/foo')
.reply(200, { ok: true })
setupMockAgent()
try {
const mockPool1 = getMockAgent().get('http://registry.pnpm.io')
mockPool1.intercept({
path: '/foo',
method: 'GET',
}).reply(302, '', { headers: { location: '//registry.other.org/foo' } })
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'http://registry.pnpm.io/foo'
)
const mockPool2 = getMockAgent().get('http://registry.other.org')
mockPool2.intercept({
path: '/foo',
method: 'GET',
}).reply(200, { ok: true }, { headers: { 'content-type': 'application/json' } })
expect(await res.json()).toStrictEqual({ ok: true })
expect(nock.isDone()).toBeTruthy()
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'http://registry.pnpm.io/foo'
)
expect(await res.json()).toStrictEqual({ ok: true })
} finally {
await teardownMockAgent()
}
})
test('redirect to relative URL', async () => {
nock('http://registry.pnpm.io/')
.get('/bar/baz')
.reply(302, '', { location: '../foo' })
nock('http://registry.pnpm.io/')
.get('/foo')
.reply(200, { ok: true })
setupMockAgent()
try {
const mockPool = getMockAgent().get('http://registry.pnpm.io')
mockPool.intercept({
path: '/bar/baz',
method: 'GET',
}).reply(302, '', { headers: { location: '../foo' } })
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'http://registry.pnpm.io/bar/baz'
)
mockPool.intercept({
path: '/foo',
method: 'GET',
}).reply(200, { ok: true }, { headers: { 'content-type': 'application/json' } })
expect(await res.json()).toStrictEqual({ ok: true })
expect(nock.isDone()).toBeTruthy()
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'http://registry.pnpm.io/bar/baz'
)
expect(await res.json()).toStrictEqual({ ok: true })
} finally {
await teardownMockAgent()
}
})
test('redirect to relative URL when request pkg.pr.new link', async () => {
nock('https://pkg.pr.new/')
.get('/vue@14175')
.reply(302, '', { location: '/vuejs/core/vue@14182' })
setupMockAgent()
try {
const mockPool = getMockAgent().get('https://pkg.pr.new')
mockPool.intercept({
path: '/vue@14175',
method: 'GET',
}).reply(302, '', { headers: { location: '/vuejs/core/vue@14182' } })
nock('https://pkg.pr.new/')
.get('/vuejs/core/vue@14182')
.reply(302, '', { location: '/vuejs/core/vue@82a13bb6faaa9f77a06b57e69e0934b9f620f333' })
mockPool.intercept({
path: '/vuejs/core/vue@14182',
method: 'GET',
}).reply(302, '', { headers: { location: '/vuejs/core/vue@82a13bb6faaa9f77a06b57e69e0934b9f620f333' } })
nock('https://pkg.pr.new/')
.get('/vuejs/core/vue@82a13bb6faaa9f77a06b57e69e0934b9f620f333')
.reply(200, { ok: true })
mockPool.intercept({
path: '/vuejs/core/vue@82a13bb6faaa9f77a06b57e69e0934b9f620f333',
method: 'GET',
}).reply(200, { ok: true }, { headers: { 'content-type': 'application/json' } })
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'https://pkg.pr.new/vue@14175'
)
const fetchFromRegistry = createFetchFromRegistry({})
const res = await fetchFromRegistry(
'https://pkg.pr.new/vue@14175'
)
expect(await res.json()).toStrictEqual({ ok: true })
expect(nock.isDone()).toBeTruthy()
expect(await res.json()).toStrictEqual({ ok: true })
} finally {
await teardownMockAgent()
}
})
test('redirect without location header throws error', async () => {
nock('http://registry.pnpm.io/')
.get('/missing-location')
.reply(302, 'found')
setupMockAgent()
try {
const mockPool = getMockAgent().get('http://registry.pnpm.io')
mockPool.intercept({
path: '/missing-location',
method: 'GET',
}).reply(302, 'found')
const fetchFromRegistry = createFetchFromRegistry({})
await expect(fetchFromRegistry(
'http://registry.pnpm.io/missing-location'
)).rejects.toThrow(/Redirect location header missing/)
const fetchFromRegistry = createFetchFromRegistry({})
await expect(fetchFromRegistry(
'http://registry.pnpm.io/missing-location'
)).rejects.toThrow(/Redirect location header missing/)
} finally {
await teardownMockAgent()
}
})

View File

@@ -12,6 +12,9 @@
{
"path": "../../core/core-loggers"
},
{
"path": "../../core/error"
},
{
"path": "../../core/logger"
},

4669
pnpm-lock.yaml generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -86,7 +86,6 @@ catalog:
'@pnpm/log.group': 3.0.2
'@pnpm/logger': '>=1001.0.0 <1002.0.0'
'@pnpm/meta-updater': 2.0.6
'@pnpm/network.agent': ^2.0.3
'@pnpm/nopt': ^0.3.1
'@pnpm/npm-conf': 3.0.2
'@pnpm/npm-lifecycle': 1100.0.0-1
@@ -234,11 +233,8 @@ catalog:
# msgpackr 1.11.9 has broken type definitions (uses Iterable/Iterator without
# required type arguments), incompatible with TypeScript 5.9.
msgpackr: 1.11.8
nm-prune: ^5.0.0
# nock 14 doesn't properly intercept node-fetch requests, causing tests
# that mock HTTP endpoints (e.g. audit) to hang indefinitely.
nock: 13.3.4
node-fetch: ^3.3.2
nm-prune: ^5.0.0
normalize-newline: 5.0.0
normalize-package-data: ^8.0.0
normalize-path: ^3.0.0
@@ -260,7 +256,7 @@ catalog:
path-temp: ^3.0.0
pidtree: ^0.6.0
preferred-pm: ^5.0.0
pretty-bytes: ^7.0.1
pretty-bytes: ^7.1.0
pretty-ms: ^9.2.0
promise-share: ^2.0.0
proxyquire: ^2.1.3
@@ -288,6 +284,7 @@ catalog:
semver-utils: ^1.1.4
shlex: ^3.0.0
shx: ^0.4.0
socks: ^2.8.1
sort-keys: ^6.0.0
split-cmd: ^1.1.0
split2: ^4.2.0
@@ -310,6 +307,7 @@ catalog:
ts-jest-resolver: 2.0.1
typescript: 5.9.3
typescript-eslint: ^8.57.1
undici: ^7.2.0
unified: ^11.0.5
validate-npm-package-name: 7.0.2
verdaccio: 6.3.2
@@ -423,6 +421,7 @@ overrides:
jws@<3.2.3: '^3.2.3'
lodash@>=4.0.0 <=4.17.22: '^4.17.23'
minimatch@>=7.0.0 <7.4.7: '^7.4.7'
minimatch@>=9.0.0 <10.0.0: '>=10.2.4'
nopt@5: npm:@pnpm/nopt@^0.2.1
on-headers@<1.1.0: '>=1.1.0'
path-to-regexp@<0.1.12: ^0.1.12

View File

@@ -52,8 +52,7 @@
"@pnpm/fetching.tarball-fetcher": "workspace:*",
"@pnpm/network.fetch": "workspace:*",
"@pnpm/resolving.default-resolver": "workspace:*",
"@pnpm/store.cafs-types": "workspace:*",
"node-fetch": "catalog:"
"@pnpm/store.cafs-types": "workspace:*"
},
"engines": {
"node": ">=22.13"

View File

@@ -2,7 +2,6 @@
import { jest } from '@jest/globals'
import type { CustomResolver, WantedDependency } from '@pnpm/hooks.types'
import { createResolver } from '@pnpm/resolving.default-resolver'
import { Response } from 'node-fetch'
test('custom resolver intercepts matching packages', async () => {
const customResolver: CustomResolver = {

View File

@@ -1,6 +1,6 @@
module.exports = jest.createMockFromModule('@pnpm/network.fetch')
// default implementation
module.exports.fetchWithAgent.mockImplementation(async (_url, _opts) => {
module.exports.fetchWithDispatcher.mockImplementation(async (_url, _opts) => {
return { ok: true }
})

View File

@@ -43,7 +43,6 @@
},
"devDependencies": {
"@jest/globals": "catalog:",
"@pnpm/network.agent": "catalog:",
"@pnpm/resolving.git-resolver": "workspace:*",
"@types/hosted-git-info": "catalog:",
"@types/is-windows": "catalog:",

View File

@@ -1,5 +1,5 @@
import { PnpmError } from '@pnpm/error'
import type { AgentOptions } from '@pnpm/network.agent'
import type { DispatcherOptions } from '@pnpm/network.fetch'
import type { GitResolution, PkgResolutionId, ResolveOptions, ResolveResult, TarballResolution } from '@pnpm/resolving.resolver-base'
import { gracefulGit as git } from 'graceful-git'
import semver from 'semver'
@@ -23,7 +23,7 @@ export type GitResolver = (
) => Promise<GitResolveResult | null>
export function createGitResolver (
opts: AgentOptions
opts: DispatcherOptions
): GitResolver {
return async function resolveGit (wantedDependency, resolveOpts?): Promise<GitResolveResult | null> {
const parsedSpecFunc = parseBareSpecifier(wantedDependency.bareSpecifier, opts)

View File

@@ -1,8 +1,7 @@
// cspell:ignore sshurl
import urlLib, { URL } from 'node:url'
import type { AgentOptions } from '@pnpm/network.agent'
import { fetchWithAgent } from '@pnpm/network.fetch'
import { type DispatcherOptions, fetchWithDispatcher } from '@pnpm/network.fetch'
import { gracefulGit as git } from 'graceful-git'
import HostedGit from 'hosted-git-info'
@@ -32,7 +31,7 @@ const gitProtocols = new Set([
'ssh',
])
export function parseBareSpecifier (bareSpecifier: string, opts: AgentOptions): null | (() => Promise<HostedPackageSpec>) {
export function parseBareSpecifier (bareSpecifier: string, opts: DispatcherOptions): null | (() => Promise<HostedPackageSpec>) {
const hosted = HostedGit.fromUrl(bareSpecifier)
if (hosted != null) {
return () => fromHostedGit(hosted, opts)
@@ -68,11 +67,11 @@ function urlToFetchSpec (url: URL): string {
return fetchSpec
}
async function fromHostedGit (hosted: any, agentOptions: AgentOptions): Promise<HostedPackageSpec> { // eslint-disable-line
async function fromHostedGit (hosted: any, dispatcherOptions: DispatcherOptions): Promise<HostedPackageSpec> { // eslint-disable-line
let fetchSpec: string | null = null
// try git/https url before fallback to ssh url
const gitHttpsUrl = hosted.https({ noCommittish: true, noGitPlus: true })
if (gitHttpsUrl && await isRepoPublic(gitHttpsUrl, agentOptions) && await accessRepository(gitHttpsUrl)) {
if (gitHttpsUrl && await isRepoPublic(gitHttpsUrl, dispatcherOptions) && await accessRepository(gitHttpsUrl)) {
fetchSpec = gitHttpsUrl
} else {
const gitSshUrl = hosted.ssh({ noCommittish: true })
@@ -84,7 +83,7 @@ async function fromHostedGit (hosted: any, agentOptions: AgentOptions): Promise<
if (!fetchSpec) {
const httpsUrl: string | null = hosted.https({ noGitPlus: true, noCommittish: true })
if (httpsUrl) {
if ((hosted.auth || !await isRepoPublic(httpsUrl, agentOptions)) && await accessRepository(httpsUrl)) {
if ((hosted.auth || !await isRepoPublic(httpsUrl, dispatcherOptions)) && await accessRepository(httpsUrl)) {
return {
fetchSpec: httpsUrl,
hosted: {
@@ -103,7 +102,7 @@ async function fromHostedGit (hosted: any, agentOptions: AgentOptions): Promise<
// npm instead tries git ls-remote directly which prompts user for login credentials.
// HTTP HEAD on https://domain/user/repo, strip out ".git"
const response = await fetchWithAgent(httpsUrl.replace(/\.git$/, ''), { method: 'HEAD', follow: 0, retry: { retries: 0 }, agentOptions })
const response = await fetchWithDispatcher(httpsUrl.replace(/\.git$/, ''), { method: 'HEAD', redirect: 'manual', retry: { retries: 0 }, dispatcherOptions })
if (response.ok) {
fetchSpec = httpsUrl
}
@@ -131,9 +130,9 @@ async function fromHostedGit (hosted: any, agentOptions: AgentOptions): Promise<
}
}
async function isRepoPublic (httpsUrl: string, agentOptions: AgentOptions): Promise<boolean> {
async function isRepoPublic (httpsUrl: string, dispatcherOptions: DispatcherOptions): Promise<boolean> {
try {
const response = await fetchWithAgent(httpsUrl.replace(/\.git$/, ''), { method: 'HEAD', follow: 0, retry: { retries: 0 }, agentOptions })
const response = await fetchWithDispatcher(httpsUrl.replace(/\.git$/, ''), { method: 'HEAD', redirect: 'manual', retry: { retries: 0 }, dispatcherOptions })
return response.ok
} catch {
return false

View File

@@ -4,15 +4,15 @@ import path from 'node:path'
import { jest } from '@jest/globals'
import isWindows from 'is-windows'
const { fetchWithAgent: fetchWithAgentOriginal } = await import('@pnpm/network.fetch')
const { fetchWithDispatcher: fetchWithDispatcherOriginal } = await import('@pnpm/network.fetch')
jest.unstable_mockModule('@pnpm/network.fetch', () => ({
fetchWithAgent: jest.fn(),
fetchWithDispatcher: jest.fn(),
}))
const { gracefulGit: gitOriginal } = await import('graceful-git')
jest.unstable_mockModule('graceful-git', () => ({
gracefulGit: jest.fn(),
}))
const { fetchWithAgent } = await import('@pnpm/network.fetch')
const { fetchWithDispatcher } = await import('@pnpm/network.fetch')
const { gracefulGit: git } = await import('graceful-git')
const { createGitResolver } = await import('@pnpm/resolving.git-resolver')
@@ -20,11 +20,11 @@ const resolveFromGit = createGitResolver({})
beforeEach(() => {
jest.mocked(git).mockImplementation(gitOriginal)
jest.mocked(fetchWithAgent).mockImplementation(fetchWithAgentOriginal)
jest.mocked(fetchWithDispatcher).mockImplementation(fetchWithDispatcherOriginal)
})
function mockFetchAsPrivate (): void {
jest.mocked(fetchWithAgent).mockImplementation(async (_url, _opts) => {
jest.mocked(fetchWithDispatcher).mockImplementation(async (_url, _opts) => {
return { ok: false } as any // eslint-disable-line @typescript-eslint/no-explicit-any
})
}

View File

@@ -74,12 +74,12 @@
"@pnpm/network.fetch": "workspace:*",
"@pnpm/resolving.npm-resolver": "workspace:*",
"@pnpm/test-fixtures": "workspace:*",
"@pnpm/testing.mock-agent": "workspace:*",
"@types/normalize-path": "catalog:",
"@types/ramda": "catalog:",
"@types/semver": "catalog:",
"@types/ssri": "catalog:",
"load-json-file": "catalog:",
"nock": "catalog:",
"tempy": "catalog:"
},
"engines": {

View File

@@ -117,9 +117,17 @@ export async function fetchMetadataFromFromRegistry (
reject(op.mainError())
return
}
// Extract error properties into a plain object because Error properties
// are non-enumerable and don't serialize well through the logging system
const errorInfo = {
name: error.name,
message: error.message,
code: error.code,
errno: error.errno,
}
requestRetryLogger.debug({
attempt,
error,
error: errorInfo,
maxRetries: fetchOpts.retry.retries!,
method: 'GET',
timeout,

View File

@@ -2,9 +2,10 @@ import { createFetchFromRegistry } from '@pnpm/network.fetch'
import { createNpmResolver } from '@pnpm/resolving.npm-resolver'
import type { PackageMeta } from '@pnpm/resolving.registry.types'
import type { Registries } from '@pnpm/types'
import nock from 'nock'
import { temporaryDirectory } from 'tempy'
import { getMockAgent, setupMockAgent, teardownMockAgent } from './utils/index.js'
const registries: Registries = {
default: 'https://registry.npmjs.org/',
}
@@ -13,14 +14,12 @@ const fetch = createFetchFromRegistry({})
const getAuthHeader = () => undefined
const createResolveFromNpm = createNpmResolver.bind(null, fetch, getAuthHeader)
beforeEach(() => {
nock.disableNetConnect()
beforeEach(async () => {
await setupMockAgent()
})
afterEach(() => {
// https://github.com/nock/nock?tab=readme-ov-file#resetting-netconnect
nock.cleanAll()
nock.enableNetConnect()
afterEach(async () => {
await teardownMockAgent()
})
test('metadata is fetched again after calling clearCache()', async () => {
@@ -43,8 +42,8 @@ test('metadata is fetched again after calling clearCache()', async () => {
},
}
nock(registries.default)
.get(`/${name}`)
const mockPool = getMockAgent().get('https://registry.npmjs.org')
mockPool.intercept({ path: `/${name}`, method: 'GET' })
.reply(200, meta)
const cacheDir = temporaryDirectory()
@@ -66,19 +65,16 @@ test('metadata is fetched again after calling clearCache()', async () => {
}
meta['dist-tags'].latest = '3.1.0'
const scope = nock(registries.default)
.get(`/${name}`)
mockPool.intercept({ path: `/${name}`, method: 'GET' })
.reply(200, meta)
// Until the cache is cleared, the resolver will still return 3.0.0.
const res2 = await resolveFromNpm({ alias: name, bareSpecifier: 'latest' }, {})
expect(res2?.id).toBe(`${name}@3.0.0`)
expect(scope.isDone()).toBe(false)
clearCache()
// After clearing cache, the resolver should start returning 3.1.0.
const res3 = await resolveFromNpm({ alias: name, bareSpecifier: 'latest' }, {})
expect(res3?.id).toBe(`${name}@3.1.0`)
expect(scope.isDone()).toBe(true)
})

View File

@@ -1,9 +1,10 @@
import { createFetchFromRegistry } from '@pnpm/network.fetch'
import { createNpmResolver } from '@pnpm/resolving.npm-resolver'
import type { Registries } from '@pnpm/types'
import nock from 'nock'
import { temporaryDirectory } from 'tempy'
import { getMockAgent, setupMockAgent, teardownMockAgent } from './utils/index.js'
const registries: Registries = {
default: 'https://registry.npmjs.org/',
}
@@ -12,13 +13,12 @@ const fetch = createFetchFromRegistry({})
const getAuthHeader = () => undefined
const createResolveFromNpm = createNpmResolver.bind(null, fetch, getAuthHeader)
afterEach(() => {
nock.cleanAll()
nock.disableNetConnect()
afterEach(async () => {
await teardownMockAgent()
})
beforeEach(() => {
nock.enableNetConnect()
beforeEach(async () => {
await setupMockAgent()
})
test('repopulate dist-tag to highest same-major version within the date cutoff', async () => {
@@ -61,8 +61,8 @@ test('repopulate dist-tag to highest same-major version within the date cutoff',
// Cutoff before 3.2.0, so latest must be remapped to 3.1.0 (same major 3)
const cutoff = new Date('2020-04-01T00:00:00.000Z')
nock(registries.default)
.get(`/${name}`)
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: `/${name}`, method: 'GET' })
.reply(200, meta)
const cacheDir = temporaryDirectory()
@@ -121,8 +121,8 @@ test('repopulate dist-tag to highest same-major version within the date cutoff.
// Cutoff before 3.2.0, so latest must be remapped to 3.1.0 (same major 3)
const cutoff = new Date('2020-04-01T00:00:00.000Z')
nock(registries.default)
.get(`/${name}`)
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: `/${name}`, method: 'GET' })
.reply(200, meta)
const cacheDir = temporaryDirectory()
@@ -180,8 +180,8 @@ test('repopulate dist-tag to highest non-prerelease same-major version within th
// Cutoff before 3.2.0, so latest must be remapped to 3.1.0 (same major 3)
const cutoff = new Date('2020-04-01T00:00:00.000Z')
nock(registries.default)
.get(`/${name}`)
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: `/${name}`, method: 'GET' })
.reply(200, meta)
const cacheDir = temporaryDirectory()
@@ -245,8 +245,8 @@ test('repopulate dist-tag to highest prerelease same-major version within the da
// Cutoff before 3.2.0 and 3.0.0-alpha.2, so latest must be remapped to 3.0.0-alpha.1 (the highest prerelease version within the cutoff)
const cutoff = new Date('2020-04-01T00:00:00.000Z')
nock(registries.default)
.get(`/${name}`)
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: `/${name}`, method: 'GET' })
.reply(200, meta)
const cacheDir = temporaryDirectory()
@@ -281,8 +281,8 @@ test('keep dist-tag if original version is within the date cutoff', async () =>
const cutoff = new Date('2020-02-01T00:00:00.000Z')
nock(registries.default)
.get(`/${name}`)
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: `/${name}`, method: 'GET' })
.reply(200, meta)
const cacheDir = temporaryDirectory()

View File

@@ -14,11 +14,10 @@ import {
import { fixtures } from '@pnpm/test-fixtures'
import type { ProjectRootDir, Registries } from '@pnpm/types'
import { loadJsonFileSync } from 'load-json-file'
import nock from 'nock'
import { omit } from 'ramda'
import { temporaryDirectory } from 'tempy'
import { delay, retryLoadJsonFile } from './utils/index.js'
import { delay, getMockAgent, retryLoadJsonFile, setupMockAgent, teardownMockAgent } from './utils/index.js'
const f = fixtures(import.meta.dirname)
/* eslint-disable @typescript-eslint/no-explicit-any */
@@ -40,18 +39,17 @@ const fetch = createFetchFromRegistry({})
const getAuthHeader = () => undefined
const createResolveFromNpm = createNpmResolver.bind(null, fetch, getAuthHeader)
afterEach(() => {
nock.cleanAll()
nock.disableNetConnect()
afterEach(async () => {
await teardownMockAgent()
})
beforeEach(() => {
nock.enableNetConnect()
beforeEach(async () => {
await setupMockAgent()
})
test('resolveFromNpm()', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -83,8 +81,8 @@ test('resolveFromNpm()', async () => {
})
test('resolveFromNpm() strips port 80 from http tarball URLs', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
versions: {
@@ -113,8 +111,8 @@ test('resolveFromNpm() strips port 80 from http tarball URLs', async () => {
})
test('resolveFromNpm() does not save mutated meta to the cache', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -134,8 +132,8 @@ test('resolveFromNpm() does not save mutated meta to the cache', async () => {
})
test('resolveFromNpm() should save metadata to a unique file when the package name has upper case letters', async () => {
nock(registries.default)
.get('/JSON')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/JSON', method: 'GET' })
.reply(200, jsonMeta)
const cacheDir = temporaryDirectory()
@@ -172,8 +170,8 @@ test('relative workspace protocol is skipped', async () => {
})
test('dry run', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -203,8 +201,8 @@ test('dry run', async () => {
})
test('resolve to latest when no bareSpecifier specified', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -217,8 +215,8 @@ test('resolve to latest when no bareSpecifier specified', async () => {
})
test('resolve to defaultTag when no bareSpecifier specified', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -233,8 +231,8 @@ test('resolve to defaultTag when no bareSpecifier specified', async () => {
})
test('resolve to biggest non-deprecated version that satisfies the range', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMetaWithDeprecated)
const { resolveFromNpm } = createResolveFromNpm({
@@ -248,8 +246,8 @@ test('resolve to biggest non-deprecated version that satisfies the range', async
})
test('resolve to a deprecated version if there are no non-deprecated ones that satisfy the range', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMetaWithDeprecated)
const { resolveFromNpm } = createResolveFromNpm({
@@ -262,8 +260,8 @@ test('resolve to a deprecated version if there are no non-deprecated ones that s
})
test('can resolve aliased dependency', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -276,8 +274,8 @@ test('can resolve aliased dependency', async () => {
})
test('can resolve aliased dependency w/o version specifier', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -290,8 +288,8 @@ test('can resolve aliased dependency w/o version specifier', async () => {
})
test('can resolve aliased dependency w/o version specifier to default tag', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -308,8 +306,8 @@ test('can resolve aliased dependency w/o version specifier to default tag', asyn
})
test('can resolve aliased scoped dependency', async () => {
nock(registries.default)
.get('/@sindresorhus%2Fis')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/@sindresorhus%2Fis', method: 'GET' })
.reply(200, sindresorhusIsMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -322,8 +320,8 @@ test('can resolve aliased scoped dependency', async () => {
})
test('can resolve aliased scoped dependency w/o version specifier', async () => {
nock(registries.default)
.get('/@sindresorhus%2Fis')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/@sindresorhus%2Fis', method: 'GET' })
.reply(200, sindresorhusIsMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -336,8 +334,8 @@ test('can resolve aliased scoped dependency w/o version specifier', async () =>
})
test('can resolve package with version prefixed with v', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -350,8 +348,8 @@ test('can resolve package with version prefixed with v', async () => {
})
test('can resolve package version loosely', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -364,8 +362,8 @@ test('can resolve package version loosely', async () => {
})
test("resolves to latest if it's inside the wanted range. Even if there are newer versions available inside the range", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.0.0' },
@@ -386,8 +384,8 @@ test("resolves to latest if it's inside the wanted range. Even if there are newe
})
test("resolves to latest if it's inside the preferred range. Even if there are newer versions available inside the preferred range", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.0.0' },
@@ -412,8 +410,8 @@ test("resolves to latest if it's inside the preferred range. Even if there are n
})
test("resolve using the wanted range, when it doesn't intersect with the preferred range. Even if the preferred range contains the latest version", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '2.0.0' },
@@ -437,8 +435,8 @@ test("resolve using the wanted range, when it doesn't intersect with the preferr
})
test("use the preferred version if it's inside the wanted range", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.1.0' },
@@ -463,8 +461,8 @@ test("use the preferred version if it's inside the wanted range", async () => {
})
test("ignore the preferred version if it's not inside the wanted range", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.1.0' },
@@ -487,8 +485,8 @@ test("ignore the preferred version if it's not inside the wanted range", async (
})
test('use the preferred range if it intersects with the wanted range', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '1.0.0' },
@@ -513,8 +511,8 @@ test('use the preferred range if it intersects with the wanted range', async ()
})
test('use the preferred range if it intersects with the wanted range (an array of preferred versions is passed)', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '1.0.0' },
@@ -542,8 +540,8 @@ test('use the preferred range if it intersects with the wanted range (an array o
})
test("ignore the preferred range if it doesn't intersect with the wanted range", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.1.0' },
@@ -566,8 +564,8 @@ test("ignore the preferred range if it doesn't intersect with the wanted range",
})
test("use the preferred dist-tag if it's inside the wanted range", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': {
@@ -593,8 +591,8 @@ test("use the preferred dist-tag if it's inside the wanted range", async () => {
})
test("ignore the preferred dist-tag if it's not inside the wanted range", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': {
@@ -620,8 +618,8 @@ test("ignore the preferred dist-tag if it's not inside the wanted range", async
})
test("prefer a version that is both inside the wanted and preferred ranges. Even if it's not the latest of any of them", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': {
@@ -646,8 +644,8 @@ test("prefer a version that is both inside the wanted and preferred ranges. Even
})
test('prefer the version that is matched by more preferred selectors', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -668,8 +666,8 @@ test('prefer the version that is matched by more preferred selectors', async ()
})
test('prefer the version that has bigger weight in preferred selectors', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -694,8 +692,8 @@ test('prefer the version that has bigger weight in preferred selectors', async (
})
test('versions without selector weights should have higher priority than negatively weighted versions', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -735,8 +733,8 @@ test('offline resolution fails when package meta not found in the store', async
})
test('offline resolution succeeds when package meta is found in the store', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -767,8 +765,8 @@ test('offline resolution succeeds when package meta is found in the store', asyn
})
test('prefer offline resolution does not fail when package meta not found in the store', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -783,8 +781,8 @@ test('prefer offline resolution does not fail when package meta not found in the
})
test('when prefer offline is used, meta from store is used, where latest might be out-of-date', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.0.0' },
@@ -803,8 +801,8 @@ test('when prefer offline is used, meta from store is used, where latest might b
await resolveFromNpm({ alias: 'is-positive', bareSpecifier: '1.0.0' }, {})
}
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.1.0' },
@@ -822,14 +820,13 @@ test('when prefer offline is used, meta from store is used, where latest might b
expect(resolveResult!.id).toBe('is-positive@3.0.0')
}
nock.cleanAll()
})
test('error is thrown when package is not found in the registry', async () => {
const notExistingPackage = 'foo'
nock(registries.default)
.get(`/${notExistingPackage}`)
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: `/${notExistingPackage}`, method: 'GET' })
.reply(404, {})
const { resolveFromNpm } = createResolveFromNpm({
@@ -845,8 +842,7 @@ test('error is thrown when package is not found in the registry', async () => {
},
{
status: 404,
// statusText: 'Not Found',
statusText: '',
statusText: 'Not Found',
},
notExistingPackage
)
@@ -857,6 +853,15 @@ test('error is thrown when registry not responding', async () => {
const notExistingPackage = 'foo'
const notExistingRegistry = 'http://not-existing.pnpm.io'
// Mock a network error for the non-existing registry
const dnsError = Object.assign(new Error('getaddrinfo ENOTFOUND not-existing.pnpm.io'), { code: 'ENOTFOUND' })
getMockAgent().get(notExistingRegistry)
.intercept({ path: `/${notExistingPackage}`, method: 'GET' })
.replyWithError(dnsError)
getMockAgent().get(notExistingRegistry)
.intercept({ path: `/${notExistingPackage}`, method: 'GET' })
.replyWithError(dnsError)
const { resolveFromNpm } = createResolveFromNpm({
storeDir: temporaryDirectory(),
cacheDir: temporaryDirectory(),
@@ -875,16 +880,13 @@ test('error is thrown when registry not responding', async () => {
expect(thrown).toBeTruthy()
expect(thrown.code).toBe('ERR_PNPM_META_FETCH_FAIL')
expect(thrown.message).toContain(`GET ${notExistingRegistry}/${notExistingPackage}:`)
expect(thrown.message).toContain('ENOTFOUND')
expect(thrown.cause).toBeTruthy()
expect(thrown.cause.code).toBe('ENOTFOUND')
})
test('extra info is shown if package has valid semver appended', async () => {
const notExistingPackage = 'foo1.0.0'
nock(registries.default)
.get(`/${notExistingPackage}`)
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: `/${notExistingPackage}`, method: 'GET' })
.reply(404, {})
const { resolveFromNpm } = createResolveFromNpm({
@@ -900,8 +902,7 @@ test('extra info is shown if package has valid semver appended', async () => {
},
{
status: 404,
// statusText: 'Not Found',
statusText: '',
statusText: 'Not Found',
},
notExistingPackage
)
@@ -909,8 +910,8 @@ test('extra info is shown if package has valid semver appended', async () => {
})
test('error is thrown when there is no package found for the requested version', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -930,9 +931,9 @@ test('error is thrown when there is no package found for the requested version',
})
test('error is thrown when package needs authorization', async () => {
nock(registries.default)
.get('/needs-auth')
.reply(403)
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/needs-auth', method: 'GET' })
.reply(403, {})
const { resolveFromNpm } = createResolveFromNpm({
storeDir: temporaryDirectory(),
@@ -947,8 +948,7 @@ test('error is thrown when package needs authorization', async () => {
},
{
status: 403,
// statusText: 'Forbidden',
statusText: '',
statusText: 'Forbidden',
},
'needs-auth'
)
@@ -956,8 +956,8 @@ test('error is thrown when package needs authorization', async () => {
})
test('error is thrown when registry returns 400 Bad Request', async () => {
nock(registries.default)
.get('/bad-pkg')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/bad-pkg', method: 'GET' })
.reply(400)
const { resolveFromNpm } = createResolveFromNpm({
@@ -973,7 +973,7 @@ test('error is thrown when registry returns 400 Bad Request', async () => {
},
{
status: 400,
statusText: '',
statusText: 'Bad Request',
},
'bad-pkg'
)
@@ -981,8 +981,8 @@ test('error is thrown when registry returns 400 Bad Request', async () => {
})
test('error is thrown when there is no package found for the requested range', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -1002,8 +1002,8 @@ test('error is thrown when there is no package found for the requested range', a
})
test('error is thrown when there is no package found for the requested tag', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -1023,11 +1023,11 @@ test('error is thrown when there is no package found for the requested tag', asy
})
test('resolveFromNpm() loads full metadata even if non-full metadata is already cached in store', async () => {
nock(registries.default)
.get('/is-positive')
.reply(200, isPositiveMeta)
.get('/is-positive')
.reply(200, isPositiveMetaFull)
const mockPool = getMockAgent().get(registries.default.replace(/\/$/, ''))
// First request returns abbreviated metadata
mockPool.intercept({ path: '/is-positive', method: 'GET' }).reply(200, isPositiveMeta)
// Second request returns full metadata
mockPool.intercept({ path: '/is-positive', method: 'GET' }).reply(200, isPositiveMetaFull)
const cacheDir = temporaryDirectory()
@@ -1055,8 +1055,8 @@ test('resolveFromNpm() loads full metadata even if non-full metadata is already
})
test('resolve when tarball URL is requested from the registry', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1093,8 +1093,8 @@ test('resolve when tarball URL is requested from the registry', async () => {
})
test('resolve when tarball URL is requested from the registry and alias is not specified', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1126,8 +1126,8 @@ test('resolve when tarball URL is requested from the registry and alias is not s
})
test('resolve from local directory when it matches the latest version of the package', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1164,8 +1164,8 @@ test('resolve from local directory when it matches the latest version of the pac
})
test('resolve injected dependency from local directory when it matches the latest version of the package', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1205,8 +1205,8 @@ test('resolve injected dependency from local directory when it matches the lates
})
test('do not resolve from local directory when alwaysTryWorkspacePackages is false', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1312,8 +1312,8 @@ test('resolve from local directory when alwaysTryWorkspacePackages is false but
})
test('use version from the registry if it is newer than the local one', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.1.0' },
@@ -1355,8 +1355,8 @@ test('use version from the registry if it is newer than the local one', async ()
})
test('preferWorkspacePackages: use version from the workspace even if there is newer version in the registry', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.1.0' },
@@ -1396,8 +1396,8 @@ test('preferWorkspacePackages: use version from the workspace even if there is n
})
test('use local version if it is newer than the latest in the registry', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
...isPositiveMeta,
'dist-tags': { latest: '3.1.0' },
@@ -1439,8 +1439,8 @@ test('use local version if it is newer than the latest in the registry', async (
})
test('resolve from local directory when package is not found in the registry', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(404, {})
const cacheDir = temporaryDirectory()
@@ -1491,8 +1491,8 @@ test('resolve from local directory when package is not found in the registry', a
})
test('resolve from local directory when package is not found in the registry and latest installed', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(404, {})
const cacheDir = temporaryDirectory()
@@ -1543,8 +1543,8 @@ test('resolve from local directory when package is not found in the registry and
})
test('resolve from local directory when package is not found in the registry and local prerelease available', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(404, {})
const cacheDir = temporaryDirectory()
@@ -1581,8 +1581,8 @@ test('resolve from local directory when package is not found in the registry and
})
test('resolve from local directory when package is not found in the registry and specific version is requested', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(404, {})
const cacheDir = temporaryDirectory()
@@ -1633,8 +1633,8 @@ test('resolve from local directory when package is not found in the registry and
})
test('resolve from local directory when the requested version is not found in the registry but is available locally', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1705,8 +1705,8 @@ test('workspace protocol: resolve from local directory even when it does not mat
})
test('workspace protocol: resolve from local package that has a pre-release version', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1743,8 +1743,8 @@ test('workspace protocol: resolve from local package that has a pre-release vers
})
test("workspace protocol: don't resolve from local package that has a pre-release version that don't satisfy the range", async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1868,8 +1868,8 @@ test('throws error when package name has "/" but not starts with @scope', async
})
test('resolveFromNpm() should always return the name of the package that is specified in the root of the meta', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveBrokenMeta)
const cacheDir = temporaryDirectory()
@@ -1900,22 +1900,22 @@ test('resolveFromNpm() should always return the name of the package that is spec
})
test('request to metadata is retried if the received JSON is broken', async () => {
const registries: Registries = {
const localRegistries: Registries = {
default: 'https://registry1.com/',
}
nock(registries.default)
.get('/is-positive')
const mockPool = getMockAgent().get(localRegistries.default.replace(/\/$/, ''))
// First request returns broken JSON
mockPool.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, '{')
nock(registries.default)
.get('/is-positive')
// Second request (retry) returns valid meta
mockPool.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
const { resolveFromNpm } = createResolveFromNpm({
retry: { retries: 1 },
storeDir: temporaryDirectory(),
registries,
registries: localRegistries,
cacheDir,
})
const resolveResult = await resolveFromNpm({ alias: 'is-positive', bareSpecifier: '1.0.0' }, {})!
@@ -1924,9 +1924,9 @@ test('request to metadata is retried if the received JSON is broken', async () =
})
test('request to a package with unpublished versions', async () => {
nock(registries.default)
.get('/code-snippet')
.reply(200, loadJsonFileSync(f.find('unpublished.json')))
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/code-snippet', method: 'GET' })
.reply(200, loadJsonFileSync(f.find('unpublished.json')) as object)
const cacheDir = temporaryDirectory()
const { resolveFromNpm } = createResolveFromNpm({
@@ -1942,8 +1942,8 @@ test('request to a package with unpublished versions', async () => {
})
test('request to a package with no versions', async () => {
nock(registries.default)
.get('/code-snippet')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/code-snippet', method: 'GET' })
.reply(200, { name: 'code-snippet' })
const cacheDir = temporaryDirectory()
@@ -1961,8 +1961,8 @@ test('request to a package with no versions', async () => {
test('request to a package with no dist-tags', async () => {
const isPositiveMeta = omit(['dist-tags'], loadJsonFileSync<any>(f.find('is-positive.json'))) // eslint-disable-line
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -1987,8 +1987,8 @@ test('request to a package with no dist-tags', async () => {
})
test('resolveFromNpm() does not fail if the meta file contains no integrity information', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, brokenIntegrity)
const cacheDir = temporaryDirectory()
@@ -2012,8 +2012,8 @@ test('resolveFromNpm() does not fail if the meta file contains no integrity info
})
test('resolveFromNpm() fails if the meta file contains invalid shasum', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, brokenIntegrity)
const cacheDir = temporaryDirectory()
@@ -2028,8 +2028,8 @@ test('resolveFromNpm() fails if the meta file contains invalid shasum', async ()
})
test('resolveFromNpm() should normalize the registry', async () => {
nock('https://reg.com/owner')
.get('/is-positive')
getMockAgent().get('https://reg.com')
.intercept({ path: '/owner/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -2055,8 +2055,8 @@ test('resolveFromNpm() should normalize the registry', async () => {
})
test('pick lowest version by * when there are only prerelease versions', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, {
versions: {
'1.0.0-alpha.1': {
@@ -2089,8 +2089,8 @@ test('pick lowest version by * when there are only prerelease versions', async (
})
test('throws when workspace package version does not match and package is not found in the registry', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(404, {})
const cacheDir = temporaryDirectory()
@@ -2120,8 +2120,8 @@ test('throws when workspace package version does not match and package is not fo
})
test('throws NoMatchingVersionError when workspace package version does not match and registry has no matching version', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()
@@ -2151,8 +2151,8 @@ test('throws NoMatchingVersionError when workspace package version does not matc
})
test('resolve from registry when workspace package version does not match the requested version', async () => {
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, isPositiveMeta)
const cacheDir = temporaryDirectory()

View File

@@ -2,9 +2,10 @@
import { createFetchFromRegistry } from '@pnpm/network.fetch'
import { createNpmResolver } from '@pnpm/resolving.npm-resolver'
import type { Registries } from '@pnpm/types'
import nock from 'nock'
import { temporaryDirectory } from 'tempy'
import { getMockAgent, setupMockAgent, teardownMockAgent } from './utils/index.js'
const registries = {
default: 'https://registry.npmjs.org/',
} satisfies Registries
@@ -13,13 +14,12 @@ const fetch = createFetchFromRegistry({})
const getAuthHeader = () => undefined
const createResolveFromNpm = createNpmResolver.bind(null, fetch, getAuthHeader)
afterEach(() => {
nock.cleanAll()
nock.disableNetConnect()
afterEach(async () => {
await teardownMockAgent()
})
beforeEach(() => {
nock.enableNetConnect()
beforeEach(async () => {
await setupMockAgent()
})
describe('optional dependencies', () => {
@@ -44,10 +44,9 @@ describe('optional dependencies', () => {
},
}
// Verify that full metadata is requested (no abbreviated Accept header)
const scope = nock(registries.default)
.get('/platform-pkg')
.matchHeader('accept', (value) => !value.includes('application/vnd.npm.install-v1+json'))
// Mock the full metadata request for optional dependency
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/platform-pkg', method: 'GET' })
.reply(200, packageMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -68,7 +67,6 @@ describe('optional dependencies', () => {
expect(result!.manifest!.libc).toEqual(['glibc'])
expect(result!.manifest!.os).toEqual(['linux'])
expect(result!.manifest!.cpu).toEqual(['x64'])
expect(scope.isDone()).toBe(true)
})
test('abbreviated and full metadata are cached separately', async () => {
@@ -108,17 +106,19 @@ describe('optional dependencies', () => {
},
}
// First request: abbreviated metadata for regular dependency
const abbreviatedScope = nock(registries.default)
.get('/cache-test')
.matchHeader('accept', /application\/vnd\.npm\.install-v1\+json/)
.reply(200, abbreviatedMeta)
// Second request: full metadata for optional dependency
const fullScope = nock(registries.default)
.get('/cache-test')
.matchHeader('accept', (value) => !value.includes('application/vnd.npm.install-v1+json'))
.reply(200, fullMeta)
const mockPool = getMockAgent().get(registries.default.replace(/\/$/, ''))
// First request: abbreviated metadata for regular dependency (accept header prefers abbreviated)
mockPool.intercept({
path: '/cache-test',
method: 'GET',
headers: { accept: /application\/vnd\.npm\.install-v1\+json/ },
}).reply(200, abbreviatedMeta)
// Second request: full metadata for optional dependency (accept header prefers full JSON)
mockPool.intercept({
path: '/cache-test',
method: 'GET',
headers: { accept: /application\/json/ },
}).reply(200, fullMeta)
const cacheDir = temporaryDirectory()
@@ -141,8 +141,5 @@ describe('optional dependencies', () => {
{}
)
expect(optionalResult!.manifest!.scripts).toEqual({ test: 'jest', build: 'tsc' })
expect(abbreviatedScope.isDone()).toBe(true)
expect(fullScope.isDone()).toBe(true)
})
})

View File

@@ -7,9 +7,10 @@ import { createNpmResolver } from '@pnpm/resolving.npm-resolver'
import { fixtures } from '@pnpm/test-fixtures'
import type { Registries } from '@pnpm/types'
import { loadJsonFileSync } from 'load-json-file'
import nock from 'nock'
import { temporaryDirectory } from 'tempy'
import { getMockAgent, setupMockAgent, teardownMockAgent } from './utils/index.js'
const f = fixtures(import.meta.dirname)
const registries: Registries = {
@@ -25,18 +26,17 @@ const fetch = createFetchFromRegistry({})
const getAuthHeader = () => undefined
const createResolveFromNpm = createNpmResolver.bind(null, fetch, getAuthHeader)
afterEach(() => {
nock.cleanAll()
nock.disableNetConnect()
afterEach(async () => {
await teardownMockAgent()
})
beforeEach(() => {
nock.enableNetConnect()
beforeEach(async () => {
await setupMockAgent()
})
test('fall back to a newer version if there is no version published by the given date', async () => {
nock(registries.default)
.get('/bad-dates')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/bad-dates', method: 'GET' })
.reply(200, badDatesMeta)
const cacheDir = temporaryDirectory()
@@ -66,8 +66,8 @@ test('request metadata when the one in cache does not have a version satisfying
fs.mkdirSync(path.join(cacheDir, `${FULL_FILTERED_META_DIR}/registry.npmjs.org`), { recursive: true })
fs.writeFileSync(path.join(cacheDir, `${FULL_FILTERED_META_DIR}/registry.npmjs.org/bad-dates.json`), JSON.stringify(cachedMeta), 'utf8')
nock(registries.default)
.get('/bad-dates')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/bad-dates', method: 'GET' })
.reply(200, badDatesMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -106,8 +106,8 @@ test('do not pick version that does not satisfy the date requirement even if it
fs.mkdirSync(path.join(cacheDir, `${FULL_FILTERED_META_DIR}/registry.npmjs.org`), { recursive: true })
fs.writeFileSync(path.join(cacheDir, `${FULL_FILTERED_META_DIR}/registry.npmjs.org/foo.json`), JSON.stringify(fooMeta), 'utf8')
nock(registries.default)
.get('/foo')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/foo', method: 'GET' })
.reply(200, fooMeta)
const { resolveFromNpm } = createResolveFromNpm({
@@ -130,8 +130,8 @@ test('should skip time field validation for excluded packages', async () => {
fs.mkdirSync(path.join(cacheDir, `${FULL_FILTERED_META_DIR}/registry.npmjs.org`), { recursive: true })
fs.writeFileSync(path.join(cacheDir, `${FULL_FILTERED_META_DIR}/registry.npmjs.org/is-positive.json`), JSON.stringify(metaWithoutTime), 'utf8')
nock(registries.default)
.get('/is-positive')
getMockAgent().get(registries.default.replace(/\/$/, ''))
.intercept({ path: '/is-positive', method: 'GET' })
.reply(200, metaWithoutTime)
const { resolveFromNpm } = createResolveFromNpm({

View File

@@ -6,10 +6,9 @@ import { createNpmResolver } from '@pnpm/resolving.npm-resolver'
import { fixtures } from '@pnpm/test-fixtures'
import type { Registries } from '@pnpm/types'
import { loadJsonFileSync } from 'load-json-file'
import nock from 'nock'
import { temporaryDirectory } from 'tempy'
import { retryLoadJsonFile } from './utils/index.js'
import { getMockAgent, retryLoadJsonFile, setupMockAgent, teardownMockAgent } from './utils/index.js'
const f = fixtures(import.meta.dirname)
/* eslint-disable @typescript-eslint/no-explicit-any */
@@ -26,27 +25,22 @@ const fetch = createFetchFromRegistry({})
const getAuthHeader = () => undefined
const createResolveFromNpm = createNpmResolver.bind(null, fetch, getAuthHeader)
afterEach(() => {
nock.cleanAll()
nock.disableNetConnect()
afterEach(async () => {
await teardownMockAgent()
})
beforeEach(() => {
nock.enableNetConnect()
beforeEach(async () => {
await setupMockAgent()
})
test('resolveFromJsr() on jsr', async () => {
const slash = '%2F'
nock(registries.default)
.get(`/@jsr${slash}rus__greet`)
.reply(404)
.get(`/@jsr${slash}luca__cases`)
.reply(404)
nock(registries['@jsr'])
.get(`/@jsr${slash}rus__greet`)
.reply(200, jsrRusGreetMeta)
.get(`/@jsr${slash}luca__cases`)
.reply(200, jsrLucaCasesMeta)
const defaultPool = getMockAgent().get(registries.default.replace(/\/$/, ''))
defaultPool.intercept({ path: `/@jsr${slash}rus__greet`, method: 'GET' }).reply(404, {})
defaultPool.intercept({ path: `/@jsr${slash}luca__cases`, method: 'GET' }).reply(404, {})
const jsrPool = getMockAgent().get(registries['@jsr'].replace(/\/$/, ''))
jsrPool.intercept({ path: `/@jsr${slash}rus__greet`, method: 'GET' }).reply(200, jsrRusGreetMeta)
jsrPool.intercept({ path: `/@jsr${slash}luca__cases`, method: 'GET' }).reply(200, jsrLucaCasesMeta)
const cacheDir = temporaryDirectory()
const { resolveFromJsr } = createResolveFromNpm({
@@ -83,16 +77,12 @@ test('resolveFromJsr() on jsr', async () => {
test('resolveFromJsr() on jsr with alias renaming', async () => {
const slash = '%2F'
nock(registries.default)
.get(`/@jsr${slash}rus__greet`)
.reply(404)
.get(`/@jsr${slash}luca__cases`)
.reply(404)
nock(registries['@jsr'])
.get(`/@jsr${slash}rus__greet`)
.reply(200, jsrRusGreetMeta)
.get(`/@jsr${slash}luca__cases`)
.reply(200, jsrLucaCasesMeta)
const defaultPool = getMockAgent().get(registries.default.replace(/\/$/, ''))
defaultPool.intercept({ path: `/@jsr${slash}rus__greet`, method: 'GET' }).reply(404, {})
defaultPool.intercept({ path: `/@jsr${slash}luca__cases`, method: 'GET' }).reply(404, {})
const jsrPool = getMockAgent().get(registries['@jsr'].replace(/\/$/, ''))
jsrPool.intercept({ path: `/@jsr${slash}rus__greet`, method: 'GET' }).reply(200, jsrRusGreetMeta)
jsrPool.intercept({ path: `/@jsr${slash}luca__cases`, method: 'GET' }).reply(200, jsrLucaCasesMeta)
const cacheDir = temporaryDirectory()
const { resolveFromJsr } = createResolveFromNpm({

View File

@@ -1,5 +1,7 @@
import fs from 'node:fs'
export { getMockAgent, setupMockAgent, teardownMockAgent } from '@pnpm/testing.mock-agent'
export async function retryLoadJsonFile<T> (filePath: string): Promise<T> {
let retry = 0
/* eslint-disable no-await-in-loop */

View File

@@ -51,6 +51,9 @@
{
"path": "../../store/index"
},
{
"path": "../../testing/mock-agent"
},
{
"path": "../../worker"
},

View File

@@ -0,0 +1,46 @@
{
"name": "@pnpm/testing.mock-agent",
"version": "0.0.0",
"private": true,
"description": "Shared undici MockAgent helpers for pnpm tests",
"keywords": [
"pnpm",
"pnpm11",
"testing"
],
"license": "MIT",
"funding": "https://opencollective.com/pnpm",
"repository": "https://github.com/pnpm/pnpm/tree/main/testing/mock-agent",
"homepage": "https://github.com/pnpm/pnpm/tree/main/testing/mock-agent#readme",
"bugs": {
"url": "https://github.com/pnpm/pnpm/issues"
},
"type": "module",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"exports": {
".": "./lib/index.js"
},
"files": [
"lib",
"!*.map"
],
"scripts": {
"prepublishOnly": "pn compile",
"lint": "eslint \"src/**/*.ts\"",
"test": "pn compile",
"compile": "tsgo --build && pn lint --fix"
},
"dependencies": {
"undici": "catalog:"
},
"peerDependencies": {
"@pnpm/network.fetch": "workspace:*"
},
"engines": {
"node": ">=22.13"
},
"devDependencies": {
"@pnpm/testing.mock-agent": "workspace:*"
}
}

View File

@@ -0,0 +1,35 @@
import { type Dispatcher, getGlobalDispatcher, MockAgent, setGlobalDispatcher } from 'undici'
let originalDispatcher: Dispatcher | null = null
let currentMockAgent: MockAgent | null = null
export async function setupMockAgent (): Promise<MockAgent> {
if (!originalDispatcher) {
originalDispatcher = getGlobalDispatcher()
}
// Dynamic import to avoid circular tsconfig reference with @pnpm/network.fetch
const { clearDispatcherCache } = await import('@pnpm/network.fetch')
clearDispatcherCache()
currentMockAgent = new MockAgent()
currentMockAgent.disableNetConnect()
setGlobalDispatcher(currentMockAgent)
return currentMockAgent
}
export async function teardownMockAgent (): Promise<void> {
if (currentMockAgent) {
await currentMockAgent.close()
currentMockAgent = null
}
if (originalDispatcher) {
setGlobalDispatcher(originalDispatcher)
originalDispatcher = null
}
}
export function getMockAgent (): MockAgent {
if (!currentMockAgent) {
throw new Error('MockAgent not initialized. Call setupMockAgent() first.')
}
return currentMockAgent
}

View File

@@ -0,0 +1,16 @@
{
"extends": "@pnpm/tsconfig",
"compilerOptions": {
"outDir": "lib",
"rootDir": "src"
},
"include": [
"src/**/*.ts",
"../../__typings__/**/*.d.ts"
],
"references": [
{
"path": "../../network/fetch"
}
]
}

View File

@@ -0,0 +1,8 @@
{
"extends": "./tsconfig.json",
"include": [
"src/**/*.ts",
"test/**/*.ts",
"../../__typings__/**/*.d.ts"
]
}