refactor!: replace npm publish with libnpmpublish (#10591)

* chore(deps): add `libnpmpublish` to catalog

* chore(deps): install `libnpmpublish`

* feat: publishableManifest (wip)

* feat: publishableManifest (wip)

* chore(cspell): libnpmpublish

* test: fix

* feat: validate field and version

* chore: @npm/types

* chore: todo

* refactor: reorganize

* feat: transformRequiredFields

* chore(deps): patch `libnpmpublish`

* fix: `BaseManifest.config`

* fix: eslint

* chore(git): revert a patch that doesn't work

This reverts commit 45f2c6a6c2.

We will use type casting

* feat: `engines.runtime`

* feat: normalize bin

* fix: `bin === ''`

* test: fix

* refactor: inference friendly

* feat: `peerDependenciesMeta`

* refactor: group into a directory

* refactor: use `ramda.pipe`

* refactor: less intrusive type assertion

* feat!: returning `ExportedManifest`

* refactor: remove unnecessary file

* docs: add a todo

* refactor: getNetworkConfigs (#10458)

Some tests are added as a bonus

* feat: `publishPackedPkg` (wip)

* feat: replace `\t` with 4 spaces

* fix: newline

* fix: newline

* refactor: extract `FailedToPublishError`

* test: FailedToPublishError

* feat: registryConfigKeys

* feat: `publishPackedPkg` (wip)

* feat(config/getNetworkConfigs): load auth info

* feat(config/getNetworkConfigs): load auth info (#10491)

* feat: `publishPackedPkg` (wip)

* refactor: extract a `static` function

* fix: inheritance, override, and merge

* feat: `executeTokenHelper`

* fix: use the visible `globalWarn`

* feat: add options

* feat: add more options

* docs: more links

* fix: private packages

* fix: --dry-run

* feat: log more things

* fix: name

* fix: tag

* refactor: remove extraneous `assertPublicPackage`

* feat: use `publishPackedPkg` for directories

* refactor: require only necessary fields

* refactor: extractManifestFromPacked

* fix: extractManifestFromPacked

* test: extractManifestFromPacked

* feat: isTarballPath

* feat: use `publishPackedPkg` for tarballs

* style: add an empty line for clarity

* refactor: remove unnecessary works

* feat: --otp

* feat: PNPM_CONFIG_OTP

* feat: oidc

* test: fix name collision

* fix: eslint

* test: disable a false test

* feat: set `provenance`

* docs(todo): auto provenance

* refactor: run oidc in `createPublishOptions`

* fix: correct auth keys for `libnpmpublish`

* docs: changeset

* fix: incorrect `password` field

* fix: typo, grammar

* chore(git): resolve merge conflict ahead of time

In preparation for https://github.com/pnpm/pnpm/pull/10385

* fix: field name

* fix(config): decoding `_password`

* fix: edge case of partial `cert`/`key`

* fix: ensure `registry` always match its config key

* fix: `_password`

* test: correct a name

* test: more specific assertions

* fix: grammar

* docs(changeset): fix grammar

* docs: fix grammar

* fix: clean up after failure

* test: fix windows

* feat(provenance): auto detect

* refactor: consistent name

* fix: correct error names

* refactor: extract the `provenance` code

* feat: show code and body of an error

* refactor: use `encodeURIComponent`

* refactor: rename a type

* refactor: use the try-catch model

* refactor: move `normalizeBinObject`

* refactor: split `oidc` into `idToken` and `authToken`

* refactor: run `next` on `stream`'s `'end'`

* fix: use the correct encoding

* feat: guard against weird names

* test: `transform/engines`

Closes https://github.com/pnpm/pnpm/pull/10599

* test: `transformPeerDependenciesMeta`

Closes https://github.com/pnpm/pnpm/pull/10600

* refactor: dependency inject the `Date` too

* refactor: export an interface

* test: oidc

Closes https://github.com/pnpm/pnpm/pull/10598

* refactor: re-arrange imports

* refactor: remove unnecessary type casts

* refactor: improve test
This commit is contained in:
Khải
2026-02-13 03:10:54 +07:00
committed by GitHub
parent 5bf7768ca4
commit cc7c0d22df
51 changed files with 3702 additions and 208 deletions

View File

@@ -0,0 +1,5 @@
---
"@pnpm/config": patch
---
Fix `_password` decoding.

View File

@@ -0,0 +1,19 @@
---
"@pnpm/plugin-commands-publishing": major
"pnpm": major
"@pnpm/make-dedicated-lockfile": minor
"@pnpm/exportable-manifest": minor
"@pnpm/types": minor
"@pnpm/config": minor
---
`pnpm publish` now works without the `npm` CLI.
The One-time Password feature now reads from `PNPM_CONFIG_OTP` instead of `NPM_CONFIG_OTP`:
```sh
export PNPM_CONFIG_OTP='<your OTP here>'
pnpm publish --no-git-checks
```
Since the new `pnpm publish` no longer calls `npm publish`, some undocumented features may have been unknowingly dropped. If you rely on a feature that is now gone, please open an issue at <https://github.com/pnpm/pnpm/issues>. In the meantime, you can use `pnpm pack && npm publish *.tgz` as a workaround.

View File

@@ -82,7 +82,7 @@ function getAuthUserPass ({
}
if (authUsername && authPassword) {
return { username: authUsername, password: authPassword }
return { username: authUsername, password: atob(authPassword) }
}
return undefined

View File

@@ -121,7 +121,7 @@ test('auth infos', () => {
expect(getNetworkConfigs({
'@foo:registry': 'https://example.com/foo',
'//example.com/foo:username': 'foo',
'//example.com/foo:_password': 'bar',
'//example.com/foo:_password': btoa('bar'),
})).toStrictEqual({
registries: {
'@foo': 'https://example.com/foo',

View File

@@ -47,7 +47,7 @@ describe('parseAuthInfo', () => {
test('authUsername and authPassword', () => {
expect(parseAuthInfo({
authUsername: 'foo',
authPassword: 'bar',
authPassword: btoa('bar'),
})).toStrictEqual({
authUserPass: {
username: 'foo',

View File

@@ -128,6 +128,7 @@
"ldni",
"leniolabs",
"libc",
"libnpmpublish",
"libnpx",
"libzip",
"licence",
@@ -157,6 +158,7 @@
"mycompany",
"myorg",
"mypackage",
"mytoken",
"ndjson",
"nerfed",
"nodetouch",
@@ -259,6 +261,7 @@
"shasums",
"sheetjs",
"shlex",
"sigstore",
"sindresorhus",
"sirv",
"soporan",

View File

@@ -9,7 +9,7 @@ import {
} from '@pnpm/lockfile.fs'
import { pruneSharedLockfile } from '@pnpm/lockfile.pruner'
import { readProjectManifest } from '@pnpm/read-project-manifest'
import { DEPENDENCIES_FIELDS, type ProjectId } from '@pnpm/types'
import { DEPENDENCIES_FIELDS, type ProjectId, type ProjectManifest } from '@pnpm/types'
import { pickBy } from 'ramda'
import renameOverwrite from 'rename-overwrite'
@@ -42,7 +42,7 @@ export async function makeDedicatedLockfile (lockfileDir: string, projectDir: st
// intentionally.
catalogs: {},
})
await writeProjectManifest(publishManifest)
await writeProjectManifest(publishManifest as ProjectManifest)
const modulesDir = path.join(projectDir, 'node_modules')
const tmp = path.join(projectDir, 'tmp_node_modules')

View File

@@ -102,7 +102,7 @@ export interface BaseManifest {
email?: string
}
scripts?: PackageScripts
config?: object
config?: Record<string, unknown>
engines?: {
node?: string
npm?: string
@@ -173,7 +173,7 @@ export interface PnpmSettings {
export interface ProjectManifest extends BaseManifest {
packageManager?: string
workspaces?: string[]
workspaces?: string[] // TODO: add Record<string, string> to represent npm (to be compatible with @npm/types)
pnpm?: PnpmSettings
private?: boolean
resolutions?: Record<string, string>

View File

@@ -31,8 +31,10 @@
"_test": "cross-env NODE_OPTIONS=\"$NODE_OPTIONS --experimental-vm-modules\" jest"
},
"dependencies": {
"@npm/types": "catalog:",
"@pnpm/catalogs.resolver": "workspace:*",
"@pnpm/error": "workspace:*",
"@pnpm/package-bins": "workspace:*",
"@pnpm/read-project-manifest": "workspace:*",
"@pnpm/resolving.jsr-specifier-parser": "workspace:*",
"@pnpm/types": "workspace:*",

View File

@@ -9,6 +9,9 @@ import { type Dependencies, type ProjectManifest } from '@pnpm/types'
import { omit } from 'ramda'
import pMapValues from 'p-map-values'
import { overridePublishConfig } from './overridePublishConfig.js'
import { type ExportedManifest, transform } from './transform/index.js'
export { type ExportedManifest }
const PREPUBLISH_SCRIPTS = [
'prepublishOnly',
@@ -30,7 +33,7 @@ export async function createExportableManifest (
dir: string,
originalManifest: ProjectManifest,
opts: MakePublishManifestOptions
): Promise<ProjectManifest> {
): Promise<ExportedManifest> {
let publishManifest: ProjectManifest = omit(['pnpm', 'scripts', 'packageManager'], originalManifest)
if (originalManifest.scripts != null) {
publishManifest.scripts = omit(PREPUBLISH_SCRIPTS, originalManifest.scripts)
@@ -70,7 +73,7 @@ export async function createExportableManifest (
publishManifest = await hook(publishManifest, dir) ?? publishManifest
}
return publishManifest
return transform(publishManifest)
}
export type PublishDependencyConverter = (

View File

@@ -0,0 +1,42 @@
import { PnpmError } from '@pnpm/error'
import { type ProjectManifest } from '@pnpm/types'
import { type ExportedManifest } from './index.js'
type Input = Pick<ProjectManifest, 'bin'> & Pick<ExportedManifest, 'name'>
type Output<Manifest extends Input> = Omit<Manifest, 'bin'> & Pick<ExportedManifest, 'bin'>
export function transformBin<Manifest extends Input> (manifest: Manifest): Output<Manifest> {
if (manifest.bin == null || typeof manifest.bin === 'object') return manifest as Output<Manifest>
const { bin, ...rest } = manifest
return {
...rest,
bin: normalizeBinObject(manifest.name, bin),
}
}
/**
* The property `"bin"` of a `package.json` could be either an object or a string.
* This function normalizes either forms into an object.
*/
export function normalizeBinObject (pkgName: string, bin: string | Record<string, string>): Record<string, string> {
if (typeof bin === 'object') return bin
const binName = normalizeBinName(pkgName)
return { [binName]: bin }
}
function normalizeBinName (name: string): string {
if (name[0] !== '@') return name
const slashIndex = name.indexOf('/')
if (slashIndex < 0) {
throw new InvalidScopedPackageNameError(name)
}
return name.slice(slashIndex + 1)
}
export class InvalidScopedPackageNameError extends PnpmError {
readonly invalidName: string
constructor (invalidName: string) {
super('INVALID_SCOPED_PACKAGE_NAME', `The name ${JSON.stringify(invalidName)} is not a valid scoped package name`)
this.invalidName = invalidName
}
}

View File

@@ -0,0 +1,36 @@
import { PnpmError } from '@pnpm/error'
import { type ProjectManifest } from '@pnpm/types'
import { type ExportedManifest } from './index.js'
type EnginesField = 'engines' | 'devEngines'
type Input = Pick<ProjectManifest, EnginesField>
type Omitted<Manifest extends Input> = Omit<Manifest, EnginesField>
type Output<Manifest extends Input> = Omitted<Manifest> & Pick<ExportedManifest, EnginesField>
export function transformEngines<Manifest extends Input> (manifest: Manifest): Output<Manifest> {
if (!manifest.engines?.runtime) return manifest as Output<Manifest>
if (manifest.engines.runtime && manifest.devEngines?.runtime) {
throw new DevEnginesRuntimeConflictError()
}
const {
engines: { runtime, ...engines },
...rest
} = manifest
return {
...rest as Omitted<Manifest>,
engines,
devEngines: {
...rest.devEngines,
runtime,
},
}
}
export class DevEnginesRuntimeConflictError extends PnpmError {
constructor () {
super('DEV_ENGINES_RUNTIME_CONFLICT', '.devEngines.runtime and .engines.runtime were both defined')
}
}

View File

@@ -0,0 +1,17 @@
import { type PackageJSON as ExportedManifest } from '@npm/types'
import { type ProjectManifest } from '@pnpm/types'
import { pipe } from 'ramda'
import { transformBin } from './bin.js'
import { transformEngines } from './engines.js'
import { transformRequiredFields } from './requiredFields.js'
import { transformPeerDependenciesMeta } from './peerDependenciesMeta.js'
export { type ExportedManifest }
export type Transform = (manifest: ProjectManifest) => ExportedManifest
export const transform: Transform = pipe(
transformRequiredFields,
transformBin,
transformEngines,
transformPeerDependenciesMeta
)

View File

@@ -0,0 +1,25 @@
import { type ProjectManifest } from '@pnpm/types'
import { type ExportedManifest } from './index.js'
type Input = Pick<ProjectManifest, 'peerDependenciesMeta'>
type Omitted<Manifest extends Input> = Omit<Manifest, 'peerDependenciesMeta'>
type Output<Manifest extends Input> = Omitted<Manifest> & Pick<ExportedManifest, 'peerDependenciesMeta'>
export function transformPeerDependenciesMeta<Manifest extends Input> (manifest: Manifest): Output<Manifest> {
if (!manifest.peerDependenciesMeta) return manifest as Omitted<Manifest>
const inputPeerDepsMeta = manifest.peerDependenciesMeta
const outputPeerDepsMeta: Required<ExportedManifest>['peerDependenciesMeta'] = {}
for (const key in inputPeerDepsMeta) {
const { optional, ...rest } = inputPeerDepsMeta[key]
outputPeerDepsMeta[key] = {
...rest,
optional: optional ?? false,
}
}
return {
...manifest as Omitted<Manifest>,
peerDependenciesMeta: outputPeerDepsMeta,
}
}

View File

@@ -0,0 +1,21 @@
import { PnpmError } from '@pnpm/error'
import { type ProjectManifest } from '@pnpm/types'
import { type ExportedManifest } from './index.js'
type RequiredField = 'name' | 'version'
type Input = Pick<ProjectManifest, RequiredField>
type Output<Manifest extends Input> = Omit<Manifest, RequiredField> & Pick<ExportedManifest, RequiredField>
export function transformRequiredFields<Manifest extends Input> (manifest: Manifest): Output<Manifest> {
if (!manifest.name) throw new MissingRequiredFieldError('name')
if (!manifest.version) throw new MissingRequiredFieldError('version')
return manifest as Output<Manifest>
}
export class MissingRequiredFieldError<Field extends RequiredField> extends PnpmError {
readonly field: Field
constructor (field: Field) {
super('MISSING_REQUIRED_FIELD', `Missing required field ${JSON.stringify(field)}`)
this.field = field
}
}

View File

@@ -46,7 +46,7 @@ test('hook returns new manifest', async () => {
module.exports = {
hooks: {
beforePacking: (pkg) => {
return { type: 'module' }
return { type: 'module', ...pkg }
},
},
}`, 'utf8')
@@ -57,6 +57,8 @@ module.exports = {
version: '1.0.0',
}, { ...defaultOpts, hooks })).toStrictEqual({
type: 'module',
name: 'foo',
version: '1.0.0',
})
})

View File

@@ -276,3 +276,37 @@ test('jsr deps are replaced', async () => {
},
} as Partial<typeof manifest>)
})
test('checks for name', async () => {
const location = 'package-to-export'
const manifest = { version: '0.0.0' } satisfies ProjectManifest
preparePackages([{
location,
package: manifest,
}])
process.chdir(location)
await expect(createExportableManifest(process.cwd(), manifest, { catalogs: {} })).rejects.toMatchObject({
code: 'ERR_PNPM_MISSING_REQUIRED_FIELD',
field: 'name',
})
})
test('checks for version', async () => {
const location = 'package-to-export'
const manifest = { name: 'example' } satisfies ProjectManifest
preparePackages([{
location,
package: manifest,
}])
process.chdir(location)
await expect(createExportableManifest(process.cwd(), manifest, { catalogs: {} })).rejects.toMatchObject({
code: 'ERR_PNPM_MISSING_REQUIRED_FIELD',
field: 'version',
})
})

View File

@@ -0,0 +1,22 @@
import { normalizeBinObject } from '../lib/transform/bin.js'
test('string', () => {
expect(normalizeBinObject('foo', 'bin.js')).toStrictEqual({ foo: 'bin.js' })
expect(normalizeBinObject('@bar/foo', 'bin.js')).toStrictEqual({ foo: 'bin.js' })
})
test('object', () => {
expect(normalizeBinObject('foo', {})).toStrictEqual({})
expect(normalizeBinObject('foo', {
foo: 'foo.js',
})).toStrictEqual({
foo: 'foo.js',
})
expect(normalizeBinObject('foo', {
foo: 'foo.js',
bar: 'bar.js',
})).toStrictEqual({
foo: 'foo.js',
bar: 'bar.js',
})
})

View File

@@ -0,0 +1,168 @@
import { transformEngines, DevEnginesRuntimeConflictError } from '../lib/transform/engines.js'
describe('transformEngines', () => {
test('moves engines.runtime to devEngines.runtime', () => {
const manifest = {
name: 'test-package',
version: '1.0.0',
engines: {
node: '>=18',
runtime: { name: 'bun', version: '1.0.0' },
},
}
const result = transformEngines(manifest)
expect(result).toStrictEqual({
name: 'test-package',
version: '1.0.0',
engines: {
node: '>=18',
},
devEngines: {
runtime: { name: 'bun', version: '1.0.0' },
},
})
})
test('preserves existing devEngines when moving engines.runtime', () => {
const manifest = {
name: 'test-package',
version: '1.0.0',
engines: {
node: '>=18',
runtime: { name: 'bun', version: '1.0.0' },
},
devEngines: {
cpu: [{ name: 'x64' }, { name: 'arm64' }],
},
}
const result = transformEngines(manifest)
expect(result).toStrictEqual({
name: 'test-package',
version: '1.0.0',
engines: {
node: '>=18',
},
devEngines: {
cpu: [{ name: 'x64' }, { name: 'arm64' }],
runtime: { name: 'bun', version: '1.0.0' },
},
})
})
test('does not modify manifest when engines.runtime is not present', () => {
const manifest = {
name: 'test-package',
version: '1.0.0',
engines: {
node: '>=18',
},
}
const result = transformEngines(manifest)
expect(result).toStrictEqual({
name: 'test-package',
version: '1.0.0',
engines: {
node: '>=18',
},
})
})
test('does not modify manifest when engines field is empty', () => {
const manifest = {
name: 'test-package',
version: '1.0.0',
engines: {},
}
const result = transformEngines(manifest)
expect(result).toStrictEqual({
name: 'test-package',
version: '1.0.0',
engines: {},
})
})
test('throws error when both engines.runtime and devEngines.runtime are defined', () => {
const manifest = {
name: 'test-package',
version: '1.0.0',
engines: {
node: '>=18',
runtime: { name: 'bun', version: '1.0.0' },
},
devEngines: {
runtime: { name: 'deno', version: '2.0.0' },
},
}
expect(() => transformEngines(manifest)).toThrow(DevEnginesRuntimeConflictError)
})
test('removes engines field when only runtime was present', () => {
const manifest = {
name: 'test-package',
version: '1.0.0',
engines: {
runtime: { name: 'bun', version: '1.0.0' },
},
}
const result = transformEngines(manifest)
expect(result).toStrictEqual({
name: 'test-package',
version: '1.0.0',
engines: {},
devEngines: {
runtime: { name: 'bun', version: '1.0.0' },
},
})
})
test('handles manifest with other fields', () => {
const manifest = {
name: 'test-package',
version: '1.0.0',
description: 'A test package',
dependencies: {
foo: '1.0.0',
},
engines: {
node: '>=18',
npm: '>=8',
runtime: { name: 'bun', version: '1.0.0' },
},
scripts: {
test: 'echo test',
},
}
const result = transformEngines(manifest)
expect(result).toStrictEqual({
name: 'test-package',
version: '1.0.0',
description: 'A test package',
dependencies: {
foo: '1.0.0',
},
engines: {
node: '>=18',
npm: '>=8',
},
devEngines: {
runtime: { name: 'bun', version: '1.0.0' },
},
scripts: {
test: 'echo test',
},
})
})
})

View File

@@ -0,0 +1,168 @@
import { type ProjectManifest } from '@pnpm/types'
import { transformPeerDependenciesMeta } from '../lib/transform/peerDependenciesMeta.js'
test('returns manifest as-is when peerDependenciesMeta is absent', () => {
const manifest: ProjectManifest = {
name: 'foo',
version: '1.0.0',
}
expect(transformPeerDependenciesMeta(manifest)).toStrictEqual(manifest)
})
test('returns manifest as-is when peerDependenciesMeta is undefined', () => {
const manifest: ProjectManifest = {
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: undefined,
}
expect(transformPeerDependenciesMeta(manifest)).toStrictEqual({
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: undefined,
})
})
test('defaults optional to false when not specified', () => {
const manifest: ProjectManifest = {
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {},
},
}
expect(transformPeerDependenciesMeta(manifest)).toStrictEqual({
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
optional: false,
},
},
})
})
test('preserves optional when explicitly set to false', () => {
const manifest: ProjectManifest = {
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
optional: false,
},
},
}
expect(transformPeerDependenciesMeta(manifest)).toStrictEqual({
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
optional: false,
},
},
})
})
test('preserves optional when explicitly set to true', () => {
const manifest: ProjectManifest = {
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
optional: true,
},
},
}
expect(transformPeerDependenciesMeta(manifest)).toStrictEqual({
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
optional: true,
},
},
})
})
test('handles multiple peerDependenciesMeta entries with different values', () => {
const manifest: ProjectManifest = {
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
optional: true,
},
baz: {
optional: false,
},
qux: {},
},
}
expect(transformPeerDependenciesMeta(manifest)).toStrictEqual({
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
optional: true,
},
baz: {
optional: false,
},
qux: {
optional: false,
},
},
})
})
test('preserves additional properties in peerDependenciesMeta', () => {
const manifest: ProjectManifest = {
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
optional: true,
// @ts-expect-error - testing non-standard properties
customProp: 'value',
},
},
}
expect(transformPeerDependenciesMeta(manifest)).toStrictEqual({
name: 'foo',
version: '1.0.0',
peerDependenciesMeta: {
bar: {
customProp: 'value',
optional: true,
},
},
})
})
test('preserves other manifest properties', () => {
const manifest: ProjectManifest = {
name: 'foo',
version: '1.0.0',
description: 'A test package',
dependencies: {
lodash: '^4.0.0',
},
peerDependenciesMeta: {
react: {
optional: true,
},
},
}
expect(transformPeerDependenciesMeta(manifest)).toStrictEqual({
name: 'foo',
version: '1.0.0',
description: 'A test package',
dependencies: {
lodash: '^4.0.0',
},
peerDependenciesMeta: {
react: {
optional: true,
},
},
})
})

View File

@@ -30,6 +30,9 @@
{
"path": "../../packages/types"
},
{
"path": "../../pkg-manager/package-bins"
},
{
"path": "../../resolving/jsr-specifier-parser"
},

430
pnpm-lock.yaml generated
View File

@@ -30,6 +30,9 @@ catalogs:
'@jest/globals':
specifier: 30.0.5
version: 30.0.5
'@npm/types':
specifier: ^2.1.0
version: 2.1.0
'@pnpm/byline':
specifier: ^1.0.0
version: 1.0.0
@@ -141,6 +144,9 @@ catalogs:
'@types/js-yaml':
specifier: ^4.0.9
version: 4.0.9
'@types/libnpmpublish':
specifier: ^9.0.1
version: 9.0.1
'@types/lodash.kebabcase':
specifier: 4.1.9
version: 4.1.9
@@ -459,6 +465,9 @@ catalogs:
lcov-result-merger:
specifier: ^3.3.0
version: 3.3.0
libnpmpublish:
specifier: ^11.1.3
version: 11.1.3
load-json-file:
specifier: ^7.0.1
version: 7.0.1
@@ -6366,12 +6375,18 @@ importers:
pkg-manifest/exportable-manifest:
dependencies:
'@npm/types':
specifier: 'catalog:'
version: 2.1.0
'@pnpm/catalogs.resolver':
specifier: workspace:*
version: link:../../catalogs/resolver
'@pnpm/error':
specifier: workspace:*
version: link:../../packages/error
'@pnpm/package-bins':
specifier: workspace:*
version: link:../../pkg-manager/package-bins
'@pnpm/read-project-manifest':
specifier: workspace:*
version: link:../read-project-manifest
@@ -7098,6 +7113,9 @@ importers:
'@pnpm/exportable-manifest':
specifier: workspace:*
version: link:../../pkg-manifest/exportable-manifest
'@pnpm/fetch':
specifier: workspace:*
version: link:../../network/fetch
'@pnpm/fs.packlist':
specifier: workspace:*
version: link:../../fs/packlist
@@ -7107,9 +7125,6 @@ importers:
'@pnpm/lifecycle':
specifier: workspace:*
version: link:../../exec/lifecycle
'@pnpm/network.auth-header':
specifier: workspace:*
version: link:../../network/auth-header
'@pnpm/package-bins':
specifier: workspace:*
version: link:../../pkg-manager/package-bins
@@ -7122,9 +7137,6 @@ importers:
'@pnpm/resolver-base':
specifier: workspace:*
version: link:../../resolving/resolver-base
'@pnpm/run-npm':
specifier: workspace:*
version: link:../../exec/run-npm
'@pnpm/sort-packages':
specifier: workspace:*
version: link:../../workspace/sort-packages
@@ -7137,12 +7149,21 @@ importers:
chalk:
specifier: 'catalog:'
version: 5.6.2
ci-info:
specifier: 'catalog:'
version: 4.4.0
enquirer:
specifier: 'catalog:'
version: 2.4.1
execa:
specifier: 'catalog:'
version: safe-execa@0.2.0
libnpmpublish:
specifier: 'catalog:'
version: 11.1.3
normalize-registry-url:
specifier: 'catalog:'
version: 2.0.1
p-filter:
specifier: 'catalog:'
version: 4.1.0
@@ -7207,6 +7228,9 @@ importers:
'@types/is-windows':
specifier: 'catalog:'
version: 1.0.2
'@types/libnpmpublish':
specifier: 'catalog:'
version: 9.0.1
'@types/proxyquire':
specifier: 'catalog:'
version: 1.3.31
@@ -7225,9 +7249,6 @@ importers:
'@types/validate-npm-package-name':
specifier: 'catalog:'
version: 4.0.2
ci-info:
specifier: 'catalog:'
version: 4.4.0
cross-spawn:
specifier: 'catalog:'
version: 7.0.6
@@ -10133,14 +10154,45 @@ packages:
resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
engines: {node: '>= 8'}
'@npm/types@1.0.2':
resolution: {integrity: sha512-KXZccTDEnWqNrrx6JjpJKU/wJvNeg9BDgjS0XhmlZab7br921HtyVbsYzJr4L+xIvjdJ20Wh9dgxgCI2a5CEQw==}
'@npm/types@2.1.0':
resolution: {integrity: sha512-humQVe2BrWR7Yum5hGDYBnIPnnZJvKSOH/I4QN1ZL2bdb4c4zQHaHupEJ3cOkSJ07G3YfN793ptbNh196BWLgA==}
engines: {node: '>=18.6.0'}
'@npmcli/agent@3.0.0':
resolution: {integrity: sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q==}
engines: {node: ^18.17.0 || >=20.5.0}
'@npmcli/agent@4.0.0':
resolution: {integrity: sha512-kAQTcEN9E8ERLVg5AsGwLNoFb+oEG6engbqAU2P43gD4JEIkNGMHdVQ096FsOAAYpZPB0RSt0zgInKIAS1l5QA==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/fs@4.0.0':
resolution: {integrity: sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q==}
engines: {node: ^18.17.0 || >=20.5.0}
'@npmcli/fs@5.0.0':
resolution: {integrity: sha512-7OsC1gNORBEawOa5+j2pXN9vsicaIOH5cPXxoR6fJOmH6/EXpJB2CajXOu1fPRFun2m1lktEFX11+P89hqO/og==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/git@7.0.1':
resolution: {integrity: sha512-+XTFxK2jJF/EJJ5SoAzXk3qwIDfvFc5/g+bD274LZ7uY7LE8sTfG6Z8rOanPl2ZEvZWqNvmEdtXC25cE54VcoA==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/package-json@7.0.4':
resolution: {integrity: sha512-0wInJG3j/K40OJt/33ax47WfWMzZTm6OQxB9cDhTt5huCP2a9g2GnlsxmfN+PulItNPIpPrZ+kfwwUil7eHcZQ==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/promise-spawn@9.0.1':
resolution: {integrity: sha512-OLUaoqBuyxeTqUvjA3FZFiXUfYC1alp3Sa99gW3EUDz3tZ3CbXDdcZ7qWKBzicrJleIgucoWamWH1saAmH/l2Q==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/redact@4.0.0':
resolution: {integrity: sha512-gOBg5YHMfZy+TfHArfVogwgfBeQnKbbGo3pSUyK/gSI0AVu+pEiDVcKlQb0D8Mg1LNRZILZ6XG8I5dJ4KuAd9Q==}
engines: {node: ^20.17.0 || >=22.9.0}
'@pkgr/core@0.2.9':
resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==}
engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
@@ -10754,6 +10806,30 @@ packages:
'@types/node':
optional: true
'@sigstore/bundle@4.0.0':
resolution: {integrity: sha512-NwCl5Y0V6Di0NexvkTqdoVfmjTaQwoLM236r89KEojGmq/jMls8S+zb7yOwAPdXvbwfKDlP+lmXgAL4vKSQT+A==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sigstore/core@3.1.0':
resolution: {integrity: sha512-o5cw1QYhNQ9IroioJxpzexmPjfCe7gzafd2RY3qnMpxr4ZEja+Jad/U8sgFpaue6bOaF+z7RVkyKVV44FN+N8A==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sigstore/protobuf-specs@0.5.0':
resolution: {integrity: sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==}
engines: {node: ^18.17.0 || >=20.5.0}
'@sigstore/sign@4.1.0':
resolution: {integrity: sha512-Vx1RmLxLGnSUqx/o5/VsCjkuN5L7y+vxEEwawvc7u+6WtX2W4GNa7b9HEjmcRWohw/d6BpATXmvOwc78m+Swdg==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sigstore/tuf@4.0.1':
resolution: {integrity: sha512-OPZBg8y5Vc9yZjmWCHrlWPMBqW5yd8+wFNl+thMdtcWz3vjVSoJQutF8YkrzI0SLGnkuFof4HSsWUhXrf219Lw==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sigstore/verify@3.1.0':
resolution: {integrity: sha512-mNe0Iigql08YupSOGv197YdHpPPr+EzDZmfCgMc7RPNaZTw5aLN01nBl6CHJOh3BGtnMIj83EeN4butBchc8Ag==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sinclair/typebox@0.27.10':
resolution: {integrity: sha512-MTBk/3jGLNB2tVxv6uLlFh1iu64iYOQ2PbdOSK3NW8JZsmlaOh2q6sdtKowBhfw8QFLmYNzTW4/oK4uATIi6ZA==}
@@ -10792,6 +10868,14 @@ packages:
resolution: {integrity: sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==}
engines: {node: '>=10'}
'@tufjs/canonical-json@2.0.0':
resolution: {integrity: sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==}
engines: {node: ^16.14.0 || >=18.0.0}
'@tufjs/models@4.1.0':
resolution: {integrity: sha512-Y8cK9aggNRsqJVaKUlEYs4s7CvQ1b1ta2DVPyAimb0I2qhzjNk+A+mxvll/klL0RlfuIUei8BF7YWiua4kQqww==}
engines: {node: ^20.17.0 || >=22.9.0}
'@tybys/wasm-util@0.10.1':
resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==}
@@ -10885,6 +10969,9 @@ packages:
'@types/keyv@3.1.4':
resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==}
'@types/libnpmpublish@9.0.1':
resolution: {integrity: sha512-9XtssAlenc4sYO1Ftn8KfKIVRWivBYIjHJBdJeTLLVnDzVJ2mzWeR0i3eNak7ECK2tppMW/SonzOXnk/lEU03w==}
'@types/lodash.kebabcase@4.1.9':
resolution: {integrity: sha512-kPrrmcVOhSsjAVRovN0lRfrbuidfg0wYsrQa5IYuoQO1fpHHGSme66oyiYA/5eQPVl8Z95OA3HG0+d2SvYC85w==}
@@ -10903,6 +10990,9 @@ packages:
'@types/minimist@1.2.5':
resolution: {integrity: sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==}
'@types/node-fetch@2.6.13':
resolution: {integrity: sha512-QGpRVpzSaUs30JBSGPjOg4Uveu384erbHBoT1zeONvyCfwQxIkUshLAOqN/k9EjGviPRmWTTe6aH2qySWKTVSw==}
'@types/node@12.20.55':
resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==}
@@ -10921,6 +11011,15 @@ packages:
'@types/normalize-path@3.0.2':
resolution: {integrity: sha512-DO++toKYPaFn0Z8hQ7Tx+3iT9t77IJo/nDiqTXilgEP+kPNIYdpS9kh3fXuc53ugqwp9pxC1PVjCpV1tQDyqMA==}
'@types/npm-package-arg@6.1.4':
resolution: {integrity: sha512-vDgdbMy2QXHnAruzlv68pUtXCjmqUk3WrBAsRboRovsOmxbfn/WiYCjmecyKjGztnMps5dWp4Uq2prp+Ilo17Q==}
'@types/npm-registry-fetch@8.0.9':
resolution: {integrity: sha512-7NxvodR5Yrop3pb6+n8jhJNyzwOX0+6F+iagNEoi9u1CGxruYAwZD8pvGc9prIkL0+FdX5Xp0p80J9QPrGUp/g==}
'@types/npmlog@7.0.0':
resolution: {integrity: sha512-hJWbrKFvxKyWwSUXjZMYTINsSOY6IclhvGOZ97M8ac2tmR9hMwmTnYaMdpGhvju9ctWLTPhCS+eLfQNluiEjQQ==}
'@types/object-hash@3.0.6':
resolution: {integrity: sha512-fOBV8C1FIu2ELinoILQ+ApxcUKz4ngq+IWUYrxSGjXzzjUALijilampwkMgEtJ+h2njAW3pi853QpzNVCHB73w==}
@@ -11739,6 +11838,10 @@ packages:
resolution: {integrity: sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==}
engines: {node: ^18.17.0 || >=20.5.0}
cacache@20.0.3:
resolution: {integrity: sha512-3pUp4e8hv07k1QlijZu6Kn7c9+ZpWWk4j3F8N3xPuCExULobqJydKYOTj1FTq58srkJsXvO7LbGAH4C0ZU3WGw==}
engines: {node: ^20.17.0 || >=22.9.0}
cacheable-lookup@5.0.4:
resolution: {integrity: sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==}
engines: {node: '>=10.6.0'}
@@ -13824,6 +13927,10 @@ packages:
json-parse-even-better-errors@2.3.1:
resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==}
json-parse-even-better-errors@5.0.0:
resolution: {integrity: sha512-ZF1nxZ28VhQouRWhUcVlUIN3qwSgPuswK05s/HIaoetAoE/9tngVmCHjSxmSQPav1nd+lPtTL0YZ/2AFdR/iYQ==}
engines: {node: ^20.17.0 || >=22.9.0}
json-schema-traverse@0.4.1:
resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==}
@@ -13902,6 +14009,10 @@ packages:
resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==}
engines: {node: '>= 0.8.0'}
libnpmpublish@11.1.3:
resolution: {integrity: sha512-NVPTth/71cfbdYHqypcO9Lt5WFGTzFEcx81lWd7GDJIgZ95ERdYHGUfCtFejHCyqodKsQkNEx2JCkMpreDty/A==}
engines: {node: ^20.17.0 || >=22.9.0}
lines-and-columns@1.2.4:
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
@@ -14066,6 +14177,10 @@ packages:
resolution: {integrity: sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ==}
engines: {node: ^18.17.0 || >=20.5.0}
make-fetch-happen@15.0.3:
resolution: {integrity: sha512-iyyEpDty1mwW3dGlYXAJqC/azFn5PPvgKVwXayOGBSmKLxhKZ9fg4qIan2ePpp1vJIwfFiO34LAPZgq9SZW9Aw==}
engines: {node: ^20.17.0 || >=22.9.0}
makeerror@1.0.12:
resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==}
@@ -14247,6 +14362,10 @@ packages:
resolution: {integrity: sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ==}
engines: {node: ^18.17.0 || >=20.5.0}
minipass-fetch@5.0.1:
resolution: {integrity: sha512-yHK8pb0iCGat0lDrs/D6RZmCdaBT64tULXjdxjSMAqoDi18Q3qKEUTHypHQZQd9+FYpIS+lkvpq6C/R6SbUeRw==}
engines: {node: ^20.17.0 || >=22.9.0}
minipass-flush@1.0.5:
resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==}
engines: {node: '>= 8'}
@@ -14259,6 +14378,10 @@ packages:
resolution: {integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==}
engines: {node: '>=8'}
minipass-sized@2.0.0:
resolution: {integrity: sha512-zSsHhto5BcUVM2m1LurnXY6M//cGhVaegT71OfOXoprxT6o780GZd792ea6FfrQkuU4usHZIUczAQMRUE2plzA==}
engines: {node: '>=8'}
minipass@3.3.6:
resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==}
engines: {node: '>=8'}
@@ -14467,6 +14590,10 @@ packages:
resolution: {integrity: sha512-gZLxXdjEzE/+mOstGDqR6b0EkhJ+kM6fxM6vUuckuctuVPh80Q6pw/rSZj9s4Gex9GxWtIicO1pc8DB9KZWudw==}
engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}
npm-install-checks@8.0.0:
resolution: {integrity: sha512-ScAUdMpyzkbpxoNekQ3tNRdFI8SJ86wgKZSQZdUxT+bj0wVFpsEMWnkXP0twVe1gJyNF5apBWDJhhIbgrIViRA==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-normalize-package-bin@2.0.0:
resolution: {integrity: sha512-awzfKUO7v0FscrSpRoogyNm0sajikhBWpU0QMrW09AMi9n1PoKU6WaIqUzuJSQnpciZZmJ/jMZ2Egfmb/9LiWQ==}
engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}
@@ -14479,6 +14606,14 @@ packages:
resolution: {integrity: sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==}
engines: {node: ^18.17.0 || >=20.5.0}
npm-normalize-package-bin@5.0.0:
resolution: {integrity: sha512-CJi3OS4JLsNMmr2u07OJlhcrPxCeOeP/4xq67aWNai6TNWWbTrlNDgl8NcFKVlcBKp18GPj+EzbNIgrBfZhsag==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-package-arg@13.0.2:
resolution: {integrity: sha512-IciCE3SY3uE84Ld8WZU23gAPPV9rIYod4F+rc+vJ7h7cwAJt9Vk6TVsK60ry7Uj3SRS3bqRRIGuTp9YVlk6WNA==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-package-arg@8.1.5:
resolution: {integrity: sha512-LhgZrg0n0VgvzVdSm1oiZworPbTxYHUJCgtsJW8mGvlDpxTM1vSJc3m5QZeUkhAHIzbz3VCHd/R4osi1L1Tg/Q==}
engines: {node: '>=10'}
@@ -14488,6 +14623,14 @@ packages:
engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}
hasBin: true
npm-pick-manifest@11.0.3:
resolution: {integrity: sha512-buzyCfeoGY/PxKqmBqn1IUJrZnUi1VVJTdSSRPGI60tJdUhUoSQFhs0zycJokDdOznQentgrpf8LayEHyyYlqQ==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-registry-fetch@19.1.1:
resolution: {integrity: sha512-TakBap6OM1w0H73VZVDf44iFXsOS3h+L4wVMXmbWOQroZgFhMch0juN6XSzBNlD965yIKvWg2dfu7NSiaYLxtw==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-run-path@2.0.2:
resolution: {integrity: sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==}
engines: {node: '>=4'}
@@ -14912,6 +15055,10 @@ packages:
resolution: {integrity: sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==}
engines: {node: ^18.17.0 || >=20.5.0}
proc-log@6.1.0:
resolution: {integrity: sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==}
engines: {node: ^20.17.0 || >=22.9.0}
proc-output@1.0.9:
resolution: {integrity: sha512-XARWwM2pPNU/U8V4OuQNQLyjFqvHk1FRB5sFd1CCyT2vLLfDlLRLE4f6njcvm4Kyek1VzvF8MQRAYK1uLOlZmw==}
@@ -15422,6 +15569,10 @@ packages:
signed-varint@2.0.1:
resolution: {integrity: sha512-abgDPg1106vuZZOvw7cFwdCABddfJRz5akcCcchzTbhyhYnsG31y4AlZEgp315T7W3nQq5P4xeOm186ZiPVFzw==}
sigstore@4.1.0:
resolution: {integrity: sha512-/fUgUhYghuLzVT/gaJoeVehLCgZiUxPCPMcyVNY0lIf/cTCz58K/WTI7PefDarXxp9nUKpEwg1yyz3eSBMTtgA==}
engines: {node: ^20.17.0 || >=22.9.0}
simple-concat@1.0.1:
resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==}
@@ -15878,6 +16029,10 @@ packages:
tslib@2.8.1:
resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
tuf-js@4.1.0:
resolution: {integrity: sha512-50QV99kCKH5P/Vs4E2Gzp7BopNV+KzTXqWeaxrfu5IQJBOULRsTIS9seSsOVT8ZnGXzCyx55nYWAi4qJzpZKEQ==}
engines: {node: ^20.17.0 || >=22.9.0}
tunnel-agent@0.6.0:
resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==}
@@ -16036,10 +16191,18 @@ packages:
resolution: {integrity: sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ==}
engines: {node: ^18.17.0 || >=20.5.0}
unique-filename@5.0.0:
resolution: {integrity: sha512-2RaJTAvAb4owyjllTfXzFClJ7WsGxlykkPvCr9pA//LD9goVq+m4PPAeBgNodGZ7nSrntT/auWpJ6Y5IFXcfjg==}
engines: {node: ^20.17.0 || >=22.9.0}
unique-slug@5.0.0:
resolution: {integrity: sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg==}
engines: {node: ^18.17.0 || >=20.5.0}
unique-slug@6.0.0:
resolution: {integrity: sha512-4Lup7Ezn8W3d52/xBhZBVdx323ckxa7DEvd9kPQHppTkLoJXw6ltrBCyj5pnrxj0qKDxYMJ56CoxNuFCscdTiw==}
engines: {node: ^20.17.0 || >=22.9.0}
unique-stream@2.4.0:
resolution: {integrity: sha512-V6QarSfeSgDipGA9EZdoIzu03ZDlOFkk+FbEP5cwgrZXN3iIkYR91IjU2EnM6rB835kGQsqHX8qncObTXV+6KA==}
@@ -16252,6 +16415,11 @@ packages:
engines: {node: ^18.17.0 || >=20.5.0}
hasBin: true
which@6.0.0:
resolution: {integrity: sha512-f+gEpIKMR9faW/JgAgPK1D7mekkFoqbmiwvNzuhsHetni20QSgzg9Vhn0g2JSJkkfehQnqdUAx7/e15qS1lPxg==}
engines: {node: ^20.17.0 || >=22.9.0}
hasBin: true
wide-align@1.1.5:
resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==}
@@ -17363,7 +17531,7 @@ snapshots:
dependencies:
'@jest/fake-timers': 30.2.0
'@jest/types': 30.2.0
'@types/node': 22.19.9
'@types/node': 25.2.1
jest-mock: 30.2.0
'@jest/expect-utils@30.0.5':
@@ -17401,7 +17569,7 @@ snapshots:
dependencies:
'@jest/types': 30.2.0
'@sinonjs/fake-timers': 13.0.5
'@types/node': 22.19.9
'@types/node': 25.2.1
jest-message-util: 30.2.0
jest-mock: 30.2.0
jest-util: 30.2.0
@@ -17647,6 +17815,10 @@ snapshots:
'@nodelib/fs.scandir': 2.1.5
fastq: 1.20.1
'@npm/types@1.0.2': {}
'@npm/types@2.1.0': {}
'@npmcli/agent@3.0.0':
dependencies:
agent-base: 7.1.4
@@ -17657,10 +17829,51 @@ snapshots:
transitivePeerDependencies:
- supports-color
'@npmcli/agent@4.0.0':
dependencies:
agent-base: 7.1.4
http-proxy-agent: 7.0.2
https-proxy-agent: 7.0.6
lru-cache: 11.2.5
socks-proxy-agent: 8.0.5
transitivePeerDependencies:
- supports-color
'@npmcli/fs@4.0.0':
dependencies:
semver: 7.7.4
'@npmcli/fs@5.0.0':
dependencies:
semver: 7.7.4
'@npmcli/git@7.0.1':
dependencies:
'@npmcli/promise-spawn': 9.0.1
ini: 6.0.0
lru-cache: 11.2.5
npm-pick-manifest: 11.0.3
proc-log: 6.1.0
promise-retry: 2.0.1
semver: 7.7.4
which: 6.0.0
'@npmcli/package-json@7.0.4':
dependencies:
'@npmcli/git': 7.0.1
glob: 13.0.1
hosted-git-info: 9.0.2
json-parse-even-better-errors: 5.0.0
proc-log: 6.1.0
semver: 7.7.4
validate-npm-package-license: 3.0.4
'@npmcli/promise-spawn@9.0.1':
dependencies:
which: 6.0.0
'@npmcli/redact@4.0.0': {}
'@pkgr/core@0.2.9': {}
'@pnpm/byline@1.0.0': {}
@@ -19069,6 +19282,38 @@ snapshots:
optionalDependencies:
'@types/node': 25.2.1
'@sigstore/bundle@4.0.0':
dependencies:
'@sigstore/protobuf-specs': 0.5.0
'@sigstore/core@3.1.0': {}
'@sigstore/protobuf-specs@0.5.0': {}
'@sigstore/sign@4.1.0':
dependencies:
'@sigstore/bundle': 4.0.0
'@sigstore/core': 3.1.0
'@sigstore/protobuf-specs': 0.5.0
make-fetch-happen: 15.0.3
proc-log: 6.1.0
promise-retry: 2.0.1
transitivePeerDependencies:
- supports-color
'@sigstore/tuf@4.0.1':
dependencies:
'@sigstore/protobuf-specs': 0.5.0
tuf-js: 4.1.0
transitivePeerDependencies:
- supports-color
'@sigstore/verify@3.1.0':
dependencies:
'@sigstore/bundle': 4.0.0
'@sigstore/core': 3.1.0
'@sigstore/protobuf-specs': 0.5.0
'@sinclair/typebox@0.27.10': {}
'@sinclair/typebox@0.34.48': {}
@@ -19114,6 +19359,13 @@ snapshots:
dependencies:
defer-to-connect: 2.0.1
'@tufjs/canonical-json@2.0.0': {}
'@tufjs/models@4.1.0':
dependencies:
'@tufjs/canonical-json': 2.0.0
minimatch: 10.1.2
'@tybys/wasm-util@0.10.1':
dependencies:
tslib: 2.8.1
@@ -19222,6 +19474,12 @@ snapshots:
dependencies:
'@types/node': 25.2.1
'@types/libnpmpublish@9.0.1':
dependencies:
'@npm/types': 1.0.2
'@types/node-fetch': 2.6.13
'@types/npm-registry-fetch': 8.0.9
'@types/lodash.kebabcase@4.1.9':
dependencies:
'@types/lodash': 4.17.23
@@ -19242,6 +19500,11 @@ snapshots:
'@types/minimist@1.2.5': {}
'@types/node-fetch@2.6.13':
dependencies:
'@types/node': 25.2.1
form-data: 4.0.5
'@types/node@12.20.55': {}
'@types/node@18.19.130':
@@ -19260,6 +19523,20 @@ snapshots:
'@types/normalize-path@3.0.2': {}
'@types/npm-package-arg@6.1.4': {}
'@types/npm-registry-fetch@8.0.9':
dependencies:
'@types/node': 25.2.1
'@types/node-fetch': 2.6.13
'@types/npm-package-arg': 6.1.4
'@types/npmlog': 7.0.0
'@types/ssri': 7.1.5
'@types/npmlog@7.0.0':
dependencies:
'@types/node': 25.2.1
'@types/object-hash@3.0.6': {}
'@types/parse-json@4.0.2': {}
@@ -20279,6 +20556,20 @@ snapshots:
tar: 7.5.7
unique-filename: 4.0.0
cacache@20.0.3:
dependencies:
'@npmcli/fs': 5.0.0
fs-minipass: 3.0.3
glob: 13.0.1
lru-cache: 11.2.5
minipass: 7.1.2
minipass-collect: 2.0.1
minipass-flush: 1.0.5
minipass-pipeline: 1.2.4
p-map: 7.0.4
ssri: 13.0.0
unique-filename: 5.0.0
cacheable-lookup@5.0.4: {}
cacheable-request@7.0.4:
@@ -22312,7 +22603,7 @@ snapshots:
'@jest/expect': 30.2.0
'@jest/test-result': 30.2.0
'@jest/types': 30.2.0
'@types/node': 22.19.9
'@types/node': 25.2.1
chalk: 4.1.2
co: 4.6.0
dedent: 1.7.1
@@ -22417,7 +22708,7 @@ snapshots:
'@jest/environment': 30.2.0
'@jest/fake-timers': 30.2.0
'@jest/types': 30.2.0
'@types/node': 22.19.9
'@types/node': 25.2.1
jest-mock: 30.2.0
jest-util: 30.2.0
jest-validate: 30.2.0
@@ -22748,7 +23039,7 @@ snapshots:
jest-worker@30.2.0:
dependencies:
'@types/node': 22.19.9
'@types/node': 25.2.1
'@ungap/structured-clone': 1.3.0
jest-util: 30.2.0
merge-stream: 2.0.0
@@ -22787,6 +23078,8 @@ snapshots:
json-parse-even-better-errors@2.3.1: {}
json-parse-even-better-errors@5.0.0: {}
json-schema-traverse@0.4.1: {}
json-schema-traverse@1.0.0: {}
@@ -22822,7 +23115,7 @@ snapshots:
lodash.isstring: 4.0.1
lodash.once: 4.1.1
ms: 2.1.3
semver: 7.7.2
semver: 7.7.4
jsprim@2.0.2:
dependencies:
@@ -22876,6 +23169,19 @@ snapshots:
prelude-ls: 1.2.1
type-check: 0.4.0
libnpmpublish@11.1.3:
dependencies:
'@npmcli/package-json': 7.0.4
ci-info: 4.4.0
npm-package-arg: 13.0.2
npm-registry-fetch: 19.1.1
proc-log: 6.1.0
semver: 7.7.4
sigstore: 4.1.0
ssri: 13.0.0
transitivePeerDependencies:
- supports-color
lines-and-columns@1.2.4: {}
load-json-file@6.2.0:
@@ -23031,6 +23337,22 @@ snapshots:
transitivePeerDependencies:
- supports-color
make-fetch-happen@15.0.3:
dependencies:
'@npmcli/agent': 4.0.0
cacache: 20.0.3
http-cache-semantics: 4.2.0
minipass: 7.1.2
minipass-fetch: 5.0.1
minipass-flush: 1.0.5
minipass-pipeline: 1.2.4
negotiator: 1.0.0
proc-log: 6.1.0
promise-retry: 2.0.1
ssri: 13.0.0
transitivePeerDependencies:
- supports-color
makeerror@1.0.12:
dependencies:
tmpl: 1.0.5
@@ -23214,6 +23536,14 @@ snapshots:
optionalDependencies:
encoding: 0.1.13
minipass-fetch@5.0.1:
dependencies:
minipass: 7.1.2
minipass-sized: 2.0.0
minizlib: 3.1.0
optionalDependencies:
encoding: 0.1.13
minipass-flush@1.0.5:
dependencies:
minipass: 3.3.6
@@ -23226,6 +23556,10 @@ snapshots:
dependencies:
minipass: 3.3.6
minipass-sized@2.0.0:
dependencies:
minipass: 7.1.2
minipass@3.3.6:
dependencies:
yallist: 4.0.0
@@ -23443,12 +23777,25 @@ snapshots:
dependencies:
npm-normalize-package-bin: 2.0.0
npm-install-checks@8.0.0:
dependencies:
semver: 7.7.4
npm-normalize-package-bin@2.0.0: {}
npm-normalize-package-bin@3.0.1: {}
npm-normalize-package-bin@4.0.0: {}
npm-normalize-package-bin@5.0.0: {}
npm-package-arg@13.0.2:
dependencies:
hosted-git-info: 9.0.2
proc-log: 6.1.0
semver: 7.7.4
validate-npm-package-name: 7.0.2
npm-package-arg@8.1.5:
dependencies:
hosted-git-info: 4.1.0
@@ -23462,6 +23809,26 @@ snapshots:
npm-bundled: 2.0.1
npm-normalize-package-bin: 2.0.0
npm-pick-manifest@11.0.3:
dependencies:
npm-install-checks: 8.0.0
npm-normalize-package-bin: 5.0.0
npm-package-arg: 13.0.2
semver: 7.7.4
npm-registry-fetch@19.1.1:
dependencies:
'@npmcli/redact': 4.0.0
jsonparse: 1.3.1
make-fetch-happen: 15.0.3
minipass: 7.1.2
minipass-fetch: 5.0.1
minizlib: 3.1.0
npm-package-arg: 13.0.2
proc-log: 6.1.0
transitivePeerDependencies:
- supports-color
npm-run-path@2.0.2:
dependencies:
path-key: 2.0.1
@@ -23888,6 +24255,8 @@ snapshots:
proc-log@5.0.0: {}
proc-log@6.1.0: {}
proc-output@1.0.9: {}
process-nextick-args@2.0.1: {}
@@ -24456,6 +24825,17 @@ snapshots:
dependencies:
varint: 5.0.0
sigstore@4.1.0:
dependencies:
'@sigstore/bundle': 4.0.0
'@sigstore/core': 3.1.0
'@sigstore/protobuf-specs': 0.5.0
'@sigstore/sign': 4.1.0
'@sigstore/tuf': 4.0.1
'@sigstore/verify': 3.1.0
transitivePeerDependencies:
- supports-color
simple-concat@1.0.1: {}
simple-get@4.0.1:
@@ -24960,6 +25340,14 @@ snapshots:
tslib@2.8.1: {}
tuf-js@4.1.0:
dependencies:
'@tufjs/models': 4.1.0
debug: 4.4.3
make-fetch-happen: 15.0.3
transitivePeerDependencies:
- supports-color
tunnel-agent@0.6.0:
dependencies:
safe-buffer: 5.2.1
@@ -25109,10 +25497,18 @@ snapshots:
dependencies:
unique-slug: 5.0.0
unique-filename@5.0.0:
dependencies:
unique-slug: 6.0.0
unique-slug@5.0.0:
dependencies:
imurmurhash: 0.1.4
unique-slug@6.0.0:
dependencies:
imurmurhash: 0.1.4
unique-stream@2.4.0:
dependencies:
json-stable-stringify-without-jsonify: 1.0.1
@@ -25446,6 +25842,10 @@ snapshots:
dependencies:
isexe: 3.1.2
which@6.0.0:
dependencies:
isexe: 3.1.2
wide-align@1.1.5:
dependencies:
string-width: 1.0.2

View File

@@ -70,6 +70,7 @@ catalog:
'@commitlint/prompt-cli': ^19.8.1
'@eslint/js': ^9.18.0
'@jest/globals': 30.0.5
'@npm/types': ^2.1.0
'@pnpm/byline': ^1.0.0
'@pnpm/colorize-semver-diff': ^1.0.1
'@pnpm/config.env-replace': ^3.0.2
@@ -107,6 +108,7 @@ catalog:
'@types/isexe': 2.0.2
'@types/jest': ^30.0.0
'@types/js-yaml': ^4.0.9
'@types/libnpmpublish': ^9.0.1
'@types/lodash.kebabcase': 4.1.9
'@types/lodash.throttle': 4.1.7
'@types/micromatch': ^4.0.9
@@ -213,6 +215,7 @@ catalog:
json5: ^2.2.3
keyv: 4.5.4
lcov-result-merger: ^3.3.0
libnpmpublish: ^11.1.3
load-json-file: ^7.0.1
lodash.kebabcase: ^4.1.1
lodash.throttle: 4.1.1

View File

@@ -41,21 +41,23 @@
"@pnpm/config": "workspace:*",
"@pnpm/error": "workspace:*",
"@pnpm/exportable-manifest": "workspace:*",
"@pnpm/fetch": "workspace:*",
"@pnpm/fs.packlist": "workspace:*",
"@pnpm/git-utils": "workspace:*",
"@pnpm/lifecycle": "workspace:*",
"@pnpm/network.auth-header": "workspace:*",
"@pnpm/package-bins": "workspace:*",
"@pnpm/pick-registry-for-package": "workspace:*",
"@pnpm/plugin-commands-env": "workspace:*",
"@pnpm/resolver-base": "workspace:*",
"@pnpm/run-npm": "workspace:*",
"@pnpm/sort-packages": "workspace:*",
"@pnpm/types": "workspace:*",
"@zkochan/rimraf": "catalog:",
"chalk": "catalog:",
"ci-info": "catalog:",
"enquirer": "catalog:",
"execa": "catalog:",
"libnpmpublish": "catalog:",
"normalize-registry-url": "catalog:",
"p-filter": "catalog:",
"p-limit": "catalog:",
"ramda": "catalog:",
@@ -82,6 +84,7 @@
"@pnpm/workspace.filter-packages-from-dir": "workspace:*",
"@types/cross-spawn": "catalog:",
"@types/is-windows": "catalog:",
"@types/libnpmpublish": "catalog:",
"@types/proxyquire": "catalog:",
"@types/ramda": "catalog:",
"@types/sinon": "catalog:",

View File

@@ -0,0 +1,63 @@
import { PnpmError } from '@pnpm/error'
import { type PackResult } from './pack.js'
interface PublishErrorProperties<Pack> {
readonly pack: Pack
readonly status: number
readonly statusText: string
readonly text: string
}
export class FailedToPublishError<Pack extends Pick<PackResult, 'publishedManifest'>> extends PnpmError implements PublishErrorProperties<Pack> {
readonly pack: Pack
readonly status: number
readonly statusText: string
readonly text: string
constructor (opts: PublishErrorProperties<Pack>) {
const { pack, status, statusText, text } = opts
const { name, version } = pack.publishedManifest
const statusDisplay = statusText ? `${status} ${statusText}` : status
const trimmedText = text.trim()
let message = `Failed to publish package ${name}@${version} (status ${statusDisplay})`
if (trimmedText.includes('\n')) {
message += '\nDetails:\n'
for (const line of text.trimEnd().split('\n')) {
message += ` ${line}\n`
}
} else if (trimmedText) {
message += `: ${trimmedText}`
}
super('FAILED_TO_PUBLISH', message)
this.pack = pack
this.status = status
this.statusText = statusText
this.text = text
}
}
export async function createFailedToPublishError<Pack extends Pick<PackResult, 'publishedManifest'>> (
pack: Pack,
fetchResponse: FetchResponse
): Promise<FailedToPublishError<Pack>> {
const { status, statusText } = fetchResponse
let text: string
try {
text = await fetchResponse.text()
} catch {
text = ''
}
return new FailedToPublishError({ pack, status, statusText, text })
}
interface FetchResponse {
readonly status: number
readonly statusText: string
readonly text: (this: FetchResponse) => string | Promise<string>
}

View File

@@ -0,0 +1,22 @@
export function displayError (error: unknown): string {
if (typeof error !== 'object' || !error) return JSON.stringify(error)
let code: string | undefined
let body: string | undefined
if ('code' in error && typeof error.code === 'string') {
code = error.code
} else if ('name' in error && typeof error.name === 'string') {
code = error.name
}
if ('message' in error && typeof error.message === 'string') {
body = error.message
}
if (code && body) return `${code}: ${body}`
if (code) return code
if (body) return body
return JSON.stringify(error)
}

View File

@@ -0,0 +1,19 @@
import { sync as execa } from 'execa'
export interface ExecuteTokenHelperOptions {
globalWarn: (message: string) => void
}
export function executeTokenHelper ([cmd, ...args]: [string, ...string[]], opts: ExecuteTokenHelperOptions): string {
const execResult = execa(cmd, args, {
stdio: 'pipe',
})
if (execResult.stderr.trim()) {
for (const line of execResult.stderr.trimEnd().split('\n')) {
opts.globalWarn(`(tokenHelper stderr) ${line}`)
}
}
return execResult.stdout.trim()
}

View File

@@ -0,0 +1,94 @@
import fs from 'fs'
import { createGunzip } from 'zlib'
import path from 'path'
import tar from 'tar-stream'
import { PnpmError } from '@pnpm/error'
import { type ExportedManifest } from '@pnpm/exportable-manifest'
const TARBALL_SUFFIXES = ['.tar.gz', '.tgz'] as const
export type TarballSuffix = typeof TARBALL_SUFFIXES[number]
export type TarballPath = `${string}${TarballSuffix}`
export const isTarballPath = (path: string): path is TarballPath =>
TARBALL_SUFFIXES.some(suffix => path.endsWith(suffix))
export async function extractManifestFromPacked<Output = ExportedManifest> (tarballPath: TarballPath): Promise<Output> {
const extract = tar.extract()
const gunzip = createGunzip()
const tarballStream = fs.createReadStream(tarballPath)
let cleanedUp = false
function cleanup (): void {
if (cleanedUp) return
cleanedUp = true
extract.destroy()
gunzip.destroy()
tarballStream.destroy()
}
const promise = new Promise<string>((resolve, reject) => {
function handleError (error: unknown): void {
cleanup()
reject(error)
}
tarballStream.once('error', handleError)
gunzip.once('error', handleError)
let manifestFound = false
extract.on('entry', (header, stream, next) => {
const normalizedPath = path.normalize(header.name).replaceAll('\\', '/')
if (normalizedPath !== 'package/package.json') {
stream.once('end', next)
stream.resume()
return
}
manifestFound = true
const chunks: Buffer[] = []
stream.on('data', (chunk: Buffer) => {
chunks.push(chunk)
})
stream.once('end', () => {
try {
const text = Buffer.concat(chunks).toString()
cleanup()
resolve(text)
} catch (error) {
handleError(error)
}
})
stream.once('error', handleError)
})
extract.once('finish', () => {
cleanup()
if (!manifestFound) {
reject(new PublishArchiveMissingManifestError(tarballPath))
}
})
extract.once('error', handleError)
})
tarballStream.pipe(gunzip).pipe(extract)
return JSON.parse(await promise)
}
export class PublishArchiveMissingManifestError extends PnpmError {
readonly tarballPath: string
constructor (tarballPath: string) {
super('PUBLISH_ARCHIVE_MISSING_MANIFEST', `The archive ${tarballPath} does not contain package/package.json`)
this.tarballPath = tarballPath
}
}

View File

@@ -0,0 +1,158 @@
import { PnpmError } from '@pnpm/error'
import { displayError } from '../displayError.js'
import { type PublishPackedPkgOptions } from '../publishPackedPkg.js'
import { SHARED_CONTEXT } from './utils/shared-context.js'
export interface AuthTokenFetchOptions {
body?: ''
headers: {
Accept: 'application/json'
Authorization: `Bearer ${string}`
'Content-Length': '0'
}
method?: 'POST'
retry?: {
factor?: number
maxTimeout?: number
minTimeout?: number
randomize?: boolean
retries?: number
}
timeout?: number
}
export interface AuthTokenFetchResponse {
readonly json: (this: this) => Promise<unknown>
readonly ok: boolean
readonly status: number
}
export interface AuthTokenContext {
fetch: (url: string, options: AuthTokenFetchOptions) => Promise<AuthTokenFetchResponse>
}
export type AuthTokenOptions = Pick<PublishPackedPkgOptions,
| 'fetchRetries'
| 'fetchRetryFactor'
| 'fetchRetryMaxtimeout'
| 'fetchRetryMintimeout'
| 'fetchTimeout'
>
export interface AuthTokenParams {
context?: AuthTokenContext
idToken: string
options?: AuthTokenOptions
packageName: string
registry: string
}
/**
* Retrieve an `authToken` from the registry.
*
* @throws instances of subclasses of {@link AuthTokenError} which can be converted into warnings and skipped.
*
* @see https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect for GitHub Actions OIDC.
* @see https://api-docs.npmjs.com/#tag/OIDC/operation/exchangeOidcToken for NPM Registry OIDC.
* @see https://github.com/npm/cli/blob/7d900c46/lib/utils/oidc.js#L112-L142 for npm's implementation.
* @see https://github.com/yarnpkg/berry/blob/bafbef55/packages/plugin-npm/sources/npmHttpUtils.ts#L626-L641 for yarn's implementation.
*/
export async function fetchAuthToken ({
context: {
fetch,
} = SHARED_CONTEXT,
options,
idToken,
packageName,
registry,
}: AuthTokenParams): Promise<string> {
const escapedPackageName = encodeURIComponent(packageName)
let response: AuthTokenFetchResponse
try {
response = await fetch(
new URL(`/-/npm/v1/oidc/token/exchange/package/${escapedPackageName}`, registry).href,
{
body: '',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${idToken}`,
'Content-Length': '0',
},
method: 'POST',
retry: {
factor: options?.fetchRetryFactor,
maxTimeout: options?.fetchRetryMaxtimeout,
minTimeout: options?.fetchRetryMintimeout,
retries: options?.fetchRetries,
},
timeout: options?.fetchTimeout,
}
)
} catch (error) {
throw new AuthTokenFetchError(error, packageName, registry)
}
if (!response.ok) {
const error = await response.json().catch(() => undefined)
throw new AuthTokenExchangeError(error as AuthTokenExchangeError['errorResponse'], response.status)
}
let json: unknown
try {
json = await response.json()
} catch (error) {
throw new AuthTokenJsonInterruptedError(error)
}
if (!json || typeof json !== 'object' || !('token' in json) || typeof json.token !== 'string') {
throw new AuthTokenMalformedJsonError(json, packageName, registry)
}
return json.token
}
export abstract class AuthTokenError extends PnpmError {}
export class AuthTokenFetchError extends AuthTokenError {
readonly errorSource: unknown
readonly packageName: string
readonly registry: string
constructor (error: unknown, packageName: string, registry: string) {
super('AUTH_TOKEN_FETCH', `Failed to fetch authToken for package ${packageName} from registry ${registry}: ${displayError(error)}`)
this.errorSource = error
this.packageName = packageName
this.registry = registry
}
}
export class AuthTokenExchangeError extends AuthTokenError {
readonly errorResponse?: { body?: { message?: string } }
readonly httpStatus: number
constructor (errorResponse: AuthTokenExchangeError['errorResponse'], httpStatus: number) {
const message = errorResponse?.body?.message ?? 'Unknown error'
super('AUTH_TOKEN_EXCHANGE', `Failed token exchange request with body message: ${message} (status code ${httpStatus})`)
this.errorResponse = errorResponse
this.httpStatus = httpStatus
}
}
export class AuthTokenJsonInterruptedError extends AuthTokenError {
readonly errorSource: unknown
constructor (error: unknown) {
super('AUTH_TOKEN_JSON_INTERRUPTED', `Fetching of authToken JSON interrupted: ${displayError(error)}`)
this.errorSource = error
}
}
export class AuthTokenMalformedJsonError extends AuthTokenError {
readonly malformedJsonResponse: unknown
readonly packageName: string
readonly registry: string
constructor (malformedJsonResponse: unknown, packageName: string, registry: string) {
super('AUTH_TOKEN_MALFORMED_JSON', `Failed to fetch authToken for package ${packageName} from registry ${registry} due to malformed JSON response`)
this.malformedJsonResponse = malformedJsonResponse
this.packageName = packageName
this.registry = registry
}
}

View File

@@ -0,0 +1,165 @@
import { PnpmError } from '@pnpm/error'
import { displayError } from '../displayError.js'
import { type PublishPackedPkgOptions } from '../publishPackedPkg.js'
import { SHARED_CONTEXT } from './utils/shared-context.js'
export interface IdTokenDate {
now: (this: this) => number
}
export interface IdTokenCIInfo {
GITHUB_ACTIONS?: boolean
GITLAB?: boolean
}
export interface IdTokenEnv extends NodeJS.ProcessEnv {
ACTIONS_ID_TOKEN_REQUEST_TOKEN?: string
ACTIONS_ID_TOKEN_REQUEST_URL?: string
NPM_ID_TOKEN?: string
}
export interface IdTokenFetchOptions {
body?: null
headers: {
Accept: 'application/json'
Authorization: `Bearer ${string}`
}
method?: 'GET'
retry?: {
factor?: number
maxTimeout?: number
minTimeout?: number
randomize?: boolean
retries?: number
}
timeout?: number
}
export interface IdTokenFetchResponse {
readonly json: (this: this) => Promise<unknown>
readonly ok: boolean
readonly status: number
}
export interface IdTokenContext {
Date: IdTokenDate
ciInfo: IdTokenCIInfo
fetch: (url: string, options: IdTokenFetchOptions) => Promise<IdTokenFetchResponse>
globalInfo: (message: string) => void
process: { env?: IdTokenEnv }
}
export type IdTokenOptions = Pick<PublishPackedPkgOptions,
| 'fetchRetries'
| 'fetchRetryFactor'
| 'fetchRetryMaxtimeout'
| 'fetchRetryMintimeout'
| 'fetchTimeout'
>
export interface IdTokenParams {
context?: IdTokenContext
options?: IdTokenOptions
registry: string
}
/**
* Retrieve an `idToken` from the CI environment.
*
* @throws instances of subclasses of {@link IdTokenError} which can be converted into warnings and skipped.
*
* @see https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect for GitHub Actions OIDC.
* @see https://github.com/npm/cli/blob/7d900c46/lib/utils/oidc.js#L37-L110 for npm's implementation
* @see https://github.com/yarnpkg/berry/blob/bafbef55/packages/plugin-npm/sources/npmHttpUtils.ts#L594-L624 for yarn's implementation
*/
export async function getIdToken ({
context: {
Date,
ciInfo: { GITHUB_ACTIONS, GITLAB },
fetch,
globalInfo,
process: { env },
} = SHARED_CONTEXT,
options,
registry,
}: IdTokenParams): Promise<string | undefined> {
if (!GITHUB_ACTIONS && !GITLAB) return undefined
if (env?.NPM_ID_TOKEN) return env.NPM_ID_TOKEN
if (!GITHUB_ACTIONS) return undefined
if (!env?.ACTIONS_ID_TOKEN_REQUEST_TOKEN || !env?.ACTIONS_ID_TOKEN_REQUEST_URL) {
throw new IdTokenGitHubWorkflowIncorrectPermissionsError()
}
const parsedRegistry = new URL(registry)
const audience = `npm:${parsedRegistry.hostname}` as const
const url = new URL(env.ACTIONS_ID_TOKEN_REQUEST_URL)
url.searchParams.append('audience', audience)
const startTime = Date.now()
const response = await fetch(url.href, {
headers: {
Accept: 'application/json',
Authorization: `Bearer ${env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`,
},
method: 'GET',
retry: {
factor: options?.fetchRetryFactor,
maxTimeout: options?.fetchRetryMaxtimeout,
minTimeout: options?.fetchRetryMintimeout,
retries: options?.fetchRetries,
},
timeout: options?.fetchTimeout,
})
const elapsedTime = Date.now() - startTime
globalInfo(`GET ${url.href} ${response.status} ${elapsedTime}ms`)
if (!response.ok) {
throw new IdTokenGitHubInvalidResponseError()
}
let json: unknown
try {
json = await response.json()
} catch (error) {
throw new IdTokenGitHubJsonInterruptedError(error)
}
if (!json || typeof json !== 'object' || !('value' in json) || typeof json.value !== 'string') {
throw new IdTokenGitHubJsonInvalidValueError(json)
}
return json.value
}
export abstract class IdTokenError extends PnpmError {}
export class IdTokenGitHubWorkflowIncorrectPermissionsError extends IdTokenError {
constructor () {
super('ID_TOKEN_GITHUB_WORKFLOW_INCORRECT_PERMISSIONS', 'Incorrect permissions for idToken within GitHub Workflows')
}
}
export class IdTokenGitHubInvalidResponseError extends IdTokenError {
constructor () {
super('ID_TOKEN_GITHUB_INVALID_RESPONSE', 'Failed to fetch idToken from GitHub: received an invalid response')
}
}
export class IdTokenGitHubJsonInterruptedError extends IdTokenError {
readonly errorSource: unknown
constructor (error: unknown) {
super('ID_TOKEN_GITHUB_JSON_INTERRUPTED_ERROR', `Fetching of idToken JSON interrupted: ${displayError(error)}`)
this.errorSource = error
}
}
export class IdTokenGitHubJsonInvalidValueError extends IdTokenError {
readonly jsonResponse: unknown
constructor (jsonResponse: unknown) {
super('ID_TOKEN_GITHUB_JSON_INVALID_VALUE', 'Failed to fetch idToken from GitHub: missing or invalid value')
this.jsonResponse = jsonResponse
}
}

View File

@@ -0,0 +1,179 @@
import { PnpmError } from '@pnpm/error'
import { type PublishPackedPkgOptions } from '../publishPackedPkg.js'
import { SHARED_CONTEXT } from './utils/shared-context.js'
export interface ProvenanceCIInfo {
GITHUB_ACTIONS?: boolean
GITLAB?: boolean
}
export interface ProvenanceEnv extends NodeJS.ProcessEnv {
SIGSTORE_ID_TOKEN?: string
}
export interface ProvenanceFetchOptions {
headers: {
Accept: 'application/json'
Authorization: `Bearer ${string}`
}
method: 'GET'
retry?: {
factor?: number
maxTimeout?: number
minTimeout?: number
randomize?: boolean
retries?: number
}
timeout?: number
}
export interface ProvenanceFetchResponse {
readonly json: (this: this) => Promise<unknown>
readonly ok: boolean
readonly status: number
}
export interface ProvenanceContext {
ciInfo: ProvenanceCIInfo
fetch: (url: URL, options: ProvenanceFetchOptions) => Promise<ProvenanceFetchResponse>
process: { env?: ProvenanceEnv }
}
export type ProvenanceOptions = Pick<PublishPackedPkgOptions,
| 'fetchRetries'
| 'fetchRetryFactor'
| 'fetchRetryMaxtimeout'
| 'fetchRetryMintimeout'
| 'fetchTimeout'
>
export interface ProvenanceParams {
authToken: string
context?: ProvenanceContext
idToken: string
options?: ProvenanceOptions,
packageName: string
registry: string
}
/**
* Determine `provenance` for a package from the CI context and the visibility of the package.
*
* @throws instances of subclasses of {@link ProvenanceError} which can be converted into warnings and skipped.
*
* @see https://github.com/npm/cli/blob/7d900c46/lib/utils/oidc.js#L145-L164 for npm's implementation.
*/
export async function determineProvenance ({
authToken,
idToken,
options,
packageName,
registry,
context: {
ciInfo: { GITHUB_ACTIONS, GITLAB },
fetch,
process: { env },
} = SHARED_CONTEXT,
}: ProvenanceParams): Promise<boolean | undefined> {
const [headerB64, payloadB64] = idToken.split('.')
if (!headerB64 || !payloadB64) {
throw new ProvenanceMalformedIdTokenError(idToken)
}
interface Payload {
repository_visibility?: unknown
project_visibility?: unknown
}
const payloadJson = Buffer.from(payloadB64, 'base64url').toString('utf8')
const payload: Payload = JSON.parse(payloadJson)
if (
(!GITHUB_ACTIONS || payload.repository_visibility !== 'public') &&
(!GITLAB || payload.project_visibility !== 'public' || !env?.SIGSTORE_ID_TOKEN)
) {
throw new ProvenanceInsufficientInformationError()
}
const escapedPackageName = encodeURIComponent(packageName)
const visibilityUrl = new URL(`/-/package/${escapedPackageName}/visibility`, registry)
const response = await fetch(visibilityUrl, {
headers: {
Accept: 'application/json',
Authorization: `Bearer ${authToken}`,
},
method: 'GET',
retry: {
factor: options?.fetchRetryFactor,
maxTimeout: options?.fetchRetryMaxtimeout,
minTimeout: options?.fetchRetryMintimeout,
retries: options?.fetchRetries,
},
timeout: options?.fetchTimeout,
})
if (!response.ok) {
throw await ProvenanceFailedToFetchVisibilityError.createErrorFromFetchResponse(response, packageName, registry)
}
const visibility = await response.json() as { public?: boolean } | undefined
if (visibility?.public) return true
return undefined
}
export abstract class ProvenanceError extends PnpmError {}
export class ProvenanceMalformedIdTokenError extends ProvenanceError {
readonly idToken: string
constructor (idToken: string) {
super('PROVENANCE_MALFORMED_ID_TOKEN', 'The received idToken is not a valid JWT')
this.idToken = idToken
}
}
export class ProvenanceInsufficientInformationError extends ProvenanceError {
constructor () {
super('PROVENANCE_INSUFFICIENT_INFORMATION', 'The environment does not provide enough information to determine visibility')
}
}
export class ProvenanceFailedToFetchVisibilityError extends ProvenanceError {
readonly errorResponse?: { code?: string, message?: string }
readonly packageName: string
readonly registry: string
readonly status: number
constructor (
errorResponse: ProvenanceFailedToFetchVisibilityError['errorResponse'],
status: number,
packageName: string,
registry: string
) {
let message = 'an unknown error'
if (errorResponse?.code && errorResponse?.message) {
message = `${errorResponse.code}: ${errorResponse.message}`
} else if (errorResponse?.code) {
message = errorResponse.code
} else if (errorResponse?.message) {
message = errorResponse.message
}
super(
'PROVENANCE_FAILED_TO_FETCH_VISIBILITY',
`Failed to fetch visibility for package ${packageName} from registry ${registry} due to ${message} (status code ${status})`
)
this.errorResponse = errorResponse
this.status = status
this.packageName = packageName
this.registry = registry
}
static async createErrorFromFetchResponse (response: ProvenanceFetchResponse, packageName: string, registry: string): Promise<ProvenanceFailedToFetchVisibilityError> {
let errorResponse: ProvenanceFailedToFetchVisibilityError['errorResponse']
try {
errorResponse = await response.json() as typeof errorResponse
} catch {}
return new ProvenanceFailedToFetchVisibilityError(errorResponse, response.status, packageName, registry)
}
}

View File

@@ -0,0 +1,19 @@
import ciInfo from 'ci-info'
import { fetch } from '@pnpm/fetch'
import { globalInfo } from '@pnpm/logger'
import { type AuthTokenContext } from '../authToken.js'
import { type IdTokenContext } from '../idToken.js'
import { type ProvenanceContext } from '../provenance.js'
type SharedContext =
& AuthTokenContext
& IdTokenContext
& ProvenanceContext
export const SHARED_CONTEXT: SharedContext = {
Date,
ciInfo,
fetch,
globalInfo,
process,
}

View File

@@ -0,0 +1,17 @@
const ENV_KEY = 'PNPM_CONFIG_OTP'
type EnvBase =
& Partial<Readonly<Record<string, string>>>
& Partial<Readonly<Record<typeof ENV_KEY, string>>>
interface OptionsBase {
readonly otp?: string
}
export const optionsWithOtpEnv = <Options extends OptionsBase> (
opts: Options,
{ [ENV_KEY]: otp }: EnvBase
): Options =>
Boolean(opts.otp) || !otp // empty string is considered "not defined" here
? opts
: { ...opts, otp }

View File

@@ -5,7 +5,7 @@ import { type Catalogs } from '@pnpm/catalogs.types'
import { PnpmError } from '@pnpm/error'
import { types as allTypes, type UniversalOptions, type Config, getWorkspaceConcurrency, getDefaultWorkspaceConcurrency } from '@pnpm/config'
import { readProjectManifest } from '@pnpm/cli-utils'
import { createExportableManifest } from '@pnpm/exportable-manifest'
import { type ExportedManifest, createExportableManifest } from '@pnpm/exportable-manifest'
import { packlist } from '@pnpm/fs.packlist'
import { getBinsFromPackageManifest } from '@pnpm/package-bins'
import { type Hooks } from '@pnpm/pnpmfile'
@@ -292,7 +292,7 @@ export async function api (opts: PackOptions): Promise<PackResult> {
}
export interface PackResult {
publishedManifest: ProjectManifest
publishedManifest: ExportedManifest
contents: string[]
tarballPath: string
}
@@ -323,7 +323,7 @@ async function packPkg (opts: {
modulesDir: string
packGzipLevel?: number
bins: string[]
manifest: ProjectManifest
manifest: ExportedManifest
}): Promise<void> {
const {
destFile,
@@ -359,7 +359,7 @@ async function createPublishManifest (opts: {
manifest: ProjectManifest
catalogs: Catalogs
hooks?: Hooks
}): Promise<ProjectManifest> {
}): Promise<ExportedManifest> {
const { projectDir, embedReadme, modulesDir, manifest, catalogs, hooks } = opts
const readmeFile = embedReadme ? await readReadmeFile(projectDir) : undefined
return createExportableManifest(projectDir, manifest, {

View File

@@ -1,21 +1,21 @@
import { promises as fs, existsSync } from 'fs'
import path from 'path'
import { docsUrl, readProjectManifest } from '@pnpm/cli-utils'
import { FILTERING } from '@pnpm/common-cli-options-help'
import { type Config, types as allTypes } from '@pnpm/config'
import { PnpmError } from '@pnpm/error'
import { runLifecycleHook, type RunLifecycleHookOptions } from '@pnpm/lifecycle'
import { runNpm } from '@pnpm/run-npm'
import { type ProjectManifest } from '@pnpm/types'
import { getCurrentBranch, isGitRepo, isRemoteHistoryClean, isWorkingTreeClean } from '@pnpm/git-utils'
import { loadToken } from '@pnpm/network.auth-header'
import enquirer from 'enquirer'
import rimraf from '@zkochan/rimraf'
import { pick } from 'ramda'
import realpathMissing from 'realpath-missing'
import renderHelp from 'render-help'
import { temporaryDirectory } from 'tempy'
import { extractManifestFromPacked, isTarballPath } from './extractManifestFromPacked.js'
import { optionsWithOtpEnv } from './otpEnv.js'
import * as pack from './pack.js'
import { publishPackedPkg } from './publishPackedPkg.js'
import { recursivePublish, type PublishRecursiveOpts } from './recursivePublish.js'
export function rcOptionsTypes (): Record<string, unknown> {
@@ -40,6 +40,7 @@ export function cliOptionsTypes (): Record<string, unknown> {
'dry-run': Boolean,
force: Boolean,
json: Boolean,
otp: String,
recursive: Boolean,
'report-summary': Boolean,
}
@@ -111,46 +112,6 @@ export function help (): string {
const GIT_CHECKS_HINT = 'If you want to disable Git checks on publish, set the "git-checks" setting to "false", or run again with "--no-git-checks".'
/**
* Remove pnpm-specific CLI options that npm doesn't recognize.
*/
export function removePnpmSpecificOptions (args: string[]): string[] {
const booleanOptions = new Set([
'--no-git-checks',
'--embed-readme',
'--no-embed-readme',
])
const optionsWithValue = new Set([
'--publish-branch',
'--npm-path',
])
const result: string[] = []
let i = 0
while (i < args.length) {
const arg = args[i]
if (booleanOptions.has(arg)) {
// Skip only the boolean option itself
i++
} else if (optionsWithValue.has(arg)) {
// Skip the option and its value
i++
// Skip the value if it exists and doesn't look like another option
if (i < args.length && args[i][0] !== '-') {
i++
}
} else {
result.push(arg)
i++
}
}
return result
}
export async function handler (
opts: Omit<PublishRecursiveOpts, 'workspaceDir'> & {
argv: {
@@ -229,17 +190,20 @@ Do you want to continue?`,
return { exitCode }
}
let args = opts.argv.original.slice(1)
const dirInParams = (params.length > 0) ? params[0] : undefined
if (dirInParams) {
args = args.filter(arg => arg !== params[0])
}
args = removePnpmSpecificOptions(args)
opts = optionsWithOtpEnv(opts, process.env)
if (dirInParams != null && (dirInParams.endsWith('.tgz') || dirInParams?.endsWith('.tar.gz'))) {
const { status } = runNpm(opts.npmPath, ['publish', dirInParams, ...args])
return { exitCode: status ?? 0 }
const dirInParams = (params.length > 0) ? params[0] : undefined
if (dirInParams != null && isTarballPath(dirInParams)) {
const tarballPath = dirInParams
const publishedManifest = await extractManifestFromPacked(tarballPath)
await publishPackedPkg({
tarballPath,
publishedManifest,
}, opts)
return { exitCode: 0 }
}
const dir = dirInParams ?? opts.dir ?? process.cwd()
const _runScriptsIfPresent = runScriptsIfPresent.bind(null, {
@@ -266,22 +230,18 @@ Do you want to continue?`,
// from the current working directory, ignoring the package.json file
// that was generated and packed to the tarball.
const packDestination = temporaryDirectory()
const { tarballPath } = await pack.api({
...opts,
dir,
packDestination,
dryRun: false,
})
await copyNpmrc({ dir, workspaceDir: opts.workspaceDir, packDestination })
const { status } = runNpm(opts.npmPath, ['publish', '--ignore-scripts', path.basename(tarballPath), ...args], {
cwd: packDestination,
env: getEnvWithTokens(opts),
})
await rimraf(packDestination)
if (status != null && status !== 0) {
return { exitCode: status }
try {
const packResult = await pack.api({
...opts,
dir,
packDestination,
dryRun: false,
})
await publishPackedPkg(packResult, opts)
} finally {
await rimraf(packDestination)
}
if (!opts.ignoreScripts) {
await _runScriptsIfPresent([
'publish',
@@ -291,50 +251,6 @@ Do you want to continue?`,
return { manifest }
}
/**
* The npm CLI doesn't support token helpers, so we transform the token helper settings
* to regular auth token settings that the npm CLI can understand.
*/
function getEnvWithTokens (opts: Pick<PublishRecursiveOpts, 'rawConfig' | 'argv'>): Record<string, string> {
const tokenHelpers = Object.entries(opts.rawConfig).filter(([key]) => key.endsWith(':tokenHelper'))
const tokenHelpersFromArgs = opts.argv.original
.filter(arg => arg.includes(':tokenHelper='))
.map(arg => arg.split('=', 2) as [string, string])
const env: Record<string, string> = {}
for (const [key, helperPath] of tokenHelpers.concat(tokenHelpersFromArgs)) {
const authHeader = loadToken(helperPath, key)
const authType = authHeader.startsWith('Bearer')
? '_authToken'
: '_auth'
const registry = key.replace(/:tokenHelper$/, '')
env[`NPM_CONFIG_${registry}:${authType}`] = authType === '_authToken'
? authHeader.slice('Bearer '.length)
: authHeader.replace(/Basic /i, '')
}
return env
}
async function copyNpmrc (
{ dir, workspaceDir, packDestination }: {
dir: string
workspaceDir?: string
packDestination: string
}
): Promise<void> {
const localNpmrc = path.join(dir, '.npmrc')
if (existsSync(localNpmrc)) {
await fs.copyFile(localNpmrc, path.join(packDestination, '.npmrc'))
return
}
if (!workspaceDir) return
const workspaceNpmrc = path.join(workspaceDir, '.npmrc')
if (existsSync(workspaceNpmrc)) {
await fs.copyFile(workspaceNpmrc, path.join(packDestination, '.npmrc'))
}
}
export async function runScriptsIfPresent (
opts: RunLifecycleHookOptions,
scriptNames: string[],

View File

@@ -0,0 +1,344 @@
import fs from 'fs/promises'
import { type PublishOptions, publish } from 'libnpmpublish'
import { type Config } from '@pnpm/config'
import { PnpmError } from '@pnpm/error'
import { type ExportedManifest } from '@pnpm/exportable-manifest'
import { globalInfo, globalWarn } from '@pnpm/logger'
import { displayError } from './displayError.js'
import { executeTokenHelper } from './executeTokenHelper.js'
import { createFailedToPublishError } from './FailedToPublishError.js'
import { AuthTokenError, fetchAuthToken } from './oidc/authToken.js'
import { IdTokenError, getIdToken } from './oidc/idToken.js'
import { ProvenanceError, determineProvenance } from './oidc/provenance.js'
import { type PackResult } from './pack.js'
import { type NormalizedRegistryUrl, allRegistryConfigKeys, parseSupportedRegistryUrl } from './registryConfigKeys.js'
type AuthConfigKey =
| 'authToken'
| 'authUserPass'
| 'tokenHelper'
type SslConfigKey =
| 'ca'
| 'cert'
| 'key'
type AuthSslConfigKey =
// default registry
| AuthConfigKey
| SslConfigKey
// other registries
| 'authInfos'
| 'sslConfigs'
export type PublishPackedPkgOptions = Pick<Config,
| AuthSslConfigKey
| 'dryRun'
| 'fetchRetries'
| 'fetchRetryFactor'
| 'fetchRetryMaxtimeout'
| 'fetchRetryMintimeout'
| 'fetchTimeout'
| 'registries'
| 'tag'
| 'userAgent'
> & {
access?: 'public' | 'restricted'
ci?: boolean
otp?: string // NOTE: There is no existing test for the One-time Password feature
provenance?: boolean
provenanceFile?: string // NOTE: This field is currently not supported
}
// @types/libnpmpublish unfortunately uses an outdated type definition of package.json
type ManifestFromOutdatedDefinition = typeof publish extends (_a: infer Manifest, ..._: never) => unknown ? Manifest : never
export async function publishPackedPkg (
packResult: Pick<PackResult, 'publishedManifest' | 'tarballPath'>,
opts: PublishPackedPkgOptions
): Promise<void> {
const { publishedManifest, tarballPath } = packResult
const tarballData = await fs.readFile(tarballPath)
const publishOptions = await createPublishOptions(publishedManifest, opts)
const { name, version } = publishedManifest
const { registry } = publishOptions
globalInfo(`📦 ${name}@${version}${registry ?? 'the default registry'}`)
if (opts.dryRun) {
globalWarn(`Skip publishing ${name}@${version} (dry run)`)
return
}
const response = await publish(publishedManifest as ManifestFromOutdatedDefinition, tarballData, publishOptions)
if (response.ok) {
globalInfo(`✅ Published package ${name}@${version}`)
return
}
throw await createFailedToPublishError(packResult, response)
}
async function createPublishOptions (manifest: ExportedManifest, options: PublishPackedPkgOptions): Promise<PublishOptions> {
const { registry, auth, ssl } = findAuthSslInfo(manifest, options)
const {
access,
ci: isFromCI,
fetchRetries,
fetchRetryFactor,
fetchRetryMaxtimeout,
fetchRetryMintimeout,
fetchTimeout: timeout,
otp,
provenance,
provenanceFile,
tag: defaultTag,
userAgent,
} = options
const publishOptions: PublishOptions = {
access,
defaultTag,
fetchRetries,
fetchRetryFactor,
fetchRetryMaxtimeout,
fetchRetryMintimeout,
isFromCI,
otp,
timeout,
provenance,
provenanceFile,
registry,
userAgent,
ca: ssl?.ca,
cert: Array.isArray(ssl?.cert) ? ssl.cert.join('\n') : ssl?.cert,
key: ssl?.key,
token: auth && extractToken(auth),
username: auth?.authUserPass?.username,
password: auth?.authUserPass?.password,
}
// This is necessary because getNetworkConfigs initialized them as { cert: '', key: '' }
// which may be a problem.
// The real fix is to change the type `SslConfig` into that of partial properties, but that
// is out of scope for now.
removeEmptyStringProperty(publishOptions, 'cert')
removeEmptyStringProperty(publishOptions, 'key')
if (registry) {
const oidcTokenProvenance = await fetchTokenAndProvenanceByOidcIfApplicable(publishOptions, manifest.name, registry, options)
publishOptions.token ??= oidcTokenProvenance?.authToken
publishOptions.provenance ??= oidcTokenProvenance?.provenance
appendAuthOptionsForRegistry(publishOptions, registry)
}
pruneUndefined(publishOptions)
return publishOptions
}
interface AuthSslInfo {
registry: NormalizedRegistryUrl
auth: Pick<Config, AuthConfigKey>
ssl: Pick<Config, SslConfigKey>
}
/**
* Find auth and ssl information according to {@link https://docs.npmjs.com/cli/v10/configuring-npm/npmrc#auth-related-configuration}.
*
* The example `.npmrc` demonstrated inheritance.
*/
function findAuthSslInfo (
{ name }: ExportedManifest,
{
authInfos,
sslConfigs,
registries,
...defaultInfos
}: Pick<Config, AuthSslConfigKey | 'registries'>
): Partial<AuthSslInfo> {
// eslint-disable-next-line regexp/no-unused-capturing-group
const scopedMatches = /@(?<scope>[^/]+)\/(?<slug>[^/]+)/.exec(name)
const registryName = scopedMatches?.groups ? `@${scopedMatches.groups.scope}` : 'default'
const nonNormalizedRegistry = registries[registryName] ?? registries.default
const supportedRegistryInfo = parseSupportedRegistryUrl(nonNormalizedRegistry)
if (!supportedRegistryInfo) {
throw new PublishUnsupportedRegistryProtocolError(nonNormalizedRegistry)
}
const {
normalizedUrl: registry,
longestConfigKey: initialRegistryConfigKey,
} = supportedRegistryInfo
const result: Partial<AuthSslInfo> = { registry }
for (const registryConfigKey of allRegistryConfigKeys(initialRegistryConfigKey)) {
const auth: Pick<Config, AuthConfigKey> | undefined = authInfos[registryConfigKey]
const ssl: Pick<Config, SslConfigKey> | undefined = sslConfigs[registryConfigKey]
result.auth ??= auth // old auth from longer path collectively overrides new auth from shorter path
result.ssl = {
...ssl,
...result.ssl, // old ssl from longer path individually overrides new ssl from shorter path
}
}
if (
nonNormalizedRegistry !== registries.default &&
registry !== registries.default &&
registry !== parseSupportedRegistryUrl(registries.default)?.normalizedUrl
) {
return result
}
return {
registry,
auth: result.auth ?? defaultInfos, // old auth from longer path collectively overrides default auth
ssl: {
...defaultInfos,
...result.ssl, // old ssl from longer path individually overrides default ssl
},
}
}
function extractToken ({
authToken,
tokenHelper,
}: {
authToken?: string
tokenHelper?: [string, ...string[]]
}): string | undefined {
if (authToken) return authToken
if (tokenHelper) {
return executeTokenHelper(tokenHelper, { globalWarn })
}
return undefined
}
export class PublishUnsupportedRegistryProtocolError extends PnpmError {
readonly registryUrl: string
constructor (registryUrl: string) {
super('PUBLISH_UNSUPPORTED_REGISTRY_PROTOCOL', `Registry ${registryUrl} has an unsupported protocol`, {
hint: '`pnpm publish` only supports HTTP and HTTPS registries',
})
this.registryUrl = registryUrl
}
}
interface OidcTokenProvenanceResult {
authToken: string
provenance?: boolean
}
/**
* If authentication information doesn't already set in {@link targetPublishOptions},
* try fetching an authentication token and provenance by OpenID Connect and return it.
*/
async function fetchTokenAndProvenanceByOidcIfApplicable (
targetPublishOptions: PublishOptions,
packageName: string,
registry: string,
options: PublishPackedPkgOptions
): Promise<OidcTokenProvenanceResult | undefined> {
if (
targetPublishOptions.token != null ||
(targetPublishOptions.username && targetPublishOptions.password)
) return undefined
let idToken: string | undefined
try {
idToken = await getIdToken({
options,
registry,
})
} catch (error) {
if (error instanceof IdTokenError) {
globalWarn(`Skipped OIDC: ${displayError(error)}`)
return undefined
}
throw error
}
if (!idToken) {
globalWarn('Skipped OIDC: idToken is not available')
return undefined
}
let authToken: string
try {
authToken = await fetchAuthToken({
idToken,
options,
packageName,
registry,
})
} catch (error) {
if (error instanceof AuthTokenError) {
globalWarn(`Skipped OIDC: ${displayError(error)}`)
return undefined
}
throw error
}
if (options.provenance != null) {
return {
authToken,
provenance: options.provenance,
}
}
let provenance: boolean | undefined
try {
provenance = await determineProvenance({
authToken,
idToken,
options,
packageName,
registry,
})
} catch (error) {
if (error instanceof ProvenanceError) {
globalWarn(`Skipped setting provenance: ${displayError(error)}`)
return undefined
}
throw error
}
return { authToken, provenance }
}
/**
* Appends authentication information to {@link targetPublishOptions} to explicitly target {@link registry}.
*
* `libnpmpublish` has a quirk in which it only read the authentication information from `//<registry>:_authToken`
* instead of `token`.
* This function fixes that by making sure the registry specific authentication information exists.
*/
function appendAuthOptionsForRegistry (targetPublishOptions: PublishOptions, registry: NormalizedRegistryUrl): void {
const registryInfo = parseSupportedRegistryUrl(registry)
if (!registryInfo) {
globalWarn(`The registry ${registry} cannot be converted into a config key. Supplement is skipped. Subsequent libnpmpublish call may fail.`)
return
}
const registryConfigKey = registryInfo.longestConfigKey
targetPublishOptions[`${registryConfigKey}:_authToken`] ??= targetPublishOptions.token
targetPublishOptions[`${registryConfigKey}:username`] ??= targetPublishOptions.username
targetPublishOptions[`${registryConfigKey}:_password`] ??= targetPublishOptions.password && btoa(targetPublishOptions.password)
}
function removeEmptyStringProperty<Key extends string> (object: Partial<Record<Key, string>>, key: Key): void {
if (!object[key]) {
delete object[key]
}
}
function pruneUndefined (object: Record<string, unknown>): void {
for (const key in object) {
if (object[key] === undefined) {
delete object[key]
}
}
}

View File

@@ -10,6 +10,7 @@ import pFilter from 'p-filter'
import { pick } from 'ramda'
import { writeJsonFile } from 'write-json-file'
import { publish } from './publish.js'
import { type PublishPackedPkgOptions } from './publishPackedPkg.js'
export type PublishRecursiveOpts = Required<Pick<Config,
| 'bin'
@@ -45,7 +46,6 @@ Partial<Pick<Config,
| 'offline'
| 'selectedProjectsGraph'
| 'strictSsl'
| 'sslConfigs'
| 'unsafePerm'
| 'userAgent'
| 'userConfig'
@@ -56,7 +56,7 @@ Partial<Pick<Config,
original: string[]
}
reportSummary?: boolean
}
} & PublishPackedPkgOptions
export async function recursivePublish (
opts: PublishRecursiveOpts & Required<Pick<Config, 'selectedProjectsGraph'>>

View File

@@ -0,0 +1,83 @@
import normalizeRegistryUrl from 'normalize-registry-url'
/**
* If {@link text} starts with {@link oldPrefix}, replace it with {@link newPrefix}.
* Otherwise, return `undefined`.
*/
const replacePrefix = <NewPrefix extends string> (
text: string,
oldPrefix: string,
newPrefix: NewPrefix
): `${NewPrefix}${string}` | undefined =>
text.startsWith(oldPrefix)
? text.replace(oldPrefix, newPrefix) as `${NewPrefix}${string}`
: undefined
/**
* If {@link text} already ends with {@link suffix}, return it.
* Otherwise, append {@link suffix} to {@link text} and return it.
*/
const ensureSuffix = <
Text extends string,
Suffix extends string
> (text: Text, suffix: Suffix): `${Text}${Suffix}` =>
text.endsWith(suffix) ? text as `${Text}${Suffix}` : `${text}${suffix}`
/**
* Protocols currently supported.
*/
type SupportedRegistryScheme = 'http' | 'https'
/**
* A registry URL that has been normalized to match its corresponding {@link RegistryConfigKey}.
*/
export type NormalizedRegistryUrl = `${SupportedRegistryScheme}://${string}/`
/**
* A config key of a registry url is a key on the `.npmrc` file. This key starts with
* a "//" prefix followed by a hostname and the rest of the URI and ends with a "/".
* They usually specify authentication information.
*/
export type RegistryConfigKey = `//${string}/`
export interface SupportedRegistryUrlInfo {
normalizedUrl: NormalizedRegistryUrl
longestConfigKey: RegistryConfigKey
}
/**
* If the {@link registryUrl} is an HTTP or an HTTPS registry url, return the longest
* {@link RegistryConfigKey} that corresponds to the registry url and a {@link NormalizedRegistryUrl}
* that matches it.
*/
export function parseSupportedRegistryUrl (registryUrl: string): SupportedRegistryUrlInfo | undefined {
registryUrl = normalizeRegistryUrl(registryUrl)
const keyPrefix = replacePrefix(registryUrl, 'http://', '//') ?? replacePrefix(registryUrl, 'https://', '//')
if (!keyPrefix) return undefined
const normalizedUrl = ensureSuffix(registryUrl, '/') as NormalizedRegistryUrl
const longestConfigKey = ensureSuffix(keyPrefix, '/')
return { normalizedUrl, longestConfigKey }
}
/**
* This value is used for termination check in {@link allRegistryConfigKeys} only.
* It is not actually a valid {@link RegistryConfigKey}.
*/
const EMPTY_REGISTRY_CONFIG_KEY: RegistryConfigKey = '///'
/**
* Generate all {@link RegistryConfigKey} of the same hostname from the longest to the shortest,
* including {@link longest} itself.
*/
export function * allRegistryConfigKeys (longest: RegistryConfigKey): Generator<RegistryConfigKey, void, void> {
if (!longest.startsWith('//')) {
throw new RangeError(`The string ${JSON.stringify(longest)} is not a valid registry config key`)
}
if (longest === EMPTY_REGISTRY_CONFIG_KEY) {
throw new RangeError('Registry config key cannot be without hostname')
}
if (longest.length <= EMPTY_REGISTRY_CONFIG_KEY.length) return
yield longest
const next = longest.replace(/[^/]*\/$/, '') as RegistryConfigKey
yield * allRegistryConfigKeys(next)
}

View File

@@ -0,0 +1,101 @@
import { type PackResult } from '../src/pack.js'
import { type FailedToPublishError, createFailedToPublishError } from '../src/FailedToPublishError.js'
const pack = (): PackResult => ({
contents: ['index.js', 'bin.js'],
publishedManifest: {
name: 'example-pack',
version: '0.1.2',
},
tarballPath: 'example-pack.tgz',
})
describe('createFailedToPublishError', () => {
test('without details', async () => {
expect(await createFailedToPublishError(pack(), {
status: 401,
statusText: 'Unauthorized',
text: () => '',
})).toMatchObject({
code: 'ERR_PNPM_FAILED_TO_PUBLISH',
message: 'Failed to publish package example-pack@0.1.2 (status 401 Unauthorized)',
status: 401,
statusText: 'Unauthorized',
text: '',
pack: pack(),
} as Partial<FailedToPublishError<PackResult>>)
})
test('failed to get details text', async () => {
expect(await createFailedToPublishError(pack(), {
status: 401,
statusText: 'Unauthorized',
text () {
throw new Error('No details')
},
})).toMatchObject({
code: 'ERR_PNPM_FAILED_TO_PUBLISH',
message: 'Failed to publish package example-pack@0.1.2 (status 401 Unauthorized)',
status: 401,
statusText: 'Unauthorized',
text: '',
pack: pack(),
} as Partial<FailedToPublishError<PackResult>>)
})
test('with single-line details', async () => {
const text = 'Failed to authenticate'
expect(await createFailedToPublishError(pack(), {
status: 401,
statusText: 'Unauthorized',
text: () => text,
})).toMatchObject({
code: 'ERR_PNPM_FAILED_TO_PUBLISH',
message: 'Failed to publish package example-pack@0.1.2 (status 401 Unauthorized): Failed to authenticate',
status: 401,
statusText: 'Unauthorized',
text,
pack: pack(),
} as Partial<FailedToPublishError<PackResult>>)
})
test('with multi-line details', async () => {
const text = [
'Failed to authenticate',
'No token provided',
].join('\n')
expect(await createFailedToPublishError(pack(), {
status: 401,
statusText: 'Unauthorized',
text: () => text,
})).toMatchObject({
code: 'ERR_PNPM_FAILED_TO_PUBLISH',
message: [
'Failed to publish package example-pack@0.1.2 (status 401 Unauthorized)',
'Details:',
' Failed to authenticate',
' No token provided',
'',
].join('\n'),
status: 401,
statusText: 'Unauthorized',
text,
pack: pack(),
} as Partial<FailedToPublishError<PackResult>>)
})
test('with an empty statusText', async () => {
expect(await createFailedToPublishError(pack(), {
status: 499,
statusText: '',
text: () => '',
})).toMatchObject({
code: 'ERR_PNPM_FAILED_TO_PUBLISH',
message: 'Failed to publish package example-pack@0.1.2 (status 499)',
status: 499,
statusText: '',
text: '',
pack: pack(),
} as Partial<FailedToPublishError<PackResult>>)
})
})

View File

@@ -0,0 +1,45 @@
import { jest } from '@jest/globals'
import { executeTokenHelper } from '../src/executeTokenHelper.js'
test('executeTokenHelper returns stdout of the tokenHelper command', () => {
const globalWarn = jest.fn<(message: string) => void>()
expect(executeTokenHelper([process.execPath, '--print', '"hello world"'], { globalWarn })).toBe('hello world')
expect(globalWarn).not.toHaveBeenCalled()
})
test('executeTokenHelper trims the output', () => {
const globalWarn = jest.fn<(message: string) => void>()
expect(executeTokenHelper([process.execPath, '--print', '" hello world \\n"'], { globalWarn })).toBe('hello world')
expect(globalWarn).not.toHaveBeenCalled()
})
test('executeTokenHelper logs line of stderr via warnings', () => {
const globalWarn = jest.fn<(message: string) => void>()
expect(executeTokenHelper([process.execPath, '--eval', [
'console.log("foo")',
'console.error("hello")',
'console.log("bar")',
'console.error("world")',
].join('\n')], { globalWarn })).toBe('foo\nbar')
expect(globalWarn.mock.calls).toStrictEqual([
['(tokenHelper stderr) hello'],
['(tokenHelper stderr) world'],
])
})
test('executeTokenHelper does not log empty stderr', () => {
const globalWarn = jest.fn<(message: string) => void>()
expect(executeTokenHelper([process.execPath, '--eval', [
'console.log("foo")',
'console.error(" ")',
'console.log("bar")',
'console.error()',
].join('\n')], { globalWarn })).toBe('foo\nbar')
expect(globalWarn).not.toHaveBeenCalled()
})
test('executeTokenHelper rejects non-zero exit codes', () => {
const globalWarn = jest.fn<(message: string) => void>()
expect(() => executeTokenHelper([process.execPath, '--eval', 'process.exit(12)'], { globalWarn })).toThrow()
expect(globalWarn).not.toHaveBeenCalled()
})

View File

@@ -0,0 +1,118 @@
import fs from 'fs'
import { createGzip } from 'zlib'
import tar from 'tar-stream'
import { type ExportedManifest } from '@pnpm/exportable-manifest'
import { prepareEmpty } from '@pnpm/prepare'
import {
type TarballPath,
PublishArchiveMissingManifestError,
isTarballPath,
extractManifestFromPacked,
} from '../src/extractManifestFromPacked.js'
async function createTarball (tarballPath: string, contents: Record<string, string | ExportedManifest>): Promise<void> {
const pack = tar.pack()
for (const name in contents) {
const content = contents[name]
const textContent = typeof content === 'string' ? content : JSON.stringify(content, undefined, 2)
pack.entry({ name }, textContent)
}
const tarball = fs.createWriteStream(tarballPath)
pack.pipe(createGzip()).pipe(tarball)
pack.finalize()
return new Promise((resolve, reject) => {
tarball.on('close', resolve)
tarball.on('error', reject)
})
}
describe('extractManifestFromPacked', () => {
test('extracts manifest from a packed package', async () => {
prepareEmpty()
const tarballPath: TarballPath = 'my-package.tgz'
const manifest: ExportedManifest = {
name: 'hello-world',
version: '0.0.0',
}
await createTarball(tarballPath, {
'package/lib/foo.js': 'hello',
'package/lib/bar.js': 'world',
'package/package.json': manifest,
'package/README.md': 'example',
})
expect(await extractManifestFromPacked(tarballPath)).toStrictEqual(manifest)
})
test('errors when manifest does not exist', async () => {
prepareEmpty()
const tarballPath: TarballPath = 'my-package.tgz'
await createTarball(tarballPath, {
'package/lib/foo.js': 'hello',
'package/lib/bar.js': 'world',
'package/README.md': 'example',
})
const promise = extractManifestFromPacked(tarballPath)
await expect(promise).rejects.toBeInstanceOf(PublishArchiveMissingManifestError)
await expect(promise).rejects.toStrictEqual(new PublishArchiveMissingManifestError(tarballPath))
await expect(promise).rejects.toMatchObject({
code: 'ERR_PNPM_PUBLISH_ARCHIVE_MISSING_MANIFEST',
tarballPath,
})
})
test('errors when the manifest is not placed in the correct location', async () => {
prepareEmpty()
const tarballPath: TarballPath = 'my-package.tgz'
const manifest: ExportedManifest = {
name: 'hello-world',
version: '0.0.0',
}
await createTarball(tarballPath, {
'lib/foo.js': 'hello',
'lib/bar.js': 'world',
'package.json': manifest,
'README.md': 'example',
})
const promise = extractManifestFromPacked(tarballPath)
await expect(promise).rejects.toBeInstanceOf(PublishArchiveMissingManifestError)
await expect(promise).rejects.toStrictEqual(new PublishArchiveMissingManifestError(tarballPath))
await expect(promise).rejects.toMatchObject({
code: 'ERR_PNPM_PUBLISH_ARCHIVE_MISSING_MANIFEST',
tarballPath,
})
})
})
describe('isTarballPath', () => {
test('returns true for .tgz', () => {
expect(isTarballPath('foo/bar.tgz')).toBe(true)
expect(isTarballPath('foo.tgz')).toBe(true)
})
test('returns true for .tar.gz', () => {
expect(isTarballPath('foo/bar.tar.gz')).toBe(true)
expect(isTarballPath('foo.tar.gz')).toBe(true)
})
test('returns false for non tarball extensions', () => {
expect(isTarballPath('foo/bar')).toBe(false)
expect(isTarballPath('foo/bar.tar')).toBe(false)
expect(isTarballPath('foo/bar.gz')).toBe(false)
expect(isTarballPath('tgz')).toBe(false)
expect(isTarballPath('tar.gz')).toBe(false)
})
})

View File

@@ -0,0 +1,268 @@
import { jest } from '@jest/globals'
import {
type AuthTokenContext,
type AuthTokenFetchOptions,
AuthTokenFetchError,
AuthTokenExchangeError,
AuthTokenJsonInterruptedError,
AuthTokenMalformedJsonError,
fetchAuthToken,
} from '../src/oidc/authToken.js'
describe('fetchAuthToken', () => {
const registry = 'https://registry.npmjs.org'
const packageName = '@pnpm/test-package'
const idToken = 'test-id-token'
test('successfully fetches auth token', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ token: 'fetched-auth-token' }),
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
const result = await fetchAuthToken({ context, idToken, packageName, registry })
expect(result).toBe('fetched-auth-token')
expect(mockFetch).toHaveBeenCalledTimes(1)
expect(mockFetch).toHaveBeenCalledWith(
'https://registry.npmjs.org/-/npm/v1/oidc/token/exchange/package/%40pnpm%2Ftest-package',
expect.objectContaining({
headers: {
Accept: 'application/json',
Authorization: `Bearer ${idToken}`,
'Content-Length': '0',
},
body: '',
method: 'POST',
} as AuthTokenFetchOptions)
)
})
test('encodes package name in URL', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ token: 'token' }),
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
const packageName = '@scope/package'
await fetchAuthToken({ context, idToken, packageName, registry })
expect(mockFetch).toHaveBeenCalledWith(
`${registry}/-/npm/v1/oidc/token/exchange/package/${encodeURIComponent(packageName)}`,
expect.anything()
)
})
test('passes fetch options correctly', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ token: 'token' }),
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
const options = {
fetchRetries: 5,
fetchRetryFactor: 3,
fetchRetryMaxtimeout: 120000,
fetchRetryMintimeout: 2000,
fetchTimeout: 45000,
}
await fetchAuthToken({ context, idToken, packageName, registry, options })
expect(mockFetch).toHaveBeenCalledWith(
expect.any(String),
{
body: '',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${idToken}`,
'Content-Length': '0',
},
method: 'POST',
retry: {
factor: 3,
maxTimeout: 120000,
minTimeout: 2000,
retries: 5,
},
timeout: 45000,
}
)
})
test('throws AuthTokenFetchError when fetch fails', async () => {
const fetchError = new Error('Network error')
const mockFetch = jest.fn(async () => {
throw fetchError
})
const context: AuthTokenContext = {
fetch: mockFetch,
}
const promise = fetchAuthToken({ context, idToken, packageName, registry })
await expect(promise).rejects.toBeInstanceOf(AuthTokenFetchError)
await expect(promise).rejects.toHaveProperty(['errorSource'], fetchError)
await expect(promise).rejects.toHaveProperty(['packageName'], packageName)
await expect(promise).rejects.toHaveProperty(['registry'], registry)
await expect(promise).rejects.toHaveProperty(['code'], 'ERR_PNPM_AUTH_TOKEN_FETCH')
})
test('throws AuthTokenExchangeError when response is not ok', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 401,
json: async () => ({ body: { message: 'Unauthorized' } }),
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
const promise = fetchAuthToken({ context, idToken, packageName, registry })
await expect(promise).rejects.toBeInstanceOf(AuthTokenExchangeError)
await expect(promise).rejects.toHaveProperty(['httpStatus'], 401)
await expect(promise).rejects.toHaveProperty(['errorResponse', 'body', 'message'], 'Unauthorized')
await expect(promise).rejects.toHaveProperty(['code'], 'ERR_PNPM_AUTH_TOKEN_EXCHANGE')
})
test('handles exchange error with missing body message', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 403,
json: async () => ({}),
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
const promise = fetchAuthToken({ context, idToken, packageName, registry })
await expect(promise).rejects.toBeInstanceOf(AuthTokenExchangeError)
await expect(promise).rejects.toHaveProperty(['httpStatus'], 403)
await expect(promise).rejects.toMatchObject({ message: expect.stringContaining('Unknown error') })
})
test('handles exchange error when json response is valid', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 500,
json: async () => ({ body: { message: 'Internal Server Error' } }),
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
const promise = fetchAuthToken({ context, idToken, packageName, registry })
await expect(promise).rejects.toBeInstanceOf(AuthTokenExchangeError)
await expect(promise).rejects.toHaveProperty(['httpStatus'], 500)
await expect(promise).rejects.toHaveProperty(['errorResponse', 'body', 'message'], 'Internal Server Error')
})
test('throws AuthTokenJsonInterruptedError when JSON parsing fails on success response', async () => {
const jsonError = new Error('JSON parse error')
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => {
throw jsonError
},
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
const promise = fetchAuthToken({ context, idToken, packageName, registry })
await expect(promise).rejects.toBeInstanceOf(AuthTokenJsonInterruptedError)
await expect(promise).rejects.toHaveProperty(['errorSource'], jsonError)
await expect(promise).rejects.toHaveProperty(['code'], 'ERR_PNPM_AUTH_TOKEN_JSON_INTERRUPTED')
})
test('throws AuthTokenMalformedJsonError when JSON response is missing token', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({}),
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
const promise = fetchAuthToken({ context, idToken, packageName, registry })
await expect(promise).rejects.toBeInstanceOf(AuthTokenMalformedJsonError)
await expect(promise).rejects.toHaveProperty(['malformedJsonResponse'], {})
await expect(promise).rejects.toHaveProperty(['packageName'], packageName)
await expect(promise).rejects.toHaveProperty(['registry'], registry)
await expect(promise).rejects.toHaveProperty(['code'], 'ERR_PNPM_AUTH_TOKEN_MALFORMED_JSON')
})
test('throws AuthTokenMalformedJsonError when token is not a string', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ token: 12345 }),
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
await expect(fetchAuthToken({ context, idToken, packageName, registry }))
.rejects.toThrow(AuthTokenMalformedJsonError)
})
test('throws AuthTokenMalformedJsonError when JSON response is null', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => null,
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
await expect(fetchAuthToken({ context, idToken, packageName, registry }))
.rejects.toThrow(AuthTokenMalformedJsonError)
})
test('throws AuthTokenMalformedJsonError when JSON response is not an object', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => 'string response',
}))
const context: AuthTokenContext = {
fetch: mockFetch,
}
await expect(fetchAuthToken({ context, idToken, packageName, registry }))
.rejects.toThrow(AuthTokenMalformedJsonError)
})
})

View File

@@ -0,0 +1,347 @@
import { jest } from '@jest/globals'
import {
type IdTokenContext,
type IdTokenFetchOptions,
IdTokenGitHubWorkflowIncorrectPermissionsError,
IdTokenGitHubInvalidResponseError,
IdTokenGitHubJsonInterruptedError,
IdTokenGitHubJsonInvalidValueError,
getIdToken,
} from '../src/oidc/idToken.js'
describe('getIdToken', () => {
const registry = 'https://registry.npmjs.org'
test('returns undefined when not in GitHub Actions or GitLab', async () => {
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: false, GITLAB: false },
fetch: jest.fn() as IdTokenContext['fetch'],
globalInfo: jest.fn() as IdTokenContext['globalInfo'],
process: { env: {} },
}
const result = await getIdToken({ context, registry })
expect(result).toBeUndefined()
expect(context.fetch).not.toHaveBeenCalled()
})
test('returns NPM_ID_TOKEN from environment when available', async () => {
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: jest.fn() as IdTokenContext['fetch'],
globalInfo: jest.fn() as IdTokenContext['globalInfo'],
process: { env: { NPM_ID_TOKEN: 'test-token-from-env' } },
}
const result = await getIdToken({ context, registry })
expect(result).toBe('test-token-from-env')
expect(context.fetch).not.toHaveBeenCalled()
})
test('returns NPM_ID_TOKEN from environment in GitLab', async () => {
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: false, GITLAB: true },
fetch: jest.fn() as IdTokenContext['fetch'],
globalInfo: jest.fn() as IdTokenContext['globalInfo'],
process: { env: { NPM_ID_TOKEN: 'test-token-gitlab' } },
}
const result = await getIdToken({ context, registry })
expect(result).toBe('test-token-gitlab')
expect(context.fetch).not.toHaveBeenCalled()
})
test('returns undefined for GitLab when NPM_ID_TOKEN is not set', async () => {
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: false, GITLAB: true },
fetch: jest.fn() as IdTokenContext['fetch'],
globalInfo: jest.fn() as IdTokenContext['globalInfo'],
process: { env: {} },
}
const result = await getIdToken({ context, registry })
expect(result).toBeUndefined()
expect(context.fetch).not.toHaveBeenCalled()
})
test('throws error when GitHub Actions environment variables are missing', async () => {
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: jest.fn() as IdTokenContext['fetch'],
globalInfo: jest.fn(),
process: { env: {} },
}
await expect(getIdToken({ context, registry }))
.rejects.toThrow(IdTokenGitHubWorkflowIncorrectPermissionsError)
})
test('throws error when only ACTIONS_ID_TOKEN_REQUEST_TOKEN is set', async () => {
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: jest.fn() as IdTokenContext['fetch'],
globalInfo: jest.fn(),
process: { env: { ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token' } },
}
await expect(getIdToken({ context, registry }))
.rejects.toThrow(IdTokenGitHubWorkflowIncorrectPermissionsError)
})
test('throws error when only ACTIONS_ID_TOKEN_REQUEST_URL is set', async () => {
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: jest.fn() as IdTokenContext['fetch'],
globalInfo: jest.fn(),
process: { env: { ACTIONS_ID_TOKEN_REQUEST_URL: 'https://example.com' } },
}
await expect(getIdToken({ context, registry }))
.rejects.toThrow(IdTokenGitHubWorkflowIncorrectPermissionsError)
})
test('fetches ID token from GitHub Actions successfully', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ value: 'fetched-id-token' }),
}))
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
globalInfo: jest.fn(),
process: {
env: {
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'request-token',
ACTIONS_ID_TOKEN_REQUEST_URL: 'https://actions.example.com/token',
},
},
}
const result = await getIdToken({ context, registry })
expect(result).toBe('fetched-id-token')
expect(mockFetch).toHaveBeenCalledTimes(1)
expect(mockFetch).toHaveBeenCalledWith(
'https://actions.example.com/token?audience=npm%3Aregistry.npmjs.org',
expect.objectContaining({
headers: {
Accept: 'application/json',
Authorization: 'Bearer request-token',
},
method: 'GET',
} as Partial<IdTokenFetchOptions>)
)
})
test('passes fetch options correctly', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ value: 'token' }),
}))
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
globalInfo: jest.fn(),
process: {
env: {
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'request-token',
ACTIONS_ID_TOKEN_REQUEST_URL: 'https://actions.example.com/token',
},
},
}
const options = {
fetchRetries: 3,
fetchRetryFactor: 2,
fetchRetryMaxtimeout: 60000,
fetchRetryMintimeout: 1000,
fetchTimeout: 30000,
}
await getIdToken({ context, registry, options })
expect(mockFetch).toHaveBeenCalledTimes(1)
expect(mockFetch).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
retry: {
factor: options.fetchRetryFactor,
maxTimeout: options.fetchRetryMaxtimeout,
minTimeout: options.fetchRetryMintimeout,
retries: options.fetchRetries,
},
timeout: options.fetchTimeout,
} as Partial<IdTokenFetchOptions>)
)
})
test('logs fetch information via globalInfo', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ value: 'token' }),
}))
const mockGlobalInfo = jest.fn()
let dateIndex = 0
const mockDateNowTable = [1000, 1500]
const mockDateNow = jest.fn(() => {
const result = mockDateNowTable[dateIndex]
dateIndex += 1
return result
})
const context: IdTokenContext = {
Date: { now: mockDateNow },
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
globalInfo: mockGlobalInfo,
process: {
env: {
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'request-token',
ACTIONS_ID_TOKEN_REQUEST_URL: 'https://actions.example.com/token',
},
},
}
await getIdToken({ context, registry })
expect(mockDateNow).toHaveBeenCalledTimes(2)
expect(mockGlobalInfo).toHaveBeenCalledWith('GET https://actions.example.com/token?audience=npm%3Aregistry.npmjs.org 200 500ms')
})
test('throws error when fetch response is not ok', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 401,
json: async () => ({ code: 'UNAUTHORIZED', message: 'Unauthorized' }),
}))
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
globalInfo: jest.fn(),
process: {
env: {
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'request-token',
ACTIONS_ID_TOKEN_REQUEST_URL: 'https://actions.example.com/token',
},
},
}
await expect(getIdToken({ context, registry })).rejects.toThrow(IdTokenGitHubInvalidResponseError)
})
test('throws error when JSON parsing fails', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => {
throw new Error('JSON parse error')
},
}))
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
globalInfo: jest.fn(),
process: {
env: {
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'request-token',
ACTIONS_ID_TOKEN_REQUEST_URL: 'https://actions.example.com/token',
},
},
}
await expect(getIdToken({ context, registry })).rejects.toThrow(IdTokenGitHubJsonInterruptedError)
})
test('throws error when JSON response is missing value field', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({}),
}))
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
globalInfo: jest.fn(),
process: {
env: {
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'request-token',
ACTIONS_ID_TOKEN_REQUEST_URL: 'https://actions.example.com/token',
},
},
}
await expect(getIdToken({ context, registry })).rejects.toThrow(IdTokenGitHubJsonInvalidValueError)
})
test('throws error when JSON response value is not a string', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ value: 123 }),
}))
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
globalInfo: jest.fn(),
process: {
env: {
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'request-token',
ACTIONS_ID_TOKEN_REQUEST_URL: 'https://actions.example.com/token',
},
},
}
await expect(getIdToken({ context, registry })).rejects.toThrow(IdTokenGitHubJsonInvalidValueError)
})
test('throws error when JSON response is null', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => null,
}))
const context: IdTokenContext = {
Date: { now: jest.fn(() => 1000) },
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
globalInfo: jest.fn(),
process: {
env: {
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'request-token',
ACTIONS_ID_TOKEN_REQUEST_URL: 'https://actions.example.com/token',
},
},
}
await expect(getIdToken({ context, registry })).rejects.toThrow(IdTokenGitHubJsonInvalidValueError)
})
})

View File

@@ -0,0 +1,482 @@
import { jest } from '@jest/globals'
import {
type ProvenanceContext,
type ProvenanceFetchOptions,
ProvenanceMalformedIdTokenError,
ProvenanceInsufficientInformationError,
ProvenanceFailedToFetchVisibilityError,
determineProvenance,
} from '../src/oidc/provenance.js'
describe('determineProvenance', () => {
const registry = 'https://registry.npmjs.org'
const packageName = '@pnpm/test-package'
const authToken = 'test-auth-token'
// Helper to create a valid JWT-like token
function createIdToken (payload: Record<string, unknown>): string {
const header = { alg: 'RS256', typ: 'JWT' }
const headerB64 = Buffer.from(JSON.stringify(header)).toString('base64url')
const payloadB64 = Buffer.from(JSON.stringify(payload)).toString('base64url')
return `${headerB64}.${payloadB64}.signature`
}
test('throws ProvenanceMalformedIdTokenError when idToken is malformed (no dots)', async () => {
const mockFetch = jest.fn() as ProvenanceContext['fetch']
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
await expect(determineProvenance({
authToken,
idToken: 'not-a-jwt-token',
packageName,
registry,
context,
})).rejects.toThrow(ProvenanceMalformedIdTokenError)
expect(mockFetch).not.toHaveBeenCalled()
})
test('throws ProvenanceMalformedIdTokenError when idToken has only one part', async () => {
const mockFetch = jest.fn() as ProvenanceContext['fetch']
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
await expect(determineProvenance({
authToken,
idToken: 'header.',
packageName,
registry,
context,
})).rejects.toThrow(ProvenanceMalformedIdTokenError)
})
test('throws ProvenanceInsufficientInformationError for GitHub Actions with non-public repository', async () => {
const mockFetch = jest.fn() as ProvenanceContext['fetch']
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'private' })
await expect(determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})).rejects.toThrow(ProvenanceInsufficientInformationError)
expect(mockFetch).not.toHaveBeenCalled()
})
test('throws ProvenanceInsufficientInformationError for GitLab with non-public project', async () => {
const mockFetch = jest.fn() as ProvenanceContext['fetch']
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: false, GITLAB: true },
fetch: mockFetch,
process: { env: { SIGSTORE_ID_TOKEN: 'token' } },
}
const idToken = createIdToken({ project_visibility: 'private' })
await expect(determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})).rejects.toThrow(ProvenanceInsufficientInformationError)
expect(mockFetch).not.toHaveBeenCalled()
})
test('throws ProvenanceInsufficientInformationError for GitLab without SIGSTORE_ID_TOKEN', async () => {
const mockFetch = jest.fn() as ProvenanceContext['fetch']
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: false, GITLAB: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ project_visibility: 'public' })
await expect(determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})).rejects.toThrow(ProvenanceInsufficientInformationError)
expect(mockFetch).not.toHaveBeenCalled()
})
test('returns true when package is public in GitHub Actions', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ public: true }),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const result = await determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
expect(result).toBe(true)
expect(mockFetch).toHaveBeenCalledTimes(1)
const expectedOptions = expect.objectContaining({
headers: {
Accept: 'application/json',
Authorization: `Bearer ${authToken}`,
},
method: 'GET',
} as Partial<ProvenanceFetchOptions>)
expect(mockFetch).toHaveBeenCalledWith(
expect.any(URL),
expectedOptions
)
expect(mockFetch).toHaveBeenCalledWith(
expect.objectContaining({
href: expect.stringContaining(`/-/package/${encodeURIComponent(packageName)}/visibility`),
}),
expectedOptions
)
})
test('returns true when package is public in GitLab', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ public: true }),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: false, GITLAB: true },
fetch: mockFetch,
process: { env: { SIGSTORE_ID_TOKEN: 'token' } },
}
const idToken = createIdToken({ project_visibility: 'public' })
const result = await determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
expect(result).toBe(true)
expect(mockFetch).toHaveBeenCalledTimes(1)
})
test('returns undefined when package visibility is not public', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ public: false }),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const result = await determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
expect(result).toBeUndefined()
})
test('returns undefined when visibility response is missing public field', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({}),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const result = await determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
expect(result).toBeUndefined()
})
test('passes fetch options correctly', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ public: true }),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const options = {
fetchRetries: 4,
fetchRetryFactor: 2.5,
fetchRetryMaxtimeout: 90000,
fetchRetryMintimeout: 1500,
fetchTimeout: 40000,
}
await determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
options,
})
expect(mockFetch).toHaveBeenCalledTimes(1)
expect(mockFetch).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
retry: {
factor: options.fetchRetryFactor,
maxTimeout: options.fetchRetryMaxtimeout,
minTimeout: options.fetchRetryMintimeout,
retries: options.fetchRetries,
},
timeout: options.fetchTimeout,
} as Partial<ProvenanceFetchOptions>)
)
})
test('throws ProvenanceFailedToFetchVisibilityError when fetch fails', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 404,
json: async () => ({ code: 'NOT_FOUND', message: 'Package not found' }),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
await expect(determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})).rejects.toThrow(ProvenanceFailedToFetchVisibilityError)
const promise = determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
await expect(promise).rejects.toBeInstanceOf(ProvenanceFailedToFetchVisibilityError)
await expect(promise).rejects.toHaveProperty(['status'], 404)
await expect(promise).rejects.toHaveProperty(['packageName'], packageName)
await expect(promise).rejects.toHaveProperty(['registry'], registry)
await expect(promise).rejects.toHaveProperty(['errorResponse', 'code'], 'NOT_FOUND')
await expect(promise).rejects.toHaveProperty(['errorResponse', 'message'], 'Package not found')
await expect(promise).rejects.toMatchObject({ message: expect.stringContaining('NOT_FOUND: Package not found') })
})
test('handles visibility fetch error with only code', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 401,
json: async () => ({ code: 'UNAUTHORIZED' }),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const promise = determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
await expect(promise).rejects.toBeInstanceOf(ProvenanceFailedToFetchVisibilityError)
await expect(promise).rejects.toMatchObject({ message: expect.stringContaining('UNAUTHORIZED') })
await expect(promise).rejects.toMatchObject({ message: expect.not.stringContaining(': ') })
})
test('handles visibility fetch error with only message', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 500,
json: async () => ({ message: 'Internal server error' }),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const promise = determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
await expect(promise).rejects.toBeInstanceOf(ProvenanceFailedToFetchVisibilityError)
await expect(promise).rejects.toMatchObject({ message: expect.stringContaining('Internal server error') })
})
test('handles visibility fetch error with no error details', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 503,
json: async () => ({}),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const promise = determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
await expect(promise).rejects.toBeInstanceOf(ProvenanceFailedToFetchVisibilityError)
await expect(promise).rejects.toMatchObject({ message: expect.stringContaining('an unknown error') })
})
test('handles visibility fetch error when JSON parsing fails', async () => {
const mockFetch = jest.fn(async () => ({
ok: false,
status: 500,
json: async () => {
throw new Error('JSON parse error')
},
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const promise = determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
await expect(promise).rejects.toBeInstanceOf(ProvenanceFailedToFetchVisibilityError)
await expect(promise).rejects.toHaveProperty(['status'], 500)
await expect(promise).rejects.toHaveProperty(['errorResponse'], undefined)
})
test('encodes package name in URL', async () => {
const mockFetch = jest.fn(async () => ({
ok: true,
status: 200,
json: async () => ({ public: true }),
}))
const context: ProvenanceContext = {
ciInfo: { GITHUB_ACTIONS: true },
fetch: mockFetch,
process: { env: {} },
}
const idToken = createIdToken({ repository_visibility: 'public' })
const packageName = '@scope/package'
await determineProvenance({
authToken,
idToken,
packageName,
registry,
context,
})
expect(mockFetch.mock.calls).toStrictEqual([[
expect.any(URL),
expect.anything(),
]])
expect(mockFetch.mock.calls).toStrictEqual([[
expect.objectContaining({
href: expect.stringContaining(encodeURIComponent(packageName)),
}),
expect.anything(),
]])
})
})

View File

@@ -0,0 +1,57 @@
import { optionsWithOtpEnv } from '../src/otpEnv.js'
describe('optionsWithOtpEnv', () => {
test('returns the same unchanged options when neither --otp nor PNPM_CONFIG_OTP is defined', () => {
const input: Record<string, string> = {
foo: 'hello',
bar: 'world',
}
const expectedOutput = { ...input }
const actualOutput = optionsWithOtpEnv(input, {})
expect(actualOutput).toBe(input)
expect(actualOutput).toStrictEqual(expectedOutput)
})
test('returns the same unchanged options when --opt is defined without PNPM_CONFIG_OTP', () => {
const otp = 'example one-time password'
const input: Record<string, string> = {
foo: 'hello',
bar: 'world',
otp,
}
const expectedOutput = { ...input }
const actualOutput = optionsWithOtpEnv(input, {})
expect(actualOutput).toBe(input)
expect(actualOutput).toStrictEqual(expectedOutput)
expect(actualOutput.otp).toBe(otp)
})
test('returns the same unchanged options when --opt is defined with PNPM_CONFIG_OTP', () => {
const otp = 'example one-time password'
const input: Record<string, string> = {
foo: 'hello',
bar: 'world',
otp,
}
const expectedOutput = { ...input }
const PNPM_CONFIG_OTP = 'different one-time password'
const actualOutput = optionsWithOtpEnv(input, { PNPM_CONFIG_OTP })
expect(actualOutput).toBe(input)
expect(actualOutput).toStrictEqual(expectedOutput)
expect(actualOutput.otp).toBe(otp)
expect(actualOutput.otp).not.toBe(PNPM_CONFIG_OTP)
})
test('returns an options with otp when PNPM_CONFIG_OTP is defined without --otp', () => {
const input: Record<string, string> = {
foo: 'hello',
bar: 'world',
}
const PNPM_CONFIG_OTP = 'one-time password from env'
const expectedOutput = { ...input, otp: PNPM_CONFIG_OTP }
const actualOutput = optionsWithOtpEnv(input, { PNPM_CONFIG_OTP })
expect(actualOutput).not.toBe(input)
expect(actualOutput).toStrictEqual(expectedOutput)
expect(actualOutput.otp).toBe(PNPM_CONFIG_OTP)
})
})

View File

@@ -274,7 +274,9 @@ test('publish: package with all possible fields in publishConfig', async () => {
name: 'test-publish-config',
version: '1.0.0',
bin: './published-bin.js',
bin: {
'test-publish-config': './published-bin.js',
},
main: './published.js',
module: './published.mjs',
types: './published-types.d.ts',
@@ -836,32 +838,37 @@ test('publish: with specified publish branch name', async () => {
}, [])
})
test('publish: exit with non-zero code when publish tgz', async () => {
test('publish: errors when publishing a non-existing tgz', async () => {
prepare({
name: 'test-publish-package.json',
version: '0.0.2',
})
const result = await publish.handler({
const promise = publish.handler({
...DEFAULT_OPTS,
argv: { original: ['publish', './non-exists.tgz', '--no-git-checks'] },
dir: process.cwd(),
gitChecks: false,
}, [
'./non-exists.tgz',
])
expect(result?.exitCode).not.toBe(0)
// NOTE: normally this should be a PnpmError, but we'd like to keep the code
// simple so we just let the internal functions throw error for now.
await expect(promise).rejects.toHaveProperty(['code'], 'ENOENT')
await expect(promise).rejects.toHaveProperty(['path'], expect.stringContaining('non-exists.tgz'))
})
test('publish: provenance', async () => {
// This test doesn't work. Verdaccio doesn't support OIDC, neither does local environment.
test.skip('publish: provenance', async () => {
prepare({
name: 'test-publish-package.json',
name: 'test-publish-package-oidc.json',
version: '0.0.2',
})
await publish.handler({
...DEFAULT_OPTS,
provenance: true,
argv: { original: ['publish', '--provenance'] },
dir: process.cwd(),
}, [])

View File

@@ -280,7 +280,7 @@ test('recursive publish writes publish summary', async () => {
}
})
test('when publish some package throws an error, exit code should be non-zero', async () => {
test('errors on fake registry', async () => {
preparePackages([
{
name: '@pnpmtest/test-recursive-publish-project-5',
@@ -292,16 +292,26 @@ test('when publish some package throws an error, exit code should be non-zero',
},
])
// Throw ENEEDAUTH error when publish.
fs.writeFileSync('.npmrc', 'registry=https://__fake_npm_registry__.com', 'utf8')
const fakeRegistry = 'https://__fake_npm_registry__.com'
const result = await publish.handler({
const promise = publish.handler({
...DEFAULT_OPTS,
...await filterPackagesFromDir(process.cwd(), []),
rawConfig: {
...DEFAULT_OPTS.rawConfig,
registry: fakeRegistry,
},
registries: {
...DEFAULT_OPTS.registries,
default: fakeRegistry,
},
dir: process.cwd(),
recursive: true,
force: true,
}, [])
expect(result?.exitCode).toBe(1)
// NOTE: normally this should be a PnpmError, but we'd like to keep the code
// simple so we just let the internal functions throw error for now.
await expect(promise).rejects.toHaveProperty(['code'], 'ENOTFOUND')
await expect(promise).rejects.toHaveProperty(['hostname'], '__fake_npm_registry__.com')
})

View File

@@ -0,0 +1,53 @@
import {
type NormalizedRegistryUrl,
type RegistryConfigKey,
type SupportedRegistryUrlInfo,
allRegistryConfigKeys,
parseSupportedRegistryUrl,
} from '../src/registryConfigKeys.js'
describe('parseSupportedRegistryUrl', () => {
type Case = [string, SupportedRegistryUrlInfo | undefined]
const createValue = (
normalizedUrl: NormalizedRegistryUrl,
longestConfigKey: RegistryConfigKey
): SupportedRegistryUrlInfo => ({ normalizedUrl, longestConfigKey })
test.each([
['https://example.com/foo/bar/', createValue('https://example.com/foo/bar/', '//example.com/foo/bar/')],
['https://example.com/foo/bar', createValue('https://example.com/foo/bar/', '//example.com/foo/bar/')],
['http://example.com/foo/bar/', createValue('http://example.com/foo/bar/', '//example.com/foo/bar/')],
['http://example.com/foo/bar', createValue('http://example.com/foo/bar/', '//example.com/foo/bar/')],
['https://example.com/', createValue('https://example.com/', '//example.com/')],
['https://example.com', createValue('https://example.com/', '//example.com/')],
['http://example.com/', createValue('http://example.com/', '//example.com/')],
['http://example.com', createValue('http://example.com/', '//example.com/')],
['ftp://example.com/', undefined],
['sftp://example.com/', undefined],
['file:///example.tgz', undefined],
] as Case[])('%p → %p', (registryUrl, registryInfo) => {
expect(parseSupportedRegistryUrl(registryUrl)).toStrictEqual(registryInfo)
})
})
describe('allRegistryConfigKeys', () => {
test('lists all keys from longest to shortest', () => {
expect(Array.from(allRegistryConfigKeys('//example.com/foo/bar/'))).toStrictEqual([
'//example.com/foo/bar/',
'//example.com/foo/',
'//example.com/',
])
})
test('rejects keys without hostname', () => {
expect(() => allRegistryConfigKeys('///').next()).toThrow(new RangeError('Registry config key cannot be without hostname'))
})
test('rejects keys that do not start with double slash', () => {
expect(
() => allRegistryConfigKeys('https://example.com' as RegistryConfigKey).next()
).toThrow(new RangeError('The string "https://example.com" is not a valid registry config key'))
expect(
() => allRegistryConfigKeys('' as RegistryConfigKey).next()
).toThrow(new RangeError('The string "" is not a valid registry config key'))
})
})

View File

@@ -1,48 +0,0 @@
import { removePnpmSpecificOptions } from '../lib/publish.js'
describe('removePnpmSpecificOptions', () => {
it('should remove --no-git-checks', () => {
const result = removePnpmSpecificOptions(['--no-git-checks', '--tag', 'latest'])
expect(result).toEqual(['--tag', 'latest'])
})
it('should preserve tarball path when using --no-git-checks', () => {
const result = removePnpmSpecificOptions(['--no-git-checks', './tarball-name.tgz'])
expect(result).toEqual(['./tarball-name.tgz'])
})
it('should remove --embed-readme', () => {
const result = removePnpmSpecificOptions(['--embed-readme', '--tag', 'latest'])
expect(result).toEqual(['--tag', 'latest'])
})
it('should remove --no-embed-readme', () => {
const result = removePnpmSpecificOptions(['--no-embed-readme', '--tag', 'latest'])
expect(result).toEqual(['--tag', 'latest'])
})
it('should remove --publish-branch with its value', () => {
const result = removePnpmSpecificOptions(['--publish-branch', 'main', '--tag', 'latest'])
expect(result).toEqual(['--tag', 'latest'])
})
it('should remove --publish-branch without value (next is another option)', () => {
const result = removePnpmSpecificOptions(['--publish-branch', '--tag', 'latest'])
expect(result).toEqual(['--tag', 'latest'])
})
it('should remove --npm-path with its value', () => {
const result = removePnpmSpecificOptions(['--npm-path', '/usr/bin/npm', '--tag', 'latest'])
expect(result).toEqual(['--tag', 'latest'])
})
it('should preserve npm options', () => {
const result = removePnpmSpecificOptions(['--tag', 'latest', '--access', 'public', '--dry-run'])
expect(result).toEqual(['--tag', 'latest', '--access', 'public', '--dry-run'])
})
it('should handle complex case with multiple options', () => {
const result = removePnpmSpecificOptions(['--no-git-checks', '--tag', 'latest', './tarball.tgz'])
expect(result).toEqual(['--tag', 'latest', './tarball.tgz'])
})
})

View File

@@ -4,6 +4,7 @@ import execa from 'execa'
const REGISTRY = `http://localhost:${REGISTRY_MOCK_PORT}`
export const DEFAULT_OPTS = {
authInfos: {},
argv: {
original: [],
},
@@ -41,6 +42,7 @@ export const DEFAULT_OPTS = {
sort: true,
cacheDir: '../cache',
strictSsl: false,
sslConfigs: {},
userAgent: 'pnpm',
userConfig: {},
useRunningStoreServer: false,

View File

@@ -39,9 +39,6 @@
{
"path": "../../exec/lifecycle"
},
{
"path": "../../exec/run-npm"
},
{
"path": "../../fs/packlist"
},
@@ -49,7 +46,7 @@
"path": "../../hooks/pnpmfile"
},
{
"path": "../../network/auth-header"
"path": "../../network/fetch"
},
{
"path": "../../packages/error"