diff --git a/.commitlintrc.js b/.commitlintrc.js
index 5b0b1a5232728..e9c80b9247279 100644
--- a/.commitlintrc.js
+++ b/.commitlintrc.js
@@ -5,6 +5,7 @@ module.exports = {
rules: {
'type-enum': [2, 'always', ['feat', 'fix', 'docs', 'deps', 'chore']],
'header-max-length': [2, 'always', 80],
- 'subject-case': [0, 'always', ['lower-case', 'sentence-case', 'start-case']],
+ 'subject-case': [0],
+ 'body-max-line-length': [0],
},
}
diff --git a/.eslintrc.js b/.eslintrc.js
index 4d943ad316745..b9ba04d34368a 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -11,8 +11,10 @@ const localConfigs = readdir(__dirname)
module.exports = {
root: true,
ignorePatterns: [
+ 'tap-testdir*/',
'docs/**',
'smoke-tests/**',
+ 'mock-globals/**',
'mock-registry/**',
'workspaces/**',
],
diff --git a/.eslintrc.local.js b/.eslintrc.local.js
new file mode 100644
index 0000000000000..2dce9d2badc08
--- /dev/null
+++ b/.eslintrc.local.js
@@ -0,0 +1,37 @@
+const { resolve, relative } = require('path')
+
+// Create an override to lockdown a file to es6 syntax only
+// and only allow it to require an allowlist of files
+const rel = (p) => relative(__dirname, resolve(__dirname, p))
+const braces = (a) => a.length > 1 ? `{${a.map(rel).join(',')}}` : a[0]
+
+const es6Files = (e) => Object.entries(e).map(([file, allow]) => ({
+ files: `./${rel(file)}`,
+ parserOptions: {
+ ecmaVersion: 6,
+ },
+ rules: Array.isArray(allow) ? {
+ 'node/no-restricted-require': ['error', [{
+ name: ['/**', `!${__dirname}/${braces(allow)}`],
+ message: `This file can only require: ${allow.join(',')}`,
+ }]],
+ } : {},
+}))
+
+module.exports = {
+ rules: {
+ 'no-console': 'error',
+ },
+ overrides: es6Files({
+ 'index.js': ['lib/cli.js'],
+ 'bin/npm-cli.js': ['lib/cli.js'],
+ 'lib/cli.js': ['lib/cli/validate-engines.js'],
+ 'lib/cli/validate-engines.js': ['package.json'],
+ // TODO: This file should also have its requires restricted as well since it
+ // is an entry point but it currently pulls in config definitions which have
+ // a large require graph, so that is not currently feasible. A future config
+ // refactor should keep that in mind and see if only config definitions can
+ // be exported in a way that is compatible with ES6.
+ 'bin/npx-cli.js': null,
+ }),
+}
diff --git a/.eslintrc.local.json b/.eslintrc.local.json
deleted file mode 100644
index 2ff50f91ec326..0000000000000
--- a/.eslintrc.local.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "rules": {
- "no-console": "error"
- }
-}
diff --git a/.gitattributes b/.gitattributes
index ef4b94f9e45f9..5d3dbc3b3ac65 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,2 +1,27 @@
-/node_modules/** linguist-generated=false
-/package-lock.json linguist-generated=false
+# normalize all line endings by default
+* text=auto
+
+# our shell/bin scripts always need to be LF
+/bin/* text eol=lf
+/workspaces/arborist/bin/index.js text eol=lf
+/configure text eol=lf
+
+# our cmd scripts always need to be CRLF
+/bin/**/*.cmd text eol=crlf
+
+# ignore all line endings in node_modules since we dont control that
+/node_modules/** -text
+
+# the files we write should be LF so they can be generated cross platform
+/node_modules/.gitignore text eol=lf
+/workspaces/arborist/test/fixtures/.gitignore text eol=lf
+/DEPENDENCIES.md text eol=lf
+/AUTHORS text eol=lf
+
+# fixture tarballs should be treated as binary
+/workspaces/*/test/fixtures/**/*.tgz binary
+
+# these hint to GitHub to show these files as not generated so they default to
+# showing the full diff in pull requests
+/node_modules/** linguist-generated=false
+/package-lock.json linguist-generated=false
diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml
new file mode 100644
index 0000000000000..f285bcce4c81e
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug.yml
@@ -0,0 +1,63 @@
+name: 🐞 Bug
+description: File a bug/issue against the latest version of npm
+title: "[BUG]
"
+labels: [Bug, Needs Triage]
+body:
+- type: checkboxes
+ attributes:
+ label: Is there an existing issue for this?
+ description: Please [search here](https://github.com/npm/cli/issues) to see if an issue already exists for your problem.
+ options:
+ - label: I have searched the existing issues
+ required: true
+- type: checkboxes
+ attributes:
+ label: This issue exists in the latest npm version
+ description: Please make sure you have installed the latest npm and verified it is still an issue.
+ options:
+ - label: I am using the latest npm
+ required: true
+- type: textarea
+ attributes:
+ label: Current Behavior
+ description: A clear & concise description of what you're experiencing.
+ validations:
+ required: false
+- type: textarea
+ attributes:
+ label: Expected Behavior
+ description: A clear & concise description of what you expected to happen.
+ validations:
+ required: false
+- type: textarea
+ attributes:
+ label: Steps To Reproduce
+ description: Steps to reproduce the behavior.
+ value: |
+ 1. In this environment...
+ 2. With this config...
+ 3. Run '...'
+ 4. See error...
+ validations:
+ required: false
+- type: textarea
+ attributes:
+ label: Environment
+ description: |
+ examples:
+ - **`npm -v`**: **npm**: 10.0.0
+ - **`node -v`**: **Node.js**: 18.0.0
+ - **OS Name**: Ubuntu 20.04
+ - **System Model Name**: Macbook Pro
+ - **`npm config ls`**: `; "user" config from ...`
+ value: |
+ - npm:
+ - Node.js:
+ - OS Name:
+ - System Model Name:
+ - npm config:
+ ```ini
+ ; copy and paste output from `npm config ls` here
+ ```
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/bug_8.yml b/.github/ISSUE_TEMPLATE/bug_8.yml
deleted file mode 100644
index f6855c4deba48..0000000000000
--- a/.github/ISSUE_TEMPLATE/bug_8.yml
+++ /dev/null
@@ -1,63 +0,0 @@
-name: 🐞 Bug v8
-description: File a bug/issue against v8.x
-title: "[BUG] "
-labels: [Bug, Needs Triage, Release 8.x]
-body:
-- type: checkboxes
- attributes:
- label: Is there an existing issue for this?
- description: Please [search here](https://github.com/npm/cli/issues) to see if an issue already exists for your problem.
- options:
- - label: I have searched the existing issues
- required: true
-- type: checkboxes
- attributes:
- label: This issue exists in the latest npm version
- description: Please make sure you have installed the latest npm and verified it is still an issue.
- options:
- - label: I am using the latest npm
- required: true
-- type: textarea
- attributes:
- label: Current Behavior
- description: A clear & concise description of what you're experiencing.
- validations:
- required: false
-- type: textarea
- attributes:
- label: Expected Behavior
- description: A clear & concise description of what you expected to happen.
- validations:
- required: false
-- type: textarea
- attributes:
- label: Steps To Reproduce
- description: Steps to reproduce the behavior.
- value: |
- 1. In this environment...
- 2. With this config...
- 3. Run '...'
- 4. See error...
- validations:
- required: false
-- type: textarea
- attributes:
- label: Environment
- description: |
- examples:
- - **`npm -v`**: **npm**: 7.6.3
- - **`node -v`**: **Node.js**: 13.14.0
- - **OS Name**: Ubuntu 20.04
- - **System Model Name**: Macbook Pro
- - **`npm config ls`**: `; "user" config from ...`
- value: |
- - npm:
- - Node.js:
- - OS Name:
- - System Model Name:
- - npm config:
- ```ini
- ; copy and paste output from `npm config ls` here
- ```
- validations:
- required: false
diff --git a/.github/ISSUE_TEMPLATE/bug_9.yml b/.github/ISSUE_TEMPLATE/bug_9.yml
deleted file mode 100644
index c29c0e3ba89de..0000000000000
--- a/.github/ISSUE_TEMPLATE/bug_9.yml
+++ /dev/null
@@ -1,63 +0,0 @@
-name: 🐞 Bug v9
-description: File a bug/issue against v9.x
-title: "[BUG] "
-labels: [Bug, Needs Triage, Release 9.x]
-body:
-- type: checkboxes
- attributes:
- label: Is there an existing issue for this?
- description: Please [search here](https://github.com/npm/cli/issues) to see if an issue already exists for your problem.
- options:
- - label: I have searched the existing issues
- required: true
-- type: checkboxes
- attributes:
- label: This issue exists in the latest npm version
- description: Please make sure you have installed the latest npm and verified it is still an issue.
- options:
- - label: I am using the latest npm
- required: true
-- type: textarea
- attributes:
- label: Current Behavior
- description: A clear & concise description of what you're experiencing.
- validations:
- required: false
-- type: textarea
- attributes:
- label: Expected Behavior
- description: A clear & concise description of what you expected to happen.
- validations:
- required: false
-- type: textarea
- attributes:
- label: Steps To Reproduce
- description: Steps to reproduce the behavior.
- value: |
- 1. In this environment...
- 2. With this config...
- 3. Run '...'
- 4. See error...
- validations:
- required: false
-- type: textarea
- attributes:
- label: Environment
- description: |
- examples:
- - **`npm -v`**: **npm**: 7.6.3
- - **`node -v`**: **Node.js**: 13.14.0
- - **OS Name**: Ubuntu 20.04
- - **System Model Name**: Macbook Pro
- - **`npm config ls`**: `; "user" config from ...`
- value: |
- - npm:
- - Node.js:
- - OS Name:
- - System Model Name:
- - npm config:
- ```ini
- ; copy and paste output from `npm config ls` here
- ```
- validations:
- required: false
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 07e3b979df202..8bcac1c6dfdd9 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -4,7 +4,7 @@ contact_links:
url: https://github.community/c/software-development/47
about: Find/file tickets with the community
- name: ⭐️ Feature Request
- url: https://github.com/npm/feedback
+ url: https://github.com/orgs/community/discussions/categories/npm
about: Add your request or discuss the project w/ the community
- name: 📃 RFC
url: https://github.com/npm/rfcs
diff --git a/.github/actions/create-check/action.yml b/.github/actions/create-check/action.yml
new file mode 100644
index 0000000000000..aa24a5b0f0581
--- /dev/null
+++ b/.github/actions/create-check/action.yml
@@ -0,0 +1,52 @@
+# This file is automatically added by @npmcli/template-oss. Do not edit.
+
+name: 'Create Check'
+inputs:
+ name:
+ required: true
+ token:
+ required: true
+ sha:
+ required: true
+ check-name:
+ default: ''
+outputs:
+ check-id:
+ value: ${{ steps.create-check.outputs.check_id }}
+runs:
+ using: "composite"
+ steps:
+ - name: Get Workflow Job
+ uses: actions/github-script@v7
+ id: workflow
+ env:
+ JOB_NAME: "${{ inputs.name }}"
+ SHA: "${{ inputs.sha }}"
+ with:
+ result-encoding: string
+ script: |
+ const { repo: { owner, repo}, runId, serverUrl } = context
+ const { JOB_NAME, SHA } = process.env
+
+ const job = await github.rest.actions.listJobsForWorkflowRun({
+ owner,
+ repo,
+ run_id: runId,
+ per_page: 100
+ }).then(r => r.data.jobs.find(j => j.name.endsWith(JOB_NAME)))
+
+ return [
+ `This check is assosciated with ${serverUrl}/${owner}/${repo}/commit/${SHA}.`,
+ 'Run logs:',
+ job?.html_url || `could not be found for a job ending with: "${JOB_NAME}"`,
+ ].join(' ')
+ - name: Create Check
+ uses: LouisBrunner/checks-action@v1.6.0
+ id: create-check
+ with:
+ token: ${{ inputs.token }}
+ sha: ${{ inputs.sha }}
+ status: in_progress
+ name: ${{ inputs.check-name || inputs.name }}
+ output: |
+ {"summary":"${{ steps.workflow.outputs.result }}"}
diff --git a/.github/actions/install-latest-npm/action.yml b/.github/actions/install-latest-npm/action.yml
new file mode 100644
index 0000000000000..8339dbf03882d
--- /dev/null
+++ b/.github/actions/install-latest-npm/action.yml
@@ -0,0 +1,58 @@
+# This file is automatically added by @npmcli/template-oss. Do not edit.
+
+name: 'Install Latest npm'
+description: 'Install the latest version of npm compatible with the Node version'
+inputs:
+ node:
+ description: 'Current Node version'
+ required: true
+runs:
+ using: "composite"
+ steps:
+ # node 10/12/14 ship with npm@6, which is known to fail when updating itself in windows
+ - name: Update Windows npm
+ if: |
+ runner.os == 'Windows' && (
+ startsWith(inputs.node, 'v10.') ||
+ startsWith(inputs.node, 'v12.') ||
+ startsWith(inputs.node, 'v14.')
+ )
+ shell: cmd
+ run: |
+ curl -sO https://registry.npmjs.org/npm/-/npm-7.5.4.tgz
+ tar xf npm-7.5.4.tgz
+ cd package
+ node lib/npm.js install --no-fund --no-audit -g ..\npm-7.5.4.tgz
+ cd ..
+ rmdir /s /q package
+ - name: Install Latest npm
+ shell: bash
+ env:
+ NODE_VERSION: ${{ inputs.node }}
+ working-directory: ${{ runner.temp }}
+ run: |
+ MATCH=""
+ SPECS=("latest" "next-10" "next-9" "next-8" "next-7" "next-6")
+
+ echo "node@$NODE_VERSION"
+
+ for SPEC in ${SPECS[@]}; do
+ ENGINES=$(npm view npm@$SPEC --json | jq -r '.engines.node')
+ echo "Checking if node@$NODE_VERSION satisfies npm@$SPEC ($ENGINES)"
+
+ if npx semver -r "$ENGINES" "$NODE_VERSION" > /dev/null; then
+ MATCH=$SPEC
+ echo "Found compatible version: npm@$MATCH"
+ break
+ fi
+ done
+
+ if [ -z $MATCH ]; then
+ echo "Could not find a compatible version of npm for node@$NODE_VERSION"
+ exit 1
+ fi
+
+ npm i --prefer-online --no-fund --no-audit -g npm@$MATCH
+ - name: npm Version
+ shell: bash
+ run: npm -v
diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml
index fa272ea63ee22..589177f79660c 100644
--- a/.github/workflows/audit.yml
+++ b/.github/workflows/audit.yml
@@ -18,18 +18,22 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps -- --package-lock
+ run: node scripts/resetdeps.js --package-lock
- name: Run Production Audit
run: node . audit --omit=dev
- name: Run Full Audit
diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
deleted file mode 100644
index 018eeae7e4974..0000000000000
--- a/.github/workflows/benchmark.yml
+++ /dev/null
@@ -1,79 +0,0 @@
-name: Benchmark - CLI
-
-on:
- pull_request:
- branches:
- - '*'
- paths:
- - lib/**
- issue_comment:
- types:
- - created
- - edited
-
-jobs:
- trigger-benchmark:
- runs-on: ubuntu-latest
- steps:
- - name: Incoming Pull Request
- if: |
- github.event_name == 'pull_request' || (
- github.event_name == 'issue_comment' &&
- github.event.issue.pull_request &&
- github.event.issue.state == 'open' &&
- startsWith(github.event.comment.body, '@npm-cli-bot benchmark this')
- )
- env:
- # gh cli uses these env vars for owner/repo/token
- GH_REPO: "npm/benchmarks"
- GITHUB_TOKEN: ${{ secrets.BENCHMARK_DISPATCH_TOKEN }}
- run: |
- EVENT_NAME="${{ github.event_name }}"
- OWNER="${{ github.event.repository.owner.login }}"
- REPO="${{ github.event.repository.name }}"
- PR=""
-
- if [[ "$EVENT_NAME" == "pull_request" ]]; then
- if [[ "$GITHUB_TOKEN" == "" ]]; then
- echo "No auth - from fork pull request, exiting"
- exit 0
- fi
- PR="${{ github.event.pull_request.number }}"
- else
- PR="${{ github.event.issue.number }}"
- SENDER="${{ github.event.comment.user.login }}"
- ROLE=$(gh api repos/${OWNER}/${REPO}/collaborators/${SENDER}/permission -q '.permission')
-
- if [[ "$ROLE" != "admin" ]]; then
- echo "${SENDER} is ${ROLE}, not an admin, exiting"
- exit 0
- fi
-
- # add emoji to comment if user is an admin to signal
- # benchmark is running
- COMMENT_NODE_ID="${{ github.event.comment.node_id }}"
- QUERY='mutation ($inputData:AddReactionInput!) {
- addReaction (input:$inputData) {
- reaction { content }
- }
- }'
- echo '{
- "query": "'${QUERY}'",
- "variables": {
- "inputData": {
- "subjectId": "'"${COMMENT_NODE_ID}"'",
- "content": "ROCKET"
- }
- }
- }' | gh api graphql --input -
- fi
-
- EVENT="${EVENT_NAME} ${OWNER}/${REPO}#${PR}"
- echo '{
- "event_type": "'"$EVENT"'",
- "client_payload": {
- "pr_id": "'"$PR"'",
- "repo": "'"$REPO"'",
- "owner": "'"$OWNER"'"
- }
- }' | gh api repos/{owner}/{repo}/dispatches --input -
diff --git a/.github/workflows/ci-libnpmaccess.yml b/.github/workflows/ci-libnpmaccess.yml
index e73e76f6cf013..b44d964b93aa4 100644
--- a/.github/workflows/ci-libnpmaccess.yml
+++ b/.github/workflows/ci-libnpmaccess.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmaccess/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmaccess/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmaccess
+ run: npm run lint --ignore-scripts -w libnpmaccess
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmaccess
+ run: npm run postlint --ignore-scripts -w libnpmaccess
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmaccess
+ run: npm test --ignore-scripts -w libnpmaccess
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmdiff.yml b/.github/workflows/ci-libnpmdiff.yml
index 02dd24d48c718..dbebd2c95d0be 100644
--- a/.github/workflows/ci-libnpmdiff.yml
+++ b/.github/workflows/ci-libnpmdiff.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmdiff/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmdiff/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmdiff
+ run: npm run lint --ignore-scripts -w libnpmdiff
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmdiff
+ run: npm run postlint --ignore-scripts -w libnpmdiff
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmdiff
+ run: npm test --ignore-scripts -w libnpmdiff
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmexec.yml b/.github/workflows/ci-libnpmexec.yml
index 5f73f3bf05b9c..e51248c69481a 100644
--- a/.github/workflows/ci-libnpmexec.yml
+++ b/.github/workflows/ci-libnpmexec.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmexec/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmexec/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmexec
+ run: npm run lint --ignore-scripts -w libnpmexec
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmexec
+ run: npm run postlint --ignore-scripts -w libnpmexec
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmexec
+ run: npm test --ignore-scripts -w libnpmexec
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmfund.yml b/.github/workflows/ci-libnpmfund.yml
index 114446ace98fe..b628fe60e3d3f 100644
--- a/.github/workflows/ci-libnpmfund.yml
+++ b/.github/workflows/ci-libnpmfund.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmfund/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmfund/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmfund
+ run: npm run lint --ignore-scripts -w libnpmfund
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmfund
+ run: npm run postlint --ignore-scripts -w libnpmfund
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmfund
+ run: npm test --ignore-scripts -w libnpmfund
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmhook.yml b/.github/workflows/ci-libnpmhook.yml
index 414e122591cca..8512c98767407 100644
--- a/.github/workflows/ci-libnpmhook.yml
+++ b/.github/workflows/ci-libnpmhook.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmhook/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmhook/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmhook
+ run: npm run lint --ignore-scripts -w libnpmhook
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmhook
+ run: npm run postlint --ignore-scripts -w libnpmhook
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmhook
+ run: npm test --ignore-scripts -w libnpmhook
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmorg.yml b/.github/workflows/ci-libnpmorg.yml
index 89140eed3cd11..3cd3c4b13040e 100644
--- a/.github/workflows/ci-libnpmorg.yml
+++ b/.github/workflows/ci-libnpmorg.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmorg/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmorg/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmorg
+ run: npm run lint --ignore-scripts -w libnpmorg
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmorg
+ run: npm run postlint --ignore-scripts -w libnpmorg
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmorg
+ run: npm test --ignore-scripts -w libnpmorg
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmpack.yml b/.github/workflows/ci-libnpmpack.yml
index 2f9e787941395..ba21f86ff8f72 100644
--- a/.github/workflows/ci-libnpmpack.yml
+++ b/.github/workflows/ci-libnpmpack.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmpack/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmpack/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmpack
+ run: npm run lint --ignore-scripts -w libnpmpack
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmpack
+ run: npm run postlint --ignore-scripts -w libnpmpack
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmpack
+ run: npm test --ignore-scripts -w libnpmpack
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmpublish.yml b/.github/workflows/ci-libnpmpublish.yml
index cbd72f52d4f58..768bf8526a3ff 100644
--- a/.github/workflows/ci-libnpmpublish.yml
+++ b/.github/workflows/ci-libnpmpublish.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmpublish/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmpublish/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmpublish
+ run: npm run lint --ignore-scripts -w libnpmpublish
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmpublish
+ run: npm run postlint --ignore-scripts -w libnpmpublish
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmpublish
+ run: npm test --ignore-scripts -w libnpmpublish
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmsearch.yml b/.github/workflows/ci-libnpmsearch.yml
index 7c11951b8c858..991ad7b103f85 100644
--- a/.github/workflows/ci-libnpmsearch.yml
+++ b/.github/workflows/ci-libnpmsearch.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmsearch/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmsearch/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmsearch
+ run: npm run lint --ignore-scripts -w libnpmsearch
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmsearch
+ run: npm run postlint --ignore-scripts -w libnpmsearch
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmsearch
+ run: npm test --ignore-scripts -w libnpmsearch
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmteam.yml b/.github/workflows/ci-libnpmteam.yml
index c89c09fcf12b1..5ba1b481ee998 100644
--- a/.github/workflows/ci-libnpmteam.yml
+++ b/.github/workflows/ci-libnpmteam.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmteam/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmteam/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmteam
+ run: npm run lint --ignore-scripts -w libnpmteam
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmteam
+ run: npm run postlint --ignore-scripts -w libnpmteam
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmteam
+ run: npm test --ignore-scripts -w libnpmteam
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-libnpmversion.yml b/.github/workflows/ci-libnpmversion.yml
index 6fa4a45b4c4dd..17b656654582d 100644
--- a/.github/workflows/ci-libnpmversion.yml
+++ b/.github/workflows/ci-libnpmversion.yml
@@ -9,8 +9,8 @@ on:
- workspaces/libnpmversion/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/libnpmversion/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w libnpmversion
+ run: npm run lint --ignore-scripts -w libnpmversion
- name: Post Lint
- run: node . run postlint --ignore-scripts -w libnpmversion
+ run: npm run postlint --ignore-scripts -w libnpmversion
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w libnpmversion
+ run: npm test --ignore-scripts -w libnpmversion
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-npmcli-arborist.yml b/.github/workflows/ci-npmcli-arborist.yml
index e1a7f82a04549..95026777fa969 100644
--- a/.github/workflows/ci-npmcli-arborist.yml
+++ b/.github/workflows/ci-npmcli-arborist.yml
@@ -9,8 +9,8 @@ on:
- workspaces/arborist/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/arborist/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w @npmcli/arborist
+ run: npm run lint --ignore-scripts -w @npmcli/arborist
- name: Post Lint
- run: node . run postlint --ignore-scripts -w @npmcli/arborist
+ run: npm run postlint --ignore-scripts -w @npmcli/arborist
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w @npmcli/arborist
+ run: npm test --ignore-scripts -w @npmcli/arborist
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-npmcli-config.yml b/.github/workflows/ci-npmcli-config.yml
index 72cc302e721ad..cb8db1ef0641d 100644
--- a/.github/workflows/ci-npmcli-config.yml
+++ b/.github/workflows/ci-npmcli-config.yml
@@ -9,8 +9,8 @@ on:
- workspaces/config/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- workspaces/config/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w @npmcli/config
+ run: npm run lint --ignore-scripts -w @npmcli/config
- name: Post Lint
- run: node . run postlint --ignore-scripts -w @npmcli/config
+ run: npm run postlint --ignore-scripts -w @npmcli/config
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,61 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
+ - 16.14.0
- 16.x
- 18.0.0
- 18.x
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.14.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 16.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.0.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w @npmcli/config
+ run: npm test --ignore-scripts -w @npmcli/config
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-npmcli-docs.yml b/.github/workflows/ci-npmcli-docs.yml
index 017d354a538fc..6585662edc54f 100644
--- a/.github/workflows/ci-npmcli-docs.yml
+++ b/.github/workflows/ci-npmcli-docs.yml
@@ -9,8 +9,8 @@ on:
- docs/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- docs/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w @npmcli/docs
+ run: npm run lint --ignore-scripts -w @npmcli/docs
- name: Post Lint
- run: node . run postlint --ignore-scripts -w @npmcli/docs
+ run: npm run postlint --ignore-scripts -w @npmcli/docs
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,35 +65,48 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 18.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w @npmcli/docs
+ run: npm test --ignore-scripts -w @npmcli/docs
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
compare-docs:
@@ -97,7 +118,7 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Git User
@@ -105,12 +126,20 @@ jobs:
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Build Docs
run: |
node . run build -w docs
@@ -121,7 +150,7 @@ jobs:
run: |
git clean -fd
git checkout ${{ github.event.pull_request.base.ref }}
- node . run resetdeps
+ node scripts/resetdeps.js
node . run build -w docs
- name: Diff Man
run: diff -r --color=always man/ man-update/ || true
diff --git a/.github/workflows/ci-npmcli-mock-globals.yml b/.github/workflows/ci-npmcli-mock-globals.yml
new file mode 100644
index 0000000000000..213a5d7cf8ec0
--- /dev/null
+++ b/.github/workflows/ci-npmcli-mock-globals.yml
@@ -0,0 +1,122 @@
+# This file is automatically added by @npmcli/template-oss. Do not edit.
+
+name: CI - @npmcli/mock-globals
+
+on:
+ workflow_dispatch:
+ pull_request:
+ paths:
+ - mock-globals/**
+ push:
+ branches:
+ - latest
+ - release/v*
+ paths:
+ - mock-globals/**
+ schedule:
+ # "At 09:00 UTC (02:00 PT) on Monday" https://crontab.guru/#0_9_*_*_1
+ - cron: "0 9 * * 1"
+
+jobs:
+ lint:
+ name: Lint
+ if: github.repository_owner == 'npm'
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ shell: bash
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Git User
+ run: |
+ git config --global user.email "npm-cli+bot@github.com"
+ git config --global user.name "npm CLI robot"
+ - name: Setup Node
+ uses: actions/setup-node@v4
+ id: node
+ with:
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
+ cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
+ - name: Reset Deps
+ run: node scripts/resetdeps.js
+ - name: Lint
+ run: npm run lint --ignore-scripts -w @npmcli/mock-globals
+ - name: Post Lint
+ run: npm run postlint --ignore-scripts -w @npmcli/mock-globals
+
+ test:
+ name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
+ if: github.repository_owner == 'npm'
+ strategy:
+ fail-fast: false
+ matrix:
+ platform:
+ - name: Linux
+ os: ubuntu-latest
+ shell: bash
+ - name: macOS
+ os: macos-latest
+ shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
+ - name: Windows
+ os: windows-latest
+ shell: cmd
+ node-version:
+ - 18.17.0
+ - 18.x
+ - 20.5.0
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.17.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.5.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
+ runs-on: ${{ matrix.platform.os }}
+ defaults:
+ run:
+ shell: ${{ matrix.platform.shell }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Git User
+ run: |
+ git config --global user.email "npm-cli+bot@github.com"
+ git config --global user.name "npm CLI robot"
+ - name: Setup Node
+ uses: actions/setup-node@v4
+ id: node
+ with:
+ node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
+ cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
+ - name: Reset Deps
+ run: node scripts/resetdeps.js
+ - name: Add Problem Matcher
+ run: echo "::add-matcher::.github/matchers/tap.json"
+ - name: Test
+ run: npm test --ignore-scripts -w @npmcli/mock-globals
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-npmcli-mock-registry.yml b/.github/workflows/ci-npmcli-mock-registry.yml
index db394f789c9f8..494f9ee4e14a4 100644
--- a/.github/workflows/ci-npmcli-mock-registry.yml
+++ b/.github/workflows/ci-npmcli-mock-registry.yml
@@ -9,8 +9,8 @@ on:
- mock-registry/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- mock-registry/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w @npmcli/mock-registry
+ run: npm run lint --ignore-scripts -w @npmcli/mock-registry
- name: Post Lint
- run: node . run postlint --ignore-scripts -w @npmcli/mock-registry
+ run: npm run postlint --ignore-scripts -w @npmcli/mock-registry
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,58 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
- - 16.x
- - 18.0.0
+ - 18.17.0
- 18.x
+ - 20.5.0
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.17.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.5.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w @npmcli/mock-registry
+ run: npm test --ignore-scripts -w @npmcli/mock-registry
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-npmcli-smoke-tests.yml b/.github/workflows/ci-npmcli-smoke-tests.yml
index d6103908fcff7..d322fa0394c13 100644
--- a/.github/workflows/ci-npmcli-smoke-tests.yml
+++ b/.github/workflows/ci-npmcli-smoke-tests.yml
@@ -9,8 +9,8 @@ on:
- smoke-tests/**
push:
branches:
- - main
- latest
+ - release/v*
paths:
- smoke-tests/**
schedule:
@@ -27,22 +27,30 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
- run: node . run lint --ignore-scripts -w @npmcli/smoke-tests
+ run: npm run lint --ignore-scripts -w @npmcli/smoke-tests
- name: Post Lint
- run: node . run postlint --ignore-scripts -w @npmcli/smoke-tests
+ run: npm run postlint --ignore-scripts -w @npmcli/smoke-tests
test:
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -57,38 +65,58 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
- - 16.x
- - 18.0.0
+ - 18.17.0
- 18.x
+ - 20.5.0
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.17.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.5.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Install Latest npm
+ uses: ./.github/actions/install-latest-npm
+ with:
+ node: ${{ steps.node.outputs.node-version }}
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
- run: node . test --ignore-scripts -w @npmcli/smoke-tests
+ run: npm test --ignore-scripts -w @npmcli/smoke-tests
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
diff --git a/.github/workflows/ci-release.yml b/.github/workflows/ci-release.yml
index 10378175bb7d3..bd6cbf8ab4f1d 100644
--- a/.github/workflows/ci-release.yml
+++ b/.github/workflows/ci-release.yml
@@ -27,75 +27,44 @@ jobs:
run:
shell: bash
steps:
- - name: Get Workflow Job
- uses: actions/github-script@v6
- if: inputs.check-sha
- id: check-output
- env:
- JOB_NAME: "Lint All"
- MATRIX_NAME: ""
- with:
- script: |
- const { owner, repo } = context.repo
-
- const { data } = await github.rest.actions.listJobsForWorkflowRun({
- owner,
- repo,
- run_id: context.runId,
- per_page: 100
- })
-
- const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
- const job = data.jobs.find(j => j.name.endsWith(jobName))
- const jobUrl = job?.html_url
-
- const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ inputs.check-sha }}`
-
- let summary = `This check is assosciated with ${shaUrl}\n\n`
-
- if (jobUrl) {
- summary += `For run logs, click here: ${jobUrl}`
- } else {
- summary += `Run logs could not be found for a job with name: "${jobName}"`
- }
-
- return { summary }
- - name: Create Check
- uses: LouisBrunner/checks-action@v1.3.1
- id: check
- if: inputs.check-sha
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- status: in_progress
- name: Lint All
- sha: ${{ inputs.check-sha }}
- output: ${{ steps.check-output.outputs.result }}
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
+ - name: Create Check
+ id: create-check
+ if: ${{ inputs.check-sha }}
+ uses: ./.github/actions/create-check
+ with:
+ name: "Lint All"
+ token: ${{ secrets.GITHUB_TOKEN }}
+ sha: ${{ inputs.check-sha }}
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
run: node . run lint --ignore-scripts -ws -iwr --if-present
- name: Post Lint
run: node . run postlint --ignore-scripts -ws -iwr --if-present
- name: Conclude Check
- uses: LouisBrunner/checks-action@v1.3.1
- if: steps.check.outputs.check_id && always()
+ uses: LouisBrunner/checks-action@v1.6.0
+ if: always()
with:
token: ${{ secrets.GITHUB_TOKEN }}
conclusion: ${{ job.status }}
- check_id: ${{ steps.check.outputs.check_id }}
+ check_id: ${{ steps.create-check.outputs.check-id }}
test-all:
name: Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
@@ -110,95 +79,80 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
- - 16.x
- - 18.0.0
+ - 18.17.0
- 18.x
+ - 20.5.0
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.17.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.5.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- - name: Get Workflow Job
- uses: actions/github-script@v6
- if: inputs.check-sha
- id: check-output
- env:
- JOB_NAME: "Test All"
- MATRIX_NAME: " - ${{ matrix.platform.name }} - ${{ matrix.node-version }}"
- with:
- script: |
- const { owner, repo } = context.repo
-
- const { data } = await github.rest.actions.listJobsForWorkflowRun({
- owner,
- repo,
- run_id: context.runId,
- per_page: 100
- })
-
- const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
- const job = data.jobs.find(j => j.name.endsWith(jobName))
- const jobUrl = job?.html_url
-
- const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ inputs.check-sha }}`
-
- let summary = `This check is assosciated with ${shaUrl}\n\n`
-
- if (jobUrl) {
- summary += `For run logs, click here: ${jobUrl}`
- } else {
- summary += `Run logs could not be found for a job with name: "${jobName}"`
- }
-
- return { summary }
- - name: Create Check
- uses: LouisBrunner/checks-action@v1.3.1
- id: check
- if: inputs.check-sha
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- status: in_progress
- name: Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
- sha: ${{ inputs.check-sha }}
- output: ${{ steps.check-output.outputs.result }}
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
+ - name: Create Check
+ id: create-check
+ if: ${{ inputs.check-sha }}
+ uses: ./.github/actions/create-check
+ with:
+ name: "Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }}"
+ token: ${{ secrets.GITHUB_TOKEN }}
+ sha: ${{ inputs.check-sha }}
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
run: node . test --ignore-scripts -ws -iwr --if-present
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
- name: Conclude Check
- uses: LouisBrunner/checks-action@v1.3.1
- if: steps.check.outputs.check_id && always()
+ uses: LouisBrunner/checks-action@v1.6.0
+ if: always()
with:
token: ${{ secrets.GITHUB_TOKEN }}
conclusion: ${{ job.status }}
- check_id: ${{ steps.check.outputs.check_id }}
+ check_id: ${{ steps.create-check.outputs.check-id }}
smoke-publish:
+ # This cant be tested on Windows because our node_modules directory
+ # checks in symlinks which are not supported there. This should be
+ # fixed somehow, because this means some forms of local development
+ # are likely broken on Windows as well.
name: Smoke Publish - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
if: github.repository_owner == 'npm'
strategy:
@@ -208,94 +162,50 @@ jobs:
- name: Linux
os: ubuntu-latest
shell: bash
- - name: macOS
- os: macos-latest
- shell: bash
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
- - 16.x
- - 18.0.0
+ - 18.17.0
- 18.x
+ - 20.5.0
+ - 20.x
+ - 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- - name: Get Workflow Job
- uses: actions/github-script@v6
- if: inputs.check-sha
- id: check-output
- env:
- JOB_NAME: "Smoke Publish"
- MATRIX_NAME: " - ${{ matrix.platform.name }} - ${{ matrix.node-version }}"
- with:
- script: |
- const { owner, repo } = context.repo
-
- const { data } = await github.rest.actions.listJobsForWorkflowRun({
- owner,
- repo,
- run_id: context.runId,
- per_page: 100
- })
-
- const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
- const job = data.jobs.find(j => j.name.endsWith(jobName))
- const jobUrl = job?.html_url
-
- const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ inputs.check-sha }}`
-
- let summary = `This check is assosciated with ${shaUrl}\n\n`
-
- if (jobUrl) {
- summary += `For run logs, click here: ${jobUrl}`
- } else {
- summary += `Run logs could not be found for a job with name: "${jobName}"`
- }
-
- return { summary }
- - name: Create Check
- uses: LouisBrunner/checks-action@v1.3.1
- id: check
- if: inputs.check-sha
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- status: in_progress
- name: Smoke Publish - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
- sha: ${{ inputs.check-sha }}
- output: ${{ steps.check-output.outputs.result }}
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
+ - name: Create Check
+ id: create-check
+ if: ${{ inputs.check-sha }}
+ uses: ./.github/actions/create-check
+ with:
+ name: "Smoke Publish - ${{ matrix.platform.name }} - ${{ matrix.node-version }}"
+ token: ${{ secrets.GITHUB_TOKEN }}
+ sha: ${{ inputs.check-sha }}
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
- - name: Pack
- run: |
- NPM_VERSION="$(node . --version)-$GITHUB_SHA.0"
- node . version $NPM_VERSION --ignore-scripts
- node scripts/publish.js --pack-destination=$RUNNER_TEMP
- node . install --global $RUNNER_TEMP/npm-$NPM_VERSION.tgz
- node . install -w smoke-tests --ignore-scripts --no-audit --no-fund
- node scripts/remove-files.js
- # call installed npm instead of local source since we are testing
- # the packed tarball that we just installed globally
- SMOKE_PUBLISH_NPM=1 npm test -w smoke-tests --ignore-scripts
+ run: node scripts/resetdeps.js
+ - name: Smoke Publish
+ run: ./scripts/smoke-publish-test.sh
- name: Conclude Check
- uses: LouisBrunner/checks-action@v1.3.1
- if: steps.check.outputs.check_id && always()
+ uses: LouisBrunner/checks-action@v1.6.0
+ if: steps.create-check.outputs.check-id && always()
with:
token: ${{ secrets.GITHUB_TOKEN }}
conclusion: ${{ job.status }}
- check_id: ${{ steps.check.outputs.check_id }}
+ check_id: ${{ steps.create-check.outputs.check-id }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 639bf4a0ce49a..5198cbeecb765 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -8,15 +8,17 @@ on:
paths-ignore:
- docs/**
- smoke-tests/**
+ - mock-globals/**
- mock-registry/**
- workspaces/**
push:
branches:
- - main
- latest
+ - release/v*
paths-ignore:
- docs/**
- smoke-tests/**
+ - mock-globals/**
- mock-registry/**
- workspaces/**
schedule:
@@ -33,18 +35,22 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Lint
run: node . run lint --ignore-scripts
- name: Post Lint
@@ -63,40 +69,56 @@ jobs:
- name: macOS
os: macos-latest
shell: bash
+ - name: macOS
+ os: macos-13
+ shell: bash
- name: Windows
os: windows-latest
shell: cmd
node-version:
- - 14.17.0
- - 14.x
- - 16.13.0
- - 16.x
- - 18.0.0
+ - 18.17.0
- 18.x
+ - 20.5.0
+ - 20.x
+ - 22.x
+ exclude:
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.17.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 18.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.5.0
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 20.x
+ - platform: { name: macOS, os: macos-13, shell: bash }
+ node-version: 22.x
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
node-version: ${{ matrix.node-version }}
+ check-latest: contains(matrix.node-version, '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Add Problem Matcher
run: echo "::add-matcher::.github/matchers/tap.json"
- name: Test
run: node . test --ignore-scripts
- name: Check Git Status
- if: matrix && matrix.platform.os != 'windows-latest'
run: node scripts/git-dirty.js
licenses:
@@ -108,18 +130,22 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Check Licenses
run: node . run licenses
@@ -132,19 +158,60 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Run Smoke Tests
run: node . test -w smoke-tests --ignore-scripts
- name: Check Git Status
run: node scripts/git-dirty.js
+
+ windows-shims:
+ name: Windows Shims Tests
+ runs-on: windows-latest
+ defaults:
+ run:
+ shell: cmd
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Git User
+ run: |
+ git config --global user.email "npm-cli+bot@github.com"
+ git config --global user.name "npm CLI robot"
+ - name: Setup Node
+ uses: actions/setup-node@v4
+ id: node
+ with:
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
+ cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
+ - name: Reset Deps
+ run: node scripts/resetdeps.js
+ - name: Setup WSL
+ uses: Vampire/setup-wsl@v2.0.1
+ - name: Setup Cygwin
+ uses: egor-tensin/setup-cygwin@v4.0.1
+ with:
+ install-dir: C:\cygwin64
+ - name: Run Windows Shims Tests
+ run: node . test --ignore-scripts -- test/bin/windows-shims.js --no-coverage
+ env:
+ WINDOWS_SHIMS_TEST: true
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 66b9498a685b9..1e05817c06c03 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -5,12 +5,12 @@ name: CodeQL
on:
push:
branches:
- - main
- latest
+ - release/v*
pull_request:
branches:
- - main
- latest
+ - release/v*
schedule:
# "At 10:00 UTC (03:00 PT) on Monday" https://crontab.guru/#0_10_*_*_1
- cron: "0 10 * * 1"
@@ -25,7 +25,7 @@ jobs:
security-events: write
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
diff --git a/.github/workflows/create-node-pr.yml b/.github/workflows/create-node-pr.yml
index f5cb5e88c8c2a..c903220dbd2ca 100644
--- a/.github/workflows/create-node-pr.yml
+++ b/.github/workflows/create-node-pr.yml
@@ -26,7 +26,7 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Git User
@@ -34,12 +34,16 @@ jobs:
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Checkout Node
uses: actions/checkout@v3
with:
diff --git a/.github/workflows/node-integration.yml b/.github/workflows/node-integration.yml
new file mode 100644
index 0000000000000..49beea2115d6f
--- /dev/null
+++ b/.github/workflows/node-integration.yml
@@ -0,0 +1,495 @@
+# This file is automatically added by @npmcli/template-oss. Do not edit.
+
+name: nodejs integration
+
+on:
+ workflow_call:
+ inputs:
+ nodeVersion:
+ description: 'nodejs version'
+ required: true
+ type: string
+ default: nightly
+ npmVersion:
+ description: 'npm version'
+ required: true
+ type: string
+ default: git
+ installFlags:
+ description: 'extra flags to pass to npm install'
+ required: false
+ type: string
+ default: ''
+
+ workflow_dispatch:
+ inputs:
+ nodeVersion:
+ description: 'nodejs version'
+ required: true
+ type: string
+ default: nightly
+ npmVersion:
+ description: 'npm version'
+ required: true
+ type: string
+ default: git
+ installFlags:
+ description: 'extra flags to pass to npm install'
+ required: false
+ type: string
+ default: ''
+
+jobs:
+ build-nodejs:
+ name: build nodejs@${{ inputs.nodeVersion }} npm@${{ inputs.npmVersion }}
+ runs-on: ubuntu-latest
+ outputs:
+ nodeVersion: ${{ steps.build-nodejs.outputs.nodeVersion }}
+ steps:
+ - name: setup ccache
+ uses: Chocobo1/setup-ccache-action@v1
+ with:
+ override_cache_key: nodejs-${{ inputs.nodeVersion }}
+ - name: build nodejs
+ id: build-nodejs
+ run: |
+ echo "::group::setup"
+ set -eo pipefail
+ npmDir="${RUNNER_TEMP}/npm"
+ sourceDir="${RUNNER_TEMP}/src"
+ targetDir="${RUNNER_TEMP}/build"
+ npmFile="${RUNNER_TEMP}/npm.tgz"
+ sourceFile="${RUNNER_TEMP}/source.tgz"
+ targetFile="${RUNNER_TEMP}/build.tgz"
+ echo "::endgroup::"
+
+ echo "::group::finding nodejs version matching ${{ inputs.nodeVersion }}"
+ if [[ "${{ inputs.nodeVersion }}" == "nightly" ]]; then
+ nodeVersion=$(curl -sSL https://nodejs.org/download/nightly/index.json | jq -r .[0].version)
+ nodeUrl="https://nodejs.org/download/nightly/${nodeVersion}/node-${nodeVersion}.tar.gz"
+ else
+ nodeVersion=$(curl -sSL https://nodejs.org/download/release/index.json | jq -r 'map(select(.version | test("^v${{ inputs.nodeVersion }}"))) | .[0].version')
+ if [[ -z "$nodeVersion" ]]; then
+ echo "::error ::unable to find released nodejs version matching: ${{ inputs.nodeVersion }}"
+ exit 1
+ fi
+ nodeUrl="https://nodejs.org/download/release/${nodeVersion}/node-${nodeVersion}.tar.gz"
+ fi
+ echo "nodeVersion=${nodeVersion}" >> $GITHUB_OUTPUT
+ echo "::endgroup::"
+
+ echo "::group::extracting source from $nodeUrl"
+ mkdir -p "$sourceDir"
+ curl -sSL "$nodeUrl" | tar xz -C "$sourceDir" --strip=1
+ echo "::endgroup::"
+
+ echo "::group::cloning npm"
+ mkdir -p "$npmDir"
+ git clone https://github.com/npm/cli.git "$npmDir"
+ npmVersion=$(cat "${npmDir}/package.json" | jq -r '"\(.version)-git"')
+ echo "::endgroup::"
+
+ if [[ "${{ inputs.npmVersion }}" != "git" ]]; then
+ npmVersion="${{ inputs.npmVersion }}"
+ npmVersion="${npmVersion#v}"
+ echo "::group::checking out npm@${npmVersion}"
+ pushd "$npmDir" >/dev/null
+ taggedSha=$(git show-ref --tags "v${npmVersion}" | cut -d' ' -f1)
+ git reset --hard "$taggedSha"
+ publishedSha=$(curl -sSL https://registry.npmjs.org/npm | jq -r --arg ver "$npmVersion" '.versions[$ver].gitHead')
+ if [[ "$taggedSha" != "$publishedSha" ]]; then
+ echo "::warning ::git tag ($taggedSha) differs from published tag ($publishedSha)"
+ fi
+ popd >/dev/null
+ echo "::endgroup::"
+ fi
+
+ echo "::group::packing npm release $npmVersion"
+ pushd "$npmDir" >/dev/null
+ node scripts/resetdeps.js
+ npmtarball="$(node . pack --loglevel=silent --json | jq -r .[0].filename)"
+ tar czf "$npmFile" -C "$npmDir" .
+ popd >/dev/null
+ echo "npm=$npmFile" >> $GITHUB_OUTPUT
+ echo "::endgroup::"
+
+ echo "::group::updating nodejs bundled npm"
+ rm -rf "${sourceDir}/deps/npm"
+ mkdir -p "${sourceDir}/deps/npm"
+ tar xfz "${npmDir}/${npmtarball}" -C "${sourceDir}/deps/npm" --strip=1
+ echo "::endgroup::"
+
+ echo "::group::packing nodejs source"
+ tar cfz "$sourceFile" -C "$sourceDir" .
+ echo "source=$sourceFile" >> $GITHUB_OUTPUT
+ echo "::endgroup::"
+
+ echo "::group::building nodejs"
+ mkdir -p "$targetDir"
+ pushd "$sourceDir" >/dev/null
+ ./configure --prefix="$targetDir"
+ make -j4 install
+ popd >/dev/null
+ echo "::endgroup::"
+
+ echo "::group::packing nodejs build"
+ tar cfz "$targetFile" -C "$targetDir" .
+ echo "build=$targetFile" >> $GITHUB_OUTPUT
+ echo "::endgroup::"
+ - name: upload artifacts
+ uses: actions/upload-artifact@v3
+ with:
+ name: nodejs-${{ steps.build-nodejs.outputs.nodeVersion }}
+ path: |
+ ${{ steps.build-nodejs.outputs.source }}
+ ${{ steps.build-nodejs.outputs.build }}
+ ${{ steps.build-nodejs.outputs.npm }}
+
+ test-nodejs:
+ name: test nodejs
+ runs-on: ubuntu-latest
+ needs:
+ - build-nodejs
+ steps:
+ - name: setup ccache
+ uses: Chocobo1/setup-ccache-action@v1
+ with:
+ override_cache_key: nodejs-${{ inputs.nodeVersion }}
+ - name: download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: nodejs-${{ needs.build-nodejs.outputs.nodeVersion }}
+ - name: test nodejs
+ run: |
+ set -e
+ tar xf source.tgz
+ ./configure
+ make -j4 test-only
+
+ test-npm:
+ name: test npm
+ runs-on: ubuntu-latest
+ needs:
+ - build-nodejs
+ steps:
+ - name: download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: nodejs-${{ needs.build-nodejs.outputs.nodeVersion }}
+ path: ${{ runner.temp }}
+ - name: install nodejs ${{ needs.build-nodejs.outputs.nodeVersion }}
+ id: install
+ run: |
+ set -e
+ mkdir -p "${RUNNER_TEMP}/node"
+ tar xf "${RUNNER_TEMP}/build.tgz" -C "${RUNNER_TEMP}/node"
+
+ mkdir -p "${RUNNER_TEMP}/npm"
+ tar xf "${RUNNER_TEMP}/npm.tgz" -C "${RUNNER_TEMP}/npm"
+
+ echo "${RUNNER_TEMP}/node/bin" >> $GITHUB_PATH
+ echo "cache=$(npm config get cache)" >> $GITHUB_OUTPUT
+ - name: setup npm cache
+ uses: actions/cache@v3
+ with:
+ path: ${{ steps.install.outputs.cache }}
+ key: npm-tests
+ - run: node --version
+ - run: npm --version
+ - name: test npm
+ run: |
+ echo "::group::setup"
+ set -e
+ cd "${RUNNER_TEMP}/npm"
+ echo "::endgroup::"
+
+ echo "::group::npm run resetdeps"
+ node scripts/resetdeps.js
+ echo "::endgroup::"
+
+ echo "::group::npm link"
+ node . link
+ echo "::endgroup::"
+
+ STEPEXIT=0
+ FINALEXIT=0
+
+ set +e
+ echo "::group::npm test"
+ node . test --ignore-scripts
+ STEPEXIT=$?
+ if [[ $STEPEXIT -ne 0 ]]; then
+ echo "::warning ::npm test failed, exit: $STEPEXIT"
+ FINALEXIT=STEPEXIT
+ fi
+ echo "::endgroup::"
+
+ for workspace in $(npm query .workspace | jq -r .[].name); do
+ echo "::group::npm test -w $workspace"
+ node . test -w $workspace --if-present --ignore-scripts
+ STEPEXIT=$?
+ if [[ $STEPEXIT -ne 0 ]]; then
+ echo "::warning ::npm test -w $workspace failed, exit: $STEPEXIT"
+ FINALEXIT=STEPEXIT
+ fi
+ echo "::endgroup::"
+ done
+
+ exit $FINALEXIT
+
+ generate-matrix:
+ name: generate matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.generate-matrix.outputs.matrix }}
+ steps:
+ - name: install dependencies
+ run: |
+ npm install --no-save --no-audit --no-fund citgm npm-package-arg
+ - name: generate matrix
+ id: generate-matrix
+ uses: actions/github-script@v6
+ env:
+ NODE_VERSION: "${{ inputs.nodeVersion }}"
+ INSTALL_FLAGS: "${{ inputs.installFlags }}"
+ with:
+ script: |
+ const { NODE_VERSION, INSTALL_FLAGS } = process.env
+
+ const { execSync } = require('child_process')
+ const npa = require('npm-package-arg')
+
+ const lookup = require('./node_modules/citgm/lib/lookup.json')
+
+ const matchesKeyword = (value) => {
+ const keywords = ['ubuntu', 'debian', 'linux', 'x86', '>=11', '>=12', '>=16', '>=17']
+ if (value === true ||
+ (typeof value === 'string' && keywords.includes(value)) ||
+ (Array.isArray(value) && keywords.some((keyword) => value.includes(keyword) || value.includes(true)))) {
+ return true
+ }
+
+ return false
+ }
+
+ // this is a manually updated list of packages that we know currently fail
+ const knownFailures = [
+ 'body-parser', // json parsing error difference
+ 'clinic', // unknown, lots of failures
+ 'ember-cli', // timeout in nodejs ci, one failing test for us that timed out
+ 'express', // body-parser is what actually fails, it's used in a test
+ 'https-proxy-agent', // looks ssl related
+ 'node-gyp', // one test consistently times out
+ 'resolve', // compares results to require.resolve and fails, also missing inspector/promises
+ 'uuid', // tests that crypto.getRandomValues throws but it doesn't
+ 'weak', // doesn't seem to build in node >12
+ 'mkdirp', // failing actions in own repo
+ 'undici', // test failure in node >=18, unable to root cause
+ ]
+
+ if (NODE_VERSION === '18') {
+ knownFailures.push('multer')
+ } else if (NODE_VERSION === '19') {
+ // empty block
+ } else if (NODE_VERSION === 'nightly') {
+ // fails in node 20, looks like a streams issue
+ knownFailures.push('fastify')
+ // esbuild barfs on node 20.0.0-pre
+ knownFailures.push('serialport')
+ }
+
+ // this is a manually updated list of packages that are flaky
+ const supplementalFlaky = [
+ 'pino', // flaky test test/transport/core.test.js:401
+ 'tough-cookie', // race condition test/node_util_fallback_test.js:87
+ ]
+
+ const matrix = []
+ for (const package in lookup) {
+ const meta = lookup[package]
+
+ // we omit npm from the matrix because its tests are run as their own job
+ if (matchesKeyword(meta.skip) || meta.yarn === true || package === 'npm') {
+ continue
+ }
+
+ const install_flags = ['--no-audit', '--no-fund']
+ if (meta.install) {
+ install_flags.push(meta.install.slice(1))
+ }
+ if (INSTALL_FLAGS) {
+ install_flags.push(INSTALL_FLAGS)
+ }
+ const context = JSON.parse(execSync(`npm show ${package} --json`))
+ const test = meta.scripts ? meta.scripts.map((script) => `npm run ${script}`) : ['npm test']
+
+ const repo = npa(meta.repo || context.repository.url).hosted
+ const details = {}
+ if (meta.useGitClone) {
+ details.repo = repo.https()
+ } else {
+ if (meta.ignoreGitHead) {
+ details.url = repo.tarball()
+ } else {
+ details.url = repo.tarball({ committish: context.gitHead })
+ }
+ }
+
+ const env = { ...meta.envVar, NODE_VERSION }
+ matrix.push({
+ package,
+ version: context.version,
+ env,
+ install_flags: install_flags.join(' '),
+ commands: [...test],
+ flaky: matchesKeyword(meta.flaky) || supplementalFlaky.includes(package),
+ knownFailure: knownFailures.includes(package),
+ ...details,
+ })
+ }
+ core.setOutput('matrix', matrix)
+
+ citgm:
+ name: citgm - ${{ matrix.package }}@${{ matrix.version }} ${{ matrix.flaky && '(flaky)' || '' }} ${{ matrix.knownFailure && '(known failure)' || '' }}
+ runs-on: ubuntu-latest
+ needs:
+ - build-nodejs
+ - generate-matrix
+ strategy:
+ fail-fast: false
+ matrix:
+ include: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
+ steps:
+ - name: download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: nodejs-${{ needs.build-nodejs.outputs.nodeVersion }}
+ path: ${{ runner.temp }}
+ - name: install nodejs ${{ needs.build-nodejs.outputs.nodeVersion }}
+ id: install
+ run: |
+ set -e
+ mkdir -p "${RUNNER_TEMP}/node"
+ tar xf "${RUNNER_TEMP}/build.tgz" -C "${RUNNER_TEMP}/node"
+ echo "nodedir=${RUNNER_TEMP}/node" >> $GITHUB_OUTPUT
+
+ echo "${RUNNER_TEMP}/node/bin" >> $GITHUB_PATH
+ echo "cache=$(npm config get cache)" >> $GITHUB_OUTPUT
+ - name: setup npm cache
+ uses: actions/cache@v3
+ with:
+ path: ${{ steps.install.outputs.cache }}
+ key: npm-${{ matrix.package }}
+ - run: node --version
+ - run: npm --version
+ - name: download source
+ id: download
+ run: |
+ set -eo pipefail
+ TARGET_DIR="${RUNNER_TEMP}/${{ matrix.package }}"
+ mkdir -p "$TARGET_DIR"
+ echo "target=$TARGET_DIR" >> $GITHUB_OUTPUT
+
+ if [[ -n "${{ matrix.repo }}" ]]; then
+ export GIT_TERMINAL_PROMPT=0
+ export GIT_SSH_COMMAND="ssh -oBatchMode=yes"
+ git clone --recurse-submodules "${{ matrix.repo }}" "$TARGET_DIR"
+ elif [[ -n "${{ matrix.url }}" ]]; then
+ curl -sSL "${{ matrix.url }}" | tar xz -C "$TARGET_DIR" --strip=1
+ fi
+
+ if [[ -f "${TARGET_DIR}/package-lock.json" ]]; then
+ lockfileVersion=$(cat "${TARGET_DIR}/package-lock.json" | jq .lockfileVersion)
+ echo "lockfileVersion=$lockfileVersion" >> $GITHUB_OUTPUT
+ fi
+ - name: npm install ${{ matrix.install_flags }}
+ id: npm-install
+ working-directory: ${{ steps.download.outputs.target }}
+ run: |
+ set +e
+ npm install --nodedir="${{steps.install.outputs.nodedir }}" ${{ matrix.install_flags }}
+ exitcode=$?
+ if [[ $exitcode -ne 0 && "${{ matrix.knownFailure }}" == "true" ]]; then
+ echo "::warning ::npm install failed, exit $exitcode"
+ echo "failed=true" >> $GITHUB_OUTPUT
+ exit 0
+ fi
+ exit $exitcode
+ - name: verify lockfileVersion unchanged
+ working-directory: ${{ steps.download.outputs.target }}
+ if: ${{ steps.download.outputs.lockfileVersion && steps.npm-install.outputs.failed != 'true' }}
+ run: |
+ if [[ -f "package-lock.json" ]]; then
+ newLockfileVersion=$(cat "package-lock.json" | jq .lockfileVersion)
+ if [[ "$newLockfileVersion" -ne "${{ steps.download.outputs.lockfileVersion }}" ]]; then
+ if [[ "${{ steps.download.outputs.lockfileVersion }}" -ne 1 ]]; then
+ echo "::error ::lockfileVersion changed from ${{ steps.download.outputs.lockfileVersion }} to $newLockfileVersion"
+ exit 1
+ fi
+ fi
+ fi
+ - name: npm ls
+ working-directory: ${{ steps.download.outputs.target }}
+ if: ${{ steps.npm-install.outputs.failed != 'true' }}
+ run: |
+ npm ls
+ - name: ${{ join(matrix.commands, ' && ') }}
+ id: command
+ continue-on-error: true
+ timeout-minutes: 10
+ env: ${{ matrix.env }}
+ working-directory: ${{ steps.download.outputs.target }}
+ if: ${{ steps.npm-install.outputs.failed != 'true' }}
+ run: |
+ set +e
+ FINALEXIT=0
+ STEPEXIT=0
+
+ export npm_config_nodedir="${{ steps.install.outputs.nodedir }}"
+
+ # inlining some patches to make tests run
+ if [[ "${{ matrix.package }}" == "undici" ]]; then
+ sed -i.bak 's/--experimental-wasm-simd //' package.json
+ rm -f package.json.bak
+ sed -i.bak 's/--experimental-wasm-simd//' .taprc
+ rm -f .taprc.bak
+ fi
+
+ for row in $(echo '${{ toJSON(matrix.commands) }}' | jq -r '.[] | @base64'); do
+ FAILCOUNT=0
+ COMMAND=$(echo "$row" | base64 --decode)
+ echo "::group::$COMMAND"
+ $COMMAND
+ STEPEXIT=$?
+ if [[ $STEPEXIT -ne 0 ]]; then
+ FAILCOUNT=1
+ if [[ "${{ matrix.knownFailure }}" == "true" || "$NODE_VERSION" == "nightly" ]]; then
+ echo "::warning ::$COMMAND failed, exit: $STEPEXIT"
+ elif [[ "${{ matrix.flaky }}" ]]; then
+ while [[ $STEPEXIT -ne 0 && $FAILCOUNT -lt 3 ]]; do
+ $COMMAND
+ STEPEXIT=$?
+ if [[ $STEPEXIT -ne 0 ]]; then
+ ((FAILCOUNT=FAILCOUNT+1))
+ fi
+ done
+
+ if [[ $STEPEXIT -ne 0 ]]; then
+ echo "::warning ::$COMMAND still failing after $FAILCOUNT attempts, exit: $STEPEXIT"
+ fi
+ else
+ FINALEXIT=$STEPEXIT
+ echo "::error ::$COMMAND failed, exit: $STEPEXIT"
+ fi
+ fi
+ echo "::endgroup::"
+ done
+ exit $FINALEXIT
+ - name: Set conclusion
+ env: ${{ matrix.env }}
+ run: |
+ EXIT=1
+ if [[ "${{ steps.command.outcome }}" == "success" || "${{ matrix.flaky }}" == "true" || "${{ matrix.knownFailure }}" == "true" || $NODE_VERSION == "nightly" ]]; then
+ EXIT=0
+ fi
+ exit $EXIT
diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml
index 3d008208833f3..2c27dec822336 100644
--- a/.github/workflows/pull-request.yml
+++ b/.github/workflows/pull-request.yml
@@ -20,7 +20,7 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Git User
@@ -28,18 +28,22 @@ jobs:
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Run Commitlint on Commits
id: commit
continue-on-error: true
- run: |
- npx --offline commitlint -V --from origin/${{ github.base_ref }} --to ${{ github.event.pull_request.head.sha }}
+ run: npx --offline commitlint -V --from 'origin/${{ github.base_ref }}' --to ${{ github.event.pull_request.head.sha }}
- name: Run Commitlint on PR Title
if: steps.commit.outcome == 'failure'
- run: |
- echo ${{ github.event.pull_request.title }} | npx --offline commitlint -V
+ env:
+ PR_TITLE: ${{ github.event.pull_request.title }}
+ run: echo "$PR_TITLE" | npx --offline commitlint -V
diff --git a/.github/workflows/release-integration.yml b/.github/workflows/release-integration.yml
new file mode 100644
index 0000000000000..cfb18e6abc8ba
--- /dev/null
+++ b/.github/workflows/release-integration.yml
@@ -0,0 +1,31 @@
+# This file is automatically added by @npmcli/template-oss. Do not edit.
+
+name: Release Integration
+
+on:
+ workflow_dispatch:
+ inputs:
+ releases:
+ required: true
+ type: string
+ description: 'A json array of releases. Required fields: publish: tagName, publishTag. publish check: pkgName, version'
+ workflow_call:
+ inputs:
+ releases:
+ required: true
+ type: string
+ description: 'A json array of releases. Required fields: publish: tagName, publishTag. publish check: pkgName, version'
+
+jobs:
+ publish:
+ strategy:
+ fail-fast: false
+ matrix:
+ nodeVersion:
+ - 18
+ - 20
+ - nightly
+ uses: ./.github/workflows/node-integration.yml
+ with:
+ nodeVersion: ${{ matrix.nodeVersion }}
+ npmVersion: ${{ fromJSON(inputs.releases)[0].version }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 36cb489fc469b..98c2034291478 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -3,10 +3,8 @@
name: Release
on:
- workflow_dispatch:
push:
branches:
- - main
- latest
- release/v*
@@ -19,12 +17,12 @@ jobs:
release:
outputs:
pr: ${{ steps.release.outputs.pr }}
- releases: ${{ steps.release.outputs.releases }}
- release-flags: ${{ steps.release.outputs.release-flags }}
- branch: ${{ steps.release.outputs.pr-branch }}
+ pr-branch: ${{ steps.release.outputs.pr-branch }}
pr-number: ${{ steps.release.outputs.pr-number }}
- comment-id: ${{ steps.pr-comment.outputs.result }}
- check-id: ${{ steps.check.outputs.check_id }}
+ pr-sha: ${{ steps.release.outputs.pr-sha }}
+ releases: ${{ steps.release.outputs.releases }}
+ comment-id: ${{ steps.create-comment.outputs.comment-id || steps.update-comment.outputs.comment-id }}
+ check-id: ${{ steps.create-check.outputs.check-id }}
name: Release
if: github.repository_owner == 'npm'
runs-on: ubuntu-latest
@@ -33,106 +31,74 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
- name: Release Please
id: release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: |
- npx --offline template-oss-release-please ${{ github.ref_name }} ${{ github.event_name }}
- - name: Post Pull Request Comment
+ run: npx --offline template-oss-release-please --branch="${{ github.ref_name }}" --backport="" --defaultTag="latest"
+ - name: Create Release Manager Comment Text
if: steps.release.outputs.pr-number
- uses: actions/github-script@v6
- id: pr-comment
- env:
- PR_NUMBER: ${{ steps.release.outputs.pr-number }}
- REF_NAME: ${{ github.ref_name }}
+ uses: actions/github-script@v7
+ id: comment-text
with:
+ result-encoding: string
script: |
- const { REF_NAME, PR_NUMBER } = process.env
- const repo = { owner: context.repo.owner, repo: context.repo.repo }
- const issue = { ...repo, issue_number: PR_NUMBER }
-
- const { data: workflow } = await github.rest.actions.getWorkflowRun({ ...repo, run_id: context.runId })
-
- let body = '## Release Manager\n\n'
-
- const comments = await github.paginate(github.rest.issues.listComments, issue)
- let commentId = comments?.find(c => c.user.login === 'github-actions[bot]' && c.body.startsWith(body))?.id
-
- body += `Release workflow run: ${workflow.html_url}\n\n#### Force CI to Update This Release\n\n`
- body += `This PR will be updated and CI will run for every non-\`chore:\` commit that is pushed to \`latest\`. `
- body += `To force CI to update this PR, run this command:\n\n`
- body += `\`\`\`\ngh workflow run release.yml -r ${REF_NAME}\n\`\`\``
-
- if (commentId) {
- await github.rest.issues.updateComment({ ...repo, comment_id: commentId, body })
- } else {
- const { data: comment } = await github.rest.issues.createComment({ ...issue, body })
- commentId = comment?.id
- }
-
- return commentId
- - name: Get Workflow Job
- uses: actions/github-script@v6
- if: steps.release.outputs.pr-sha
- id: check-output
- env:
- JOB_NAME: "Release"
- MATRIX_NAME: ""
+ const { runId, repo: { owner, repo } } = context
+ const { data: workflow } = await github.rest.actions.getWorkflowRun({ owner, repo, run_id: runId })
+ return['## Release Manager', `Release workflow run: ${workflow.html_url}`].join('\n\n')
+ - name: Find Release Manager Comment
+ uses: peter-evans/find-comment@v2
+ if: steps.release.outputs.pr-number
+ id: found-comment
with:
- script: |
- const { owner, repo } = context.repo
-
- const { data } = await github.rest.actions.listJobsForWorkflowRun({
- owner,
- repo,
- run_id: context.runId,
- per_page: 100
- })
-
- const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
- const job = data.jobs.find(j => j.name.endsWith(jobName))
- const jobUrl = job?.html_url
-
- const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ steps.release.outputs.pr-sha }}`
-
- let summary = `This check is assosciated with ${shaUrl}\n\n`
-
- if (jobUrl) {
- summary += `For run logs, click here: ${jobUrl}`
- } else {
- summary += `Run logs could not be found for a job with name: "${jobName}"`
- }
-
- return { summary }
+ issue-number: ${{ steps.release.outputs.pr-number }}
+ comment-author: 'github-actions[bot]'
+ body-includes: '## Release Manager'
+ - name: Create Release Manager Comment
+ id: create-comment
+ if: steps.release.outputs.pr-number && !steps.found-comment.outputs.comment-id
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ issue-number: ${{ steps.release.outputs.pr-number }}
+ body: ${{ steps.comment-text.outputs.result }}
+ - name: Update Release Manager Comment
+ id: update-comment
+ if: steps.release.outputs.pr-number && steps.found-comment.outputs.comment-id
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ comment-id: ${{ steps.found-comment.outputs.comment-id }}
+ body: ${{ steps.comment-text.outputs.result }}
+ edit-mode: 'replace'
- name: Create Check
- uses: LouisBrunner/checks-action@v1.3.1
- id: check
+ id: create-check
+ uses: ./.github/actions/create-check
if: steps.release.outputs.pr-sha
with:
+ name: "Release"
token: ${{ secrets.GITHUB_TOKEN }}
- status: in_progress
- name: Release
sha: ${{ steps.release.outputs.pr-sha }}
- output: ${{ steps.check-output.outputs.result }}
update:
needs: release
outputs:
sha: ${{ steps.commit.outputs.sha }}
- check-id: ${{ steps.check.outputs.check_id }}
+ check-id: ${{ steps.create-check.outputs.check-id }}
name: Update - Release
if: github.repository_owner == 'npm' && needs.release.outputs.pr
runs-on: ubuntu-latest
@@ -141,29 +107,40 @@ jobs:
shell: bash
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- ref: ${{ needs.release.outputs.branch }}
+ ref: ${{ needs.release.outputs.pr-branch }}
- name: Setup Git User
run: |
git config --global user.email "npm-cli+bot@github.com"
git config --global user.name "npm CLI robot"
- name: Setup Node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
+ id: node
with:
- node-version: 18.x
+ node-version: 22.x
+ check-latest: contains('22.x', '.x')
cache: npm
+ - name: Check Git Status
+ run: node scripts/git-dirty.js
- name: Reset Deps
- run: node . run resetdeps
+ run: node scripts/resetdeps.js
+ - name: Create Release Manager Checklist Text
+ id: comment-text
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: node . exec --offline -- template-oss-release-manager --pr="${{ needs.release.outputs.pr-number }}" --backport="" --defaultTag="latest" --lockfile
+ - name: Append Release Manager Comment
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ comment-id: ${{ needs.release.outputs.comment-id }}
+ body: ${{ steps.comment-text.outputs.result }}
+ edit-mode: 'append'
- name: Run Post Pull Request Actions
env:
- RELEASE_PR_NUMBER: ${{ needs.release.outputs.pr-number }}
- RELEASE_COMMENT_ID: ${{ needs.release.outputs.comment-id }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: |
- node . exec --offline -- template-oss-release-manager --lockfile=true
- node . run rp-pull-request --ignore-scripts -ws -iwr --if-present
+ run: node . run rp-pull-request --ignore-scripts -ws -iwr --if-present -- --pr="${{ needs.release.outputs.pr-number }}" --commentId="${{ needs.release.outputs.comment-id }}"
- name: Commit
id: commit
env:
@@ -171,53 +148,17 @@ jobs:
run: |
git commit --all --amend --no-edit || true
git push --force-with-lease
- echo "::set-output name=sha::$(git rev-parse HEAD)"
- - name: Get Workflow Job
- uses: actions/github-script@v6
- if: steps.commit.outputs.sha
- id: check-output
- env:
- JOB_NAME: "Update - Release"
- MATRIX_NAME: ""
- with:
- script: |
- const { owner, repo } = context.repo
-
- const { data } = await github.rest.actions.listJobsForWorkflowRun({
- owner,
- repo,
- run_id: context.runId,
- per_page: 100
- })
-
- const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
- const job = data.jobs.find(j => j.name.endsWith(jobName))
- const jobUrl = job?.html_url
-
- const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ steps.commit.outputs.sha }}`
-
- let summary = `This check is assosciated with ${shaUrl}\n\n`
-
- if (jobUrl) {
- summary += `For run logs, click here: ${jobUrl}`
- } else {
- summary += `Run logs could not be found for a job with name: "${jobName}"`
- }
-
- return { summary }
+ echo "sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
- name: Create Check
- uses: LouisBrunner/checks-action@v1.3.1
- id: check
- if: steps.commit.outputs.sha
+ id: create-check
+ uses: ./.github/actions/create-check
with:
+ name: "Update - Release"
+ check-name: "Release"
token: ${{ secrets.GITHUB_TOKEN }}
- status: in_progress
- name: Release
sha: ${{ steps.commit.outputs.sha }}
- output: ${{ steps.check-output.outputs.result }}
- name: Conclude Check
- uses: LouisBrunner/checks-action@v1.3.1
- if: needs.release.outputs.check-id && always()
+ uses: LouisBrunner/checks-action@v1.6.0
with:
token: ${{ secrets.GITHUB_TOKEN }}
conclusion: ${{ job.status }}
@@ -229,7 +170,7 @@ jobs:
if: needs.release.outputs.pr
uses: ./.github/workflows/ci-release.yml
with:
- ref: ${{ needs.release.outputs.branch }}
+ ref: ${{ needs.release.outputs.pr-branch }}
check-sha: ${{ needs.update.outputs.sha }}
post-ci:
@@ -241,8 +182,8 @@ jobs:
run:
shell: bash
steps:
- - name: Get Needs Result
- id: needs-result
+ - name: Get CI Conclusion
+ id: conclusion
run: |
result=""
if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" ]]; then
@@ -252,17 +193,18 @@ jobs:
else
result="success"
fi
- echo "::set-output name=result::$result"
+ echo "result=$result" >> $GITHUB_OUTPUT
- name: Conclude Check
- uses: LouisBrunner/checks-action@v1.3.1
- if: needs.update.outputs.check-id && always()
+ uses: LouisBrunner/checks-action@v1.6.0
with:
token: ${{ secrets.GITHUB_TOKEN }}
- conclusion: ${{ steps.needs-result.outputs.result }}
+ conclusion: ${{ steps.conclusion.outputs.result }}
check_id: ${{ needs.update.outputs.check-id }}
post-release:
needs: release
+ outputs:
+ comment-id: ${{ steps.create-comment.outputs.comment-id }}
name: Post Release - Release
if: github.repository_owner == 'npm' && needs.release.outputs.releases
runs-on: ubuntu-latest
@@ -270,21 +212,91 @@ jobs:
run:
shell: bash
steps:
- - name: Checkout
- uses: actions/checkout@v3
- - name: Setup Git User
- run: |
- git config --global user.email "npm-cli+bot@github.com"
- git config --global user.name "npm CLI robot"
- - name: Setup Node
- uses: actions/setup-node@v3
- with:
- node-version: 18.x
- cache: npm
- - name: Reset Deps
- run: node . run resetdeps
- - name: Run Post Release Actions
+ - name: Create Release PR Comment Text
+ id: comment-text
+ uses: actions/github-script@v7
env:
RELEASES: ${{ needs.release.outputs.releases }}
+ with:
+ result-encoding: string
+ script: |
+ const releases = JSON.parse(process.env.RELEASES)
+ const { runId, repo: { owner, repo } } = context
+ const issue_number = releases[0].prNumber
+ const runUrl = `https://github.com/${owner}/${repo}/actions/runs/${runId}`
+
+ return [
+ '## Release Workflow\n',
+ ...releases.map(r => `- \`${r.pkgName}@${r.version}\` ${r.url}`),
+ `- Workflow run: :arrows_counterclockwise: ${runUrl}`,
+ ].join('\n')
+ - name: Create Release PR Comment
+ id: create-comment
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ issue-number: ${{ fromJSON(needs.release.outputs.releases)[0].prNumber }}
+ body: ${{ steps.comment-text.outputs.result }}
+
+ release-integration:
+ needs: release
+ name: Release Integration
+ if: needs.release.outputs.releases
+ uses: ./.github/workflows/release-integration.yml
+ with:
+ releases: ${{ needs.release.outputs.releases }}
+
+ post-release-integration:
+ needs: [ release, release-integration, post-release ]
+ name: Post Release Integration - Release
+ if: github.repository_owner == 'npm' && needs.release.outputs.releases && always()
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ shell: bash
+ steps:
+ - name: Get Post Release Conclusion
+ id: conclusion
run: |
- node . run rp-release --ignore-scripts --if-present ${{ join(fromJSON(needs.release.outputs.release-flags), ' ') }}
+ if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" ]]; then
+ result="x"
+ elif [[ "${{ contains(needs.*.result, 'cancelled') }}" == "true" ]]; then
+ result="heavy_multiplication_x"
+ else
+ result="white_check_mark"
+ fi
+ echo "result=$result" >> $GITHUB_OUTPUT
+ - name: Find Release PR Comment
+ uses: peter-evans/find-comment@v2
+ id: found-comment
+ with:
+ issue-number: ${{ fromJSON(needs.release.outputs.releases)[0].prNumber }}
+ comment-author: 'github-actions[bot]'
+ body-includes: '## Release Workflow'
+ - name: Create Release PR Comment Text
+ id: comment-text
+ if: steps.found-comment.outputs.comment-id
+ uses: actions/github-script@v7
+ env:
+ RESULT: ${{ steps.conclusion.outputs.result }}
+ BODY: ${{ steps.found-comment.outputs.comment-body }}
+ with:
+ result-encoding: string
+ script: |
+ const { RESULT, BODY } = process.env
+ const body = [BODY.replace(/(Workflow run: :)[a-z_]+(:)/, `$1${RESULT}$2`)]
+ if (RESULT !== 'white_check_mark') {
+ body.push(':rotating_light::rotating_light::rotating_light:')
+ body.push([
+ '@npm/cli-team: The post-release workflow failed for this release.',
+ 'Manual steps may need to be taken after examining the workflow output.'
+ ].join(' '))
+ body.push(':rotating_light::rotating_light::rotating_light:')
+ }
+ return body.join('\n\n').trim()
+ - name: Update Release PR Comment
+ if: steps.comment-text.outputs.result
+ uses: peter-evans/create-or-update-comment@v3
+ with:
+ comment-id: ${{ steps.found-comment.outputs.comment-id }}
+ body: ${{ steps.comment-text.outputs.result }}
+ edit-mode: 'replace'
diff --git a/.gitignore b/.gitignore
index dd81bed4846dc..2a10ae9bd8888 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,8 @@
# ignore everything in the root
/*
+# transient test directories
+tap-testdir*/
# keep these
!**/.gitignore
@@ -38,8 +40,10 @@
!/SECURITY.md
!/tap-snapshots/
!/test/
+!/tsconfig.json
!/docs/
!/smoke-tests/
+!/mock-globals/
!/mock-registry/
!/workspaces/
/workspaces/*
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 354b45e27c90f..c9d6c5d3b823f 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,16 +1,16 @@
{
- ".": "9.2.0",
- "workspaces/arborist": "6.1.5",
- "workspaces/libnpmaccess": "7.0.1",
- "workspaces/libnpmdiff": "5.0.6",
- "workspaces/libnpmexec": "5.0.6",
- "workspaces/libnpmfund": "4.0.6",
- "workspaces/libnpmhook": "9.0.1",
- "workspaces/libnpmorg": "5.0.1",
- "workspaces/libnpmpack": "5.0.6",
- "workspaces/libnpmpublish": "7.0.6",
- "workspaces/libnpmsearch": "6.0.1",
- "workspaces/libnpmteam": "5.0.1",
- "workspaces/libnpmversion": "4.0.1",
- "workspaces/config": "6.1.0"
+ ".": "10.8.2",
+ "workspaces/arborist": "7.5.4",
+ "workspaces/libnpmaccess": "8.0.6",
+ "workspaces/libnpmdiff": "6.1.4",
+ "workspaces/libnpmexec": "8.1.3",
+ "workspaces/libnpmfund": "5.0.12",
+ "workspaces/libnpmhook": "10.0.5",
+ "workspaces/libnpmorg": "6.0.6",
+ "workspaces/libnpmpack": "7.0.4",
+ "workspaces/libnpmpublish": "9.0.9",
+ "workspaces/libnpmsearch": "7.0.6",
+ "workspaces/libnpmteam": "6.0.5",
+ "workspaces/libnpmversion": "6.0.3",
+ "workspaces/config": "8.3.4"
}
diff --git a/AUTHORS b/AUTHORS
index af0d5ed1a0c62..90a1d94834c8d 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -867,3 +867,81 @@ Andrew Dawes
sosoba
Aron
HenryNguyen5 <6404866+HenryNguyen5@users.noreply.github.com>
+Eric Mutta
+Peally <102741826+Peallyz@users.noreply.github.com>
+James Shaw
+Brian DeHamer
+Santosh Heigrujam
+Rohan Mukherjee
+Andreas Deininger
+Tuukka Hastrup
+David Tanner
+Jeff Mealo
+Kevin Rouchut
+Stafford Williams
+CharlieWONG
+James Henry
+Kashyap Kaki <30841403+kashyapkaki@users.noreply.github.com>
+Darryl Tec
+Michaël Bitard
+may <63159454+m4rch3n1ng@users.noreply.github.com>
+Rayyan Ul Haq <31252332+Rayyan98@users.noreply.github.com>
+DaviDevMod <98312056+DaviDevMod@users.noreply.github.com>
+Mike Ribbons
+Rahul <122141535+rahulio96@users.noreply.github.com>
+AaronHamilton965 <91709196+AaronHamilton965@users.noreply.github.com>
+Emmanuel Ferdman
+P-Chan
+Rahul
+Francesco Sardone
+joaootavios
+Saquib
+Dan Rose
+Yuku Kotani
+Vlad-Ștefan Harbuz <291640+vladh@users.noreply.github.com>
+siemhesda <143130929+siemhesda@users.noreply.github.com>
+Carl
+jpg619 <141764922+jpg619@users.noreply.github.com>
+Frazer Smith
+Aaron <2738518+NeonArray@users.noreply.github.com>
+Wes Todd
+Mike McCready <66998419+MikeMcC399@users.noreply.github.com>
+Piotr Kmieć <103869106+Gekuro@users.noreply.github.com>
+Santoshraj2 <143222321+Santoshraj2@users.noreply.github.com>
+Jamie Tanna
+三咲智子 Kevin Deng
+Manuel Spigolon
+Robin
+Juan Julián Merelo Guervós
+Le Michel <95184938+Ptitet@users.noreply.github.com>
+Zearin
+Jan T. Sott
+Daniel Kaplan
+Andrii Romasiun <35810911+Blaumaus@users.noreply.github.com>
+Rita Aktay
+GoodDaisy <90915921+GoodDaisy@users.noreply.github.com>
+Aleks Sobieraj
+Roberto Basile
+rveerd
+Jason Ho
+Christian Oliff
+Jamie King
+David LJ
+Uiolee <22849383+uiolee@users.noreply.github.com>
+Vinicius Lourenço <12551007+H4ad@users.noreply.github.com>
+roni-berlin <72336831+roni-berlin@users.noreply.github.com>
+Marc Bernard <59966492+mbtools@users.noreply.github.com>
+Erik Williamson
+Hong Xu
+Thomas Reggi
+Mottle
+klm-turing
+klm
+Avinal Kumar
+milaninfy <111582375+milaninfy@users.noreply.github.com>
+Reggi
+Norman Perrin
+Leo Balter <301201+leobalter@users.noreply.github.com>
+drew4237 <57016082+drew4237@users.noreply.github.com>
+AmirHossein Sakhravi
+Hiroo Ono <49257691+oikumene@users.noreply.github.com>
diff --git a/CHANGELOG.md b/CHANGELOG.md
index bdb6c046e49d2..0d1c704a2ea8d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,435 +1,1016 @@
# Changelog
-## [9.2.0](https://github.com/npm/cli/compare/v9.1.3...v9.2.0) (2022-12-07)
+## [10.8.2](https://github.com/npm/cli/compare/v10.8.1...v10.8.2) (2024-07-09)
-### Features
+### Bug Fixes
-* [`cf57ffa`](https://github.com/npm/cli/commit/cf57ffa90088fcf5b028cc02938baae6228b5a40) [#5888](https://github.com/npm/cli/pull/5888) discrete npm doctor commands (#5888) (@wraithgar)
+* [`3101a40`](https://github.com/npm/cli/commit/3101a4045bd7030f232d6cb2ae61339214968321) [#7631](https://github.com/npm/cli/pull/7631) limit concurrent open files during 'npm cache verify' (#7631) (@oikumene)
+* [`2273183`](https://github.com/npm/cli/commit/22731831e22011e32fa0ca12178e242c2ee2b33d) [#7595](https://github.com/npm/cli/pull/7595) outdated: fixed wanted range for alias with version range (#7595) (@milaninfy)
+* [`15be6dd`](https://github.com/npm/cli/commit/15be6dd33bfab8bdfaf8c3dece435d7139c1bf6d) [#7574](https://github.com/npm/cli/pull/7574) don't try parsing workspaces if none exist (@wraithgar)
-### Bug Fixes
+### Documentation
-* [`dfd5d46`](https://github.com/npm/cli/commit/dfd5d461e0ee2163e210cc136d2bb7873dfeb363) [#5932](https://github.com/npm/cli/pull/5932) ignore implicit workspaces for completion (#5932) (@wraithgar)
+* [`ac937d4`](https://github.com/npm/cli/commit/ac937d4f29b27fb877f79f33034ee1144c7202c4) [#7616](https://github.com/npm/cli/pull/7616) install: add save-peer flag (#7616) (@drew4237)
+* [`55639ef`](https://github.com/npm/cli/commit/55639efd2d3094ca0931ddf9276c93f6880cd6e6) [#7615](https://github.com/npm/cli/pull/7615) use git+https in package.com url examples (#7615) (@MikeMcC399)
+* [`93883bb`](https://github.com/npm/cli/commit/93883bb6459208a916584cad8c6c72a315cf32af) [#7582](https://github.com/npm/cli/pull/7582) Improve manpage section for `package.json` `funding` properties (#7582) (@kemitchell)
+* [`92e71e6`](https://github.com/npm/cli/commit/92e71e6b0c7889e243e6b54ef8b4eb9656de95f8) [#7576](https://github.com/npm/cli/pull/7576) fix links to community discussions (#7576) (@leobalter)
### Dependencies
-* [`2f2b146`](https://github.com/npm/cli/commit/2f2b1469565894ec777e6eb77fea7b607b797adb) [#5936](https://github.com/npm/cli/pull/5936) `npm-packlist@7.0.4` (#5936)
-* [`372d158`](https://github.com/npm/cli/commit/372d158d2637120600a95abee64355ed1cb6f990) [#5935](https://github.com/npm/cli/pull/5935) `minimatch@5.1.1` (#5935)
-* [`0e6c28b`](https://github.com/npm/cli/commit/0e6c28ba093f8c5d35df98afca28e842b247004b) [#5934](https://github.com/npm/cli/pull/5934) `ci-info@3.7.0` (#5934)
-* [`0a3fe00`](https://github.com/npm/cli/commit/0a3fe000e2723ae6fdb8b1d3154fd3835057c992) [#5933](https://github.com/npm/cli/pull/5933) `minipass@4.0.0`
-* [`6b77340`](https://github.com/npm/cli/commit/6b7734009ecd939fbb3d382cb92eb0cdbec7dcd3) `tar@6.1.13`
-* [`cf0a174`](https://github.com/npm/cli/commit/cf0a17407abc577c27420a1c8a4a0c08c7cefce9) `ssri@10.0.1`
-* [`3da9a1a`](https://github.com/npm/cli/commit/3da9a1a4ebcf1779035b5f9ae985c087f617efe3) `pacote@15.0.7`
-* [`fee9b66`](https://github.com/npm/cli/commit/fee9b6686892a1c7f976c36ddd5d89b70c416817) `npm-registry-fetch@14.0.3`
-* [`e940917`](https://github.com/npm/cli/commit/e940917befcdaf44ee7e24d31b540f4de8507734) `cacache@17.0.3`
-* [`875bd56`](https://github.com/npm/cli/commit/875bd56c33ca5eef80c2a50a11808445f2a39a2a) `npm-package-arg@10.1.0`
-* [`280b7a4`](https://github.com/npm/cli/commit/280b7a445e4a83d70980cf3c436745a1faa50c67) [#5927](https://github.com/npm/cli/pull/5927) `npm-packlist@7.0.3`
-* [Workspace](https://github.com/npm/cli/releases/tag/arborist-v6.1.5): `@npmcli/arborist@6.1.5`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmaccess-v7.0.1): `libnpmaccess@7.0.1`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v5.0.6): `libnpmdiff@5.0.6`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v5.0.6): `libnpmexec@5.0.6`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v4.0.6): `libnpmfund@4.0.6`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmhook-v9.0.1): `libnpmhook@9.0.1`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmorg-v5.0.1): `libnpmorg@5.0.1`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v5.0.6): `libnpmpack@5.0.6`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v7.0.6): `libnpmpublish@7.0.6`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmsearch-v6.0.1): `libnpmsearch@6.0.1`
-* [Workspace](https://github.com/npm/cli/releases/tag/libnpmteam-v5.0.1): `libnpmteam@5.0.1`
-
-## [9.1.3](https://github.com/npm/cli/compare/v9.1.2...v9.1.3) (2022-11-30)
+* [`1c1adae`](https://github.com/npm/cli/commit/1c1adaeeb59401db32d3d1ac4074654c87cd6f25) [#7636](https://github.com/npm/cli/pull/7636) `npm-pick-manifest@9.1.0`
+* [`5e4fa18`](https://github.com/npm/cli/commit/5e4fa18dc02cd6b99df51e220661d82b2db12c3d) [#7636](https://github.com/npm/cli/pull/7636) `socks-proxy-agent@8.0.4`
+* [`d8fa116`](https://github.com/npm/cli/commit/d8fa116f142044d3774439cf64dd51e8acb5bcff) [#7636](https://github.com/npm/cli/pull/7636) `https-proxy-agent@7.0.5`
+* [`76dab91`](https://github.com/npm/cli/commit/76dab917eb22f5079cbb8bc052ec7c5bb07e9389) [#7636](https://github.com/npm/cli/pull/7636) `normalize-package-data@6.0.2`
+* [`094c4ea`](https://github.com/npm/cli/commit/094c4ea17398d092a8b2c631564209c10183d417) [#7636](https://github.com/npm/cli/pull/7636) `minimatch@9.0.5`
+* [`1c8d41d`](https://github.com/npm/cli/commit/1c8d41ddafc5810511d54fd227ce964c503464e0) [#7636](https://github.com/npm/cli/pull/7636) `@npmcli/git@5.0.8`
+* [`e5451e1`](https://github.com/npm/cli/commit/e5451e1e91d7a71546f21daca604fb956c1cac3f) [#7605](https://github.com/npm/cli/pull/7605) `jackspeak@3.4.0`
+* [`7b584d3`](https://github.com/npm/cli/commit/7b584d3b60ff118fd04f3ee5693816d9e948ff1b) [#7605](https://github.com/npm/cli/pull/7605) `foreground-child@3.2.1`
+* [`941d0d7`](https://github.com/npm/cli/commit/941d0d7b596ec4cc2d21f86fb03876e2c6360987) [#7605](https://github.com/npm/cli/pull/7605) `debug@4.3.5`
+* [`8b8ce7a`](https://github.com/npm/cli/commit/8b8ce7a23543a4d28c850fa27cd411de68a74cfd) [#7605](https://github.com/npm/cli/pull/7605) `glob@10.4.2`
+* [`4646768`](https://github.com/npm/cli/commit/46467680d0e75c95406e46b5e2f754d2b7df1f7b) [#7605](https://github.com/npm/cli/pull/7605) `npm-registry-fetch@17.1.0`
+* [`6f0d7ce`](https://github.com/npm/cli/commit/6f0d7cec260993d269294e05e18eacf23935b78a) [#7605](https://github.com/npm/cli/pull/7605) `@npmcli/redact@2.0.1`
+* [`29204c8`](https://github.com/npm/cli/commit/29204c8ad4be0a1ffbca6f40393b00278bb34b3e) [#7605](https://github.com/npm/cli/pull/7605) `@npmcli/package-json@5.2.0`
+* [`04d6910`](https://github.com/npm/cli/commit/04d6910e474003762b4606837960b4eb10d7bcd9) [#7574](https://github.com/npm/cli/pull/7574) `@npmcli/package-json@5.1.1`
+* [`4ef4830`](https://github.com/npm/cli/commit/4ef4830dd792c2f23d3ffc7a10f797fc4ac8e5cb) [#7574](https://github.com/npm/cli/pull/7574) remove read-package-json-fast
+## [10.8.2](https://github.com/npm/cli/compare/v10.8.1...v10.8.2) (2024-07-09)
### Bug Fixes
-* [`ffbdea2`](https://github.com/npm/cli/commit/ffbdea286a08eeaf40ab83eea5bfe0602dc6bbcd) [#5894](https://github.com/npm/cli/pull/5894) npm pack filename on scoped packages (#5894) (@HenryNguyen5)
-* [`c26d708`](https://github.com/npm/cli/commit/c26d708428a96da530092759b5ff6d67c7282348) [#5884](https://github.com/npm/cli/pull/5884) validate username at get-identity (#5884) (@sosoba, @nlf)
+* [`3101a40`](https://github.com/npm/cli/commit/3101a4045bd7030f232d6cb2ae61339214968321) [#7631](https://github.com/npm/cli/pull/7631) limit concurrent open files during 'npm cache verify' (#7631) (@oikumene)
+* [`2273183`](https://github.com/npm/cli/commit/22731831e22011e32fa0ca12178e242c2ee2b33d) [#7595](https://github.com/npm/cli/pull/7595) outdated: fixed wanted range for alias with version range (#7595) (@milaninfy)
+* [`15be6dd`](https://github.com/npm/cli/commit/15be6dd33bfab8bdfaf8c3dece435d7139c1bf6d) [#7574](https://github.com/npm/cli/pull/7574) don't try parsing workspaces if none exist (@wraithgar)
### Documentation
-* [`ea948dc`](https://github.com/npm/cli/commit/ea948dceac5cfeef437c97874ab26c3275e75766) [#5881](https://github.com/npm/cli/pull/5881) update description of npm exec (#5881) (@styfle, @wraithgar)
-* [`40f2c21`](https://github.com/npm/cli/commit/40f2c213d75a252665311b4f8775d297390aeb70) [#5865](https://github.com/npm/cli/pull/5865) ci-info url (#5865) (@wraithgar)
-* [`681a45b`](https://github.com/npm/cli/commit/681a45bb48acd57aa64cb3241ea4915f5a12e029) [#5875](https://github.com/npm/cli/pull/5875) run the comand for directory workspaces (#5875) (@1aron)
-* [`681a45b`](https://github.com/npm/cli/commit/681a45bb48acd57aa64cb3241ea4915f5a12e029) [#5875](https://github.com/npm/cli/pull/5875) add workspace directory example (#5875) (@1aron)
+* [`ac937d4`](https://github.com/npm/cli/commit/ac937d4f29b27fb877f79f33034ee1144c7202c4) [#7616](https://github.com/npm/cli/pull/7616) install: add save-peer flag (#7616) (@drew4237)
+* [`55639ef`](https://github.com/npm/cli/commit/55639efd2d3094ca0931ddf9276c93f6880cd6e6) [#7615](https://github.com/npm/cli/pull/7615) use git+https in package.com url examples (#7615) (@MikeMcC399)
+* [`93883bb`](https://github.com/npm/cli/commit/93883bb6459208a916584cad8c6c72a315cf32af) [#7582](https://github.com/npm/cli/pull/7582) Improve manpage section for `package.json` `funding` properties (#7582) (@kemitchell)
+* [`92e71e6`](https://github.com/npm/cli/commit/92e71e6b0c7889e243e6b54ef8b4eb9656de95f8) [#7576](https://github.com/npm/cli/pull/7576) fix links to community discussions (#7576) (@leobalter)
### Dependencies
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.1.3...arborist-v6.1.4): `@npmcli/arborist@6.1.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.4...libnpmdiff-v5.0.5): `libnpmdiff@5.0.5`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.4...libnpmexec-v5.0.5): `libnpmexec@5.0.5`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.4...libnpmfund-v4.0.5): `libnpmfund@4.0.5`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.4...libnpmpack-v5.0.5): `libnpmpack@5.0.5`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.4...libnpmpublish-v7.0.5): `libnpmpublish@7.0.5`
+* [`1c1adae`](https://github.com/npm/cli/commit/1c1adaeeb59401db32d3d1ac4074654c87cd6f25) [#7636](https://github.com/npm/cli/pull/7636) `npm-pick-manifest@9.1.0`
+* [`5e4fa18`](https://github.com/npm/cli/commit/5e4fa18dc02cd6b99df51e220661d82b2db12c3d) [#7636](https://github.com/npm/cli/pull/7636) `socks-proxy-agent@8.0.4`
+* [`d8fa116`](https://github.com/npm/cli/commit/d8fa116f142044d3774439cf64dd51e8acb5bcff) [#7636](https://github.com/npm/cli/pull/7636) `https-proxy-agent@7.0.5`
+* [`76dab91`](https://github.com/npm/cli/commit/76dab917eb22f5079cbb8bc052ec7c5bb07e9389) [#7636](https://github.com/npm/cli/pull/7636) `normalize-package-data@6.0.2`
+* [`094c4ea`](https://github.com/npm/cli/commit/094c4ea17398d092a8b2c631564209c10183d417) [#7636](https://github.com/npm/cli/pull/7636) `minimatch@9.0.5`
+* [`1c8d41d`](https://github.com/npm/cli/commit/1c8d41ddafc5810511d54fd227ce964c503464e0) [#7636](https://github.com/npm/cli/pull/7636) `@npmcli/git@5.0.8`
+* [`e5451e1`](https://github.com/npm/cli/commit/e5451e1e91d7a71546f21daca604fb956c1cac3f) [#7605](https://github.com/npm/cli/pull/7605) `jackspeak@3.4.0`
+* [`7b584d3`](https://github.com/npm/cli/commit/7b584d3b60ff118fd04f3ee5693816d9e948ff1b) [#7605](https://github.com/npm/cli/pull/7605) `foreground-child@3.2.1`
+* [`941d0d7`](https://github.com/npm/cli/commit/941d0d7b596ec4cc2d21f86fb03876e2c6360987) [#7605](https://github.com/npm/cli/pull/7605) `debug@4.3.5`
+* [`8b8ce7a`](https://github.com/npm/cli/commit/8b8ce7a23543a4d28c850fa27cd411de68a74cfd) [#7605](https://github.com/npm/cli/pull/7605) `glob@10.4.2`
+* [`4646768`](https://github.com/npm/cli/commit/46467680d0e75c95406e46b5e2f754d2b7df1f7b) [#7605](https://github.com/npm/cli/pull/7605) `npm-registry-fetch@17.1.0`
+* [`6f0d7ce`](https://github.com/npm/cli/commit/6f0d7cec260993d269294e05e18eacf23935b78a) [#7605](https://github.com/npm/cli/pull/7605) `@npmcli/redact@2.0.1`
+* [`29204c8`](https://github.com/npm/cli/commit/29204c8ad4be0a1ffbca6f40393b00278bb34b3e) [#7605](https://github.com/npm/cli/pull/7605) `@npmcli/package-json@5.2.0`
+* [`04d6910`](https://github.com/npm/cli/commit/04d6910e474003762b4606837960b4eb10d7bcd9) [#7574](https://github.com/npm/cli/pull/7574) `@npmcli/package-json@5.1.1`
+* [`4ef4830`](https://github.com/npm/cli/commit/4ef4830dd792c2f23d3ffc7a10f797fc4ac8e5cb) [#7574](https://github.com/npm/cli/pull/7574) remove read-package-json-fast
+
+### Chores
+
+* [`2490b49`](https://github.com/npm/cli/commit/2490b492cc8d9a351841d2c643b13a47a3254f41) [#7621](https://github.com/npm/cli/pull/7621) remove .github/workflows/benchmark.yml (#7621) (@wraithgar)
+* [`3b8b111`](https://github.com/npm/cli/commit/3b8b11161ee2f88817dcc19b4770040d5bc73261) [#7605](https://github.com/npm/cli/pull/7605) update devDependencies in lockfile (@wraithgar)
+
+### Chores
-## [9.1.2](https://github.com/npm/cli/compare/v9.1.1...v9.1.2) (2022-11-16)
+* [`2490b49`](https://github.com/npm/cli/commit/2490b492cc8d9a351841d2c643b13a47a3254f41) [#7621](https://github.com/npm/cli/pull/7621) remove .github/workflows/benchmark.yml (#7621) (@wraithgar)
+* [`3b8b111`](https://github.com/npm/cli/commit/3b8b11161ee2f88817dcc19b4770040d5bc73261) [#7605](https://github.com/npm/cli/pull/7605) update devDependencies in lockfile (@wraithgar)
+
+## [10.8.1](https://github.com/npm/cli/compare/v10.8.0...v10.8.1) (2024-05-29)
### Bug Fixes
-* [`d9654cf`](https://github.com/npm/cli/commit/d9654cffd7024ec2d068147868978fc994d696e4) [#5861](https://github.com/npm/cli/pull/5861) remove unwanted package.json entries (#5861) (@wraithgar)
+* [`6b55646`](https://github.com/npm/cli/commit/6b556468f9d6ed62c681954bfe6ad012315e3b53) [#7569](https://github.com/npm/cli/pull/7569) exec: look in workspace and root for bin entries (#7569) (@wraithgar)
+* [`e4c7a41`](https://github.com/npm/cli/commit/e4c7a410f590e5c9c81f6410f600181a4deef005) [#7564](https://github.com/npm/cli/pull/7564) publish: skip workspace packages marked private on publish (#7564) (@milaninfy)
+* [`8f94ae8`](https://github.com/npm/cli/commit/8f94ae8c4d36cd397c53e8dc9f54d4332bf1b847) [#7556](https://github.com/npm/cli/pull/7556) utils/tar: index access while match is null (#7555) (#7556) (@NormanPerrin)
+* [`2d1d8d0`](https://github.com/npm/cli/commit/2d1d8d0ef18a10ac7938380884745f1d3c3cb078) [#7559](https://github.com/npm/cli/pull/7559) adds `node:` specifier to all native node modules (#7559) (@reggi)
+* [`7d89b55`](https://github.com/npm/cli/commit/7d89b55341160459e0fcd3374c3720d758b16339) [#7490](https://github.com/npm/cli/pull/7490) ci: rm workspace node_modules (#7490) (@reggi)
+* [`9122fb6`](https://github.com/npm/cli/commit/9122fb65ac05d793a69f4fdcbd03b59595adf937) [#7516](https://github.com/npm/cli/pull/7516) cache: add both full and minified packument to cache (#7516) (@milaninfy)
+* [`9e6686b`](https://github.com/npm/cli/commit/9e6686bb965fa6843ea483cb802f14282eabf3e5) [#7545](https://github.com/npm/cli/pull/7545) send proper otp token on web auth (#7545) (@wraithgar)
+* [`b1db070`](https://github.com/npm/cli/commit/b1db070cb4a497fd91fd61cf197d28b5d47274bb) [#7534](https://github.com/npm/cli/pull/7534) refactor: use output.buffer and set explicit json mode in query (#7534) (@lukekarrys)
+* [`53cda32`](https://github.com/npm/cli/commit/53cda32aa35f3a592fb0ddf37e43c028f93ef613) [#7542](https://github.com/npm/cli/pull/7542) refactor ls to use output.buffer for json (#7542) (@lukekarrys)
+* [`61d5771`](https://github.com/npm/cli/commit/61d57719e6443978c5e9a91ca7a63397be3065fa) [#7541](https://github.com/npm/cli/pull/7541) remove json.stringify from all commands (#7541) (@lukekarrys)
+* [`4dfc7d2`](https://github.com/npm/cli/commit/4dfc7d20b58eaa3d231ef5dc86b9802e1c0d0e68) [#7540](https://github.com/npm/cli/pull/7540) pass strings to JSON.stringify in --json mode (#7540) (@lukekarrys)
+* [`3cefdf6`](https://github.com/npm/cli/commit/3cefdf6eaab5bfb4371149f674dc95e9b9c54853) [#7538](https://github.com/npm/cli/pull/7538) outdated: return array for outdated deps from multiple workspaces (@lukekarrys)
+* [`ef4c975`](https://github.com/npm/cli/commit/ef4c975f2784f70ab86a3e7e6e40c99279e5a3e3) [#7508](https://github.com/npm/cli/pull/7508) view: dont immediately exit on first workspace 404 (#7508) (@lukekarrys)
+
+### Documentation
+
+* [`fd6479f`](https://github.com/npm/cli/commit/fd6479f85b9cf14a23cb4f9a049e0ea68632d8e9) [#7560](https://github.com/npm/cli/pull/7560) update publish docs: dist-tag + publish case (#7560) (@davidlj95)
### Dependencies
-* [`a351685`](https://github.com/npm/cli/commit/a351685c4951b1d9e2ba86bc99e3706688813438) [#5858](https://github.com/npm/cli/pull/5858) move from @npmcli/ci-detect to ci-info (#5858)
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.1.2...arborist-v6.1.3): `@npmcli/arborist@6.1.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.3...libnpmdiff-v5.0.4): `libnpmdiff@5.0.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.3...libnpmexec-v5.0.4): `libnpmexec@5.0.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.3...libnpmfund-v4.0.4): `libnpmfund@4.0.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.3...libnpmpack-v5.0.4): `libnpmpack@5.0.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.3...libnpmpublish-v7.0.4): `libnpmpublish@7.0.4`
+* [`e3f0fd4`](https://github.com/npm/cli/commit/e3f0fd45d97569a0d6c2a67e179e4a5f13eb9442) [#7568](https://github.com/npm/cli/pull/7568) `@npmcli/package-json@5.1.1`
+* [`447a8d7`](https://github.com/npm/cli/commit/447a8d7f3124bf9f69453098ce31b56bad20efd7) [#7566](https://github.com/npm/cli/pull/7566) `spdx-license-ids@3.0.18`
+* [`83fed2e`](https://github.com/npm/cli/commit/83fed2e8ec253051d60babdf2b10e2f2c64df318) [#7566](https://github.com/npm/cli/pull/7566) `sigstore@2.3.1`
+* [`41291ba`](https://github.com/npm/cli/commit/41291bab34d3f7351c94e97e89ac1f85b0950188) [#7566](https://github.com/npm/cli/pull/7566) `@sigstore/tuf@2.3.4`
+* [`18b42a4`](https://github.com/npm/cli/commit/18b42a46abdbc26a28f2e16ea74a1812d8de4c4d) [#7566](https://github.com/npm/cli/pull/7566) `glob@10.4.1`
+* [`5c6759d`](https://github.com/npm/cli/commit/5c6759decb00dfbe377c0ad41f85a3222f79a36a) [#7566](https://github.com/npm/cli/pull/7566) `postcss-selector-parser@6.1.0`
+* [`2508a83`](https://github.com/npm/cli/commit/2508a83e6d2936d15c210b9dee41098131ed6aff) [#7566](https://github.com/npm/cli/pull/7566) `is-cidr@5.1.0`
+* [`6278fe4`](https://github.com/npm/cli/commit/6278fe430bb7c3ecfae730f9ea084501e57c0e2c) [#7566](https://github.com/npm/cli/pull/7566) `ini@4.1.3`
+## [10.8.1](https://github.com/npm/cli/compare/v10.8.0...v10.8.1) (2024-05-29)
-## [9.1.1](https://github.com/npm/cli/compare/v9.1.0...v9.1.1) (2022-11-09)
+### Bug Fixes
+
+* [`6b55646`](https://github.com/npm/cli/commit/6b556468f9d6ed62c681954bfe6ad012315e3b53) [#7569](https://github.com/npm/cli/pull/7569) exec: look in workspace and root for bin entries (#7569) (@wraithgar)
+* [`e4c7a41`](https://github.com/npm/cli/commit/e4c7a410f590e5c9c81f6410f600181a4deef005) [#7564](https://github.com/npm/cli/pull/7564) publish: skip workspace packages marked private on publish (#7564) (@milaninfy)
+* [`8f94ae8`](https://github.com/npm/cli/commit/8f94ae8c4d36cd397c53e8dc9f54d4332bf1b847) [#7556](https://github.com/npm/cli/pull/7556) utils/tar: index access while match is null (#7555) (#7556) (@NormanPerrin)
+* [`2d1d8d0`](https://github.com/npm/cli/commit/2d1d8d0ef18a10ac7938380884745f1d3c3cb078) [#7559](https://github.com/npm/cli/pull/7559) adds `node:` specifier to all native node modules (#7559) (@reggi)
+* [`7d89b55`](https://github.com/npm/cli/commit/7d89b55341160459e0fcd3374c3720d758b16339) [#7490](https://github.com/npm/cli/pull/7490) ci: rm workspace node_modules (#7490) (@reggi)
+* [`9122fb6`](https://github.com/npm/cli/commit/9122fb65ac05d793a69f4fdcbd03b59595adf937) [#7516](https://github.com/npm/cli/pull/7516) cache: add both full and minified packument to cache (#7516) (@milaninfy)
+* [`9e6686b`](https://github.com/npm/cli/commit/9e6686bb965fa6843ea483cb802f14282eabf3e5) [#7545](https://github.com/npm/cli/pull/7545) send proper otp token on web auth (#7545) (@wraithgar)
+* [`b1db070`](https://github.com/npm/cli/commit/b1db070cb4a497fd91fd61cf197d28b5d47274bb) [#7534](https://github.com/npm/cli/pull/7534) refactor: use output.buffer and set explicit json mode in query (#7534) (@lukekarrys)
+* [`53cda32`](https://github.com/npm/cli/commit/53cda32aa35f3a592fb0ddf37e43c028f93ef613) [#7542](https://github.com/npm/cli/pull/7542) refactor ls to use output.buffer for json (#7542) (@lukekarrys)
+* [`61d5771`](https://github.com/npm/cli/commit/61d57719e6443978c5e9a91ca7a63397be3065fa) [#7541](https://github.com/npm/cli/pull/7541) remove json.stringify from all commands (#7541) (@lukekarrys)
+* [`4dfc7d2`](https://github.com/npm/cli/commit/4dfc7d20b58eaa3d231ef5dc86b9802e1c0d0e68) [#7540](https://github.com/npm/cli/pull/7540) pass strings to JSON.stringify in --json mode (#7540) (@lukekarrys)
+* [`3cefdf6`](https://github.com/npm/cli/commit/3cefdf6eaab5bfb4371149f674dc95e9b9c54853) [#7538](https://github.com/npm/cli/pull/7538) outdated: return array for outdated deps from multiple workspaces (@lukekarrys)
+* [`ef4c975`](https://github.com/npm/cli/commit/ef4c975f2784f70ab86a3e7e6e40c99279e5a3e3) [#7508](https://github.com/npm/cli/pull/7508) view: dont immediately exit on first workspace 404 (#7508) (@lukekarrys)
### Documentation
-* [`1bff064`](https://github.com/npm/cli/commit/1bff0640ccb8414e2d416a5cf9d64e9ff03c6403) [#5819](https://github.com/npm/cli/pull/5819) config: document `npm config fix` (#5819) (@wraithgar)
+* [`fd6479f`](https://github.com/npm/cli/commit/fd6479f85b9cf14a23cb4f9a049e0ea68632d8e9) [#7560](https://github.com/npm/cli/pull/7560) update publish docs: dist-tag + publish case (#7560) (@davidlj95)
### Dependencies
-* [`335c7e4`](https://github.com/npm/cli/commit/335c7e4348f5505fad33b8a78348a02a82b91426) [#5813](https://github.com/npm/cli/pull/5813) `cacache@17.0.2`
-* [`878ddfb`](https://github.com/npm/cli/commit/878ddfb5b68c03bdcd7d7da8dae92c4947942801) `@npmcli/fs@3.1.0`
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.1.1...arborist-v6.1.2): `@npmcli/arborist@6.1.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.2...libnpmdiff-v5.0.3): `libnpmdiff@5.0.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.2...libnpmexec-v5.0.3): `libnpmexec@5.0.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.2...libnpmfund-v4.0.3): `libnpmfund@4.0.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.2...libnpmpack-v5.0.3): `libnpmpack@5.0.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.2...libnpmpublish-v7.0.3): `libnpmpublish@7.0.3`
+* [`e3f0fd4`](https://github.com/npm/cli/commit/e3f0fd45d97569a0d6c2a67e179e4a5f13eb9442) [#7568](https://github.com/npm/cli/pull/7568) `@npmcli/package-json@5.1.1`
+* [`447a8d7`](https://github.com/npm/cli/commit/447a8d7f3124bf9f69453098ce31b56bad20efd7) [#7566](https://github.com/npm/cli/pull/7566) `spdx-license-ids@3.0.18`
+* [`83fed2e`](https://github.com/npm/cli/commit/83fed2e8ec253051d60babdf2b10e2f2c64df318) [#7566](https://github.com/npm/cli/pull/7566) `sigstore@2.3.1`
+* [`41291ba`](https://github.com/npm/cli/commit/41291bab34d3f7351c94e97e89ac1f85b0950188) [#7566](https://github.com/npm/cli/pull/7566) `@sigstore/tuf@2.3.4`
+* [`18b42a4`](https://github.com/npm/cli/commit/18b42a46abdbc26a28f2e16ea74a1812d8de4c4d) [#7566](https://github.com/npm/cli/pull/7566) `glob@10.4.1`
+* [`5c6759d`](https://github.com/npm/cli/commit/5c6759decb00dfbe377c0ad41f85a3222f79a36a) [#7566](https://github.com/npm/cli/pull/7566) `postcss-selector-parser@6.1.0`
+* [`2508a83`](https://github.com/npm/cli/commit/2508a83e6d2936d15c210b9dee41098131ed6aff) [#7566](https://github.com/npm/cli/pull/7566) `is-cidr@5.1.0`
+* [`6278fe4`](https://github.com/npm/cli/commit/6278fe430bb7c3ecfae730f9ea084501e57c0e2c) [#7566](https://github.com/npm/cli/pull/7566) `ini@4.1.3`
+
+### Chores
-## [9.1.0](https://github.com/npm/cli/compare/v9.0.1...v9.1.0) (2022-11-02)
+* [`2d84091`](https://github.com/npm/cli/commit/2d840917bbeff6a1eb5accc10a5ec1c2280b9470) [#7568](https://github.com/npm/cli/pull/7568) fix snapshots for updated @npmcli/package-json (@wraithgar)
+* [`6574dc9`](https://github.com/npm/cli/commit/6574dc98705cf0a88ea6fe41a57d0d0adfcea439) [#7566](https://github.com/npm/cli/pull/7566) dev dependency updates (@wraithgar)
+
+### Chores
+
+* [`2d84091`](https://github.com/npm/cli/commit/2d840917bbeff6a1eb5accc10a5ec1c2280b9470) [#7568](https://github.com/npm/cli/pull/7568) fix snapshots for updated @npmcli/package-json (@wraithgar)
+* [`6574dc9`](https://github.com/npm/cli/commit/6574dc98705cf0a88ea6fe41a57d0d0adfcea439) [#7566](https://github.com/npm/cli/pull/7566) dev dependency updates (@wraithgar)
+
+## [10.8.0](https://github.com/npm/cli/compare/v10.7.0...v10.8.0) (2024-05-15)
### Features
-* [`706b3d3`](https://github.com/npm/cli/commit/706b3d3f227de43a095263926d2eef2b4e4cf2a9) [#5779](https://github.com/npm/cli/pull/5779) set --no-audit when installing outside of a project (like --global) (@fritzy)
+* [`1e375c1`](https://github.com/npm/cli/commit/1e375c1f8d16ac114b615c2a2f374099345b0b69) [#7442](https://github.com/npm/cli/pull/7442) create exit handler class (#7442) (@lukekarrys)
### Bug Fixes
-* [`1f5382d`](https://github.com/npm/cli/commit/1f5382dada181cda41f1504974de1e69a6c1ad7f) [#5789](https://github.com/npm/cli/pull/5789) don't set `stdioString` for any spawn/run-script calls (@lukekarrys)
-* [`8fd614a`](https://github.com/npm/cli/commit/8fd614af5d6de970a6bbcffc538564d2a809411a) use promiseSpawn.open instead of opener (@nlf)
-* [`41843ad`](https://github.com/npm/cli/commit/41843ad8a20bd20aacad2bb37fe473f2e76d5306) use an absolute path to notepad.exe by default, correct docs (@nlf)
-* [`0c5834e`](https://github.com/npm/cli/commit/0c5834ed635833ef49fe10cc888025a5debebe21) [#5758](https://github.com/npm/cli/pull/5758) use hosted-git-info to parse registry urls (#5758) (@lukekarrys)
+* [`d5c3289`](https://github.com/npm/cli/commit/d5c32899b6ffc6254c96f62a06a854bb2c2b95c5) [#7513](https://github.com/npm/cli/pull/7513) refactor: use output buffer and error for more commands (#7513) (@lukekarrys)
+* [`12f103c`](https://github.com/npm/cli/commit/12f103ce55ed21c9c04f87a101fb64d55ac02d3c) [#7533](https://github.com/npm/cli/pull/7533) add first param titles to logs where missing (#7533) (@lukekarrys)
+* [`badeac2`](https://github.com/npm/cli/commit/badeac28faf9fde5f8c05d235219be840999a646) [#7521](https://github.com/npm/cli/pull/7521) config: use redact on config output (#7521) (@lukekarrys)
+* [`76aef74`](https://github.com/npm/cli/commit/76aef7423ab7e47a5f9b73849b47ba029730d75a) [#7520](https://github.com/npm/cli/pull/7520) view: refactor exec and execWorkspaces to call same methods (#7520) (@lukekarrys)
+* [`b54cdb8`](https://github.com/npm/cli/commit/b54cdb836d6c4146a1aa8e1a5fe9655ba2ed0a6a) [#7515](https://github.com/npm/cli/pull/7515) refactor: create new error output primitives (#7515) (@lukekarrys)
+* [`e40454c`](https://github.com/npm/cli/commit/e40454c35f75b5b814e7b5167c8a8b05664246f3) [#7506](https://github.com/npm/cli/pull/7506) view: dont unwrap arrays in json mode (#7506) (@lukekarrys)
+* [`6f64148`](https://github.com/npm/cli/commit/6f6414829fd82704233fbb56375b167495a0aaf5) require stdout to be a TTY for progress (#7507) (@lukekarrys)
+* [`db62910`](https://github.com/npm/cli/commit/db6291036f076bf0251b74a504bd5b693c29c4bb) [#7504](https://github.com/npm/cli/pull/7504) config: be more aggressive about hiding protected values (#7504) (@wraithgar)
+* [`6d456bb`](https://github.com/npm/cli/commit/6d456bba46d6afe1e2cf9464908e6ad99375cb7c) [#7497](https://github.com/npm/cli/pull/7497) dont write log file for completion commands (#7497) (@lukekarrys)
+* [`722c0fa`](https://github.com/npm/cli/commit/722c0faa387ae6e35886f08eefb238c03ae85db1) [#7463](https://github.com/npm/cli/pull/7463) limit packument cache size based on heap size (@wraithgar)
+* [`ca1a68d`](https://github.com/npm/cli/commit/ca1a68d14d184f2535720ed4715f388965ade21a) [#7474](https://github.com/npm/cli/pull/7474) log if `npm deprecate` does not match any version (#7474) (@mbtools)
+* [`261ea19`](https://github.com/npm/cli/commit/261ea193c96aaa73ce5630e21c6a31de9f19ef5b) [#7457](https://github.com/npm/cli/pull/7457) run input.start around help and openining urls (@lukekarrys)
+* [`4ab6cf4`](https://github.com/npm/cli/commit/4ab6cf4a9e7fca64f95422f4099b33cdbb9efa25) [#7459](https://github.com/npm/cli/pull/7459) publish: validate dist-tag (#7459) (@reggi)
### Documentation
-* [`ce6745c`](https://github.com/npm/cli/commit/ce6745c806d721f5e3c455a65fd44bfe03e9d2ae) [#5763](https://github.com/npm/cli/pull/5763) fixed some typos (#5763) (@AndrewDawes)
+* [`b2ce025`](https://github.com/npm/cli/commit/b2ce0250e32abaaaf60d895cda210914bdf903ea) [#7518](https://github.com/npm/cli/pull/7518) suggest correct bin entry (#7518) (@Santoshraj2)
+* [`bdd2aae`](https://github.com/npm/cli/commit/bdd2aae12b213815b5d800902b0a9722b263a03c) [#7502](https://github.com/npm/cli/pull/7502) remove obsolete removal using make uninstall (#7502) (@avinal)
+* [`c3d2819`](https://github.com/npm/cli/commit/c3d281984ed363ed03d6a7abe083f301c1dd2c88) [#7496](https://github.com/npm/cli/pull/7496) npm help json/global command on windows (#7496) (@klm-turing, @lukekarrys)
+* [`268303c`](https://github.com/npm/cli/commit/268303c3b40551ae558f201841d3d5977769a7c9) [#7479](https://github.com/npm/cli/pull/7479) add npm version to every local help output (#7479) (@klm-turing)
+* [`e39d422`](https://github.com/npm/cli/commit/e39d422d69c2275ed1e3a606447a9b9d87bdca4f) [#7473](https://github.com/npm/cli/pull/7473) suggest "npm repo" for showing the repo of a package (#7473) (@full-stop)
+* [`f6fff32`](https://github.com/npm/cli/commit/f6fff3295d19b63003cf49eb1c4805f453c5390a) [#7433](https://github.com/npm/cli/pull/7433) clarify what peerDependenciesMeta does (#7433) (@xuhdev, @wraithgar)
### Dependencies
-* [`b89c19e`](https://github.com/npm/cli/commit/b89c19e9a7674b0bd9d336c14dee1bf381843648) [#5795](https://github.com/npm/cli/pull/5795) `cli-table3@0.6.3`
-* [`6b6dfca`](https://github.com/npm/cli/commit/6b6dfca191cb8f7871f755b926fd5ae223ba697a) `fastest-levenshtein@1.0.16`
-* [`9972ed1`](https://github.com/npm/cli/commit/9972ed1423d7a4f7ca03a34f5aa69321b81850fd) `@npmcli/ci-detect@3.0.1`
-* [`024e612`](https://github.com/npm/cli/commit/024e612f55fc9906b49065dbabbee8b8261eb4eb) `abbrev@2.0.0`
-* [`66f9bcd`](https://github.com/npm/cli/commit/66f9bcd10b8d8cb635593c526727056581c7955d) `nopt@7.0.0`
-* [`5730d17`](https://github.com/npm/cli/commit/5730d17198e066077cb3ea6f78753746afc13603) `tar@6.1.12`
-* [`2fef570`](https://github.com/npm/cli/commit/2fef570caf00bd92a3a4cf0b2bc4ce56fd8bd594) `node-gyp@9.3.0`
-* [`abfb28b`](https://github.com/npm/cli/commit/abfb28b249183b8c033f8e7acc1546150cdac137) `@npmcli/run-script@6.0.0`
-* [`205e2fd`](https://github.com/npm/cli/commit/205e2fdde91f4f21d92ccf0bf9e1ab9ab3053167) `pacote@15.0.6`
-* [`ac25863`](https://github.com/npm/cli/commit/ac25863a33b75620ac9edf4057bfb9409028636a) remove opener, `@npmcli/promise-spawn@6.0.1`, `@npmcli/run-script@5.1.1`, `@npmcli/git@4.0.3`, `pacote@15.0.5`, `which@3.0.0`
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.1.0...arborist-v6.1.1): `@npmcli/arborist@6.1.1`
-* [Workspace](https://github.com/npm/cli/compare/config-v6.0.1...config-v6.1.0): `@npmcli/config@6.1.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.1...libnpmdiff-v5.0.2): `libnpmdiff@5.0.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.1...libnpmexec-v5.0.2): `libnpmexec@5.0.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.1...libnpmfund-v4.0.2): `libnpmfund@4.0.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.1...libnpmpack-v5.0.2): `libnpmpack@5.0.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.1...libnpmpublish-v7.0.2): `libnpmpublish@7.0.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmversion-v4.0.0...libnpmversion-v4.0.1): `libnpmversion@4.0.1`
-
-## [9.0.1](https://github.com/npm/cli/compare/v9.0.0...v9.0.1) (2022-10-26)
+* [`1cdc662`](https://github.com/npm/cli/commit/1cdc662bd2835531fbe790011a00f88ddb5f6868) [#7522](https://github.com/npm/cli/pull/7522) `@tufjs/repo-mock@2.0.1`
+* [`898bcfd`](https://github.com/npm/cli/commit/898bcfda5c5ac192b2cf5f47d0b939794c1b2164) [#7522](https://github.com/npm/cli/pull/7522) `@sigstore/protobuf-specs@0.3.2`
+* [`fec3c94`](https://github.com/npm/cli/commit/fec3c947d7dcc71071a8f527aa5bd81f47015486) [#7522](https://github.com/npm/cli/pull/7522) `path-scurry@1.11.1`
+* [`cb85973`](https://github.com/npm/cli/commit/cb8597316a8d53815835901ae9d5756d4dc481ea) [#7522](https://github.com/npm/cli/pull/7522) `glob@10.3.15`
+* [`e189873`](https://github.com/npm/cli/commit/e18987371399f508cb224e159987b10ddb922bb8) [#7498](https://github.com/npm/cli/pull/7498) `@sigstore/sign@2.3.1`
+* [`c2b28f9`](https://github.com/npm/cli/commit/c2b28f9d6cba12e88f849e5b4a82607e2c218a16) [#7498](https://github.com/npm/cli/pull/7498) `minipass@7.1.1`
+* [`9064ffc`](https://github.com/npm/cli/commit/9064ffc6c85309de2e9e798fdc6caca209f5fa18) [#7498](https://github.com/npm/cli/pull/7498) `@sigstore/tuf@2.3.3`
+* [`fd42986`](https://github.com/npm/cli/commit/fd429866c79cc001979135857c019d7d2873f291) [#7498](https://github.com/npm/cli/pull/7498) `@npmcli/fs@3.1.1`
+* [`4e53e33`](https://github.com/npm/cli/commit/4e53e33757c88ca9c413e3943b17e0cb246e955c) [#7498](https://github.com/npm/cli/pull/7498) `semver@7.6.2`
+* [`f078c82`](https://github.com/npm/cli/commit/f078c8224f6775d53da98f310531524c616e6099) [#7495](https://github.com/npm/cli/pull/7495) `glob@10.3.14`
+* [`58f773c`](https://github.com/npm/cli/commit/58f773c99742ef55ac2a9eca23c27b32800c2cf1) [#7495](https://github.com/npm/cli/pull/7495) `path-scurry@1.11.0`
+* [`ea0b07d`](https://github.com/npm/cli/commit/ea0b07da149767265f11d5d77d2156e2c9f43e63) [#7482](https://github.com/npm/cli/pull/7482) `pacote@18.0.6`
+* [`8d161a4`](https://github.com/npm/cli/commit/8d161a414160dab7a930b1668c3af3ba280e8532) [#7482](https://github.com/npm/cli/pull/7482) `semver@7.6.1`
+* [`5b2317b`](https://github.com/npm/cli/commit/5b2317b472342428c6521d7b0d550d0fcc9bb202) [#7463](https://github.com/npm/cli/pull/7463) add lru-cache
+* [`26fefb8`](https://github.com/npm/cli/commit/26fefb82b3bd812009b8b627e3c19032a931aade) [#7480](https://github.com/npm/cli/pull/7480) `promzard@1.0.2`
+* [`2146e1f`](https://github.com/npm/cli/commit/2146e1f83ae94debecfaf08ef32e319c02223c12) [#7480](https://github.com/npm/cli/pull/7480) `npm-bundled@3.0.1`
+* [`ff6c5d1`](https://github.com/npm/cli/commit/ff6c5d161b52e8961e0c2ebf0467bc1382ef72d2) [#7480](https://github.com/npm/cli/pull/7480) `minipass-fetch@3.0.5`
+* [`419f9b9`](https://github.com/npm/cli/commit/419f9b9d9d6806d56b68d96bd50f7d25274a8f48) [#7480](https://github.com/npm/cli/pull/7480) `cmd-shim@6.0.3`
+* [`dade2c8`](https://github.com/npm/cli/commit/dade2c88d23289d57351d614feaa876d9e1e17f4) [#7480](https://github.com/npm/cli/pull/7480) `minipass@7.1.0`
+* [`18e5312`](https://github.com/npm/cli/commit/18e53129f0f3a19725e377b336336aa85ade3ba5) [#7480](https://github.com/npm/cli/pull/7480) `validate-npm-package-name@5.0.1`
+* [`d440011`](https://github.com/npm/cli/commit/d44001164f66d15daa3fd27da004194478b7c99c) [#7480](https://github.com/npm/cli/pull/7480) `npm-user-validate@2.0.1`
+* [`552113e`](https://github.com/npm/cli/commit/552113e7a663efdcebfcbcc6148b1d51be55596b) [#7480](https://github.com/npm/cli/pull/7480) `ignore-walk@6.0.5`
+* [`7e15b6d`](https://github.com/npm/cli/commit/7e15b6d56abbf47456c12fa2d5688d5d187a0ae7) [#7480](https://github.com/npm/cli/pull/7480) `@npmcli/metavuln-calculator@7.1.1`
+* [`8b20f8c`](https://github.com/npm/cli/commit/8b20f8c8ba70e43ad222538fc396dedb071b1680) [#7480](https://github.com/npm/cli/pull/7480) `ssri@10.0.6`
+* [`a9a6dcd`](https://github.com/npm/cli/commit/a9a6dcd4427ec82e491a2cad5672d8183e12180f) [#7480](https://github.com/npm/cli/pull/7480) `pacote@18.0.5`
+* [`e2fdb65`](https://github.com/npm/cli/commit/e2fdb651cda9ec603f009f5713a5a2b489d49e15) [#7480](https://github.com/npm/cli/pull/7480) `npm-pick-manifest@9.0.1`
+* [`310a7a5`](https://github.com/npm/cli/commit/310a7a5583d14da761d38b7421ebb6cee65600b6) [#7480](https://github.com/npm/cli/pull/7480) `normalize-package-data@6.0.1`
+* [`e71f541`](https://github.com/npm/cli/commit/e71f541b020de7940faccffab68d0255c4079e1a) [#7480](https://github.com/npm/cli/pull/7480) `nopt@7.2.1`
+* [`18c3b40`](https://github.com/npm/cli/commit/18c3b4058c7f721ff585de2f2766e53da897e16e) [#7480](https://github.com/npm/cli/pull/7480) `json-parse-even-better-errors@3.0.2`
+* [`4c5bf77`](https://github.com/npm/cli/commit/4c5bf77af6db3b447f9b9abc3b67b211d7bb82b8) [#7480](https://github.com/npm/cli/pull/7480) `init-package-json@6.0.3`
+* [`714e3e1`](https://github.com/npm/cli/commit/714e3e1e1ce014cba71db41c2d6c02d9dd53fcd3) [#7480](https://github.com/npm/cli/pull/7480) `hosted-git-info@7.0.2`
+* [`f94d672`](https://github.com/npm/cli/commit/f94d6726a6ca96cad0da88ea499fa22f35b7c4c0) [#7480](https://github.com/npm/cli/pull/7480) `cacache@18.0.3`
+* [`43331e4`](https://github.com/npm/cli/commit/43331e4d0647c3af4cc2aa3db8b47d797584a6d8) [#7480](https://github.com/npm/cli/pull/7480) `bin-links@4.0.4`
+* [`8234412`](https://github.com/npm/cli/commit/823441219ce63d7863aede8b22b4b1d07021fd22) [#7480](https://github.com/npm/cli/pull/7480) `@npmcli/promise-spawn@7.0.2`
+* [`6dfaebb`](https://github.com/npm/cli/commit/6dfaebb8f08acf992ac36faf4db8b650e8e55eae) [#7480](https://github.com/npm/cli/pull/7480) `@npmcli/git@5.0.7`
+* [`63ef498`](https://github.com/npm/cli/commit/63ef498bf2916a882a92c0b9fe6de6728584694a) [#7457](https://github.com/npm/cli/pull/7457) `npm-registry-fetch@17.0.1`
+* [`4cbc2d4`](https://github.com/npm/cli/commit/4cbc2d402174933052c7addd6ea55b1ecee202c5) [#7457](https://github.com/npm/cli/pull/7457) `npm-profile@10.0.0`
+## [10.8.0](https://github.com/npm/cli/compare/v10.7.0...v10.8.0) (2024-05-15)
+
+### Features
+
+* [`1e375c1`](https://github.com/npm/cli/commit/1e375c1f8d16ac114b615c2a2f374099345b0b69) [#7442](https://github.com/npm/cli/pull/7442) create exit handler class (#7442) (@lukekarrys)
+
+### Bug Fixes
+
+* [`d5c3289`](https://github.com/npm/cli/commit/d5c32899b6ffc6254c96f62a06a854bb2c2b95c5) [#7513](https://github.com/npm/cli/pull/7513) refactor: use output buffer and error for more commands (#7513) (@lukekarrys)
+* [`12f103c`](https://github.com/npm/cli/commit/12f103ce55ed21c9c04f87a101fb64d55ac02d3c) [#7533](https://github.com/npm/cli/pull/7533) add first param titles to logs where missing (#7533) (@lukekarrys)
+* [`badeac2`](https://github.com/npm/cli/commit/badeac28faf9fde5f8c05d235219be840999a646) [#7521](https://github.com/npm/cli/pull/7521) config: use redact on config output (#7521) (@lukekarrys)
+* [`76aef74`](https://github.com/npm/cli/commit/76aef7423ab7e47a5f9b73849b47ba029730d75a) [#7520](https://github.com/npm/cli/pull/7520) view: refactor exec and execWorkspaces to call same methods (#7520) (@lukekarrys)
+* [`b54cdb8`](https://github.com/npm/cli/commit/b54cdb836d6c4146a1aa8e1a5fe9655ba2ed0a6a) [#7515](https://github.com/npm/cli/pull/7515) refactor: create new error output primitives (#7515) (@lukekarrys)
+* [`e40454c`](https://github.com/npm/cli/commit/e40454c35f75b5b814e7b5167c8a8b05664246f3) [#7506](https://github.com/npm/cli/pull/7506) view: dont unwrap arrays in json mode (#7506) (@lukekarrys)
+* [`6f64148`](https://github.com/npm/cli/commit/6f6414829fd82704233fbb56375b167495a0aaf5) require stdout to be a TTY for progress (#7507) (@lukekarrys)
+* [`db62910`](https://github.com/npm/cli/commit/db6291036f076bf0251b74a504bd5b693c29c4bb) [#7504](https://github.com/npm/cli/pull/7504) config: be more aggressive about hiding protected values (#7504) (@wraithgar)
+* [`6d456bb`](https://github.com/npm/cli/commit/6d456bba46d6afe1e2cf9464908e6ad99375cb7c) [#7497](https://github.com/npm/cli/pull/7497) dont write log file for completion commands (#7497) (@lukekarrys)
+* [`722c0fa`](https://github.com/npm/cli/commit/722c0faa387ae6e35886f08eefb238c03ae85db1) [#7463](https://github.com/npm/cli/pull/7463) limit packument cache size based on heap size (@wraithgar)
+* [`ca1a68d`](https://github.com/npm/cli/commit/ca1a68d14d184f2535720ed4715f388965ade21a) [#7474](https://github.com/npm/cli/pull/7474) log if `npm deprecate` does not match any version (#7474) (@mbtools)
+* [`261ea19`](https://github.com/npm/cli/commit/261ea193c96aaa73ce5630e21c6a31de9f19ef5b) [#7457](https://github.com/npm/cli/pull/7457) run input.start around help and openining urls (@lukekarrys)
+* [`4ab6cf4`](https://github.com/npm/cli/commit/4ab6cf4a9e7fca64f95422f4099b33cdbb9efa25) [#7459](https://github.com/npm/cli/pull/7459) publish: validate dist-tag (#7459) (@reggi)
### Documentation
-* [`b5fadd0`](https://github.com/npm/cli/commit/b5fadd0cec392f4bf6d60fa1358f96400be94667) [#5742](https://github.com/npm/cli/pull/5742) Better npx link (#5742) (@mrienstra)
+* [`b2ce025`](https://github.com/npm/cli/commit/b2ce0250e32abaaaf60d895cda210914bdf903ea) [#7518](https://github.com/npm/cli/pull/7518) suggest correct bin entry (#7518) (@Santoshraj2)
+* [`bdd2aae`](https://github.com/npm/cli/commit/bdd2aae12b213815b5d800902b0a9722b263a03c) [#7502](https://github.com/npm/cli/pull/7502) remove obsolete removal using make uninstall (#7502) (@avinal)
+* [`c3d2819`](https://github.com/npm/cli/commit/c3d281984ed363ed03d6a7abe083f301c1dd2c88) [#7496](https://github.com/npm/cli/pull/7496) npm help json/global command on windows (#7496) (@klm-turing, @lukekarrys)
+* [`268303c`](https://github.com/npm/cli/commit/268303c3b40551ae558f201841d3d5977769a7c9) [#7479](https://github.com/npm/cli/pull/7479) add npm version to every local help output (#7479) (@klm-turing)
+* [`e39d422`](https://github.com/npm/cli/commit/e39d422d69c2275ed1e3a606447a9b9d87bdca4f) [#7473](https://github.com/npm/cli/pull/7473) suggest "npm repo" for showing the repo of a package (#7473) (@full-stop)
+* [`f6fff32`](https://github.com/npm/cli/commit/f6fff3295d19b63003cf49eb1c4805f453c5390a) [#7433](https://github.com/npm/cli/pull/7433) clarify what peerDependenciesMeta does (#7433) (@xuhdev, @wraithgar)
### Dependencies
-* [`de6618e`](https://github.com/npm/cli/commit/de6618e93182ba00b4be516db1efb3c51efa17ba) [#5757](https://github.com/npm/cli/pull/5757) `@npmcli/promise-spawn@5.0.0` (#5757)
-* [`5625274`](https://github.com/npm/cli/commit/562527456d3862d871d042fa4ff6e38354e320ea) [#5755](https://github.com/npm/cli/pull/5755) `hosted-git-info@6.1.0` (#5755)
-* [`32bdd68`](https://github.com/npm/cli/commit/32bdd686ccf826050075e770ffddf7401efa79c9) [#5754](https://github.com/npm/cli/pull/5754) `npm-packlist@7.0.2` (#5754)
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.0.0...arborist-v6.1.0): `@npmcli/arborist@6.1.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.0...libnpmdiff-v5.0.1): `libnpmdiff@5.0.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.0...libnpmexec-v5.0.1): `libnpmexec@5.0.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.0...libnpmfund-v4.0.1): `libnpmfund@4.0.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.0...libnpmpack-v5.0.1): `libnpmpack@5.0.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.0...libnpmpublish-v7.0.1): `libnpmpublish@7.0.1`
-
-## [9.0.0](https://github.com/npm/cli/compare/v9.0.0-pre.6...v9.0.0) (2022-10-19)
+* [`1cdc662`](https://github.com/npm/cli/commit/1cdc662bd2835531fbe790011a00f88ddb5f6868) [#7522](https://github.com/npm/cli/pull/7522) `@tufjs/repo-mock@2.0.1`
+* [`898bcfd`](https://github.com/npm/cli/commit/898bcfda5c5ac192b2cf5f47d0b939794c1b2164) [#7522](https://github.com/npm/cli/pull/7522) `@sigstore/protobuf-specs@0.3.2`
+* [`fec3c94`](https://github.com/npm/cli/commit/fec3c947d7dcc71071a8f527aa5bd81f47015486) [#7522](https://github.com/npm/cli/pull/7522) `path-scurry@1.11.1`
+* [`cb85973`](https://github.com/npm/cli/commit/cb8597316a8d53815835901ae9d5756d4dc481ea) [#7522](https://github.com/npm/cli/pull/7522) `glob@10.3.15`
+* [`e189873`](https://github.com/npm/cli/commit/e18987371399f508cb224e159987b10ddb922bb8) [#7498](https://github.com/npm/cli/pull/7498) `@sigstore/sign@2.3.1`
+* [`c2b28f9`](https://github.com/npm/cli/commit/c2b28f9d6cba12e88f849e5b4a82607e2c218a16) [#7498](https://github.com/npm/cli/pull/7498) `minipass@7.1.1`
+* [`9064ffc`](https://github.com/npm/cli/commit/9064ffc6c85309de2e9e798fdc6caca209f5fa18) [#7498](https://github.com/npm/cli/pull/7498) `@sigstore/tuf@2.3.3`
+* [`fd42986`](https://github.com/npm/cli/commit/fd429866c79cc001979135857c019d7d2873f291) [#7498](https://github.com/npm/cli/pull/7498) `@npmcli/fs@3.1.1`
+* [`4e53e33`](https://github.com/npm/cli/commit/4e53e33757c88ca9c413e3943b17e0cb246e955c) [#7498](https://github.com/npm/cli/pull/7498) `semver@7.6.2`
+* [`f078c82`](https://github.com/npm/cli/commit/f078c8224f6775d53da98f310531524c616e6099) [#7495](https://github.com/npm/cli/pull/7495) `glob@10.3.14`
+* [`58f773c`](https://github.com/npm/cli/commit/58f773c99742ef55ac2a9eca23c27b32800c2cf1) [#7495](https://github.com/npm/cli/pull/7495) `path-scurry@1.11.0`
+* [`ea0b07d`](https://github.com/npm/cli/commit/ea0b07da149767265f11d5d77d2156e2c9f43e63) [#7482](https://github.com/npm/cli/pull/7482) `pacote@18.0.6`
+* [`8d161a4`](https://github.com/npm/cli/commit/8d161a414160dab7a930b1668c3af3ba280e8532) [#7482](https://github.com/npm/cli/pull/7482) `semver@7.6.1`
+* [`5b2317b`](https://github.com/npm/cli/commit/5b2317b472342428c6521d7b0d550d0fcc9bb202) [#7463](https://github.com/npm/cli/pull/7463) add lru-cache
+* [`26fefb8`](https://github.com/npm/cli/commit/26fefb82b3bd812009b8b627e3c19032a931aade) [#7480](https://github.com/npm/cli/pull/7480) `promzard@1.0.2`
+* [`2146e1f`](https://github.com/npm/cli/commit/2146e1f83ae94debecfaf08ef32e319c02223c12) [#7480](https://github.com/npm/cli/pull/7480) `npm-bundled@3.0.1`
+* [`ff6c5d1`](https://github.com/npm/cli/commit/ff6c5d161b52e8961e0c2ebf0467bc1382ef72d2) [#7480](https://github.com/npm/cli/pull/7480) `minipass-fetch@3.0.5`
+* [`419f9b9`](https://github.com/npm/cli/commit/419f9b9d9d6806d56b68d96bd50f7d25274a8f48) [#7480](https://github.com/npm/cli/pull/7480) `cmd-shim@6.0.3`
+* [`dade2c8`](https://github.com/npm/cli/commit/dade2c88d23289d57351d614feaa876d9e1e17f4) [#7480](https://github.com/npm/cli/pull/7480) `minipass@7.1.0`
+* [`18e5312`](https://github.com/npm/cli/commit/18e53129f0f3a19725e377b336336aa85ade3ba5) [#7480](https://github.com/npm/cli/pull/7480) `validate-npm-package-name@5.0.1`
+* [`d440011`](https://github.com/npm/cli/commit/d44001164f66d15daa3fd27da004194478b7c99c) [#7480](https://github.com/npm/cli/pull/7480) `npm-user-validate@2.0.1`
+* [`552113e`](https://github.com/npm/cli/commit/552113e7a663efdcebfcbcc6148b1d51be55596b) [#7480](https://github.com/npm/cli/pull/7480) `ignore-walk@6.0.5`
+* [`7e15b6d`](https://github.com/npm/cli/commit/7e15b6d56abbf47456c12fa2d5688d5d187a0ae7) [#7480](https://github.com/npm/cli/pull/7480) `@npmcli/metavuln-calculator@7.1.1`
+* [`8b20f8c`](https://github.com/npm/cli/commit/8b20f8c8ba70e43ad222538fc396dedb071b1680) [#7480](https://github.com/npm/cli/pull/7480) `ssri@10.0.6`
+* [`a9a6dcd`](https://github.com/npm/cli/commit/a9a6dcd4427ec82e491a2cad5672d8183e12180f) [#7480](https://github.com/npm/cli/pull/7480) `pacote@18.0.5`
+* [`e2fdb65`](https://github.com/npm/cli/commit/e2fdb651cda9ec603f009f5713a5a2b489d49e15) [#7480](https://github.com/npm/cli/pull/7480) `npm-pick-manifest@9.0.1`
+* [`310a7a5`](https://github.com/npm/cli/commit/310a7a5583d14da761d38b7421ebb6cee65600b6) [#7480](https://github.com/npm/cli/pull/7480) `normalize-package-data@6.0.1`
+* [`e71f541`](https://github.com/npm/cli/commit/e71f541b020de7940faccffab68d0255c4079e1a) [#7480](https://github.com/npm/cli/pull/7480) `nopt@7.2.1`
+* [`18c3b40`](https://github.com/npm/cli/commit/18c3b4058c7f721ff585de2f2766e53da897e16e) [#7480](https://github.com/npm/cli/pull/7480) `json-parse-even-better-errors@3.0.2`
+* [`4c5bf77`](https://github.com/npm/cli/commit/4c5bf77af6db3b447f9b9abc3b67b211d7bb82b8) [#7480](https://github.com/npm/cli/pull/7480) `init-package-json@6.0.3`
+* [`714e3e1`](https://github.com/npm/cli/commit/714e3e1e1ce014cba71db41c2d6c02d9dd53fcd3) [#7480](https://github.com/npm/cli/pull/7480) `hosted-git-info@7.0.2`
+* [`f94d672`](https://github.com/npm/cli/commit/f94d6726a6ca96cad0da88ea499fa22f35b7c4c0) [#7480](https://github.com/npm/cli/pull/7480) `cacache@18.0.3`
+* [`43331e4`](https://github.com/npm/cli/commit/43331e4d0647c3af4cc2aa3db8b47d797584a6d8) [#7480](https://github.com/npm/cli/pull/7480) `bin-links@4.0.4`
+* [`8234412`](https://github.com/npm/cli/commit/823441219ce63d7863aede8b22b4b1d07021fd22) [#7480](https://github.com/npm/cli/pull/7480) `@npmcli/promise-spawn@7.0.2`
+* [`6dfaebb`](https://github.com/npm/cli/commit/6dfaebb8f08acf992ac36faf4db8b650e8e55eae) [#7480](https://github.com/npm/cli/pull/7480) `@npmcli/git@5.0.7`
+* [`63ef498`](https://github.com/npm/cli/commit/63ef498bf2916a882a92c0b9fe6de6728584694a) [#7457](https://github.com/npm/cli/pull/7457) `npm-registry-fetch@17.0.1`
+* [`4cbc2d4`](https://github.com/npm/cli/commit/4cbc2d402174933052c7addd6ea55b1ecee202c5) [#7457](https://github.com/npm/cli/pull/7457) `npm-profile@10.0.0`
+
+### Chores
+
+* [`10256e8`](https://github.com/npm/cli/commit/10256e8cbe3583b65cb253a19ba9ee64f21c9584) [#7522](https://github.com/npm/cli/pull/7522) dev dependency updates (@wraithgar)
+* [`dcfc3de`](https://github.com/npm/cli/commit/dcfc3deba2bb0066ec5d8b1870719f730adc2e97) [#7517](https://github.com/npm/cli/pull/7517) convert run-script tests to snapshots (#7517) (@lukekarrys)
+* [`8add914`](https://github.com/npm/cli/commit/8add914841775f239acd838b1d8d52b251d0e1b2) [#7505](https://github.com/npm/cli/pull/7505) create single bug report issue template (#7505) (@lukekarrys)
+* [`7c7fba4`](https://github.com/npm/cli/commit/7c7fba4fc92528287e1f2ef5fb8256e60760c201) [#7500](https://github.com/npm/cli/pull/7500) benchmarks: fix emoji reaction to comment (#7500) (@lukekarrys)
+* [`d3b9587`](https://github.com/npm/cli/commit/d3b958756144d71c25cc4bd2020e040a940fbe3b) [#7494](https://github.com/npm/cli/pull/7494) disable progress on npm pack test (@wraithgar)
+* [`67ebb66`](https://github.com/npm/cli/commit/67ebb66d3ffa61582f8dd63ddbb5679649a5d2a4) [#7494](https://github.com/npm/cli/pull/7494) disable color in config tests (@wraithgar)
+* [`2ec2e75`](https://github.com/npm/cli/commit/2ec2e75ee81361a850bca643341d240441870334) [#7494](https://github.com/npm/cli/pull/7494) disable progress on shellout exit tests (@wraithgar)
+* [`e9fdc9a`](https://github.com/npm/cli/commit/e9fdc9a792666d5b9aa73833c65516bafcb80863) [#7483](https://github.com/npm/cli/pull/7483) create smoke-publish-test.sh script (#7483) (@lukekarrys)
+* [`1524cfd`](https://github.com/npm/cli/commit/1524cfd94daaca131ab16180c27323999fab07b3) [#7484](https://github.com/npm/cli/pull/7484) remove extra quotes from benchmark event_type (#7484) (@lukekarrys)
+* [`9c4d3c4`](https://github.com/npm/cli/commit/9c4d3c402c77bd7aaa514ee9e02d7fd87223343e) [#7467](https://github.com/npm/cli/pull/7467) template-oss-apply (@lukekarrys)
+* [`2b7ec54`](https://github.com/npm/cli/commit/2b7ec54f52f9e8aee568ccb4e34ce4a5733af21a) [#7467](https://github.com/npm/cli/pull/7467) `template-oss@4.22.0` (@lukekarrys)
+* [`8ded848`](https://github.com/npm/cli/commit/8ded848b099297a12a81ec008d6229f3ad3494a6) [#7457](https://github.com/npm/cli/pull/7457) remove doctor snapshot stack traces (@lukekarrys)
+
+### Chores
+
+* [`10256e8`](https://github.com/npm/cli/commit/10256e8cbe3583b65cb253a19ba9ee64f21c9584) [#7522](https://github.com/npm/cli/pull/7522) dev dependency updates (@wraithgar)
+* [`dcfc3de`](https://github.com/npm/cli/commit/dcfc3deba2bb0066ec5d8b1870719f730adc2e97) [#7517](https://github.com/npm/cli/pull/7517) convert run-script tests to snapshots (#7517) (@lukekarrys)
+* [`8add914`](https://github.com/npm/cli/commit/8add914841775f239acd838b1d8d52b251d0e1b2) [#7505](https://github.com/npm/cli/pull/7505) create single bug report issue template (#7505) (@lukekarrys)
+* [`7c7fba4`](https://github.com/npm/cli/commit/7c7fba4fc92528287e1f2ef5fb8256e60760c201) [#7500](https://github.com/npm/cli/pull/7500) benchmarks: fix emoji reaction to comment (#7500) (@lukekarrys)
+* [`d3b9587`](https://github.com/npm/cli/commit/d3b958756144d71c25cc4bd2020e040a940fbe3b) [#7494](https://github.com/npm/cli/pull/7494) disable progress on npm pack test (@wraithgar)
+* [`67ebb66`](https://github.com/npm/cli/commit/67ebb66d3ffa61582f8dd63ddbb5679649a5d2a4) [#7494](https://github.com/npm/cli/pull/7494) disable color in config tests (@wraithgar)
+* [`2ec2e75`](https://github.com/npm/cli/commit/2ec2e75ee81361a850bca643341d240441870334) [#7494](https://github.com/npm/cli/pull/7494) disable progress on shellout exit tests (@wraithgar)
+* [`e9fdc9a`](https://github.com/npm/cli/commit/e9fdc9a792666d5b9aa73833c65516bafcb80863) [#7483](https://github.com/npm/cli/pull/7483) create smoke-publish-test.sh script (#7483) (@lukekarrys)
+* [`1524cfd`](https://github.com/npm/cli/commit/1524cfd94daaca131ab16180c27323999fab07b3) [#7484](https://github.com/npm/cli/pull/7484) remove extra quotes from benchmark event_type (#7484) (@lukekarrys)
+* [`9c4d3c4`](https://github.com/npm/cli/commit/9c4d3c402c77bd7aaa514ee9e02d7fd87223343e) [#7467](https://github.com/npm/cli/pull/7467) template-oss-apply (@lukekarrys)
+* [`2b7ec54`](https://github.com/npm/cli/commit/2b7ec54f52f9e8aee568ccb4e34ce4a5733af21a) [#7467](https://github.com/npm/cli/pull/7467) `template-oss@4.22.0` (@lukekarrys)
+* [`8ded848`](https://github.com/npm/cli/commit/8ded848b099297a12a81ec008d6229f3ad3494a6) [#7457](https://github.com/npm/cli/pull/7457) remove doctor snapshot stack traces (@lukekarrys)
+
+## [10.7.0](https://github.com/npm/cli/compare/v10.6.0...v10.7.0) (2024-04-30)
### Features
-* [`e3b004c`](https://github.com/npm/cli/commit/e3b004c0d6dfcb153c4734af12afb09897e20932) [#5727](https://github.com/npm/cli/pull/5727) move cli and all workspaces out of prerelease mode (@lukekarrys)
+* [`7e349f4`](https://github.com/npm/cli/commit/7e349f45363bb8dbe1cc803f8b48befc01aae7fd) [#7432](https://github.com/npm/cli/pull/7432) add spinner (#7432) (@lukekarrys)
-### Dependencies
+### Bug Fixes
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.0.0-pre.5...arborist-v6.0.0): `@npmcli/arborist@6.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmaccess-v7.0.0-pre.2...libnpmaccess-v7.0.0): `libnpmaccess@7.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.0-pre.3...libnpmdiff-v5.0.0): `libnpmdiff@5.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.0-pre.5...libnpmexec-v5.0.0): `libnpmexec@5.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.0-pre.5...libnpmfund-v4.0.0): `libnpmfund@4.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmhook-v9.0.0-pre.1...libnpmhook-v9.0.0): `libnpmhook@9.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmorg-v5.0.0-pre.1...libnpmorg-v5.0.0): `libnpmorg@5.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.0-pre.4...libnpmpack-v5.0.0): `libnpmpack@5.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.0-pre.4...libnpmpublish-v7.0.0): `libnpmpublish@7.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmsearch-v6.0.0-pre.1...libnpmsearch-v6.0.0): `libnpmsearch@6.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmteam-v5.0.0-pre.1...libnpmteam-v5.0.0): `libnpmteam@5.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmversion-v4.0.0-pre.1...libnpmversion-v4.0.0): `libnpmversion@4.0.0`
-
-## [9.0.0-pre.6](https://github.com/npm/cli/compare/v9.0.0-pre.5...v9.0.0-pre.6) (2022-10-19)
+* [`d679ce8`](https://github.com/npm/cli/commit/d679ce80fd9b761b2323777ec5cd84ebc5c164fe) [#7449](https://github.com/npm/cli/pull/7449) linting: no-unused-vars (@wraithgar)
+* [`2558283`](https://github.com/npm/cli/commit/25582837b0ebb8c2de2932c04224332b11d0e1d5) [#7450](https://github.com/npm/cli/pull/7450) powershell: fallback to script root if globalPrefix does not exist (#7450) (@lukekarrys)
+* [`bc4c342`](https://github.com/npm/cli/commit/bc4c3426c35201baff7230c2b3beee2cf5936296) [#7448](https://github.com/npm/cli/pull/7448) account for registries with no publisher in search (#7448) (@wraithgar)
+* [`bcc781a`](https://github.com/npm/cli/commit/bcc781abf10b46aa5b04da996fc4602524be9b0e) [#7439](https://github.com/npm/cli/pull/7439) move run-script banners to stderr when in json mode (#7439) (@lukekarrys)
+* [`104fcb5`](https://github.com/npm/cli/commit/104fcb53282653a1c5610a6590afecc207920a6b) [#7438](https://github.com/npm/cli/pull/7438) remove doctor log for each file permission check (#7438) (@lukekarrys)
+* [`5230647`](https://github.com/npm/cli/commit/52306473da03123ef5623e9e152e10285c8097f3) [#7422](https://github.com/npm/cli/pull/7422) rewrite powershell scripts to use PSScriptRoot (#7422) (@lukekarrys)
+* [`71cbd91`](https://github.com/npm/cli/commit/71cbd91b6f01875a99eeae989ea67489bdd0178d) [#7421](https://github.com/npm/cli/pull/7421) hide banner for exec and explore (#7421) (@lukekarrys)
+* [`57ebebf`](https://github.com/npm/cli/commit/57ebebf03d55d4eda2b6439149a97b595a191aaf) [#7418](https://github.com/npm/cli/pull/7418) update repository.url in package.json (#7418) (@wraithgar)
-### ⚠️ BREAKING CHANGES
+### Documentation
+
+* [`1674136`](https://github.com/npm/cli/commit/1674136bc14b0f708bb01f75d65474346c863bd9) [#7441](https://github.com/npm/cli/pull/7441) correct search help output (#7441) (@wraithgar)
+
+### Dependencies
-* `npm` now outputs some json errors on stdout. Previously `npm` would output all json formatted errors on stderr, making it difficult to parse as the stderr stream usually has logs already written to it. In the future, `npm` will differentiate between errors and crashes. Errors, such as `E404` and `ERESOLVE`, will be handled and will continue to be output on stdout. In the case of a crash, `npm` will log the error as usual but will not attempt to display it as json, even in `--json` mode. Moving a case from the category of an error to a crash will not be considered a breaking change. For more information see npm/rfcs#482.
-* `npm config set` will no longer accept deprecated or invalid config options.
-* `timing` and `loglevel` changes
- - `timing` has been removed as a value for `--loglevel`
- - `--timing` will show timing information regardless of
- `--loglevel`, except when `--silent`
-* deprecate boolean install flags in favor of `--install-strategy`
- * deprecate --global-style, --global now sets --install-strategy=shallow
- * deprecate --legacy-bundling, now sets --install-strategy=nested
-* npm will no longer attempt to modify ownership of files it creates
-* this package no longer attempts to change file ownership automatically
-* this package no longer attempts to change file ownership automatically
+* [`80eec03`](https://github.com/npm/cli/commit/80eec03462e5747cb4434d43aff25939826b7850) [#7453](https://github.com/npm/cli/pull/7453) `@npmcli/redact@2.0.0`
+* [`a7145d4`](https://github.com/npm/cli/commit/a7145d422485fcbcb9427efa775c15180c7ee1c2) [#7453](https://github.com/npm/cli/pull/7453) `npm-registry-fetch@17.0.0`
+* [`a785766`](https://github.com/npm/cli/commit/a785766325141335cde39d43eb631062e32d6605) [#7453](https://github.com/npm/cli/pull/7453) `pacote@18.0.3`
+* [`65d76db`](https://github.com/npm/cli/commit/65d76dbd2fb11c83141302500ec4a3f5128ff12f) [#7453](https://github.com/npm/cli/pull/7453) `npm-profile@9.0.2`
+* [`cadc0f0`](https://github.com/npm/cli/commit/cadc0f0cad8909755ae8ac72f2dd5802a0d34943) [#7449](https://github.com/npm/cli/pull/7449) hoist production copy of sprintf-js
+* [`2cffdfe`](https://github.com/npm/cli/commit/2cffdfef45b9a13d189f5059cc69fc7319620fda) [#7449](https://github.com/npm/cli/pull/7449) `lru-cache@10.2.2`
+* [`432efb5`](https://github.com/npm/cli/commit/432efb5ee313ac5bd08642a9cef2b7c52da23ea9) [#7449](https://github.com/npm/cli/pull/7449) `make-fetch-happen@13.0.1`
+* [`9da5738`](https://github.com/npm/cli/commit/9da57388ebd5c643c2a95bbf63abc745cad45ccc) [#7437](https://github.com/npm/cli/pull/7437) `@npmcli/run-script@8.1.0` (#7437)
+* [`762888a`](https://github.com/npm/cli/commit/762888a3b603704c7c53a94a704b8a7f3edea918) [#7429](https://github.com/npm/cli/pull/7429) update dependencies for workspaces (#7429)
+* [workspace](https://github.com/npm/cli/releases/tag/arborist-v7.5.1): `@npmcli/arborist@7.5.1`
+* [workspace](https://github.com/npm/cli/releases/tag/config-v8.3.1): `@npmcli/config@8.3.1`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmaccess-v8.0.5): `libnpmaccess@8.0.5`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.1.1): `libnpmdiff@6.1.1`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v8.1.0): `libnpmexec@8.1.0`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.9): `libnpmfund@5.0.9`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmhook-v10.0.4): `libnpmhook@10.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmorg-v6.0.5): `libnpmorg@6.0.5`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v7.0.1): `libnpmpack@7.0.1`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.7): `libnpmpublish@9.0.7`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmsearch-v7.0.4): `libnpmsearch@7.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmteam-v6.0.4): `libnpmteam@6.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmversion-v6.0.1): `libnpmversion@6.0.1`
+
+### Chores
+
+* [`356c374`](https://github.com/npm/cli/commit/356c374817711845719902184547e1e0ac359b22) [#7453](https://github.com/npm/cli/pull/7453) fix test fixture for new redact behavior (@wraithgar)
+* [`2fba4b7`](https://github.com/npm/cli/commit/2fba4b7b6218900fd895daf5218682edeb4253a4) [#7449](https://github.com/npm/cli/pull/7449) update devDependencies in lockfile (@wraithgar)
+
+## [10.6.0](https://github.com/npm/cli/compare/v10.5.2...v10.6.0) (2024-04-25)
### Features
-* [`d3543e9`](https://github.com/npm/cli/commit/d3543e945e721783dcb83385935f282a4bb32cf3) output json formatted errors on stdout (#5716) (@lukekarrys)
-* [`be642c6`](https://github.com/npm/cli/commit/be642c6b8e3df40fd43b0110b30d3ecd44086016) refuse to set deprecated/invalid config (#5719) (@wraithgar)
-* [`332914b`](https://github.com/npm/cli/commit/332914b48b616099e586893b1df21480b7ddb733) separate configs for `--timing` and `--loglevel` (@lukekarrys)
-* [`f653785`](https://github.com/npm/cli/commit/f6537855e1a34b84251993a49e1ee362082ada37) deprecated `key`, `cert` config options and updated registry scoped auth docs (@fritzy)
-* [`de2d33f`](https://github.com/npm/cli/commit/de2d33f3ed42e187803bdd31db4f7a12f08f353c) add --install-strategy=hoisted|nested|shallow, deprecate --global-style, --legacy-bundling (#5709) (@fritzy)
-* [`58065bc`](https://github.com/npm/cli/commit/58065bc679e6968742b5b15fa2fb82dd9e8ae988) [#5704](https://github.com/npm/cli/pull/5704) do not alter file ownership (@nlf)
-* [`475e9b6`](https://github.com/npm/cli/commit/475e9b6c0c978a104dd2ee47bde22b0a031a95f9) [#5703](https://github.com/npm/cli/pull/5703) do not alter file ownership (@nlf)
+* [`9123de4`](https://github.com/npm/cli/commit/9123de4d282bfd19ea17ad613f5a2acab0e0e162) [#7373](https://github.com/npm/cli/pull/7373) do all ouput over proc-log events (@lukekarrys)
+* [`9622597`](https://github.com/npm/cli/commit/9622597399ec93224fddf90a9209a98dbcfd6b2f) [#7339](https://github.com/npm/cli/pull/7339) refactor terminal display (#7339) (@lukekarrys)
### Bug Fixes
-* [`6ffa5b7`](https://github.com/npm/cli/commit/6ffa5b7bbb8fd7cae1a0b955a1f762661ec5e9ed) `npm hook ls` duplicates hook name prefixes (#5295) (@gennadiygashev)
-* [`1afe5ba`](https://github.com/npm/cli/commit/1afe5ba9647d1f0f55bf0a4bace543965d05daed) account for new npm-package-arg behavior (@wraithgar)
-* [`353b5bb`](https://github.com/npm/cli/commit/353b5bb92c3f7899526536b597252b44aa8a712d) [#5710](https://github.com/npm/cli/pull/5710) remove chownr and mkdirp-infer-owner (@nlf)
+* [`0e74ee4`](https://github.com/npm/cli/commit/0e74ee42cbd2cbe438e64a2426767dad1868e70d) [#7416](https://github.com/npm/cli/pull/7416) clean up npm object (#7416) (@wraithgar)
+* [`c060e60`](https://github.com/npm/cli/commit/c060e60a7ce767f6fdb32782c0d373e717df7856) [#7415](https://github.com/npm/cli/pull/7415) return command and argv from load (@lukekarrys)
+* [`180b919`](https://github.com/npm/cli/commit/180b919d2322e55ec5d58bbd476c3ecc31880479) [#7415](https://github.com/npm/cli/pull/7415) remove unused npm.setCmd method (@lukekarrys)
+* [`0708b3b`](https://github.com/npm/cli/commit/0708b3b2dbfc811b0e692133557b6b2e698519fd) [#7414](https://github.com/npm/cli/pull/7414) use name of level instead of label for logging prefixes (#7414) (@lukekarrys)
+* [`7f4e667`](https://github.com/npm/cli/commit/7f4e66772ee631158b47fcfcd8e22b7b6b9b9cce) [#7403](https://github.com/npm/cli/pull/7403) redact when displaying non-ascii arguments (@lukekarrys)
+* [`06202f0`](https://github.com/npm/cli/commit/06202f0e13d91f5ee6edfe2da6ee21bafbf18cca) [#7403](https://github.com/npm/cli/pull/7403) store unref promises for awaiting in tests (@lukekarrys)
+* [`e5f1948`](https://github.com/npm/cli/commit/e5f1948a569224c59a9a4fdeb0e3b82312fb87c6) [#7403](https://github.com/npm/cli/pull/7403) run update notifier after exec but before waiting (@lukekarrys)
+* [`f309c1c`](https://github.com/npm/cli/commit/f309c1cd8ab26b9cdc3b1224499aba345edfc582) [#7403](https://github.com/npm/cli/pull/7403) refactor: move timer and error logfile messages to lib/npm (@lukekarrys)
+* [`43e6194`](https://github.com/npm/cli/commit/43e619480568493433af78208e2b3c48843185ef) [#7403](https://github.com/npm/cli/pull/7403) dont show run script banners in silent (@lukekarrys)
+* [`694dba9`](https://github.com/npm/cli/commit/694dba9fd9a8d71facc7b832bf8f00ca9468e68a) [#7403](https://github.com/npm/cli/pull/7403) dont expose as many public properties of timers (@lukekarrys)
+* [`7ca6d84`](https://github.com/npm/cli/commit/7ca6d8490b675c9d4c402ae8e41c9afd5871aa35) [#7403](https://github.com/npm/cli/pull/7403) use proc-log META for flush and force (@lukekarrys)
+* [`2538438`](https://github.com/npm/cli/commit/25384388e01d1c9d6c4cae4a49149407b0024176) [#7403](https://github.com/npm/cli/pull/7403) dont write timing logs to file unless requested (@lukekarrys)
+* [`7e04417`](https://github.com/npm/cli/commit/7e04417d4b084043300d1cdd5b4dce8ea7184d9e) [#7403](https://github.com/npm/cli/pull/7403) rename base-cmd to match other commands (@lukekarrys)
+* [`fc68547`](https://github.com/npm/cli/commit/fc68547eb9b06a1a6a2a0feb7e422accec50230d) [#7403](https://github.com/npm/cli/pull/7403) remove some npm.load timers and exit earlier for --versions (@lukekarrys)
+* [`28019d5`](https://github.com/npm/cli/commit/28019d50fdb1b2395199516694180edc7b4f8dd6) [#7403](https://github.com/npm/cli/pull/7403) cleanup: move cli specific files to separate dir (@lukekarrys)
+* [`469f788`](https://github.com/npm/cli/commit/469f7885ca47f79bbd3c7171dc56a471a3e422a2) [#7403](https://github.com/npm/cli/pull/7403) cleanup: newlines and whitespace (@lukekarrys)
+* [`4ab6401`](https://github.com/npm/cli/commit/4ab64013236925e7bceb9f88cfcc81f4a65f292f) [#7403](https://github.com/npm/cli/pull/7403) cleanup: dont nest files utils dir (@lukekarrys)
+* [`78447d7`](https://github.com/npm/cli/commit/78447d7a35fab870456ba66eee408b2baddca23e) [#7399](https://github.com/npm/cli/pull/7399) prefer fs/promises over promisify (#7399) (@lukekarrys)
+* [`d531f8b`](https://github.com/npm/cli/commit/d531f8b9237fabf640d421a8c4ea3c4284e00f0a) [#7407](https://github.com/npm/cli/pull/7407) Remove table output from search and tar summary (@wraithgar)
+* [`c209e98`](https://github.com/npm/cli/commit/c209e989b405fa3e86df7015c22e6840e18313b8) [#7401](https://github.com/npm/cli/pull/7401) Remove table output from many commands (@wraithgar)
+* [`ad7ab8c`](https://github.com/npm/cli/commit/ad7ab8c19994c1d2a452278edba65968185d3871) [#7388](https://github.com/npm/cli/pull/7388) perf: lazy loading optimizations (#7388) (@wraithgar)
+* [`8eae4b3`](https://github.com/npm/cli/commit/8eae4b3b30a7375f0f9dd4172a8b683efe0bb354) [#7385](https://github.com/npm/cli/pull/7385) token: properly await registry request (#7385) (@wraithgar)
+* [`9216d59`](https://github.com/npm/cli/commit/9216d5985fb3e2a2754be5bf0b1d43fc321cf723) [#7377](https://github.com/npm/cli/pull/7377) better output colors (#7377) (@wraithgar)
+* [`6512112`](https://github.com/npm/cli/commit/65121122d99855541f63aa787f8ee8bb4eea4a3f) [#7378](https://github.com/npm/cli/pull/7378) use proc-log for all timers (@lukekarrys)
+* [`2a80dab`](https://github.com/npm/cli/commit/2a80dab2ed1feedf3cd3af8ff7f41846567b2fd3) [#7370](https://github.com/npm/cli/pull/7370) typo in `npm access` usage (#7370) (@mbtools)
### Documentation
-* [`9e74d3e`](https://github.com/npm/cli/commit/9e74d3e847c4bc0abc630fbe81328e011d6f0187) update supported engines in readme (#5725) (@lukekarrys)
+* [`dfa4cab`](https://github.com/npm/cli/commit/dfa4cabdccdd1c82cc160bb435c29c45ed05fdf4) [#7401](https://github.com/npm/cli/pull/7401) update output examples (@wraithgar)
+* [`140b9c9`](https://github.com/npm/cli/commit/140b9c988d08beb6c3cd95b67cd4135ed32a19e6) [#7382](https://github.com/npm/cli/pull/7382) package-json: note that line endings are inferred (#7382) (@ertw)
+* [`c16dd4e`](https://github.com/npm/cli/commit/c16dd4e46b58d6c4490284e6edc5c255862e322b) [#7367](https://github.com/npm/cli/pull/7367) Document that overrides only work in the root `package.json` (#7367) (@s100)
### Dependencies
-* [`88137a3`](https://github.com/npm/cli/commit/88137a329c8ad418db265dd465768a7cf5ebccb1) `npmlog@7.0.1`
-* [`2008ea6`](https://github.com/npm/cli/commit/2008ea6a807acbd97912799adfe97f276202cea6) `npm-package-arg@10.0.0`, `pacote@15.0.2`
-* [`aa01072`](https://github.com/npm/cli/commit/aa010722996ef6de46e1bb937c6f8a94dc2844fa) [#5707](https://github.com/npm/cli/pull/5707) update the following dependencies
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.0.0-pre.4...arborist-v6.0.0-pre.5): `@npmcli/arborist@6.0.0-pre.5`
-* [Workspace](https://github.com/npm/cli/compare/libnpmaccess-v7.0.0-pre.1...libnpmaccess-v7.0.0-pre.2): `libnpmaccess@7.0.0-pre.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.0-pre.2...libnpmdiff-v5.0.0-pre.3): `libnpmdiff@5.0.0-pre.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.0-pre.4...libnpmexec-v5.0.0-pre.5): `libnpmexec@5.0.0-pre.5`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.0-pre.4...libnpmfund-v4.0.0-pre.5): `libnpmfund@4.0.0-pre.5`
-* [Workspace](https://github.com/npm/cli/compare/libnpmhook-v9.0.0-pre.0...libnpmhook-v9.0.0-pre.1): `libnpmhook@9.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmorg-v5.0.0-pre.0...libnpmorg-v5.0.0-pre.1): `libnpmorg@5.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.0-pre.3...libnpmpack-v5.0.0-pre.4): `libnpmpack@5.0.0-pre.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.0-pre.3...libnpmpublish-v7.0.0-pre.4): `libnpmpublish@7.0.0-pre.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmsearch-v6.0.0-pre.0...libnpmsearch-v6.0.0-pre.1): `libnpmsearch@6.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmteam-v5.0.0-pre.0...libnpmteam-v5.0.0-pre.1): `libnpmteam@5.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmversion-v4.0.0-pre.0...libnpmversion-v4.0.0-pre.1): `libnpmversion@4.0.0-pre.1`
-
-## [9.0.0-pre.5](https://github.com/npm/cli/compare/v9.0.0-pre.4...v9.0.0-pre.5) (2022-10-13)
+* [`36adff3`](https://github.com/npm/cli/commit/36adff36c41f56315fe582e1e4dda29060f7fdf7) [#7408](https://github.com/npm/cli/pull/7408) `pacote@18.0.2`
+* [`486d46c`](https://github.com/npm/cli/commit/486d46cd5b5678ad1ab6c23ee12cf7559477805a) [#7408](https://github.com/npm/cli/pull/7408) `@npmcli/installed-package-contents@2.1.0`
+* [`157d0ae`](https://github.com/npm/cli/commit/157d0aebfe5710880d0c91bddee970316b8a6612) [#7408](https://github.com/npm/cli/pull/7408) `@npmcli/package-json@5.1.0`
+* [`b0ca163`](https://github.com/npm/cli/commit/b0ca16310d9db944dd13f80ecce534c65eea42c5) [#7409](https://github.com/npm/cli/pull/7409) remove @npmcli/disparity-colors
+* [`c77f035`](https://github.com/npm/cli/commit/c77f035fc18fdc0a9eb3ef0a7d1faf2c7aec6af3) [#7407](https://github.com/npm/cli/pull/7407) remove columnify
+* [`2ec690d`](https://github.com/npm/cli/commit/2ec690da99fb441cdd7069480b42b6302e098005) [#7401](https://github.com/npm/cli/pull/7401) move cli-table3 to devDependencies
+* [`fc6e291`](https://github.com/npm/cli/commit/fc6e291e9c2154c2e76636cb7ebf0a17be307585) [#7392](https://github.com/npm/cli/pull/7392) `proc-log@4.2.0` (#7392)
+* [`38ed048`](https://github.com/npm/cli/commit/38ed048ac0d7a36785dbff0eeca3618cb7f084c5) [#7378](https://github.com/npm/cli/pull/7378) `@npmcli/metavuln-calculator@7.1.0`
+* [`7678a3d`](https://github.com/npm/cli/commit/7678a3d92835457bb402c82e4ca7ea3fa734d23b) [#7378](https://github.com/npm/cli/pull/7378) `proc-log@4.1.0`
+* [`87f6c09`](https://github.com/npm/cli/commit/87f6c094ac47f4e6eb5d5d6a03a0ad97711b51e9) [#7373](https://github.com/npm/cli/pull/7373) `@npmcli/metavuln-calculator@7.0.1`
+* [`b8f8b41`](https://github.com/npm/cli/commit/b8f8b414d8ad9635e3efedc6e491c8c6e3df0973) [#7373](https://github.com/npm/cli/pull/7373) `@npmcli/run-script@8.0.0`
+* [`79f79c7`](https://github.com/npm/cli/commit/79f79c7460be8a74f2b77c647100bcefd89b2efa) [#7373](https://github.com/npm/cli/pull/7373) `proc-log@4.0.0`
+* [`9027266`](https://github.com/npm/cli/commit/90272661b16d861a5926af8ec394d32ec0f307fd) [#7373](https://github.com/npm/cli/pull/7373) `pacote@18.0.0`
+* [`ee4b3e0`](https://github.com/npm/cli/commit/ee4b3e0e741545045dc03741c7147560961d867d) [#7373](https://github.com/npm/cli/pull/7373) `npm-registry-fetch@16.2.1`
+* [`0e7789b`](https://github.com/npm/cli/commit/0e7789b7d9ec88c89edcdade9fc898c131ed492a) [#7373](https://github.com/npm/cli/pull/7373) `npm-profile@9.0.1`
+* [`ac98fd3`](https://github.com/npm/cli/commit/ac98fd3a8514f2552555d2b8af74a52e64888797) [#7373](https://github.com/npm/cli/pull/7373) `npm-package-arg@11.0.2`
+* [`9351570`](https://github.com/npm/cli/commit/93515700efbb2147a6e929cf117da9e6e87c0aca) [#7373](https://github.com/npm/cli/pull/7373) `@npmcli/package-json@5.0.3`
+* [`d3a0cfa`](https://github.com/npm/cli/commit/d3a0cfad06ddffe6a3d4968257b7993aea68fc7c) [#7373](https://github.com/npm/cli/pull/7373) `@npmcli/git@5.0.6`
+* [workspace](https://github.com/npm/cli/releases/tag/arborist-v7.5.0): `@npmcli/arborist@7.5.0`
+* [workspace](https://github.com/npm/cli/releases/tag/config-v8.3.0): `@npmcli/config@8.3.0`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmaccess-v8.0.4): `libnpmaccess@8.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.1.0): `libnpmdiff@6.1.0`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v8.0.0): `libnpmexec@8.0.0`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.8): `libnpmfund@5.0.8`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmhook-v10.0.3): `libnpmhook@10.0.3`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmorg-v6.0.4): `libnpmorg@6.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v7.0.0): `libnpmpack@7.0.0`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.6): `libnpmpublish@9.0.6`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmsearch-v7.0.3): `libnpmsearch@7.0.3`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmteam-v6.0.3): `libnpmteam@6.0.3`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmversion-v6.0.0): `libnpmversion@6.0.0`
+
+### Chores
+
+* [`3cbc258`](https://github.com/npm/cli/commit/3cbc25800bbd14a8b22ee3feddad715b3a6436d7) [#7403](https://github.com/npm/cli/pull/7403) test for early version exit (@lukekarrys)
+* [`08e0c0e`](https://github.com/npm/cli/commit/08e0c0ed1ad7bc2f12b4f358eb3e6b18bdbca053) [#7403](https://github.com/npm/cli/pull/7403) move sigstore json files to fixtures dir (@lukekarrys)
+* [`ea66e95`](https://github.com/npm/cli/commit/ea66e9534f4b9004f8b4c53a2f8adb7688ee9b36) [#7380](https://github.com/npm/cli/pull/7380) remove licensee as a devDependency (#7380) (@lukekarrys, @wraithgar)
+* [`9333e9d`](https://github.com/npm/cli/commit/9333e9df9b6ad3143f084a25b0e7ea347c8094d3) [#7386](https://github.com/npm/cli/pull/7386) reduce windows shim test flakes (#7386) (@lukekarrys)
+
+## [10.5.2](https://github.com/npm/cli/compare/v10.5.1...v10.5.2) (2024-04-10)
-### ⚠️ BREAKING CHANGES
+### Bug Fixes
+
+* [`ef381b1`](https://github.com/npm/cli/commit/ef381b1449c99e174437665aa767e7a9b60edf22) [#7363](https://github.com/npm/cli/pull/7363) use @npmcli/redact for url cleaning (#7363) (@lukekarrys)
+* [`3760dd2`](https://github.com/npm/cli/commit/3760dd275aaa53cd2cee92e6a7b90aaf62f663cf) [#7361](https://github.com/npm/cli/pull/7361) perf: do less work loading config (#7361) (@wraithgar)
+* [`64bcf4c`](https://github.com/npm/cli/commit/64bcf4cf09e284ca18988615ab8696b55d27d829) [#7360](https://github.com/npm/cli/pull/7360) perf: only initialize workpaces when we are inside a workspace (#7360) (@H4ad)
+* [`5a28a29`](https://github.com/npm/cli/commit/5a28a29799aac2c89b4e7a3d5c1d1d880346b743) [#7352](https://github.com/npm/cli/pull/7352) perf: lazy load workspace dependency (#7352) (@H4ad)
+* [`5fc0f9d`](https://github.com/npm/cli/commit/5fc0f9dfdac3f95fd2eb6855beb2020941b2a361) [#7347](https://github.com/npm/cli/pull/7347) lazy load validate npm package name on error message (#7347) (@H4ad)
+* [`c929ed1`](https://github.com/npm/cli/commit/c929ed180b3d3361d1541737a4c3c9932cd480c6) [#7321](https://github.com/npm/cli/pull/7321) prioritize CLI flags over publishConfig settings (#7321) (@roni-berlin)
+* [`70497cb`](https://github.com/npm/cli/commit/70497cbcd4e6b32948253b644ccd7a3f0ee29270) [#7346](https://github.com/npm/cli/pull/7346) perf: avoid importing the entire semver package for update-notifier (#7346) (@H4ad)
+
+### Documentation
+
+* [`90ba1c9`](https://github.com/npm/cli/commit/90ba1c9fd6c1ea89173ed7712cf1da205732b164) [#7340](https://github.com/npm/cli/pull/7340) fix incorrect ISO 8601 reference (#7340) (@emmanuel-ferdman)
+
+### Dependencies
+
+* [`699a1de`](https://github.com/npm/cli/commit/699a1de9d22e6c552141f87e468f5b37ed825206) [#7362](https://github.com/npm/cli/pull/7362) `@npmcli/map-workspaces@3.0.6`
+* [`49fb9b7`](https://github.com/npm/cli/commit/49fb9b7cc22a348f608ab443c6953d950955c9e2) [#7362](https://github.com/npm/cli/pull/7362) `socks@2.8.3`
+* [`f69052e`](https://github.com/npm/cli/commit/f69052e16c21ce87c3f8f53a3769cc42ae68db63) [#7362](https://github.com/npm/cli/pull/7362) `@npmcli/package-json@5.0.2`
+* [`c18a0ad`](https://github.com/npm/cli/commit/c18a0ade8c4e7f6e35ffa937aa7afe06351b4b60) [#7357](https://github.com/npm/cli/pull/7357) `sigstore@2.3.0`
+* [`fd4153b`](https://github.com/npm/cli/commit/fd4153b3ccbc8d49b3aff90fed5e853f35fd6beb) [#7357](https://github.com/npm/cli/pull/7357) `socks@2.8.2`
+* [`d6b705a`](https://github.com/npm/cli/commit/d6b705a474a39b83b3d75516a95e34af92793034) [#7357](https://github.com/npm/cli/pull/7357) `postcss-selector-parser@6.0.16`
+* [`248c177`](https://github.com/npm/cli/commit/248c1771b8b406337ec96cffc6b8bc8f9b3b55b4) [#7357](https://github.com/npm/cli/pull/7357) `hasown@2.0.2`
+* [`4af9e86`](https://github.com/npm/cli/commit/4af9e863d1a3ad4a934315113745bad4f3c29bb0) [#7357](https://github.com/npm/cli/pull/7357) `builtins@5.1.0`
+* [`7546b56`](https://github.com/npm/cli/commit/7546b5608d86fdefc5d2f080cd9c0223150ab2f7) [#7357](https://github.com/npm/cli/pull/7357) `@npmcli/agent@2.2.2`
+* [`d38fd4f`](https://github.com/npm/cli/commit/d38fd4f6035a6ec9fbd366079d6761cf9c25b471) [#7357](https://github.com/npm/cli/pull/7357) `spdx-expression-parse@4.0.0`
+* [`913b326`](https://github.com/npm/cli/commit/913b326f62319c95df3b576dd71cdbb04be26782) [#7357](https://github.com/npm/cli/pull/7357) `is-cidr@5.0.5`
+* [`84bbbd4`](https://github.com/npm/cli/commit/84bbbd4b61be12025cfa952c9d2a522a1b996245) [#7357](https://github.com/npm/cli/pull/7357) `@npmcli/package-json@5.0.1`
+* [`a0f5048`](https://github.com/npm/cli/commit/a0f504808b81af7442f47343366724eea1bc8623) [#7357](https://github.com/npm/cli/pull/7357) `@npmcli/git@5.0.5`
+* [workspace](https://github.com/npm/cli/releases/tag/arborist-v7.4.2): `@npmcli/arborist@7.4.2`
+* [workspace](https://github.com/npm/cli/releases/tag/config-v8.2.2): `@npmcli/config@8.2.2`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.9): `libnpmdiff@6.0.9`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.10): `libnpmexec@7.0.10`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.7): `libnpmfund@5.0.7`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.9): `libnpmpack@6.0.9`
+
+### Chores
+
+* [`81be28d`](https://github.com/npm/cli/commit/81be28de88fb5f3951e782a474548214f61eef70) [#7357](https://github.com/npm/cli/pull/7357) dev dependency updates (@wraithgar)
+
+## [10.5.1](https://github.com/npm/cli/compare/v10.5.0...v10.5.1) (2024-04-03)
-* the presence of auth related settings that are not scoped to a specific registry found in a config file is no longer supported and will throw errors
-* the `node-version` and `npm-version` configs have been removed.
-* links generated from git urls will now use `HEAD` instead of `master` as the default ref
+### Bug Fixes
+
+* [`17d97d2`](https://github.com/npm/cli/commit/17d97d266807f69cfc2a7a3982e8db126f90c48a) [#7334](https://github.com/npm/cli/pull/7334) use @npmcli/redact for log redactions (@lukekarrys)
+* [`8cab136`](https://github.com/npm/cli/commit/8cab136f731c69be079be08d79e3514e01bbd563) [#7324](https://github.com/npm/cli/pull/7324) ensure maxSockets is respected (#7324) (@lukekarrys)
+* [`9bffa13`](https://github.com/npm/cli/commit/9bffa13e0b96efe1039d9075fdcb11d5946b2f90) [#7320](https://github.com/npm/cli/pull/7320) query: properly return :missing nodes (#7320) (@wraithgar)
+* [`9d4e85f`](https://github.com/npm/cli/commit/9d4e85f2379eded50b54f4e0b6f307031037f1ec) [#7297](https://github.com/npm/cli/pull/7297) search: include searchlimit option in help output (#7297) (@10xLaCroixDrinker)
+* [`bdb3c28`](https://github.com/npm/cli/commit/bdb3c28167f757060474ef2b82c92f3a1f210972) [#7274](https://github.com/npm/cli/pull/7274) added check for dry-run (#7274) (@cod1r)
+* [`7f1ab88`](https://github.com/npm/cli/commit/7f1ab8822a8d50403338595ab9f218e4d63f37fa) [#7271](https://github.com/npm/cli/pull/7271) more lightweight npm bin discovery in windows (#7271) (@wraithgar)
+
+### Documentation
+
+* [`1da5cf0`](https://github.com/npm/cli/commit/1da5cf0ace30d89edf05833b91ce80467c7864bd) [#7198](https://github.com/npm/cli/pull/7198) add link to engines section (#7198) (@uiolee)
+* [`1114a12`](https://github.com/npm/cli/commit/1114a12f2b4691d403d0863d4dca44f25580f57d) [#7307](https://github.com/npm/cli/pull/7307) fix incorrect npm audit key response link (#7307) (@davidlj95)
+* [`9807caf`](https://github.com/npm/cli/commit/9807cafbaf274eca2a0abbd04a9b2b55e850de9d) [#7304](https://github.com/npm/cli/pull/7304) update audit docs with provenance info (#7304) (@bdehamer)
+* [`e1ecfa7`](https://github.com/npm/cli/commit/e1ecfa7829be91282373862669b92ef42b9e48df) [#7285](https://github.com/npm/cli/pull/7285) Correct capitalization for URL (#7285) (@coliff)
+
+### Dependencies
+
+* [`87a61fc`](https://github.com/npm/cli/commit/87a61fc8bb65c950cda389ab3d14ae250ab2345d) [#7334](https://github.com/npm/cli/pull/7334) `npm-registry-fetch@16.2.0`
+* [`5469614`](https://github.com/npm/cli/commit/54696148f25986bcdf39e1acb5aca4bf09e7d1a0) [#7327](https://github.com/npm/cli/pull/7327) `init-package-json@6.0.2`
+* [`5469614`](https://github.com/npm/cli/commit/54696148f25986bcdf39e1acb5aca4bf09e7d1a0) [#7327](https://github.com/npm/cli/pull/7327) `promzard@1.0.1`
+* [`5469614`](https://github.com/npm/cli/commit/54696148f25986bcdf39e1acb5aca4bf09e7d1a0) [#7327](https://github.com/npm/cli/pull/7327) `read@3.0.1` (#7327)
+* [`9ccff72`](https://github.com/npm/cli/commit/9ccff72c332e6062e6ebcf8123c7888d8d617091) [#7329](https://github.com/npm/cli/pull/7329) `tar@6.2.1`
+* [`7201a00`](https://github.com/npm/cli/commit/7201a00632f5cedf50101e8411a4b3c514439efb) [#7329](https://github.com/npm/cli/pull/7329) `node-gyp@10.1.0`
+* [`6fd94f2`](https://github.com/npm/cli/commit/6fd94f249f43080ae183da36b971981e8ad00882) [#7329](https://github.com/npm/cli/pull/7329) `minimatch@9.0.4`
+* [`b048592`](https://github.com/npm/cli/commit/b048592a9583dca6f75a9c837edee57ab4e12ab0) [#7329](https://github.com/npm/cli/pull/7329) `ini@4.1.2`
+* [`c54a84a`](https://github.com/npm/cli/commit/c54a84ab5fdd7513913518734c0ece5f3d3e39c9) [#7329](https://github.com/npm/cli/pull/7329) `glob@10.3.12`
+* [`6853531`](https://github.com/npm/cli/commit/6853531da30bc8fecb776c823144766915b5e421) [#7329](https://github.com/npm/cli/pull/7329) `cli-table3@0.6.4`
+* [`c9315cb`](https://github.com/npm/cli/commit/c9315cb240be02babbbd99585f7ef23679e1d963) [#7329](https://github.com/npm/cli/pull/7329) `binary-extensions@2.3.0`
+* [`5bb0031`](https://github.com/npm/cli/commit/5bb003147423a644969c04222e2ba1b6cf407e6f) [#7329](https://github.com/npm/cli/pull/7329) `@sigstore/tuf@2.3.2`
+* [`8cab136`](https://github.com/npm/cli/commit/8cab136f731c69be079be08d79e3514e01bbd563) [#7324](https://github.com/npm/cli/pull/7324) `agent-base@7.1.1` (@lukekarrys)
+* [workspace](https://github.com/npm/cli/releases/tag/arborist-v7.4.1): `@npmcli/arborist@7.4.1`
+* [workspace](https://github.com/npm/cli/releases/tag/config-v8.2.1): `@npmcli/config@8.2.1`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmaccess-v8.0.3): `libnpmaccess@8.0.3`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.8): `libnpmdiff@6.0.8`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.9): `libnpmexec@7.0.9`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.6): `libnpmfund@5.0.6`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmhook-v10.0.2): `libnpmhook@10.0.2`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmorg-v6.0.3): `libnpmorg@6.0.3`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.8): `libnpmpack@6.0.8`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.5): `libnpmpublish@9.0.5`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmsearch-v7.0.2): `libnpmsearch@7.0.2`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmteam-v6.0.2): `libnpmteam@6.0.2`
+
+### Chores
+
+* [`8cab136`](https://github.com/npm/cli/commit/8cab136f731c69be079be08d79e3514e01bbd563) [#7324](https://github.com/npm/cli/pull/7324) add smoke-test for large prod installs (@lukekarrys)
+* [`0dab381`](https://github.com/npm/cli/commit/0dab3810e6b84e1b4afcc2a315c878fffccb328b) [#7258](https://github.com/npm/cli/pull/7258) `jsdom@24.0.0` (#7258) (@wraithgar)
+* [`af3c48e`](https://github.com/npm/cli/commit/af3c48e074d03caebaa8ed24d39405329f545497) [#7262](https://github.com/npm/cli/pull/7262) test refactor (#7262) (@wraithgar)
+
+## [10.5.0](https://github.com/npm/cli/compare/v10.4.0...v10.5.0) (2024-02-28)
### Features
-* [`a09e19d`](https://github.com/npm/cli/commit/a09e19d88f046e54e8d75343883635a1bd056310) [#5696](https://github.com/npm/cli/pull/5696) introduce the `npm config fix` command (@nlf)
-* [`d2963c6`](https://github.com/npm/cli/commit/d2963c67b992b9b3b9dd32f6f41cbbe4bcc580c8) explicitly validate config within the cli (@nlf)
-* [`a5fec08`](https://github.com/npm/cli/commit/a5fec08348add7e75fa2498e6a9efe608b20aa8b) rewrite docs generation (@lukekarrys)
+* [`2366edc`](https://github.com/npm/cli/commit/2366edcaf2b32b5d1c6a7c03184c59eef0e08eae) [#7218](https://github.com/npm/cli/pull/7218) query: add :vuln pseudo selector (@wraithgar)
+* [`4f3ddbb`](https://github.com/npm/cli/commit/4f3ddbbe88df7c94d1e06e660928a962e973f332) [#5966](https://github.com/npm/cli/pull/5966) add --expect-entries to `npm query` (@wraithgar)
### Bug Fixes
-* [`a35c784`](https://github.com/npm/cli/commit/a35c784f8c25dce05b4173edd6c3f8e7913d7b50) [#5691](https://github.com/npm/cli/pull/5691) config: remove `node-version` and `npm-version` (@wraithgar)
+* [`818957c`](https://github.com/npm/cli/commit/818957c0f88c859bf3ea90ff440ec5d9d9e990b9) [#7158](https://github.com/npm/cli/pull/7158) pack, publish: default foreground-scripts to true (#7158) (@ljharb)
+* [`d04111d`](https://github.com/npm/cli/commit/d04111d48ca59fce27909712b328fe5cfc4d016d) [#7197](https://github.com/npm/cli/pull/7197) view: filter out invalid semver (#7197) (@wraithgar)
+* [`b0a3ba0`](https://github.com/npm/cli/commit/b0a3ba0b99ce5920722244f1ccb8b830826abef4) [#7195](https://github.com/npm/cli/pull/7195) prevent adding invalid dist-tag (#7195) (@wraithgar)
### Documentation
-* [`a8532eb`](https://github.com/npm/cli/commit/a8532eb39504584cef452152948e015cef8c010a) [#5661](https://github.com/npm/cli/pull/5661) typo missing parentheses (@hbrls)
-* [`542efdb`](https://github.com/npm/cli/commit/542efdb0a31f663cd899bc6d2ddad8fa88c20bc8) update `folders` page for modern npm (@shalvah)
+* [`c4741fe`](https://github.com/npm/cli/commit/c4741fee8b4e6cdca90c6773385ff710c8b3f7f5) [#7254](https://github.com/npm/cli/pull/7254) Remove additional example and comments about uninstall script (#7254) (@rveerd)
+* [`686a622`](https://github.com/npm/cli/commit/686a622480d32eabf2d69982e422ba3dcd8a6f7c) [#7247](https://github.com/npm/cli/pull/7247) scope: update example command to work in windows (#7247) (@robertobasile84)
+* [`95b5057`](https://github.com/npm/cli/commit/95b505738a73ba740227a41b8c7c87013af5acaf) [#5966](https://github.com/npm/cli/pull/5966) clarify in-range and out-of-range in dependency selectors (@wraithgar)
+* [`5b7184f`](https://github.com/npm/cli/commit/5b7184f3aaf5a9ca58418b6d029616088964ed0a) [#7190](https://github.com/npm/cli/pull/7190) workspaces: fix grammar (#7190) (@alekstech)
+* [`0dd03f9`](https://github.com/npm/cli/commit/0dd03f9450e0cf57fa85ad2ef74b5a54f3c775a9) [#7182](https://github.com/npm/cli/pull/7182) fix typos (#7182) (@GoodDaisy)
### Dependencies
-* [`cee3fd9`](https://github.com/npm/cli/commit/cee3fd9905c7eb0a5cb26a8c9c08c5db48becd15) `@npmcli/config@5.0.0`
-* [`2a740b1`](https://github.com/npm/cli/commit/2a740b14c3789d80825b1345f2e99765fcb90351) [#5692](https://github.com/npm/cli/pull/5692) `hosted-git-info@6.0.0`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.0-pre.2...libnpmpack-v5.0.0-pre.3): `libnpmpack@5.0.0-pre.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.0-pre.2...libnpmpublish-v7.0.0-pre.3): `libnpmpublish@7.0.0-pre.3`
+* [`16d4c9f`](https://github.com/npm/cli/commit/16d4c9f0e48a18719f1461460504a4228f8f663d) [#7218](https://github.com/npm/cli/pull/7218) `@npmcli/query@3.1.0`
+* [`06247d1`](https://github.com/npm/cli/commit/06247d18fd3573d48b220512e84f87eaab06210a) [#7242](https://github.com/npm/cli/pull/7242) `spdx-license-ids@3.0.17`
+* [`79d1e0b`](https://github.com/npm/cli/commit/79d1e0bc9f781b02af876f0615595976958ca410) [#7242](https://github.com/npm/cli/pull/7242) `spdx-exceptions@2.5.0`
+* [`67e853a`](https://github.com/npm/cli/commit/67e853a507ab31af5b82fd3fcbcb2fe2f18cad5d) [#7242](https://github.com/npm/cli/pull/7242) `socks@2.8.0`
+* [`4c9fe4b`](https://github.com/npm/cli/commit/4c9fe4be4df39960cfadef2ca07465c90d9ee414) [#7242](https://github.com/npm/cli/pull/7242) `lru-cache@10.2.0`
+* [`4a50c5a`](https://github.com/npm/cli/commit/4a50c5adaa0f28a8e9f6433c34b0eedfc73885a1) [#7242](https://github.com/npm/cli/pull/7242) `https-proxy-agent@7.0.4`
+* [`ed3f254`](https://github.com/npm/cli/commit/ed3f254a09d8f0da49e927e8b7fbd81c1232208e) [#7242](https://github.com/npm/cli/pull/7242) `http-proxy-agent@7.0.2`
+* [`8ec4fd5`](https://github.com/npm/cli/commit/8ec4fd541be937a965e685ad4dbbfb57ae3b2462) [#7242](https://github.com/npm/cli/pull/7242) `hasown@2.0.1`
+* [`98b1189`](https://github.com/npm/cli/commit/98b11894b770065979c8fefb87861c156ad0c895) [#7242](https://github.com/npm/cli/pull/7242) `@npmcli/agent@2.2.1`
+* [`dafa903`](https://github.com/npm/cli/commit/dafa903fe9083343fd17bdd3a5acc6f9acc8f356) [#7242](https://github.com/npm/cli/pull/7242) `sigstore@2.2.2`
+* [`d0bcb76`](https://github.com/npm/cli/commit/d0bcb76bd635eefcb1fae39e831e227a1852ef68) [#7242](https://github.com/npm/cli/pull/7242) `diff@5.2.0`
+* [`a13808e`](https://github.com/npm/cli/commit/a13808e2e8ed0507e12ad27f8b212e5b0b6ccae8) [#7242](https://github.com/npm/cli/pull/7242) `semver@7.6.0`
+* [`d6521ac`](https://github.com/npm/cli/commit/d6521ac9c43ccf909ff4d1564dffa0f07e260503) [#7242](https://github.com/npm/cli/pull/7242) `@sigstore/tuf@2.3.1`
+* [`43cac2f`](https://github.com/npm/cli/commit/43cac2f990aefca283d49e26ff83ba5d6fb28313) [#7242](https://github.com/npm/cli/pull/7242) `ip@2.0.1`
+* [workspace](https://github.com/npm/cli/releases/tag/arborist-v7.4.0): `@npmcli/arborist@7.4.0`
+* [workspace](https://github.com/npm/cli/releases/tag/config-v8.2.0): `@npmcli/config@8.2.0`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.7): `libnpmdiff@6.0.7`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.8): `libnpmexec@7.0.8`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.5): `libnpmfund@5.0.5`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.7): `libnpmpack@6.0.7`
+
+### Chores
+
+* [`d6bc684`](https://github.com/npm/cli/commit/d6bc6840edce1957c0ea0ed618819cbfe23fa611) [#7242](https://github.com/npm/cli/pull/7242) update devDependencies in lockfile (@wraithgar)
+
+## [10.4.0](https://github.com/npm/cli/compare/v10.3.0...v10.4.0) (2024-01-24)
+
+### Features
+
+* [`35a098c`](https://github.com/npm/cli/commit/35a098cc54ef208c53e7329bfb65a7e5656c559b) [#7175](https://github.com/npm/cli/pull/7175) display tree diff on `--long` (@wraithgar)
+* [`dffca29`](https://github.com/npm/cli/commit/dffca29f0690be82692eaa44a1dd02a0807a70ae) [#7174](https://github.com/npm/cli/pull/7174) format: print `--dry-run` diffs in table format (#7174) (@ritaaktay)
+* [`6d5f9ac`](https://github.com/npm/cli/commit/6d5f9acd0dc7f755a9ef4d27e1dac7076ec7e469) [#7133](https://github.com/npm/cli/pull/7133) dedupe - display difference when `--dry-run` is enabled (#7133) (@Blaumaus)
+
+### Bug Fixes
+
+* [`ec06f77`](https://github.com/npm/cli/commit/ec06f7712bbdc3b38c546593e9970962fee127ed) [#7175](https://github.com/npm/cli/pull/7175) inline diff table code w/ summary code (@wraithgar)
+* [`d4ebfba`](https://github.com/npm/cli/commit/d4ebfba91991423b52edf5f9431424faa6618073) [#7157](https://github.com/npm/cli/pull/7157) use util.stripVTControlCharacters instead of strip-ansi (@wraithgar)
+* [`81c95c7`](https://github.com/npm/cli/commit/81c95c7de71b40831ad46356d75ed56b20c66372) [#7063](https://github.com/npm/cli/pull/7063) don't reset update notifier duration on every check (#7063) (@wraithgar)
+
+### Documentation
-## [9.0.0-pre.4](https://github.com/npm/cli/compare/v9.0.0-pre.3...v9.0.0-pre.4) (2022-10-05)
+* [`2b7eaad`](https://github.com/npm/cli/commit/2b7eaade0620b3ea69e8b0b21335ed25af082351) [#7168](https://github.com/npm/cli/pull/7168) package-json: Reword warning about publishing local dependencies (#7168) (@DanKaplanSES)
+* [`67ab0f7`](https://github.com/npm/cli/commit/67ab0f745ad182fa8250cc1da53b618df2e0d95a) [#7142](https://github.com/npm/cli/pull/7142) update: Replace comma with period in run on sentence (#7142) (@DanKaplanSES)
+* [`05c69dc`](https://github.com/npm/cli/commit/05c69dc7ac70d27f1ec1d58ea0dd7111770fd0aa) [#7150](https://github.com/npm/cli/pull/7150) config: State default configuration file affected by `npm config set` (#7150) (@DanKaplanSES)
+* [`44f4518`](https://github.com/npm/cli/commit/44f45187ef8ee20c373e89407f3c32708bef9f0f) [#7149](https://github.com/npm/cli/pull/7149) Link to the config command in the npm configuration description (#7149) (@DanKaplanSES)
+* [`dd5699f`](https://github.com/npm/cli/commit/dd5699f7f0dcf9e5ad1c5549f3eaacaa1715f8eb) [#7152](https://github.com/npm/cli/pull/7152) update: Fix a typo and remove unneeded statement (#7152) (@DanKaplanSES)
+
+### Dependencies
+
+* [`ec77e81`](https://github.com/npm/cli/commit/ec77e81f5ecc3603bb7c9963f860a8c46f6a61ec) [#7124](https://github.com/npm/cli/pull/7124) `promise-call-limit@3.0.1`
+* [`e32189c`](https://github.com/npm/cli/commit/e32189c768181d58ab72825d67307a3a653652ac) [#7173](https://github.com/npm/cli/pull/7173) deduplicate tree
+* [`b7af4d9`](https://github.com/npm/cli/commit/b7af4d9bb96a86abee6a745b4a756b84d54d6e79) [#7173](https://github.com/npm/cli/pull/7173) `tar-stream@3.1.7`
+* [`6883743`](https://github.com/npm/cli/commit/68837432c0b78bf7a4382a443f35ce2484449952) [#7173](https://github.com/npm/cli/pull/7173) `@npmcli/run-script@7.0.4`
+* [`739634d`](https://github.com/npm/cli/commit/739634d76168f438934f1f8287618dabe2f3b968) [#7157](https://github.com/npm/cli/pull/7157) remove strip-ansi
+* [`162c82e`](https://github.com/npm/cli/commit/162c82e845d410ede643466f9f8af78a312296cc) [#7148](https://github.com/npm/cli/pull/7148) `pacote@17.0.6` (#7148) (@bdehamer)
+* [`a50b03b`](https://github.com/npm/cli/commit/a50b03b10046cf769cd328df96adcf292db5c067) [#7141](https://github.com/npm/cli/pull/7141) `sigstore@2.2.0` (#7141) (@bdehamer)
+* [`f696b51`](https://github.com/npm/cli/commit/f696b517a39bea1be0f7116983a5b4c8ed6f01b7) [#7132](https://github.com/npm/cli/pull/7132) `@sigstore/tuf@2.3.0` (#7132) (@bdehamer)
+* [workspace](https://github.com/npm/cli/releases/tag/arborist-v7.3.1): `@npmcli/arborist@7.3.1`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.6): `libnpmdiff@6.0.6`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.7): `libnpmexec@7.0.7`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.4): `libnpmfund@5.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.6): `libnpmpack@6.0.6`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.4): `libnpmpublish@9.0.4`
+
+### Chores
+
+* [`a82ccc5`](https://github.com/npm/cli/commit/a82ccc5f97a60698fb0ee413347a621d0662f493) [#7173](https://github.com/npm/cli/pull/7173) `nock@13.5.0` (@wraithgar)
+* [`dcaa99c`](https://github.com/npm/cli/commit/dcaa99c5138ad20a3210a8396772177aa1fa33da) [#7173](https://github.com/npm/cli/pull/7173) fix exec test (@wraithgar)
+* [`0d96080`](https://github.com/npm/cli/commit/0d96080fa8c08b5eb77bbd6ae64111379fa24465) [#7162](https://github.com/npm/cli/pull/7162) release: do not exclude docs directory from CLI release commits (#7162) (@lukekarrys)
+
+## [10.3.0](https://github.com/npm/cli/compare/v10.2.5...v10.3.0) (2024-01-10)
### Features
-* [`9609e9e`](https://github.com/npm/cli/commit/9609e9eed87c735f0319ac0af265f4d406cbf800) [#5605](https://github.com/npm/cli/pull/5605) use v3 lockfiles by default (#5605) (@fritzy)
+* [`6673c77`](https://github.com/npm/cli/commit/6673c77bc4222d0f1719449fe903b7461b3e6907) [#6914](https://github.com/npm/cli/pull/6914) add `--libc` option to override platform specific install (#6914) (@wraithgar, @Brooooooklyn)
### Bug Fixes
-* [`e4e8ae2`](https://github.com/npm/cli/commit/e4e8ae20aef9e27e57282e87e8757d5b364abb39) libnpmpack: obey foregroundScripts (@winterqt)
-* [`07fabc9`](https://github.com/npm/cli/commit/07fabc93007495f0926f4dd24b4350c07d92887d) [#5633](https://github.com/npm/cli/pull/5633) `npm link` should override `--install-links` (#5633) (@fritzy)
-* [`02fcbb6`](https://github.com/npm/cli/commit/02fcbb67e6b7cf78cd6dc996570b0ba58132de22) [#5634](https://github.com/npm/cli/pull/5634) ensure Arborist constructor gets passed around everywhere for pacote (#5634) (@nlf)
+* [`b7fc10a`](https://github.com/npm/cli/commit/b7fc10aa37bc2422f479119558f82e01bdaab6cd) [#7113](https://github.com/npm/cli/pull/7113) filter C0 and C1 control characters from logs and cli output (#7113) (@wraithgar)
+
+### Dependencies
+
+* [`3fd5213`](https://github.com/npm/cli/commit/3fd521393461639b6b768df43f0b5b71d3ee7dd2) [#7121](https://github.com/npm/cli/pull/7121) `npm-packlist@8.0.2`
+* [`5698415`](https://github.com/npm/cli/commit/56984154b445c479236effdb844888b1ef409250) [#7121](https://github.com/npm/cli/pull/7121) `postcss-selector-parser@6.0.15`
+* [`e56a4f3`](https://github.com/npm/cli/commit/e56a4f3dff9de349917d530074e698c644471080) [#7121](https://github.com/npm/cli/pull/7121) `are-we-there-yet@4.0.2`
+* [`8495b7c`](https://github.com/npm/cli/commit/8495b7c088118e383ff6f3c7ce744df9c0d8e106) [#7121](https://github.com/npm/cli/pull/7121) `cacache@18.0.2`
+* [`796d1c7`](https://github.com/npm/cli/commit/796d1c7507d69382181e515d6c6987930c9ec636) [#7121](https://github.com/npm/cli/pull/7121) `@npmcli/run-script@7.0.3`
+* [`3b7f6f2`](https://github.com/npm/cli/commit/3b7f6f274aceed4218e5e903752d879a41d3a547) [#7121](https://github.com/npm/cli/pull/7121) `@npmcli/promise-spawn@7.0.1`
+* [`7ecd146`](https://github.com/npm/cli/commit/7ecd146a3241723602693ca67c78ca5e2da8cb57) [#7121](https://github.com/npm/cli/pull/7121) `npmcli/git@5.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/arborist-v7.3.0): `@npmcli/arborist@7.3.0`
+* [workspace](https://github.com/npm/cli/releases/tag/config-v8.1.0): `@npmcli/config@8.1.0`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.5): `libnpmdiff@6.0.5`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.6): `libnpmexec@7.0.6`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.3): `libnpmfund@5.0.3`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.5): `libnpmpack@6.0.5`
+
+### Chores
+
+* [`2fd8292`](https://github.com/npm/cli/commit/2fd8292eff1155ec84a53f6d4321e6a386df93c7) [#7121](https://github.com/npm/cli/pull/7121) work around @npmcli/git lazy loading (@wraithgar)
+* [`cd9a66d`](https://github.com/npm/cli/commit/cd9a66df2476d602fe3d36a6c3c6185f1bd8c676) [#7121](https://github.com/npm/cli/pull/7121) update devDependencies in lockfile (@wraithgar)
+* [`2dda715`](https://github.com/npm/cli/commit/2dda715c9e72eefe69b49ee07c87fa126c84ff10) [#7121](https://github.com/npm/cli/pull/7121) `tap@16.3.10` (@wraithgar)
+
+## [10.2.5](https://github.com/npm/cli/compare/v10.2.4...v10.2.5) (2023-12-06)
+
+### Bug Fixes
+
+* [`c7a592c`](https://github.com/npm/cli/commit/c7a592c9c400e73dd27264c11ad6459616023e4c) [#7061](https://github.com/npm/cli/pull/7061) dont use cache for update notifier manifest request (@lukekarrys)
+* [`7b952f6`](https://github.com/npm/cli/commit/7b952f64b882bd891fab5c21c7c3b49838c8a995) [#7049](https://github.com/npm/cli/pull/7049) unpublish: bubble up all errors parsing local package.json (#7049) (@wraithgar)
+* [`be4741f`](https://github.com/npm/cli/commit/be4741f5bc20239f11842f780047d91fda23935d) [#7039](https://github.com/npm/cli/pull/7039) unpublish bugfixes (#7039) (@wraithgar)
+* [`bc7f53d`](https://github.com/npm/cli/commit/bc7f53db793d362d2015d3e55ce121e6b4d3d91f) [#7036](https://github.com/npm/cli/pull/7036) reverse direction of SPDX SBOM dependency rels (#7036) (@bdehamer, @antonbauhofer)
+* [`11ec231`](https://github.com/npm/cli/commit/11ec231e895300e5b7292ac16685d37d1d5df3b9) [#7033](https://github.com/npm/cli/pull/7033) skip creation of log directory if `logs-max` is set to 0 (#7033) (@JJ)
+* [`6267f54`](https://github.com/npm/cli/commit/6267f543c2ac134c0f8433f8b48659a3949bf210) [#7005](https://github.com/npm/cli/pull/7005) properly catch missing url opener error on interactive prompt (#7005) (@wraithgar)
+
+### Dependencies
+
+* [`ff1204a`](https://github.com/npm/cli/commit/ff1204aff0651e32679ecd09d0a2a62de49f4eac) [#7058](https://github.com/npm/cli/pull/7058) `lru-cache@10.1.0`
+* [`c648020`](https://github.com/npm/cli/commit/c648020cdc0fa0916bc618b6d1191e68dcfc8d73) [#7058](https://github.com/npm/cli/pull/7058) `json-parse-even-better-errors@3.0.1`
+* [`53aa8f2`](https://github.com/npm/cli/commit/53aa8f2c110f38a10f98f976cb40f54ea6d95844) [#7058](https://github.com/npm/cli/pull/7058) `pacote@17.0.5`
+* [`2e5331c`](https://github.com/npm/cli/commit/2e5331c75df8606b1f92bf61c8612f5e7b0274aa) [#7058](https://github.com/npm/cli/pull/7058) `npm-packlist@8.0.1`
+* [`937b7b7`](https://github.com/npm/cli/commit/937b7b7b11ec21fbe373ab93f4e4e170625dd6b1) [#7058](https://github.com/npm/cli/pull/7058) `ignore-walk@6.0.4`
+* [`35371c8`](https://github.com/npm/cli/commit/35371c8796a08a269ac3f7017c35e5fcb7ef0968) [#7058](https://github.com/npm/cli/pull/7058) `cacache@18.0.1`
+* [workspace](https://github.com/npm/cli/releases/tag/arborist-v7.2.2): `@npmcli/arborist@7.2.2`
+* [workspace](https://github.com/npm/cli/releases/tag/config-v8.0.3): `@npmcli/config@8.0.3`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmaccess-v8.0.2): `libnpmaccess@8.0.2`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.4): `libnpmdiff@6.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.5): `libnpmexec@7.0.5`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.2): `libnpmfund@5.0.2`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmhook-v10.0.1): `libnpmhook@10.0.1`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmorg-v6.0.2): `libnpmorg@6.0.2`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.4): `libnpmpack@6.0.4`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.3): `libnpmpublish@9.0.3`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmsearch-v7.0.1): `libnpmsearch@7.0.1`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmteam-v6.0.1): `libnpmteam@6.0.1`
+* [workspace](https://github.com/npm/cli/releases/tag/libnpmversion-v5.0.2): `libnpmversion@5.0.2`
+
+### Chores
+
+* [`f656b66`](https://github.com/npm/cli/commit/f656b669e549286844f2071b9b62cf23f7958034) [#7062](https://github.com/npm/cli/pull/7062) `@npmcli/template-oss@4.21.3` (#7062) (@lukekarrys)
+* [`9754b17`](https://github.com/npm/cli/commit/9754b173de26f3173e7f41eab34733fe9ba50f1d) [#7051](https://github.com/npm/cli/pull/7051) use global npm for workspace tests (@lukekarrys)
+* [`3891757`](https://github.com/npm/cli/commit/3891757f54d6d960cbf5f0d93d183d6424e8bed6) [#7051](https://github.com/npm/cli/pull/7051) `@npmcli/template-oss@4.21.2` (@lukekarrys)
+* [`71f70fa`](https://github.com/npm/cli/commit/71f70fa0e86448b20a63b9eec922ad25971a9377) [#7058](https://github.com/npm/cli/pull/7058) `nock@13.4.0` (@wraithgar)
+* [`43674a4`](https://github.com/npm/cli/commit/43674a449816e364265205e56270ad547718069c) [#7058](https://github.com/npm/cli/pull/7058) `tap@16.3.9` (@wraithgar)
+* [`4ba585c`](https://github.com/npm/cli/commit/4ba585ce0e1a2ea4591d64d7166b81b7fe92010b) [#7040](https://github.com/npm/cli/pull/7040) fix tests for zlib differences between node versions (#7040) (@wraithgar)
+
+## [10.2.4](https://github.com/npm/cli/compare/v10.2.3...v10.2.4) (2023-11-14)
+
+### Bug Fixes
+
+* [`cd291e7`](https://github.com/npm/cli/commit/cd291e7aa52e56fc45f8245e67c77e0ed3711b07) [#6995](https://github.com/npm/cli/pull/6995) refactor search formatting code (#6995) (@wraithgar)
+* [`f3a7380`](https://github.com/npm/cli/commit/f3a7380a45323dbf6c74015e418de3963fb11a69) [#6973](https://github.com/npm/cli/pull/6973) look in workspace for exec commands (#6973) (@wraithgar)
+* [`d11496b`](https://github.com/npm/cli/commit/d11496b26dfee5957e7e2a1b328f346b2aca9348) [#6977](https://github.com/npm/cli/pull/6977) pkg: properly output in workspace mode (#6977) (@wraithgar)
+* [`0f70088`](https://github.com/npm/cli/commit/0f7008851f1c250405e8dc326f15d535e8fc1eae) [#6969](https://github.com/npm/cli/pull/6969) correctly handle object licenses in SBOM generation (#6969) (@jamietanna)
+* [`dce3b08`](https://github.com/npm/cli/commit/dce3b0896ba81b2109fea42ab32edd8a3193324c) [#6951](https://github.com/npm/cli/pull/6951) properly catch missing url opener error (#6951) (@wraithgar)
### Documentation
-* [`f37caad`](https://github.com/npm/cli/commit/f37caad9e92c50ae949014f6bee6375d9299fb39) [#5606](https://github.com/npm/cli/pull/5606) accurately describe install-links effect on relative paths (#5606) (@lukekarrys)
-* [`97c32ed`](https://github.com/npm/cli/commit/97c32ed24d8fa2edcdbb9448839a1f1c9d8fb86f) [#5637](https://github.com/npm/cli/pull/5637) remove link to cache command (#5637) (@wraithgar)
-* [`130bc9f`](https://github.com/npm/cli/commit/130bc9fb31fcff956765493a9e3cec668867c30e) [#5626](https://github.com/npm/cli/pull/5626) Remove circular reference (#5626) (@giovanniPepi)
+* [`a38836c`](https://github.com/npm/cli/commit/a38836ce9d703f5012ff4d4a8a4e3b9a9aedc025) [#6616](https://github.com/npm/cli/pull/6616) add path usage for view command (#6616) (@RobinKnipe)
+* [`da18e4f`](https://github.com/npm/cli/commit/da18e4f9baa180beeb325a384759a26a19ac2919) [#6987](https://github.com/npm/cli/pull/6987) update npm-prune description (#6987) (@Eomm)
### Dependencies
-* [`5344d2c`](https://github.com/npm/cli/commit/5344d2ca9ffd1f6db473fd58b46b50179f899ff5) [#5644](https://github.com/npm/cli/pull/5644) `pacote@14.0.0`
-* [`6a43b31`](https://github.com/npm/cli/commit/6a43b31eab8bd392ed684d2f906259ddfe0f26b5) `@npmcli/metavuln-calculator@4.0.0`
-* [`501f8ca`](https://github.com/npm/cli/commit/501f8ca47bb042f19cdfca4026970caf7160f7f6) [#5640](https://github.com/npm/cli/pull/5640) `semver@7.3.8` (#5640)
-* [`8b072dc`](https://github.com/npm/cli/commit/8b072dc113190ed49b296a5f02650b7d8cbf384a) [#5639](https://github.com/npm/cli/pull/5639) `@npmcli/ci-detect@3.0.0` (#5639)
-* [`1ebbb44`](https://github.com/npm/cli/commit/1ebbb4454c09891ca2c9f9a11432c4a10ccf8c32) [#5638](https://github.com/npm/cli/pull/5638) `npm-profile@7.0.0` (#5638)
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.0.0-pre.3...arborist-v6.0.0-pre.4): `@npmcli/arborist@6.0.0-pre.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.0-pre.1...libnpmdiff-v5.0.0-pre.2): `libnpmdiff@5.0.0-pre.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.0-pre.3...libnpmexec-v5.0.0-pre.4): `libnpmexec@5.0.0-pre.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.0-pre.3...libnpmfund-v4.0.0-pre.4): `libnpmfund@4.0.0-pre.4`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.0-pre.1...libnpmpack-v5.0.0-pre.2): `libnpmpack@5.0.0-pre.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.0-pre.1...libnpmpublish-v7.0.0-pre.2): `libnpmpublish@7.0.0-pre.2`
+* [`e9ec2f7`](https://github.com/npm/cli/commit/e9ec2f7005e7326d9a819978b251cf0a05062538) [#6994](https://github.com/npm/cli/pull/6994) `lru-cache@10.0.2`
+* [`faf9eff`](https://github.com/npm/cli/commit/faf9efffb7bfe6efde6e53830cb054b34c3a6ea3) [#6994](https://github.com/npm/cli/pull/6994) `is-core-module@2.13.1`
+* [`b00e780`](https://github.com/npm/cli/commit/b00e7808e370513e79ca31c261958377984e7444) [#6994](https://github.com/npm/cli/pull/6994) `@sigstore/sign@2.2.0`
+* [`4613774`](https://github.com/npm/cli/commit/461377426d998ed79400501b09e1ee67c32bee23) [#6994](https://github.com/npm/cli/pull/6994) hoisting newer deps in favor of older ones
+* [`54c4f7b`](https://github.com/npm/cli/commit/54c4f7b8705b2c9d5b8bc5bb846f4e7863735b7e) [#6994](https://github.com/npm/cli/pull/6994) `signal-exit@4.1.0`
+* [`8c5882f`](https://github.com/npm/cli/commit/8c5882f3eed305bbd3514d7143f9d92e9577e1b9) [#6994](https://github.com/npm/cli/pull/6994) `strip-ansi@7.1.0`
+* [`cd0c649`](https://github.com/npm/cli/commit/cd0c649ec2b421b59012854e61788a11a77194f2) [#6994](https://github.com/npm/cli/pull/6994) `ci-info@4.0.0`
+* [`a0a58b7`](https://github.com/npm/cli/commit/a0a58b735c418d8555a06890b63098c68f53106e) [#6994](https://github.com/npm/cli/pull/6994) `@sigstore/tuf@2.2.0`
+* [`b3a53c6`](https://github.com/npm/cli/commit/b3a53c6ab5fd933fc7f8258c155ed31be834393e) [#6949](https://github.com/npm/cli/pull/6949) `is-cidr@5.0.3` (#6949)
+* [Workspace](https://github.com/npm/cli/releases/tag/config-v8.0.2): `@npmcli/config@8.0.2`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.4): `libnpmexec@7.0.4`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.2): `libnpmpublish@9.0.2`
+
+## [10.2.3](https://github.com/npm/cli/compare/v10.2.2...v10.2.3) (2023-11-02)
-## [9.0.0-pre.3](https://github.com/npm/cli/compare/v9.0.0-pre.2...v9.0.0-pre.3) (2022-09-30)
+### Dependencies
-### ⚠️ BREAKING CHANGES
+* [`b5dedf3`](https://github.com/npm/cli/commit/b5dedf39dd772192ed6639926ad4c99ff5dfd28a) [#6958](https://github.com/npm/cli/pull/6958) `node-gyp@10.0.1`
-* `npm pack` now follows a strict order of operations when applying ignore rules. If a files array is present in the package.json, then rules in .gitignore and .npmignore files from the root will be ignored.
-* `--timing` file changes:
- - When run with the `--timing` flag, `npm` now writes timing data to a
- file alongside the debug log data, respecting the `logs-dir` option and
- falling back to `/_logs/` dir, instead of directly inside the
- cache directory.
- - The timing file data is no longer newline delimited JSON, and instead
- each run will create a uniquely named `-timing.json` file, with the
- `` portion being the same as the debug log.
- - Finally, the data inside the file now has three top level keys,
- `metadata`, `timers, and `unfinishedTimers` instead of everything being
- a top level key.
+## [10.2.2](https://github.com/npm/cli/compare/v10.2.1...v10.2.2) (2023-10-31)
-### Features
+### Bug Fixes
-* [`3ae796d`](https://github.com/npm/cli/commit/3ae796d937bd36a5b1b9fd6e9e8473b4f2ddc32d) implement new `npm-packlist` behavior (@lukekarrys)
-* [`e64d69a`](https://github.com/npm/cli/commit/e64d69aedecc0943425605b3a6dc68aec3ad93aa) [#5581](https://github.com/npm/cli/pull/5581) write eresolve error files to the logs directory (@lukekarrys)
-* [`3445da0`](https://github.com/npm/cli/commit/3445da0138f9eed9d73d2b3f5f451fcc1fa2e3fe) timings are now written alongside debug log files (@lukekarrys)
+* [`8ed6d28`](https://github.com/npm/cli/commit/8ed6d28fa14b40d7a05784ad0a1e80661256b466) [#6910](https://github.com/npm/cli/pull/6910) make npm link respect --no-save (#6910) (@Santoshraj2)
+* [`eacec5f`](https://github.com/npm/cli/commit/eacec5f49060d3dfcdc3c7043115619e4bb22864) [#6941](https://github.com/npm/cli/pull/6941) add back bin/node-gyp-bin/node-gyp files (#6941) (@lukekarrys)
+* [`b776753`](https://github.com/npm/cli/commit/b776753f9cfeab329169105f604bc55ed03bd0e1) [#6928](https://github.com/npm/cli/pull/6928) Grammar mistake in authentication error message (#6928) (@Gekuro, gek)
### Documentation
-* [`f0e7584`](https://github.com/npm/cli/commit/f0e758494698d9dd8a58d07bf71c87608c36869e) [#5601](https://github.com/npm/cli/pull/5601) update docs/logging for new --access default (@wraithgar)
+* [`c422a01`](https://github.com/npm/cli/commit/c422a01e1564d25148c821ee257196ebe60e8e6c) [#6924](https://github.com/npm/cli/pull/6924) use markdown links instead of html (@lukekarrys)
+* [`dd03aa0`](https://github.com/npm/cli/commit/dd03aa0b9acc535283daf39699de9831202348cb) [#6921](https://github.com/npm/cli/pull/6921) add v9 and above for lockfile version 3 (#6921) (@MikeMcC399)
### Dependencies
-* [`bc21552`](https://github.com/npm/cli/commit/bc2155247d00b7a868c414f4bc86993069b035f9) [#5603](https://github.com/npm/cli/pull/5603) `npm-package-arg@9.1.2`
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.0.0-pre.2...arborist-v6.0.0-pre.3): `@npmcli/arborist@6.0.0-pre.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmdiff-v5.0.0-pre.0...libnpmdiff-v5.0.0-pre.1): `libnpmdiff@5.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.0-pre.2...libnpmexec-v5.0.0-pre.3): `libnpmexec@5.0.0-pre.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.0-pre.2...libnpmfund-v4.0.0-pre.3): `libnpmfund@4.0.0-pre.3`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpack-v5.0.0-pre.0...libnpmpack-v5.0.0-pre.1): `libnpmpack@5.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmpublish-v7.0.0-pre.0...libnpmpublish-v7.0.0-pre.1): `libnpmpublish@7.0.0-pre.1`
+* [`dfb6298`](https://github.com/npm/cli/commit/dfb6298c3eb9fb7ef452906765ac5f23ea6fec49) [#6937](https://github.com/npm/cli/pull/6937) `node-gyp@10.0.0` (#6937)
+* [Workspace](https://github.com/npm/cli/releases/tag/arborist-v7.2.1): `@npmcli/arborist@7.2.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.3): `libnpmdiff@6.0.3`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.3): `libnpmexec@7.0.3`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.1): `libnpmfund@5.0.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.3): `libnpmpack@6.0.3`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmversion-v5.0.1): `libnpmversion@5.0.1`
-## [9.0.0-pre.2](https://github.com/npm/cli/compare/v9.0.0-pre.1...v9.0.0-pre.2) (2022-09-23)
+## [10.2.1](https://github.com/npm/cli/compare/v10.2.0...v10.2.1) (2023-10-18)
-### ⚠️ BREAKING CHANGES
+### Bug Fixes
-* the default `auth-type` config value is now `web`
-* `login`, `adduser`, and `auth-type` changes
- - This removes all `auth-type` configs except `web` and `legacy`.
- - `login` and `adduser` are now separate commands that send different data to the registry.
- - `auth-type` config values `web` and `legacy` only try
- their respective methods, npm no longer tries them all and waits to see
- which one doesn't fail.
+* [`35c92fe`](https://github.com/npm/cli/commit/35c92fec3d053d303cc8057faa0ff4fe6e7cdc8a) [#6902](https://github.com/npm/cli/pull/6902) Add check to pkg command to deal with empty values (#6902) (@NeonArray)
+* [`5b6172f`](https://github.com/npm/cli/commit/5b6172f01c88b73e83a75a508bbdcad92231ead5) [#6895](https://github.com/npm/cli/pull/6895) logout from custom registry (@wraithgar)
+* [`8423d4f`](https://github.com/npm/cli/commit/8423d4f133a40c8ceb0e1a75d23aa95fbf4f5b65) [#6895](https://github.com/npm/cli/pull/6895) delete auth from proper location on logout (@wraithgar)
+* [`0cfe9de`](https://github.com/npm/cli/commit/0cfe9de1c74b20d3e04ecc26ccf594196d101afe) [#6873](https://github.com/npm/cli/pull/6873) audit: spelling error in message (#6873) (@Fdawgs)
+
+### Documentation
+
+* [`5142735`](https://github.com/npm/cli/commit/5142735c462e285a7a7d9bcbd562885c6ef96c96) [#6894](https://github.com/npm/cli/pull/6894) update npm build description (#6894) (@siemhesda)
+* [`2e4b4ad`](https://github.com/npm/cli/commit/2e4b4ad8bef158def1b2302846ab294fe7a83de4) [#6861](https://github.com/npm/cli/pull/6861) npm publish content modification (#6861) (@jpg619)
+
+### Dependencies
+
+* [`96e1637`](https://github.com/npm/cli/commit/96e1637117b6614b5ad861d86d828746d5db356c) [#6915](https://github.com/npm/cli/pull/6915) `cmd-shim@6.0.2` (#6915)
+* [`b405da1`](https://github.com/npm/cli/commit/b405da1672e05d55bd22e476091891c443bcbeab) [#6899](https://github.com/npm/cli/pull/6899) `bin-links@4.0.3`
+* [`ef69d36`](https://github.com/npm/cli/commit/ef69d362fa81640ac3ca60a6e01921c17f7a76cb) [#6895](https://github.com/npm/cli/pull/6895) `npm-registry-fetch@16.1.0`
+* [`337c903`](https://github.com/npm/cli/commit/337c9038605b97431e06d2f470229f4370703b13) [#6882](https://github.com/npm/cli/pull/6882) `spdx-license-ids@3.0.16`
+* [`e6b0be7`](https://github.com/npm/cli/commit/e6b0be7d3b3cd7f66612f9adb6c4de829335b607) [#6882](https://github.com/npm/cli/pull/6882) `socks-proxy-agent@8.0.2`
+* [`ee6892e`](https://github.com/npm/cli/commit/ee6892e69079b07c0a8747d873018819a97e3877) [#6882](https://github.com/npm/cli/pull/6882) `readable-stream@4.4.2`
+* [`61c3ee9`](https://github.com/npm/cli/commit/61c3ee9a073528b30676ec66fdd29788ea7be09d) [#6882](https://github.com/npm/cli/pull/6882) `minipass@7.0.4`
+* [`14d31fd`](https://github.com/npm/cli/commit/14d31fdcc747f420158d254d0ac258a848bc888c) [#6882](https://github.com/npm/cli/pull/6882) `is-core-module@2.13.0`
+* [`03f3d2e`](https://github.com/npm/cli/commit/03f3d2e1d13cd12f23a946cfb9065b8e8fbe129b) [#6882](https://github.com/npm/cli/pull/6882) `https-proxy-agent@7.0.2`
+* [`e0163c6`](https://github.com/npm/cli/commit/e0163c6787f3877c3ad6c84d8af44378f7eed23b) [#6882](https://github.com/npm/cli/pull/6882) `are-we-there-yet@4.0.1`
+* [`fca804a`](https://github.com/npm/cli/commit/fca804adec57e176bb2a2e60bf84df44e661478f) [#6882](https://github.com/npm/cli/pull/6882) `ci-info@3.9.0`
+* [`6af582f`](https://github.com/npm/cli/commit/6af582f23bf046a224d5679e917977f0bb3f95e3) [#6882](https://github.com/npm/cli/pull/6882) `npm-install-checks@6.3.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/config-v8.0.1): `@npmcli/config@8.0.1`
+
+## [10.2.0](https://github.com/npm/cli/compare/v10.1.0...v10.2.0) (2023-10-02)
### Features
-* [`66ed584`](https://github.com/npm/cli/commit/66ed58454418dd69c4cd8196ad8499e73f7e46e1) [#5551](https://github.com/npm/cli/pull/5551) default auth-type to web (#5551) (@wraithgar)
-* [`6ee5b32`](https://github.com/npm/cli/commit/6ee5b320d2eab58c18d50b861b3cfabe7f24124a) query: display `queryContext` in results (@nlf)
-* [`314311c`](https://github.com/npm/cli/commit/314311c61b8f341715c168199d52976ee3237077) [#5550](https://github.com/npm/cli/pull/5550) separate login/adduser, remove auth types (#5550) (@wraithgar)
+* [`7c459d2`](https://github.com/npm/cli/commit/7c459d28ca987264028d4d2ca21b0825493c1537) [#6801](https://github.com/npm/cli/pull/6801) add npm sbom command (#6801) (@bdehamer)
+* [`81a460f`](https://github.com/npm/cli/commit/81a460f6e6317aca2288d16cda591aa6541540c6) [#6732](https://github.com/npm/cli/pull/6732) add package-lock-only mode to npm query (@wraithgar)
+* [`0d29855`](https://github.com/npm/cli/commit/0d2985535c9cc3dfc3e1f355580570c9cce37d61) [#6732](https://github.com/npm/cli/pull/6732) add no-package-lock mode to npm audit (@wraithgar)
### Bug Fixes
-* [`0d90a01`](https://github.com/npm/cli/commit/0d90a011fff411c878ba4b44582f14ef7dbdceb1) [#5480](https://github.com/npm/cli/pull/5480) audit: add a condition to allow third-party registries returning E400 (#5480) (@juanheyns, Juan Heyns)
+* [`2207628`](https://github.com/npm/cli/commit/22076286a46499e3d0b3f8564b7ba07008317be4) [#6823](https://github.com/npm/cli/pull/6823) use strip-ansi module instead of internal regex (#6823) (@wraithgar)
+* [`d46d052`](https://github.com/npm/cli/commit/d46d0526be12eae2cd458fd08dd5c0a0320cc8bd) [#6798](https://github.com/npm/cli/pull/6798) tolerate null bugs URLs (#6798) (@vladh)
+* [`fb1b674`](https://github.com/npm/cli/commit/fb1b6741bd52d865b8f8a93ad3fd6c8afa758b6a) [#6758](https://github.com/npm/cli/pull/6758) deprecate: ignore implicit workspace mode (#6758) (@wraithgar)
### Documentation
-* [`2d756cb`](https://github.com/npm/cli/commit/2d756cbb05125dcb769f2ca4c1687e42568d5882) [#5527](https://github.com/npm/cli/pull/5527) add instruction to query objects with npm view (#5527) (@moonith)
-* [`8743366`](https://github.com/npm/cli/commit/874336699681ac37857167b2438fac19c059511c) [#5519](https://github.com/npm/cli/pull/5519) add hash to "tag" config link (#5519) (@mrienstra, @lukekarrys)
-* [`5645c51`](https://github.com/npm/cli/commit/5645c51410a730c4b9c6831cf81ab22efbe8c0ce) [#5521](https://github.com/npm/cli/pull/5521) link mentions of config parameters (#5521) (@mrienstra)
-* [`19762b4`](https://github.com/npm/cli/commit/19762b4ac4b10741ff53ddd315be1fd23d9b1e28) [#5529](https://github.com/npm/cli/pull/5529) modify Misleading doc about bins (@Hafizur046)
-* [`19762b4`](https://github.com/npm/cli/commit/19762b4ac4b10741ff53ddd315be1fd23d9b1e28) [#5529](https://github.com/npm/cli/pull/5529) modify misleading doc about package.json:bin (#5529) (@Hafizur046)
-* [`8402fd8`](https://github.com/npm/cli/commit/8402fd8780c5e0461850da882dca024f7df1a681) [#5547](https://github.com/npm/cli/pull/5547) add `:outdated` pseudo selector to docs (@nlf)
+* [`68031f2`](https://github.com/npm/cli/commit/68031f2ae1cd5d49b0fb263da1a7eae62712ff97) [#6844](https://github.com/npm/cli/pull/6844) update `CONTRIBUTING.md` to prevent errors (#6844) (@darcyclarke)
+* [`3ac703c`](https://github.com/npm/cli/commit/3ac703c95e7bb851d0f6145f1d612749ed479fef) [#6831](https://github.com/npm/cli/pull/6831) add `include `param to commands that have `omit` param (#6831) (@siemhesda)
+* [`03912db`](https://github.com/npm/cli/commit/03912dbaeb92559270ab3f7df75b507b2f35a119) [#6819](https://github.com/npm/cli/pull/6819) add init-specific params to init docs/help (#6819) (@wraithgar)
+* [`8088325`](https://github.com/npm/cli/commit/8088325281bc976e8a8aea4d7527b54f4e25fb5f) [#6800](https://github.com/npm/cli/pull/6800) Update npm-doctor.md (#6800) (@siemhesda)
### Dependencies
-* [`d030f10`](https://github.com/npm/cli/commit/d030f10fd535433e5a824df1b099f500a71075dd) `@npmcli/query@2.0.0`
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.0.0-pre.1...arborist-v6.0.0-pre.2): `@npmcli/arborist@6.0.0-pre.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.0-pre.1...libnpmexec-v5.0.0-pre.2): `libnpmexec@5.0.0-pre.2`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.0-pre.1...libnpmfund-v4.0.0-pre.2): `libnpmfund@4.0.0-pre.2`
+* [`aa6728b`](https://github.com/npm/cli/commit/aa6728b1d003f0fc620b074ba0396a3e07f2db6a) [#6859](https://github.com/npm/cli/pull/6859) `tar@6.2.0`
+* [`ce9089f`](https://github.com/npm/cli/commit/ce9089f604a01297d3d2dd544283696a6297dce5) [#6859](https://github.com/npm/cli/pull/6859) `npm-package-arg@11.0.1`
+* [`39d7f04`](https://github.com/npm/cli/commit/39d7f046f1c39017b398cb242ad07e874484e86c) [#6859](https://github.com/npm/cli/pull/6859) `minipass@7.0.4`
+* [`0a47af5`](https://github.com/npm/cli/commit/0a47af509d66071908c7e0bf065dcf2f4c877669) [#6859](https://github.com/npm/cli/pull/6859) `hosted-git-info@7.0.1`
+* [`af93130`](https://github.com/npm/cli/commit/af93130fe949f07df23891286c634c77ecf38c53) [#6859](https://github.com/npm/cli/pull/6859) `glob@10.3.10`
+* [`3ebc474`](https://github.com/npm/cli/commit/3ebc4744433d906e5c491d183fc077ffe79958cf) [#6859](https://github.com/npm/cli/pull/6859) `@npmcli/query@3.0.1`
+* [`284cbfd`](https://github.com/npm/cli/commit/284cbfd168879b9277c9999e8a28dad8f72ecc02) [#6858](https://github.com/npm/cli/pull/6858) `@npmcli/agent@2.2.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/arborist-v7.2.0): `@npmcli/arborist@7.2.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/config-v8.0.0): `@npmcli/config@8.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmaccess-v8.0.1): `libnpmaccess@8.0.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.2): `libnpmdiff@6.0.2`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.2): `libnpmexec@7.0.2`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v5.0.0): `libnpmfund@5.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmorg-v6.0.1): `libnpmorg@6.0.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.2): `libnpmpack@6.0.2`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.1): `libnpmpublish@9.0.1`
+
+## [10.1.0](https://github.com/npm/cli/compare/v10.0.0...v10.1.0) (2023-09-08)
-## [9.0.0-pre.1](https://github.com/npm/cli/compare/v9.0.0-pre.0...v9.0.0-pre.1) (2022-09-14)
+### Features
-### ⚠️ BREAKING CHANGES
+* [`1c93c44`](https://github.com/npm/cli/commit/1c93c4430300e3b3bd2cb5bab327c1732f470bca) [#6755](https://github.com/npm/cli/pull/6755) Add `--cpu` and `--os` option to override platform specific install (#6755) (@yukukotani)
+
+### Bug Fixes
+
+* [`7bf2374`](https://github.com/npm/cli/commit/7bf2374a1dde0e9b4a4345eeaafb23316a9a5a0b) [#6762](https://github.com/npm/cli/pull/6762) make `$npm_execpath` always point to npm (@rotu)
+
+### Documentation
-* renames most of the `npm access` subcommands
-* the api for libnpmaccess is different now
+* [`09d8e0a`](https://github.com/npm/cli/commit/09d8e0a20bd11f53a9fafac1fff4f1ec0b7b379e) [#6759](https://github.com/npm/cli/pull/6759) fix versions of node.js in readme (#6759) (@JoaoOtavioS)
+
+### Dependencies
+
+* [`f76066a`](https://github.com/npm/cli/commit/f76066a047e4a0e819149356b68a1c50fd30f9de) [#6771](https://github.com/npm/cli/pull/6771) `@npmcli/agent@2.1.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/arborist-v7.1.0): `@npmcli/arborist@7.1.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/config-v7.2.0): `@npmcli/config@7.2.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.1): `libnpmdiff@6.0.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.1): `libnpmexec@7.0.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v4.1.1): `libnpmfund@4.1.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.1): `libnpmpack@6.0.1`
+
+## [10.0.0](https://github.com/npm/cli/compare/v10.0.0-pre.1...v10.0.0) (2023-08-31)
### Features
-* [`9c32c6c`](https://github.com/npm/cli/commit/9c32c6c8d6fc5bdfd6af685731fe26920d7e5446) rewrite: rewrite `npm access` (@wraithgar)
-* [`854521b`](https://github.com/npm/cli/commit/854521baa49ef88ff9586ec2cc5f1fbaee7fa364) rewrite: Rewrite libnpmaccess (@wraithgar)
+* [`48a7b07`](https://github.com/npm/cli/commit/48a7b077d70cbe5bc808db6aae2c734aa202938a) remove prerelease flags (@lukekarrys)
+
+### Dependencies
+
+* [Workspace](https://github.com/npm/cli/releases/tag/arborist-v7.0.0): `@npmcli/arborist@7.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/config-v7.1.0): `@npmcli/config@7.1.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmaccess-v8.0.0): `libnpmaccess@8.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.0): `libnpmdiff@6.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.0): `libnpmexec@7.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v4.1.0): `libnpmfund@4.1.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmhook-v10.0.0): `libnpmhook@10.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmorg-v6.0.0): `libnpmorg@6.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.0): `libnpmpack@6.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.0): `libnpmpublish@9.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmsearch-v7.0.0): `libnpmsearch@7.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmteam-v6.0.0): `libnpmteam@6.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmversion-v5.0.0): `libnpmversion@5.0.0`
+
+## [10.0.0-pre.1](https://github.com/npm/cli/compare/v10.0.0-pre.0...v10.0.0-pre.1) (2023-08-31)
+
+### ⚠️ BREAKING CHANGES
+
+* support for node <=16.13 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
+* support for node 14 has been removed
### Bug Fixes
-* [`c3d7549`](https://github.com/npm/cli/commit/c3d75499cfd4e3601c6ca31621b2f693af466c4d) add tag to publish log message (@wraithgar)
+* [`b34ee65`](https://github.com/npm/cli/commit/b34ee65ad1c82b53d5b5b28595203e18163fe4df) [#6706](https://github.com/npm/cli/pull/6706) set objectMode for search filter stream (@lukekarrys)
+* [`6b251b1`](https://github.com/npm/cli/commit/6b251b1009648b36d49b83a2cc407c348fa225e0) [#6706](https://github.com/npm/cli/pull/6706) drop node 16.13.x support (@lukekarrys)
+* [`d857c4a`](https://github.com/npm/cli/commit/d857c4ac7321211848076d148a4bea46af7058fd) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`37a99eb`](https://github.com/npm/cli/commit/37a99eb98b8846ab9481cc4ebd7a7278a8bc89bd) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`ee7292e`](https://github.com/npm/cli/commit/ee7292ed78c362927736471e0584217b2000f493) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`8b0e755`](https://github.com/npm/cli/commit/8b0e755b78098d9c0800e69f0cc2f6a457ce28a6) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`5c8c6cc`](https://github.com/npm/cli/commit/5c8c6ccc0be6e544f6884ecc1189de02450b7dfc) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`d431647`](https://github.com/npm/cli/commit/d4316479a7894290586718e412d7c670316a36f2) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`b6f2205`](https://github.com/npm/cli/commit/b6f220569791d655ab3c423990356cee47ca5218) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`4caedd0`](https://github.com/npm/cli/commit/4caedd0e49641e9f1757f5622e5845b5b49c56c1) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`355bac8`](https://github.com/npm/cli/commit/355bac87eb66b105c9f0c2338ae37fed5f973b66) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`e3a377d`](https://github.com/npm/cli/commit/e3a377d3b047c0436e05096d70cc5697714e413d) [#6706](https://github.com/npm/cli/pull/6706) drop node14 support (@lukekarrys)
+* [`f916d33`](https://github.com/npm/cli/commit/f916d333c16b4f0433d8a304e856b73ed4f949cd) [#6715](https://github.com/npm/cli/pull/6715) allow searching packages with no description (@lukekarrys)
### Documentation
-* [`fd0eebe`](https://github.com/npm/cli/commit/fd0eebe4c2b55dd69972aff7de1b4db14ea6799a) update registry docs header (@hughlilly)
+* [`c736b62`](https://github.com/npm/cli/commit/c736b622b8504b07f5a19f631ade42dd40063269) [#6686](https://github.com/npm/cli/pull/6686) add missing bugs key in package-json.md (#6686) (@airscripts)
+* [`c1e01d9`](https://github.com/npm/cli/commit/c1e01d97da3b775edf104de158ee5db5cf027d0d) [#6680](https://github.com/npm/cli/pull/6680) Update package-json.md (#6680) (@p-chan, @ljharb)
### Dependencies
-* [Workspace](https://github.com/npm/cli/compare/arborist-v6.0.0-pre.0...arborist-v6.0.0-pre.1): `@npmcli/arborist@6.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmaccess-v7.0.0-pre.0...libnpmaccess-v7.0.0-pre.1): `libnpmaccess@7.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmexec-v5.0.0-pre.0...libnpmexec-v5.0.0-pre.1): `libnpmexec@5.0.0-pre.1`
-* [Workspace](https://github.com/npm/cli/compare/libnpmfund-v4.0.0-pre.0...libnpmfund-v4.0.0-pre.1): `libnpmfund@4.0.0-pre.1`
+* [`5ab3f7e`](https://github.com/npm/cli/commit/5ab3f7e944b12481cb1164175c7a79d24d5e3ac5) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/git@5.0.3`
+* [`eb41977`](https://github.com/npm/cli/commit/eb41977c56cbac88fa7d02f88dbf630cc652471a) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/run-script@7.0.1`
+* [`f30c9e3`](https://github.com/npm/cli/commit/f30c9e30c2a6d777ea31157a90fddadc81fd11d0) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/git@5.0.2`
+* [`f334466`](https://github.com/npm/cli/commit/f334466c53669e7debd4b9c67eafca74955509ee) [#6706](https://github.com/npm/cli/pull/6706) `pacote@17.0.4`
+* [`bb63bf9`](https://github.com/npm/cli/commit/bb63bf945b2db8f3074e7429aff6343721c55cd1) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/run-script@7.0.0`
+* [`75642c6`](https://github.com/npm/cli/commit/75642c6041195e093ef15ee2a42e1fc6a381c572) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/promise-spawn@7.0.0`
+* [`dbb18f4`](https://github.com/npm/cli/commit/dbb18f4778a97915cd8bbb737a807f3db51c4619) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/agent@2.1.0`
+* [`812aa6d`](https://github.com/npm/cli/commit/812aa6d2027ed42453b86b22f4cf8de25f6e0180) [#6706](https://github.com/npm/cli/pull/6706) `sigstore@2.1.0`
+* [`7fab9d3`](https://github.com/npm/cli/commit/7fab9d3d2efd71f505658216dc44d802bc3203a6) [#6706](https://github.com/npm/cli/pull/6706) `@sigstore/tuf@2.1.0`
+* [`12337cc`](https://github.com/npm/cli/commit/12337cc9d43bae2c5ad75e295b6a4d70e15a39cf) [#6706](https://github.com/npm/cli/pull/6706) `which@4.0.0`
+* [`b1ad3ad`](https://github.com/npm/cli/commit/b1ad3ad194d046aa6209a4efad961429b379393c) [#6706](https://github.com/npm/cli/pull/6706) `npm-packlist@8.0.0`
+* [`43831d0`](https://github.com/npm/cli/commit/43831d0fe4b02cb18d1c533f2831aaeedf5102e1) [#6706](https://github.com/npm/cli/pull/6706) `pacote@17.0.3`
+* [`44e8fec`](https://github.com/npm/cli/commit/44e8fec3f28ce3bdd0500b92cbcf8f211da3c866) [#6706](https://github.com/npm/cli/pull/6706) `pacote@17.0.2`
+* [`0d2e2c9`](https://github.com/npm/cli/commit/0d2e2c9d09ff760d8db09774fcd7ad417a88c4c7) [#6706](https://github.com/npm/cli/pull/6706) bump sigstore from 1.7.0 to 2.0.0
+* [`dbd5885`](https://github.com/npm/cli/commit/dbd5885364648d3f2fe1c7b672e8aeadcd06edd1) [#6706](https://github.com/npm/cli/pull/6706) `npm-profile@9.0.0`
+* [`2ee0fb3`](https://github.com/npm/cli/commit/2ee0fb3ac0c5e49f9eba545d6b05e20be1352414) [#6706](https://github.com/npm/cli/pull/6706) `npm-registry-fetch@16.0.0`
+* [`81ff4df`](https://github.com/npm/cli/commit/81ff4dfd17024efb068816c9b0824ffc709a7cc4) [#6706](https://github.com/npm/cli/pull/6706) `pacote@17.0.1`
+* [`2b23d44`](https://github.com/npm/cli/commit/2b23d44a9f0f01370d4999853aedecec4f1d8dd3) [#6706](https://github.com/npm/cli/pull/6706) hoist `read-package-json@7.0.0`
+* [`325ed05`](https://github.com/npm/cli/commit/325ed05be53b57096727fb962925bf362edf9730) [#6706](https://github.com/npm/cli/pull/6706) hoist `normalize-package-data@6.0.0`
+* [`c3a1a02`](https://github.com/npm/cli/commit/c3a1a021780d948a3023b622700b98aabb0df2f4) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/metavuln-calculator@7.0.0`
+* [`f1dd130`](https://github.com/npm/cli/commit/f1dd1305fdcba0b7f5496223b5a65f0fe7e29975) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/git@5.0.1`
+* [`10792ea`](https://github.com/npm/cli/commit/10792ea951a3ef8fc138f82d7b81484006213ce9) [#6706](https://github.com/npm/cli/pull/6706) `init-package-json@6.0.0`
+* [`cac0725`](https://github.com/npm/cli/commit/cac07256e7234d0782a4833dae207732c71fef95) [#6706](https://github.com/npm/cli/pull/6706) `pacote@17.0.0`
+* [`fd8beaf`](https://github.com/npm/cli/commit/fd8beaf4de23b8fbd9d5b968e10a5034d1a8f7bd) [#6706](https://github.com/npm/cli/pull/6706) `npm-pick-manifest@9.0.0`
+* [`65f435e`](https://github.com/npm/cli/commit/65f435ee0a088d6593d8e985c2519cdd783f9a6d) [#6706](https://github.com/npm/cli/pull/6706) hoist `lru-cache@10.0.1`
+* [`c784b57`](https://github.com/npm/cli/commit/c784b57b654d25e8d932e6fe415b87e75dcf9026) [#6706](https://github.com/npm/cli/pull/6706) `npm-package-arg@11.0.0`
+* [`d6b1790`](https://github.com/npm/cli/commit/d6b1790492d9bc96c196d85d8fc9fd98d62d0087) [#6706](https://github.com/npm/cli/pull/6706) `normalize-package-data@6.0.0`
+* [`2f03fb9`](https://github.com/npm/cli/commit/2f03fb9d8f25fd2b047d46edb608eb75f1f36017) [#6706](https://github.com/npm/cli/pull/6706) `make-fetch-happen@13.0.0`
+* [`729e893`](https://github.com/npm/cli/commit/729e893cf610de725142f72cc344d1c11f42d7af) [#6706](https://github.com/npm/cli/pull/6706) `hosted-git-info@7.0.0`
+* [`7af81c7`](https://github.com/npm/cli/commit/7af81c7360a6df31cdb0a8f18104b42656166378) [#6706](https://github.com/npm/cli/pull/6706) `cacache@18.0.0`
+* [`b0849ab`](https://github.com/npm/cli/commit/b0849ab6feb62bf307ee362389bfcaf0e85653be) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/package-json@5.0.0`
+* [`c9587d7`](https://github.com/npm/cli/commit/c9587d79c7c02aff4f53b093bf6702026ecea53a) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/git@5.0.0`
+* [`e28d426`](https://github.com/npm/cli/commit/e28d42674deb791d862e07756bb453190773e6ec) [#6706](https://github.com/npm/cli/pull/6706) `minipass-fetch@3.0.4`
+* [`61e9b00`](https://github.com/npm/cli/commit/61e9b00e096ce2e3122f1b21d22f3073ff22f2ce) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/metavuln-calculator@6.0.1`
+* [`2c5542d`](https://github.com/npm/cli/commit/2c5542d29ba207e7c5c4337ac9ad7f296188508a) [#6706](https://github.com/npm/cli/pull/6706) `minipass@7.0.3`
+* [`ede7f5e`](https://github.com/npm/cli/commit/ede7f5e74ad4d88559fec2532ddba2facbd7af7f) [#6706](https://github.com/npm/cli/pull/6706) `glob@10.3.3`
+* [`4c9eb17`](https://github.com/npm/cli/commit/4c9eb1703bd41555e4ef7c2fc087a349b90c9b4c) [#6706](https://github.com/npm/cli/pull/6706) `npm-install-checks@6.2.0`
+* [`88ece81`](https://github.com/npm/cli/commit/88ece8161021997cb5c22040b34d0dffff55fcf1) [#6706](https://github.com/npm/cli/pull/6706) `npm-pick-manifest@8.0.2`
+* [`9117a4f`](https://github.com/npm/cli/commit/9117a4fcf05291ce7609bcad5bb810df9a5158e7) [#6706](https://github.com/npm/cli/pull/6706) `ssri@10.0.5`
+* [`45f8d6f`](https://github.com/npm/cli/commit/45f8d6f15f82067f27d56357159a7f965b857f5d) [#6706](https://github.com/npm/cli/pull/6706) `make-fetch-happen@12.0.0`
+* [`f6f6a18`](https://github.com/npm/cli/commit/f6f6a18120b31626259cdd4da834524a034aa4cb) [#6706](https://github.com/npm/cli/pull/6706) `fs-minipass@3.0.3`
+* [`5eea975`](https://github.com/npm/cli/commit/5eea975437ab27d02afa2aaee59b2d4f98831df3) [#6706](https://github.com/npm/cli/pull/6706) `cacache@17.1.4`
+* [`ca33c98`](https://github.com/npm/cli/commit/ca33c9840533435bda634adefb61757f30fad5ab) [#6706](https://github.com/npm/cli/pull/6706) `@npmcli/metavuln-calculator@6.0.0`
+* [`7be541a`](https://github.com/npm/cli/commit/7be541a7a82cf1fb0de58953605b69c058f7efe0) [#6706](https://github.com/npm/cli/pull/6706) `npm-profile@8.0.0`
+* [`edbc25a`](https://github.com/npm/cli/commit/edbc25a5980c34e0d28aac7503475cd33e07f7d2) [#6706](https://github.com/npm/cli/pull/6706) `pacote@16.0.0`
+* [`5d0d859`](https://github.com/npm/cli/commit/5d0d8592cbf3b816d9fe44c36d390200ec15e87a) [#6706](https://github.com/npm/cli/pull/6706) `npm-registry-fetch@15.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/arborist-v7.0.0-pre.0): `@npmcli/arborist@7.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/config-v7.0.1): `@npmcli/config@7.0.1`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmaccess-v8.0.0-pre.0): `libnpmaccess@8.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmdiff-v6.0.0-pre.0): `libnpmdiff@6.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmexec-v7.0.0-pre.0): `libnpmexec@7.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmfund-v4.0.20): `libnpmfund@4.0.20`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmhook-v10.0.0-pre.0): `libnpmhook@10.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmorg-v6.0.0-pre.0): `libnpmorg@6.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpack-v6.0.0-pre.0): `libnpmpack@6.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v9.0.0-pre.0): `libnpmpublish@9.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmsearch-v7.0.0-pre.0): `libnpmsearch@7.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmteam-v6.0.0-pre.0): `libnpmteam@6.0.0-pre.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmversion-v5.0.0-pre.0): `libnpmversion@5.0.0-pre.0`
+
+## [10.0.0-pre.0](https://github.com/npm/cli/compare/v9.8.1...v10.0.0-pre.0) (2023-07-26)
-## [9.0.0-pre.0](https://github.com/npm/cli/compare/v8.19.1...v9.0.0-pre.0) (2022-09-08)
-
-### ⚠ BREAKING CHANGES
+### ⚠️ BREAKING CHANGES
-* **workspaces:** all workspace packages are now compatible with the following semver range for node: `^14.17.0 || ^16.13.0 || >=18.0.0`
-* this removes the `npm birthday` command
-* this removes `npm set-script`
-* this changes the default value of `install-links` to true
-* this removes the `npm bin` command
-* `npm` is now compatible with the following semver range for node: `^14.17.0 || ^16.13.0 || >=18.0.0`
+* the "ci-name" config has been removed
+* npm no longer treats missing scripts as a special case in workspace mode. Use `if-present` to ignore missing scripts.
+* npm now supports node `^18.17.0 || >=20.5.0`
### Features
- * [`e95017a`](https://github.com/npm/cli/commit/e95017a07b041cbb3293e659dad853f76462c108) [#5485](https://github.com/npm/cli/pull/5485) feat(workspaces): update supported node engines in package.json (@lukekarrys)
- * [`49bbb2f`](https://github.com/npm/cli/commit/49bbb2fb9d56e02d94da652befaa3d445283090b) [#5455](https://github.com/npm/cli/pull/5455) feat: remove `npm birthday` (@wraithgar)
- * [`926f0ad`](https://github.com/npm/cli/commit/926f0adbd71949c905932a241a245b78c85ef643) [#5456](https://github.com/npm/cli/pull/5456) feat: remove `npm set-script` (@wraithgar)
- * [`2a8c2fc`](https://github.com/npm/cli/commit/2a8c2fcd124ce7d4b23a6c26552d097c6501ac74) [#5458](https://github.com/npm/cli/pull/5458) feat: default `install-links` to true (@wraithgar)
- * [`2e92800`](https://github.com/npm/cli/commit/2e9280072f9852466fa0944d3a0fdb0c8af156a9) [#5459](https://github.com/npm/cli/pull/5459) feat: remove `npm bin` (@wraithgar)
- * [`457d388`](https://github.com/npm/cli/commit/457d388c9a70b4bc6c2421f576c79fb7524ff259) [#5475](https://github.com/npm/cli/pull/5475) feat: update supported node engines in package.json (@wraithgar)
+* [`b6cf113`](https://github.com/npm/cli/commit/b6cf113f5199d3c23f632dbe35d8020515c6c623) [#6674](https://github.com/npm/cli/pull/6674) set engines and prerelease for npm 10 (#6674) (@lukekarrys)
### Bug Fixes
- * [`41481f8`](https://github.com/npm/cli/commit/41481f8bc1de0fb92a2d6aab3d4a43292d1a1db7) [#5475](https://github.com/npm/cli/pull/5475) fix: attempt more graceful failure in older node versions (@wraithgar)
+* [`e0d3edd`](https://github.com/npm/cli/commit/e0d3edd9908f8303abb9941bdd2f6e9aa31bc9d7) [#6641](https://github.com/npm/cli/pull/6641) remove "ci-name" config (@wraithgar)
+* [`0318f44`](https://github.com/npm/cli/commit/0318f442fe6c18275607a5d574c383f085484e6e) [#6641](https://github.com/npm/cli/pull/6641) remove implicit if-present logic from run-script workspaces (@wraithgar)
### Documentation
- * [`7fc2b6f`](https://github.com/npm/cli/commit/7fc2b6f3cc157c8727da9e480f1f552eae2451e2) [#5468](https://github.com/npm/cli/pull/5468) docs: remove duplicate description for `prepare` script (@kidonng)
- * [`285b39f`](https://github.com/npm/cli/commit/285b39f8d6915823fb424cca7161a0b445b86bd3) [#5324](https://github.com/npm/cli/pull/5324) docs: add documentation for expanded :semver selector (@nlf)
-
+* [`e5338af`](https://github.com/npm/cli/commit/e5338af3ca5d1aea78348f4894481eef3b1f7354) [#6672](https://github.com/npm/cli/pull/6672) remove link to deprecated `npm set-script` command (#6672) (@emmanuel-ferdman)
### Dependencies
-* The following workspace dependencies were updated
- * dependencies
- * @npmcli/arborist bumped from ^5.6.1 to ^6.0.0-pre.0
- * libnpmaccess bumped from ^6.0.4 to ^7.0.0-pre.0
- * libnpmdiff bumped from ^4.0.5 to ^5.0.0-pre.0
- * libnpmexec bumped from ^4.0.12 to ^5.0.0-pre.0
- * libnpmfund bumped from ^3.0.3 to ^4.0.0-pre.0
- * libnpmhook bumped from ^8.0.4 to ^9.0.0-pre.0
- * libnpmorg bumped from ^4.0.4 to ^5.0.0-pre.0
- * libnpmpack bumped from ^4.1.3 to ^5.0.0-pre.0
- * libnpmpublish bumped from ^6.0.5 to ^7.0.0-pre.0
- * libnpmsearch bumped from ^5.0.4 to ^6.0.0-pre.0
- * libnpmteam bumped from ^4.0.4 to ^5.0.0-pre.0
- * libnpmversion bumped from ^3.0.7 to ^4.0.0-pre.0
+* [Workspace](https://github.com/npm/cli/releases/tag/config-v7.0.0): `@npmcli/config@7.0.0`
+* [Workspace](https://github.com/npm/cli/releases/tag/libnpmpublish-v8.0.0): `libnpmpublish@8.0.0`
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index e12e300210841..f05f05ae612b0 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -19,7 +19,7 @@ $ git clone git@github.com:npm/cli.git npm
**2. Navigate into project & install development-specific dependencies...**
```bash
-$ cd ./npm && node . install
+$ cd ./npm && node ./scripts/resetdeps.js
```
**3. Write some code &/or add some tests...**
@@ -48,6 +48,28 @@ We use [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/).
We use [`tap`](https://node-tap.org/) for testing & expect that every new feature or bug fix comes with corresponding tests that validate the solutions. Tap also reports on code coverage and it will fail if that drops below 100%.
+To run your repository's version of the npm cli on your local machine use the following commands:
+
+**npm commands:**
+```bash
+node .
+```
+
+**npx commands:**
+```bash
+node . exec
+```
+
+For example instead of:
+```bash
+npm exec --
+```
+Use:
+```bash
+node . exec --
+```
+
+
## Performance & Benchmarks
We've set up an automated [benchmark](https://github.com/npm/benchmarks) integration that will run against all Pull Requests; Posting back a comment with the results of the run.
diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 69b72b640df87..f612fd4a3834e 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -12,8 +12,8 @@ graph LR;
cacache-->ssri;
cacache-->unique-filename;
init-package-json-->npm-package-arg;
+ init-package-json-->npmcli-package-json["@npmcli/package-json"];
init-package-json-->promzard;
- init-package-json-->read-package-json;
init-package-json-->read;
init-package-json-->semver;
init-package-json-->validate-npm-package-name;
@@ -24,20 +24,17 @@ graph LR;
libnpmaccess-->npmcli-template-oss["@npmcli/template-oss"];
libnpmdiff-->npm-package-arg;
libnpmdiff-->npmcli-arborist["@npmcli/arborist"];
- libnpmdiff-->npmcli-disparity-colors["@npmcli/disparity-colors"];
libnpmdiff-->npmcli-eslint-config["@npmcli/eslint-config"];
libnpmdiff-->npmcli-installed-package-contents["@npmcli/installed-package-contents"];
libnpmdiff-->npmcli-template-oss["@npmcli/template-oss"];
libnpmdiff-->pacote;
libnpmexec-->bin-links;
- libnpmexec-->minify-registry-metadata;
libnpmexec-->npm-package-arg;
libnpmexec-->npmcli-arborist["@npmcli/arborist"];
libnpmexec-->npmcli-eslint-config["@npmcli/eslint-config"];
libnpmexec-->npmcli-mock-registry["@npmcli/mock-registry"];
libnpmexec-->npmcli-run-script["@npmcli/run-script"];
libnpmexec-->npmcli-template-oss["@npmcli/template-oss"];
- libnpmexec-->npmlog;
libnpmexec-->pacote;
libnpmexec-->proc-log;
libnpmexec-->read-package-json-fast;
@@ -62,8 +59,10 @@ graph LR;
libnpmpublish-->npm-package-arg;
libnpmpublish-->npm-registry-fetch;
libnpmpublish-->npmcli-eslint-config["@npmcli/eslint-config"];
+ libnpmpublish-->npmcli-mock-globals["@npmcli/mock-globals"];
libnpmpublish-->npmcli-mock-registry["@npmcli/mock-registry"];
libnpmpublish-->npmcli-template-oss["@npmcli/template-oss"];
+ libnpmpublish-->proc-log;
libnpmpublish-->semver;
libnpmpublish-->ssri;
libnpmsearch-->npm-registry-fetch;
@@ -81,6 +80,8 @@ graph LR;
libnpmversion-->semver;
make-fetch-happen-->cacache;
make-fetch-happen-->minipass-fetch;
+ make-fetch-happen-->npmcli-agent["@npmcli/agent"];
+ make-fetch-happen-->proc-log;
make-fetch-happen-->ssri;
nopt-->abbrev;
normalize-package-data-->hosted-git-info;
@@ -105,6 +106,7 @@ graph LR;
npm-->libnpmversion;
npm-->make-fetch-happen;
npm-->nopt;
+ npm-->normalize-package-data;
npm-->npm-audit-report;
npm-->npm-install-checks;
npm-->npm-package-arg;
@@ -120,22 +122,20 @@ graph LR;
npm-->npmcli-fs["@npmcli/fs"];
npm-->npmcli-git["@npmcli/git"];
npm-->npmcli-map-workspaces["@npmcli/map-workspaces"];
+ npm-->npmcli-mock-globals["@npmcli/mock-globals"];
npm-->npmcli-mock-registry["@npmcli/mock-registry"];
npm-->npmcli-package-json["@npmcli/package-json"];
npm-->npmcli-promise-spawn["@npmcli/promise-spawn"];
+ npm-->npmcli-redact["@npmcli/redact"];
npm-->npmcli-run-script["@npmcli/run-script"];
npm-->npmcli-smoke-tests["@npmcli/smoke-tests"];
npm-->npmcli-template-oss["@npmcli/template-oss"];
- npm-->npmlog;
npm-->pacote;
npm-->parse-conflict-json;
npm-->proc-log;
- npm-->read-package-json-fast;
- npm-->read-package-json;
npm-->read;
npm-->semver;
npm-->ssri;
- npm-->treeverse;
npm-->validate-npm-package-name;
npm-->write-file-atomic;
npm-bundled-->npm-normalize-package-bin;
@@ -154,6 +154,7 @@ graph LR;
npm-registry-fetch-->make-fetch-happen;
npm-registry-fetch-->minipass-fetch;
npm-registry-fetch-->npm-package-arg;
+ npm-registry-fetch-->npmcli-redact["@npmcli/redact"];
npm-registry-fetch-->proc-log;
npmcli-arborist-->bin-links;
npmcli-arborist-->cacache;
@@ -174,28 +175,31 @@ graph LR;
npmcli-arborist-->npmcli-node-gyp["@npmcli/node-gyp"];
npmcli-arborist-->npmcli-package-json["@npmcli/package-json"];
npmcli-arborist-->npmcli-query["@npmcli/query"];
+ npmcli-arborist-->npmcli-redact["@npmcli/redact"];
npmcli-arborist-->npmcli-run-script["@npmcli/run-script"];
npmcli-arborist-->npmcli-template-oss["@npmcli/template-oss"];
- npmcli-arborist-->npmlog;
npmcli-arborist-->pacote;
npmcli-arborist-->parse-conflict-json;
npmcli-arborist-->proc-log;
+ npmcli-arborist-->proggy;
npmcli-arborist-->read-package-json-fast;
npmcli-arborist-->semver;
npmcli-arborist-->ssri;
- npmcli-arborist-->treeverse;
npmcli-config-->ini;
npmcli-config-->nopt;
npmcli-config-->npmcli-eslint-config["@npmcli/eslint-config"];
npmcli-config-->npmcli-map-workspaces["@npmcli/map-workspaces"];
+ npmcli-config-->npmcli-mock-globals["@npmcli/mock-globals"];
+ npmcli-config-->npmcli-package-json["@npmcli/package-json"];
npmcli-config-->npmcli-template-oss["@npmcli/template-oss"];
npmcli-config-->proc-log;
- npmcli-config-->read-package-json-fast;
npmcli-config-->semver;
npmcli-docs-->ignore-walk;
npmcli-docs-->npmcli-eslint-config["@npmcli/eslint-config"];
npmcli-docs-->npmcli-template-oss["@npmcli/template-oss"];
+ npmcli-docs-->semver;
npmcli-fs-->semver;
+ npmcli-git-->ini;
npmcli-git-->npm-pick-manifest;
npmcli-git-->npmcli-promise-spawn["@npmcli/promise-spawn"];
npmcli-git-->proc-log;
@@ -207,22 +211,30 @@ graph LR;
npmcli-metavuln-calculator-->cacache;
npmcli-metavuln-calculator-->json-parse-even-better-errors;
npmcli-metavuln-calculator-->pacote;
+ npmcli-metavuln-calculator-->proc-log;
npmcli-metavuln-calculator-->semver;
+ npmcli-mock-globals-->npmcli-eslint-config["@npmcli/eslint-config"];
+ npmcli-mock-globals-->npmcli-template-oss["@npmcli/template-oss"];
npmcli-mock-registry-->npm-package-arg;
npmcli-mock-registry-->npmcli-arborist["@npmcli/arborist"];
npmcli-mock-registry-->npmcli-eslint-config["@npmcli/eslint-config"];
npmcli-mock-registry-->npmcli-template-oss["@npmcli/template-oss"];
npmcli-mock-registry-->pacote;
+ npmcli-package-json-->hosted-git-info;
npmcli-package-json-->json-parse-even-better-errors;
+ npmcli-package-json-->normalize-package-data;
+ npmcli-package-json-->npmcli-git["@npmcli/git"];
+ npmcli-package-json-->proc-log;
+ npmcli-package-json-->semver;
npmcli-run-script-->npmcli-node-gyp["@npmcli/node-gyp"];
+ npmcli-run-script-->npmcli-package-json["@npmcli/package-json"];
npmcli-run-script-->npmcli-promise-spawn["@npmcli/promise-spawn"];
- npmcli-run-script-->read-package-json-fast;
+ npmcli-run-script-->proc-log;
npmcli-smoke-tests-->npmcli-eslint-config["@npmcli/eslint-config"];
npmcli-smoke-tests-->npmcli-mock-registry["@npmcli/mock-registry"];
npmcli-smoke-tests-->npmcli-promise-spawn["@npmcli/promise-spawn"];
npmcli-smoke-tests-->npmcli-template-oss["@npmcli/template-oss"];
- npmlog-->are-we-there-yet;
- npmlog-->gauge;
+ npmcli-smoke-tests-->semver;
pacote-->cacache;
pacote-->fs-minipass;
pacote-->npm-package-arg;
@@ -231,18 +243,14 @@ graph LR;
pacote-->npm-registry-fetch;
pacote-->npmcli-git["@npmcli/git"];
pacote-->npmcli-installed-package-contents["@npmcli/installed-package-contents"];
+ pacote-->npmcli-package-json["@npmcli/package-json"];
pacote-->npmcli-promise-spawn["@npmcli/promise-spawn"];
pacote-->npmcli-run-script["@npmcli/run-script"];
pacote-->proc-log;
- pacote-->read-package-json-fast;
- pacote-->read-package-json;
pacote-->ssri;
parse-conflict-json-->json-parse-even-better-errors;
promzard-->read;
read-->mute-stream;
- read-package-json-->json-parse-even-better-errors;
- read-package-json-->normalize-package-data;
- read-package-json-->npm-normalize-package-bin;
read-package-json-fast-->json-parse-even-better-errors;
read-package-json-fast-->npm-normalize-package-bin;
unique-filename-->unique-slug;
@@ -252,87 +260,67 @@ graph LR;
```mermaid
graph LR;
agent-base-->debug;
- agentkeepalive-->debug;
- agentkeepalive-->depd;
- agentkeepalive-->humanize-ms;
aggregate-error-->clean-stack;
aggregate-error-->indent-string;
- ansi-styles-->color-convert;
- are-we-there-yet-->delegates;
- are-we-there-yet-->readable-stream;
bin-links-->cmd-shim;
bin-links-->npm-normalize-package-bin;
bin-links-->read-cmd-shim;
bin-links-->write-file-atomic;
brace-expansion-->balanced-match;
- builtins-->semver;
- cacache-->chownr;
cacache-->fs-minipass;
cacache-->glob;
- cacache-->infer-owner;
cacache-->lru-cache;
cacache-->minipass-collect;
cacache-->minipass-flush;
cacache-->minipass-pipeline;
cacache-->minipass;
- cacache-->mkdirp;
cacache-->npmcli-fs["@npmcli/fs"];
- cacache-->npmcli-move-file["@npmcli/move-file"];
cacache-->p-map;
- cacache-->promise-inflight;
- cacache-->rimraf;
cacache-->ssri;
cacache-->tar;
cacache-->unique-filename;
- chalk-->ansi-styles;
- chalk-->supports-color;
cidr-regex-->ip-regex;
cli-columns-->string-width;
cli-columns-->strip-ansi;
- cli-table3-->colors-colors["@colors/colors"];
- cli-table3-->string-width;
- color-convert-->color-name;
- columnify-->strip-ansi;
- columnify-->wcwidth;
+ cross-spawn-->path-key;
+ cross-spawn-->shebang-command;
+ cross-spawn-->which;
debug-->ms;
- defaults-->clone;
encoding-->iconv-lite;
+ foreground-child-->cross-spawn;
+ foreground-child-->signal-exit;
fs-minipass-->minipass;
- gauge-->aproba;
- gauge-->color-support;
- gauge-->console-control-strings;
- gauge-->has-unicode;
- gauge-->signal-exit;
- gauge-->string-width;
- gauge-->strip-ansi;
- gauge-->wide-align;
- glob-->fs.realpath;
- glob-->inflight;
- glob-->inherits;
+ glob-->foreground-child;
+ glob-->jackspeak;
glob-->minimatch;
- glob-->once;
- glob-->path-is-absolute;
- has-->function-bind;
+ glob-->minipass;
+ glob-->package-json-from-dist;
+ glob-->path-scurry;
hosted-git-info-->lru-cache;
http-proxy-agent-->agent-base;
http-proxy-agent-->debug;
- http-proxy-agent-->tootallnate-once["@tootallnate/once"];
https-proxy-agent-->agent-base;
https-proxy-agent-->debug;
- humanize-ms-->ms;
iconv-lite-->safer-buffer;
ignore-walk-->minimatch;
- inflight-->once;
- inflight-->wrappy;
init-package-json-->npm-package-arg;
+ init-package-json-->npmcli-package-json["@npmcli/package-json"];
init-package-json-->promzard;
- init-package-json-->read-package-json;
init-package-json-->read;
init-package-json-->semver;
init-package-json-->validate-npm-package-license;
init-package-json-->validate-npm-package-name;
+ ip-address-->jsbn;
+ ip-address-->sprintf-js;
is-cidr-->cidr-regex;
- is-core-module-->has;
+ isaacs-cliui-->string-width-cjs;
+ isaacs-cliui-->string-width;
+ isaacs-cliui-->strip-ansi-cjs;
+ isaacs-cliui-->strip-ansi;
+ isaacs-cliui-->wrap-ansi-cjs;
+ isaacs-cliui-->wrap-ansi;
+ jackspeak-->isaacs-cliui["@isaacs/cliui"];
+ jackspeak-->pkgjs-parseargs["@pkgjs/parseargs"];
libnpmaccess-->nock;
libnpmaccess-->npm-package-arg;
libnpmaccess-->npm-registry-fetch;
@@ -345,7 +333,6 @@ graph LR;
libnpmdiff-->minimatch;
libnpmdiff-->npm-package-arg;
libnpmdiff-->npmcli-arborist["@npmcli/arborist"];
- libnpmdiff-->npmcli-disparity-colors["@npmcli/disparity-colors"];
libnpmdiff-->npmcli-eslint-config["@npmcli/eslint-config"];
libnpmdiff-->npmcli-installed-package-contents["@npmcli/installed-package-contents"];
libnpmdiff-->npmcli-template-oss["@npmcli/template-oss"];
@@ -357,15 +344,12 @@ graph LR;
libnpmexec-->ci-info;
libnpmexec-->just-extend;
libnpmexec-->just-safe-set;
- libnpmexec-->minify-registry-metadata;
- libnpmexec-->mkdirp;
libnpmexec-->npm-package-arg;
libnpmexec-->npmcli-arborist["@npmcli/arborist"];
libnpmexec-->npmcli-eslint-config["@npmcli/eslint-config"];
libnpmexec-->npmcli-mock-registry["@npmcli/mock-registry"];
libnpmexec-->npmcli-run-script["@npmcli/run-script"];
libnpmexec-->npmcli-template-oss["@npmcli/template-oss"];
- libnpmexec-->npmlog;
libnpmexec-->pacote;
libnpmexec-->proc-log;
libnpmexec-->read-package-json-fast;
@@ -399,15 +383,18 @@ graph LR;
libnpmpack-->pacote;
libnpmpack-->spawk;
libnpmpack-->tap;
- libnpmpublish-->lodash.clonedeep;
+ libnpmpublish-->ci-info;
libnpmpublish-->nock;
libnpmpublish-->normalize-package-data;
libnpmpublish-->npm-package-arg;
libnpmpublish-->npm-registry-fetch;
libnpmpublish-->npmcli-eslint-config["@npmcli/eslint-config"];
+ libnpmpublish-->npmcli-mock-globals["@npmcli/mock-globals"];
libnpmpublish-->npmcli-mock-registry["@npmcli/mock-registry"];
libnpmpublish-->npmcli-template-oss["@npmcli/template-oss"];
+ libnpmpublish-->proc-log;
libnpmpublish-->semver;
+ libnpmpublish-->sigstore;
libnpmpublish-->ssri;
libnpmpublish-->tap;
libnpmsearch-->nock;
@@ -430,22 +417,17 @@ graph LR;
libnpmversion-->require-inject;
libnpmversion-->semver;
libnpmversion-->tap;
- lru-cache-->yallist;
- make-fetch-happen-->agentkeepalive;
make-fetch-happen-->cacache;
make-fetch-happen-->http-cache-semantics;
- make-fetch-happen-->http-proxy-agent;
- make-fetch-happen-->https-proxy-agent;
make-fetch-happen-->is-lambda;
- make-fetch-happen-->lru-cache;
- make-fetch-happen-->minipass-collect;
make-fetch-happen-->minipass-fetch;
make-fetch-happen-->minipass-flush;
make-fetch-happen-->minipass-pipeline;
make-fetch-happen-->minipass;
make-fetch-happen-->negotiator;
+ make-fetch-happen-->npmcli-agent["@npmcli/agent"];
+ make-fetch-happen-->proc-log;
make-fetch-happen-->promise-retry;
- make-fetch-happen-->socks-proxy-agent;
make-fetch-happen-->ssri;
minimatch-->brace-expansion;
minipass-->yallist;
@@ -455,35 +437,35 @@ graph LR;
minipass-fetch-->minipass;
minipass-fetch-->minizlib;
minipass-flush-->minipass;
- minipass-json-stream-->jsonparse;
- minipass-json-stream-->minipass;
minipass-pipeline-->minipass;
minipass-sized-->minipass;
minizlib-->minipass;
minizlib-->yallist;
node-gyp-->env-paths;
+ node-gyp-->exponential-backoff;
node-gyp-->glob;
node-gyp-->graceful-fs;
node-gyp-->make-fetch-happen;
node-gyp-->nopt;
- node-gyp-->npmlog;
- node-gyp-->rimraf;
+ node-gyp-->proc-log;
node-gyp-->semver;
node-gyp-->tar;
node-gyp-->which;
nopt-->abbrev;
normalize-package-data-->hosted-git-info;
- normalize-package-data-->is-core-module;
normalize-package-data-->semver;
normalize-package-data-->validate-npm-package-license;
npm-->abbrev;
+ npm-->ajv-formats-draft2019;
+ npm-->ajv-formats;
+ npm-->ajv;
npm-->archy;
npm-->cacache;
npm-->chalk;
npm-->ci-info;
npm-->cli-columns;
npm-->cli-table3;
- npm-->columnify;
+ npm-->diff;
npm-->fastest-levenshtein;
npm-->fs-minipass;
npm-->glob;
@@ -505,16 +487,15 @@ graph LR;
npm-->libnpmsearch;
npm-->libnpmteam;
npm-->libnpmversion;
- npm-->licensee;
npm-->make-fetch-happen;
npm-->minimatch;
npm-->minipass-pipeline;
npm-->minipass;
- npm-->mkdirp;
npm-->ms;
npm-->nock;
npm-->node-gyp;
npm-->nopt;
+ npm-->normalize-package-data;
npm-->npm-audit-report;
npm-->npm-install-checks;
npm-->npm-package-arg;
@@ -530,36 +511,39 @@ graph LR;
npm-->npmcli-fs["@npmcli/fs"];
npm-->npmcli-git["@npmcli/git"];
npm-->npmcli-map-workspaces["@npmcli/map-workspaces"];
+ npm-->npmcli-mock-globals["@npmcli/mock-globals"];
npm-->npmcli-mock-registry["@npmcli/mock-registry"];
npm-->npmcli-package-json["@npmcli/package-json"];
npm-->npmcli-promise-spawn["@npmcli/promise-spawn"];
+ npm-->npmcli-redact["@npmcli/redact"];
npm-->npmcli-run-script["@npmcli/run-script"];
npm-->npmcli-smoke-tests["@npmcli/smoke-tests"];
npm-->npmcli-template-oss["@npmcli/template-oss"];
- npm-->npmlog;
npm-->p-map;
npm-->pacote;
npm-->parse-conflict-json;
npm-->proc-log;
npm-->qrcode-terminal;
- npm-->read-package-json-fast;
- npm-->read-package-json;
npm-->read;
npm-->remark-gfm;
npm-->remark-github;
npm-->remark;
+ npm-->rimraf;
npm-->semver;
+ npm-->sigstore-tuf["@sigstore/tuf"];
npm-->spawk;
+ npm-->spdx-expression-parse;
npm-->ssri;
+ npm-->supports-color;
npm-->tap;
npm-->tar;
npm-->text-table;
npm-->tiny-relative-date;
npm-->treeverse;
+ npm-->tufjs-repo-mock["@tufjs/repo-mock"];
npm-->validate-npm-package-name;
npm-->which;
npm-->write-file-atomic;
- npm-audit-report-->chalk;
npm-bundled-->npm-normalize-package-bin;
npm-install-checks-->semver;
npm-package-arg-->hosted-git-info;
@@ -573,22 +557,28 @@ graph LR;
npm-pick-manifest-->semver;
npm-profile-->npm-registry-fetch;
npm-profile-->proc-log;
+ npm-registry-fetch-->jsonparse;
npm-registry-fetch-->make-fetch-happen;
npm-registry-fetch-->minipass-fetch;
- npm-registry-fetch-->minipass-json-stream;
npm-registry-fetch-->minipass;
npm-registry-fetch-->minizlib;
npm-registry-fetch-->npm-package-arg;
+ npm-registry-fetch-->npmcli-redact["@npmcli/redact"];
npm-registry-fetch-->proc-log;
+ npmcli-agent-->agent-base;
+ npmcli-agent-->http-proxy-agent;
+ npmcli-agent-->https-proxy-agent;
+ npmcli-agent-->lru-cache;
+ npmcli-agent-->socks-proxy-agent;
npmcli-arborist-->benchmark;
npmcli-arborist-->bin-links;
npmcli-arborist-->cacache;
- npmcli-arborist-->chalk;
npmcli-arborist-->common-ancestor-path;
npmcli-arborist-->hosted-git-info;
npmcli-arborist-->isaacs-string-locale-compare["@isaacs/string-locale-compare"];
npmcli-arborist-->json-parse-even-better-errors;
npmcli-arborist-->json-stringify-nice;
+ npmcli-arborist-->lru-cache;
npmcli-arborist-->minify-registry-metadata;
npmcli-arborist-->minimatch;
npmcli-arborist-->nock;
@@ -606,37 +596,39 @@ graph LR;
npmcli-arborist-->npmcli-node-gyp["@npmcli/node-gyp"];
npmcli-arborist-->npmcli-package-json["@npmcli/package-json"];
npmcli-arborist-->npmcli-query["@npmcli/query"];
+ npmcli-arborist-->npmcli-redact["@npmcli/redact"];
npmcli-arborist-->npmcli-run-script["@npmcli/run-script"];
npmcli-arborist-->npmcli-template-oss["@npmcli/template-oss"];
- npmcli-arborist-->npmlog;
npmcli-arborist-->pacote;
npmcli-arborist-->parse-conflict-json;
npmcli-arborist-->proc-log;
+ npmcli-arborist-->proggy;
npmcli-arborist-->promise-all-reject-late;
npmcli-arborist-->promise-call-limit;
npmcli-arborist-->read-package-json-fast;
npmcli-arborist-->semver;
npmcli-arborist-->ssri;
npmcli-arborist-->tap;
+ npmcli-arborist-->tar-stream;
npmcli-arborist-->tcompare;
npmcli-arborist-->treeverse;
npmcli-arborist-->walk-up-path;
+ npmcli-config-->ci-info;
npmcli-config-->ini;
npmcli-config-->nopt;
npmcli-config-->npmcli-eslint-config["@npmcli/eslint-config"];
npmcli-config-->npmcli-map-workspaces["@npmcli/map-workspaces"];
+ npmcli-config-->npmcli-mock-globals["@npmcli/mock-globals"];
+ npmcli-config-->npmcli-package-json["@npmcli/package-json"];
npmcli-config-->npmcli-template-oss["@npmcli/template-oss"];
npmcli-config-->proc-log;
- npmcli-config-->read-package-json-fast;
npmcli-config-->semver;
npmcli-config-->tap;
npmcli-config-->walk-up-path;
- npmcli-disparity-colors-->ansi-styles;
npmcli-docs-->front-matter;
npmcli-docs-->ignore-walk;
npmcli-docs-->isaacs-string-locale-compare["@isaacs/string-locale-compare"];
npmcli-docs-->jsdom;
- npmcli-docs-->mkdirp;
npmcli-docs-->npmcli-eslint-config["@npmcli/eslint-config"];
npmcli-docs-->npmcli-template-oss["@npmcli/template-oss"];
npmcli-docs-->rehype-stringify;
@@ -644,12 +636,13 @@ graph LR;
npmcli-docs-->remark-man;
npmcli-docs-->remark-parse;
npmcli-docs-->remark-rehype;
+ npmcli-docs-->semver;
npmcli-docs-->tap;
npmcli-docs-->unified;
npmcli-docs-->yaml;
npmcli-fs-->semver;
+ npmcli-git-->ini;
npmcli-git-->lru-cache;
- npmcli-git-->mkdirp;
npmcli-git-->npm-pick-manifest;
npmcli-git-->npmcli-promise-spawn["@npmcli/promise-spawn"];
npmcli-git-->proc-log;
@@ -666,7 +659,12 @@ graph LR;
npmcli-metavuln-calculator-->cacache;
npmcli-metavuln-calculator-->json-parse-even-better-errors;
npmcli-metavuln-calculator-->pacote;
+ npmcli-metavuln-calculator-->proc-log;
npmcli-metavuln-calculator-->semver;
+ npmcli-mock-globals-->npmcli-eslint-config["@npmcli/eslint-config"];
+ npmcli-mock-globals-->npmcli-template-oss["@npmcli/template-oss"];
+ npmcli-mock-globals-->tap;
+ npmcli-mock-registry-->json-stringify-safe;
npmcli-mock-registry-->nock;
npmcli-mock-registry-->npm-package-arg;
npmcli-mock-registry-->npmcli-arborist["@npmcli/arborist"];
@@ -674,30 +672,29 @@ graph LR;
npmcli-mock-registry-->npmcli-template-oss["@npmcli/template-oss"];
npmcli-mock-registry-->pacote;
npmcli-mock-registry-->tap;
- npmcli-move-file-->mkdirp;
- npmcli-move-file-->rimraf;
+ npmcli-package-json-->glob;
+ npmcli-package-json-->hosted-git-info;
npmcli-package-json-->json-parse-even-better-errors;
+ npmcli-package-json-->normalize-package-data;
+ npmcli-package-json-->npmcli-git["@npmcli/git"];
+ npmcli-package-json-->proc-log;
+ npmcli-package-json-->semver;
npmcli-promise-spawn-->which;
npmcli-query-->postcss-selector-parser;
npmcli-run-script-->node-gyp;
npmcli-run-script-->npmcli-node-gyp["@npmcli/node-gyp"];
+ npmcli-run-script-->npmcli-package-json["@npmcli/package-json"];
npmcli-run-script-->npmcli-promise-spawn["@npmcli/promise-spawn"];
- npmcli-run-script-->read-package-json-fast;
+ npmcli-run-script-->proc-log;
npmcli-run-script-->which;
- npmcli-smoke-tests-->http-proxy;
- npmcli-smoke-tests-->just-extend;
- npmcli-smoke-tests-->just-safe-set;
npmcli-smoke-tests-->npmcli-eslint-config["@npmcli/eslint-config"];
npmcli-smoke-tests-->npmcli-mock-registry["@npmcli/mock-registry"];
npmcli-smoke-tests-->npmcli-promise-spawn["@npmcli/promise-spawn"];
npmcli-smoke-tests-->npmcli-template-oss["@npmcli/template-oss"];
+ npmcli-smoke-tests-->proxy;
+ npmcli-smoke-tests-->semver;
npmcli-smoke-tests-->tap;
npmcli-smoke-tests-->which;
- npmlog-->are-we-there-yet;
- npmlog-->console-control-strings;
- npmlog-->gauge;
- npmlog-->set-blocking;
- once-->wrappy;
p-map-->aggregate-error;
pacote-->cacache;
pacote-->fs-minipass;
@@ -708,35 +705,47 @@ graph LR;
pacote-->npm-registry-fetch;
pacote-->npmcli-git["@npmcli/git"];
pacote-->npmcli-installed-package-contents["@npmcli/installed-package-contents"];
+ pacote-->npmcli-package-json["@npmcli/package-json"];
pacote-->npmcli-promise-spawn["@npmcli/promise-spawn"];
pacote-->npmcli-run-script["@npmcli/run-script"];
pacote-->proc-log;
pacote-->promise-retry;
- pacote-->read-package-json-fast;
- pacote-->read-package-json;
+ pacote-->sigstore;
pacote-->ssri;
pacote-->tar;
parse-conflict-json-->json-parse-even-better-errors;
parse-conflict-json-->just-diff-apply;
parse-conflict-json-->just-diff;
+ path-scurry-->lru-cache;
+ path-scurry-->minipass;
postcss-selector-parser-->cssesc;
postcss-selector-parser-->util-deprecate;
promise-retry-->err-code;
promise-retry-->retry;
promzard-->read;
read-->mute-stream;
- read-package-json-->glob;
- read-package-json-->json-parse-even-better-errors;
- read-package-json-->normalize-package-data;
- read-package-json-->npm-normalize-package-bin;
read-package-json-fast-->json-parse-even-better-errors;
read-package-json-fast-->npm-normalize-package-bin;
- readable-stream-->inherits;
- readable-stream-->string_decoder;
- readable-stream-->util-deprecate;
- rimraf-->glob;
- semver-->lru-cache;
- socks-->ip;
+ shebang-command-->shebang-regex;
+ sigstore-->sigstore-bundle["@sigstore/bundle"];
+ sigstore-->sigstore-core["@sigstore/core"];
+ sigstore-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
+ sigstore-->sigstore-sign["@sigstore/sign"];
+ sigstore-->sigstore-tuf["@sigstore/tuf"];
+ sigstore-->sigstore-verify["@sigstore/verify"];
+ sigstore-bundle-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
+ sigstore-sign-->make-fetch-happen;
+ sigstore-sign-->proc-log;
+ sigstore-sign-->promise-retry;
+ sigstore-sign-->sigstore-bundle["@sigstore/bundle"];
+ sigstore-sign-->sigstore-core["@sigstore/core"];
+ sigstore-sign-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
+ sigstore-tuf-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
+ sigstore-tuf-->tuf-js;
+ sigstore-verify-->sigstore-bundle["@sigstore/bundle"];
+ sigstore-verify-->sigstore-core["@sigstore/core"];
+ sigstore-verify-->sigstore-protobuf-specs["@sigstore/protobuf-specs"];
+ socks-->ip-address;
socks-->smart-buffer;
socks-proxy-agent-->agent-base;
socks-proxy-agent-->debug;
@@ -746,31 +755,35 @@ graph LR;
spdx-expression-parse-->spdx-exceptions;
spdx-expression-parse-->spdx-license-ids;
ssri-->minipass;
+ string-width-->eastasianwidth;
string-width-->emoji-regex;
string-width-->is-fullwidth-code-point;
string-width-->strip-ansi;
- string_decoder-->safe-buffer;
strip-ansi-->ansi-regex;
- supports-color-->has-flag;
tar-->chownr;
tar-->fs-minipass;
tar-->minipass;
tar-->minizlib;
tar-->mkdirp;
tar-->yallist;
+ tuf-js-->debug;
+ tuf-js-->make-fetch-happen;
+ tuf-js-->tufjs-models["@tufjs/models"];
+ tufjs-models-->minimatch;
+ tufjs-models-->tufjs-canonical-json["@tufjs/canonical-json"];
unique-filename-->unique-slug;
unique-slug-->imurmurhash;
validate-npm-package-license-->spdx-correct;
validate-npm-package-license-->spdx-expression-parse;
- validate-npm-package-name-->builtins;
- wcwidth-->defaults;
which-->isexe;
- wide-align-->string-width;
+ wrap-ansi-->ansi-styles;
+ wrap-ansi-->string-width;
+ wrap-ansi-->strip-ansi;
write-file-atomic-->imurmurhash;
write-file-atomic-->signal-exit;
```
-## npm dependency heirarchy
+## npm dependency hierarchy
These are the groups of dependencies in npm that depend on each other.
Each group depends on packages lower down the chain, nothing depends on
@@ -781,9 +794,10 @@ packages higher up the chain.
- @npmcli/mock-registry, libnpmdiff, libnpmfund, libnpmpack
- @npmcli/arborist
- @npmcli/metavuln-calculator
- - pacote, libnpmhook, libnpmorg, libnpmsearch, libnpmteam, npm-profile
- - npm-registry-fetch, libnpmversion
- - @npmcli/git, make-fetch-happen, @npmcli/config, init-package-json
- - @npmcli/installed-package-contents, @npmcli/map-workspaces, cacache, npm-pick-manifest, @npmcli/run-script, read-package-json, promzard
- - @npmcli/docs, @npmcli/fs, npm-bundled, read-package-json-fast, unique-filename, npm-install-checks, npm-package-arg, npm-packlist, normalize-package-data, @npmcli/package-json, bin-links, nopt, npmlog, parse-conflict-json, read
- - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, json-parse-even-better-errors, fs-minipass, ssri, unique-slug, @npmcli/promise-spawn, hosted-git-info, proc-log, validate-npm-package-name, @npmcli/node-gyp, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, are-we-there-yet, gauge, treeverse, minify-registry-metadata, ini, @npmcli/disparity-colors, mute-stream, npm-audit-report, npm-user-validate
+ - pacote, libnpmversion
+ - @npmcli/run-script, @npmcli/config, libnpmhook, libnpmorg, libnpmsearch, libnpmteam, init-package-json, npm-profile
+ - @npmcli/package-json, npm-registry-fetch
+ - @npmcli/git, make-fetch-happen
+ - @npmcli/installed-package-contents, @npmcli/map-workspaces, cacache, npm-pick-manifest, promzard
+ - @npmcli/docs, @npmcli/fs, npm-bundled, read-package-json-fast, unique-filename, npm-install-checks, npm-package-arg, normalize-package-data, npm-packlist, bin-links, nopt, parse-conflict-json, @npmcli/mock-globals, read
+ - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, json-parse-even-better-errors, fs-minipass, ssri, unique-slug, @npmcli/promise-spawn, ini, hosted-git-info, proc-log, validate-npm-package-name, @npmcli/node-gyp, @npmcli/redact, @npmcli/agent, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, proggy, minify-registry-metadata, mute-stream, npm-audit-report, npm-user-validate
diff --git a/README.md b/README.md
index b88882fadfee0..3dc35a3842554 100644
--- a/README.md
+++ b/README.md
@@ -9,9 +9,8 @@
One of the following versions of [Node.js](https://nodejs.org/en/download/) must be installed to run **`npm`**:
-* `14.x.x` >= `14.17.0`
-* `16.x.x` >= `16.13.0`
-* `18.0.0` or higher
+* `18.x.x` >= `18.17.0`
+* `20.5.0` or higher
### Installation
@@ -27,17 +26,7 @@ curl -qL https://www.npmjs.com/install.sh | sh
#### Node Version Managers
-If you're looking to manage multiple versions of **`node`** &/or **`npm`**, consider using a "Node Version Manager" such as:
-
-* [**`nvm`**](https://github.com/nvm-sh/nvm)
-* [**`nvs`**](https://github.com/jasongin/nvs)
-* [**`nave`**](https://github.com/isaacs/nave)
-* [**`n`**](https://github.com/tj/n)
-* [**`volta`**](https://github.com/volta-cli/volta)
-* [**`nodenv`**](https://github.com/nodenv/nodenv)
-* [**`asdf-nodejs`**](https://github.com/asdf-vm/asdf-nodejs)
-* [**`nvm-windows`**](https://github.com/coreybutler/nvm-windows)
-* [**`fnm`**](https://github.com/Schniz/fnm)
+If you're looking to manage multiple versions of **`Node.js`** &/or **`npm`**, consider using a [node version manager](https://github.com/search?q=node+version+manager+archived%3Afalse&type=repositories&ref=advsearch)
### Usage
@@ -51,7 +40,7 @@ npm
* Note: you can also search docs locally with `npm help-search `
* [**Bug Tracker**](https://github.com/npm/cli/issues) - Search or submit bugs against the CLI
* [**Roadmap**](https://github.com/orgs/github/projects/4247/views/1?filterQuery=npm) - Track & follow along with our public roadmap
-* [**Feedback**](https://github.com/npm/feedback) - Contribute ideas & discussion around the npm registry, website & CLI
+* [**Community Feedback and Discussions**](https://github.com/orgs/community/discussions/categories/npm) - Contribute ideas & discussion around the npm registry, website & CLI
* [**RFCs**](https://github.com/npm/rfcs) - Contribute ideas & specifications for the API/design of the npm CLI
* [**Service Status**](https://status.npmjs.org/) - Monitor the current status & see incident reports for the website & registry
* [**Project Status**](https://npm.github.io/statusboard/) - See the health of all our maintained OSS projects in one view
diff --git a/SECURITY.md b/SECURITY.md
index 4e7c26c66ebd9..9cd2deaf4fd34 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -4,11 +4,10 @@ GitHub takes the security of our software products and services seriously, inclu
If you believe you have found a security vulnerability in this GitHub-owned open source repository, you can report it to us in one of two ways.
-If the vulnerability you have found is *not* [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) or if you do not wish to be considered for a bounty reward, please report the issue to us directly using [private vulnerability reporting](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
+If the vulnerability you have found is *not* [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) or if you do not wish to be considered for a bounty reward, please report the issue to us directly through [opensource-security@github.com](mailto:opensource-security@github.com).
If the vulnerability you have found is [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) and you would like for your finding to be considered for a bounty reward, please submit the vulnerability to us through [HackerOne](https://hackerone.com/github) in order to be eligible to receive a bounty award.
**Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.**
Thanks for helping make GitHub safe for everyone.
-
diff --git a/bin/node-gyp-bin/node-gyp.cmd b/bin/node-gyp-bin/node-gyp.cmd
index 1ef2ae0c68fc4..083c9c58a502a 100755
--- a/bin/node-gyp-bin/node-gyp.cmd
+++ b/bin/node-gyp-bin/node-gyp.cmd
@@ -1,5 +1,5 @@
-if not defined npm_config_node_gyp (
- node "%~dp0\..\..\node_modules\node-gyp\bin\node-gyp.js" %*
-) else (
+if not defined npm_config_node_gyp (
+ node "%~dp0\..\..\node_modules\node-gyp\bin\node-gyp.js" %*
+) else (
node "%npm_config_node_gyp%" %*
-)
+)
diff --git a/bin/npm b/bin/npm
index a131a53543404..027dc9d128d22 100755
--- a/bin/npm
+++ b/bin/npm
@@ -1,4 +1,8 @@
#!/usr/bin/env bash
+
+# This is used by the Node.js installer, which expects the cygwin/mingw
+# shell script to already be present in the npm dependency folder.
+
(set -o igncr) 2>/dev/null && set -o igncr; # cygwin encoding fix
basedir=`dirname "$0"`
@@ -7,6 +11,16 @@ case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
+if [ `uname` = 'Linux' ] && type wslpath &>/dev/null ; then
+ IS_WSL="true"
+fi
+
+function no_node_dir {
+ # if this didn't work, then everything else below will fail
+ echo "Could not determine Node.js install directory" >&2
+ exit 1
+}
+
NODE_EXE="$basedir/node.exe"
if ! [ -x "$NODE_EXE" ]; then
NODE_EXE="$basedir/node"
@@ -17,14 +31,21 @@ fi
# this path is passed to node.exe, so it needs to match whatever
# kind of paths Node.js thinks it's using, typically win32 paths.
-CLI_BASEDIR="$("$NODE_EXE" -p 'require("path").dirname(process.execPath)')"
+CLI_BASEDIR="$("$NODE_EXE" -p 'require("path").dirname(process.execPath)' 2> /dev/null)"
+if [ $? -ne 0 ]; then
+ # this fails under WSL 1 so add an additional message. we also suppress stderr above
+ # because the actual error raised is not helpful. in WSL 1 node.exe cannot handle
+ # output redirection properly. See https://github.com/microsoft/WSL/issues/2370
+ if [ "$IS_WSL" == "true" ]; then
+ echo "WSL 1 is not supported. Please upgrade to WSL 2 or above." >&2
+ fi
+ no_node_dir
+fi
+NPM_PREFIX_JS="$CLI_BASEDIR/node_modules/npm/bin/npm-prefix.js"
NPM_CLI_JS="$CLI_BASEDIR/node_modules/npm/bin/npm-cli.js"
-
-NPM_PREFIX=`"$NODE_EXE" "$NPM_CLI_JS" prefix -g`
+NPM_PREFIX=`"$NODE_EXE" "$NPM_PREFIX_JS"`
if [ $? -ne 0 ]; then
- # if this didn't work, then everything else below will fail
- echo "Could not determine Node.js install directory" >&2
- exit 1
+ no_node_dir
fi
NPM_PREFIX_NPM_CLI_JS="$NPM_PREFIX/node_modules/npm/bin/npm-cli.js"
@@ -34,7 +55,7 @@ NPM_WSL_PATH="/.."
# WSL can run Windows binaries, so we have to give it the win32 path
# however, WSL bash tests against posix paths, so we need to construct that
# to know if npm is installed globally.
-if [ `uname` = 'Linux' ] && type wslpath &>/dev/null ; then
+if [ "$IS_WSL" == "true" ]; then
NPM_WSL_PATH=`wslpath "$NPM_PREFIX_NPM_CLI_JS"`
fi
if [ -f "$NPM_PREFIX_NPM_CLI_JS" ] || [ -f "$NPM_WSL_PATH" ]; then
diff --git a/bin/npm-prefix.js b/bin/npm-prefix.js
new file mode 100755
index 0000000000000..b0b0ace6a92ab
--- /dev/null
+++ b/bin/npm-prefix.js
@@ -0,0 +1,30 @@
+#!/usr/bin/env node
+// This is a single-use bin to help windows discover the proper prefix for npm
+// without having to load all of npm first
+// It does not accept argv params
+
+const path = require('node:path')
+const Config = require('@npmcli/config')
+const { definitions, flatten, shorthands } = require('@npmcli/config/lib/definitions')
+const config = new Config({
+ npmPath: path.dirname(__dirname),
+ // argv is explicitly not looked at since prefix is not something that can be changed via argv
+ argv: [],
+ definitions,
+ flatten,
+ shorthands,
+ excludeNpmCwd: false,
+})
+
+async function main () {
+ try {
+ await config.load()
+ // eslint-disable-next-line no-console
+ console.log(config.globalPrefix)
+ } catch (err) {
+ // eslint-disable-next-line no-console
+ console.error(err)
+ process.exit(1)
+ }
+}
+main()
diff --git a/bin/npm.cmd b/bin/npm.cmd
index f111c59d1efb6..68af4b0fca09f 100755
--- a/bin/npm.cmd
+++ b/bin/npm.cmd
@@ -1,19 +1,20 @@
-:: Created by npm, please don't edit manually.
-@ECHO OFF
-
-SETLOCAL
-
-SET "NODE_EXE=%~dp0\node.exe"
-IF NOT EXIST "%NODE_EXE%" (
- SET "NODE_EXE=node"
-)
-
-SET "NPM_CLI_JS=%~dp0\node_modules\npm\bin\npm-cli.js"
-FOR /F "delims=" %%F IN ('CALL "%NODE_EXE%" "%NPM_CLI_JS%" prefix -g') DO (
- SET "NPM_PREFIX_NPM_CLI_JS=%%F\node_modules\npm\bin\npm-cli.js"
-)
-IF EXIST "%NPM_PREFIX_NPM_CLI_JS%" (
- SET "NPM_CLI_JS=%NPM_PREFIX_NPM_CLI_JS%"
-)
-
-"%NODE_EXE%" "%NPM_CLI_JS%" %*
+:: Created by npm, please don't edit manually.
+@ECHO OFF
+
+SETLOCAL
+
+SET "NODE_EXE=%~dp0\node.exe"
+IF NOT EXIST "%NODE_EXE%" (
+ SET "NODE_EXE=node"
+)
+
+SET "NPM_PREFIX_JS=%~dp0\node_modules\npm\bin\npm-prefix.js"
+SET "NPM_CLI_JS=%~dp0\node_modules\npm\bin\npm-cli.js"
+FOR /F "delims=" %%F IN ('CALL "%NODE_EXE%" "%NPM_PREFIX_JS%"') DO (
+ SET "NPM_PREFIX_NPM_CLI_JS=%%F\node_modules\npm\bin\npm-cli.js"
+)
+IF EXIST "%NPM_PREFIX_NPM_CLI_JS%" (
+ SET "NPM_CLI_JS=%NPM_PREFIX_NPM_CLI_JS%"
+)
+
+"%NODE_EXE%" "%NPM_CLI_JS%" %*
diff --git a/bin/npm.ps1 b/bin/npm.ps1
new file mode 100644
index 0000000000000..04a1fd478ef9d
--- /dev/null
+++ b/bin/npm.ps1
@@ -0,0 +1,32 @@
+#!/usr/bin/env pwsh
+
+$NODE_EXE="$PSScriptRoot/node.exe"
+if (-not (Test-Path $NODE_EXE)) {
+ $NODE_EXE="$PSScriptRoot/node"
+}
+if (-not (Test-Path $NODE_EXE)) {
+ $NODE_EXE="node"
+}
+
+$NPM_PREFIX_JS="$PSScriptRoot/node_modules/npm/bin/npm-prefix.js"
+$NPM_CLI_JS="$PSScriptRoot/node_modules/npm/bin/npm-cli.js"
+$NPM_PREFIX=(& $NODE_EXE $NPM_PREFIX_JS)
+
+if ($LASTEXITCODE -ne 0) {
+ Write-Host "Could not determine Node.js install directory"
+ exit 1
+}
+
+$NPM_PREFIX_NPM_CLI_JS="$NPM_PREFIX/node_modules/npm/bin/npm-cli.js"
+if (Test-Path $NPM_PREFIX_NPM_CLI_JS) {
+ $NPM_CLI_JS=$NPM_PREFIX_NPM_CLI_JS
+}
+
+# Support pipeline input
+if ($MyInvocation.ExpectingInput) {
+ $input | & $NODE_EXE $NPM_CLI_JS $args
+} else {
+ & $NODE_EXE $NPM_CLI_JS $args
+}
+
+exit $LASTEXITCODE
diff --git a/bin/npx b/bin/npx
index a34e3459b5a70..b8619ee9c5e37 100755
--- a/bin/npx
+++ b/bin/npx
@@ -11,6 +11,16 @@ case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
+if [ `uname` = 'Linux' ] && type wslpath &>/dev/null ; then
+ IS_WSL="true"
+fi
+
+function no_node_dir {
+ # if this didn't work, then everything else below will fail
+ echo "Could not determine Node.js install directory" >&2
+ exit 1
+}
+
NODE_EXE="$basedir/node.exe"
if ! [ -x "$NODE_EXE" ]; then
NODE_EXE="$basedir/node"
@@ -19,17 +29,24 @@ if ! [ -x "$NODE_EXE" ]; then
NODE_EXE=node
fi
-# these paths are passed to node.exe, so they need to match whatever
+# this path is passed to node.exe, so it needs to match whatever
# kind of paths Node.js thinks it's using, typically win32 paths.
-CLI_BASEDIR="$("$NODE_EXE" -p 'require("path").dirname(process.execPath)')"
+CLI_BASEDIR="$("$NODE_EXE" -p 'require("path").dirname(process.execPath)' 2> /dev/null)"
if [ $? -ne 0 ]; then
- # if this didn't work, then everything else below will fail
- echo "Could not determine Node.js install directory" >&2
- exit 1
+ # this fails under WSL 1 so add an additional message. we also suppress stderr above
+ # because the actual error raised is not helpful. in WSL 1 node.exe cannot handle
+ # output redirection properly. See https://github.com/microsoft/WSL/issues/2370
+ if [ "$IS_WSL" == "true" ]; then
+ echo "WSL 1 is not supported. Please upgrade to WSL 2 or above." >&2
+ fi
+ no_node_dir
fi
-NPM_CLI_JS="$CLI_BASEDIR/node_modules/npm/bin/npm-cli.js"
+NPM_PREFIX_JS="$CLI_BASEDIR/node_modules/npm/bin/npm-prefix.js"
NPX_CLI_JS="$CLI_BASEDIR/node_modules/npm/bin/npx-cli.js"
-NPM_PREFIX=`"$NODE_EXE" "$NPM_CLI_JS" prefix -g`
+NPM_PREFIX=`"$NODE_EXE" "$NPM_PREFIX_JS"`
+if [ $? -ne 0 ]; then
+ no_node_dir
+fi
NPM_PREFIX_NPX_CLI_JS="$NPM_PREFIX/node_modules/npm/bin/npx-cli.js"
# a path that will fail -f test on any posix bash
@@ -38,7 +55,7 @@ NPX_WSL_PATH="/.."
# WSL can run Windows binaries, so we have to give it the win32 path
# however, WSL bash tests against posix paths, so we need to construct that
# to know if npm is installed globally.
-if [ `uname` = 'Linux' ] && type wslpath &>/dev/null ; then
+if [ "$IS_WSL" == "true" ]; then
NPX_WSL_PATH=`wslpath "$NPM_PREFIX_NPX_CLI_JS"`
fi
if [ -f "$NPM_PREFIX_NPX_CLI_JS" ] || [ -f "$NPX_WSL_PATH" ]; then
diff --git a/bin/npx-cli.js b/bin/npx-cli.js
index 75090aed41f1f..e2e1b87906abe 100755
--- a/bin/npx-cli.js
+++ b/bin/npx-cli.js
@@ -24,9 +24,9 @@ const removed = new Set([
...removedOpts,
])
-const { definitions, shorthands } = require('../lib/utils/config/index.js')
+const { definitions, shorthands } = require('@npmcli/config/lib/definitions')
const npmSwitches = Object.entries(definitions)
- .filter(([key, { type }]) => type === Boolean ||
+ .filter(([, { type }]) => type === Boolean ||
(Array.isArray(type) && type.includes(Boolean)))
.map(([key]) => key)
diff --git a/bin/npx.cmd b/bin/npx.cmd
index b79518ec50540..ab991abfc2562 100755
--- a/bin/npx.cmd
+++ b/bin/npx.cmd
@@ -1,20 +1,20 @@
-:: Created by npm, please don't edit manually.
-@ECHO OFF
-
-SETLOCAL
-
-SET "NODE_EXE=%~dp0\node.exe"
-IF NOT EXIST "%NODE_EXE%" (
- SET "NODE_EXE=node"
-)
-
-SET "NPM_CLI_JS=%~dp0\node_modules\npm\bin\npm-cli.js"
-SET "NPX_CLI_JS=%~dp0\node_modules\npm\bin\npx-cli.js"
-FOR /F "delims=" %%F IN ('CALL "%NODE_EXE%" "%NPM_CLI_JS%" prefix -g') DO (
- SET "NPM_PREFIX_NPX_CLI_JS=%%F\node_modules\npm\bin\npx-cli.js"
-)
-IF EXIST "%NPM_PREFIX_NPX_CLI_JS%" (
- SET "NPX_CLI_JS=%NPM_PREFIX_NPX_CLI_JS%"
-)
-
-"%NODE_EXE%" "%NPX_CLI_JS%" %*
+:: Created by npm, please don't edit manually.
+@ECHO OFF
+
+SETLOCAL
+
+SET "NODE_EXE=%~dp0\node.exe"
+IF NOT EXIST "%NODE_EXE%" (
+ SET "NODE_EXE=node"
+)
+
+SET "NPM_PREFIX_JS=%~dp0\node_modules\npm\bin\npm-prefix.js"
+SET "NPX_CLI_JS=%~dp0\node_modules\npm\bin\npx-cli.js"
+FOR /F "delims=" %%F IN ('CALL "%NODE_EXE%" "%NPM_PREFIX_JS%"') DO (
+ SET "NPM_PREFIX_NPX_CLI_JS=%%F\node_modules\npm\bin\npx-cli.js"
+)
+IF EXIST "%NPM_PREFIX_NPX_CLI_JS%" (
+ SET "NPX_CLI_JS=%NPM_PREFIX_NPX_CLI_JS%"
+)
+
+"%NODE_EXE%" "%NPX_CLI_JS%" %*
diff --git a/bin/npx.ps1 b/bin/npx.ps1
new file mode 100644
index 0000000000000..28dae51b22ca9
--- /dev/null
+++ b/bin/npx.ps1
@@ -0,0 +1,32 @@
+#!/usr/bin/env pwsh
+
+$NODE_EXE="$PSScriptRoot/node.exe"
+if (-not (Test-Path $NODE_EXE)) {
+ $NODE_EXE="$PSScriptRoot/node"
+}
+if (-not (Test-Path $NODE_EXE)) {
+ $NODE_EXE="node"
+}
+
+$NPM_PREFIX_JS="$PSScriptRoot/node_modules/npm/bin/npm-prefix.js"
+$NPX_CLI_JS="$PSScriptRoot/node_modules/npm/bin/npx-cli.js"
+$NPM_PREFIX=(& $NODE_EXE $NPM_PREFIX_JS)
+
+if ($LASTEXITCODE -ne 0) {
+ Write-Host "Could not determine Node.js install directory"
+ exit 1
+}
+
+$NPM_PREFIX_NPX_CLI_JS="$NPM_PREFIX/node_modules/npm/bin/npx-cli.js"
+if (Test-Path $NPM_PREFIX_NPX_CLI_JS) {
+ $NPX_CLI_JS=$NPM_PREFIX_NPX_CLI_JS
+}
+
+# Support pipeline input
+if ($MyInvocation.ExpectingInput) {
+ $input | & $NODE_EXE $NPX_CLI_JS $args
+} else {
+ & $NODE_EXE $NPX_CLI_JS $args
+}
+
+exit $LASTEXITCODE
diff --git a/docs/.eslintrc.js b/docs/.eslintrc.js
index 5db9f815536f1..f21d26eccec7d 100644
--- a/docs/.eslintrc.js
+++ b/docs/.eslintrc.js
@@ -10,6 +10,9 @@ const localConfigs = readdir(__dirname)
module.exports = {
root: true,
+ ignorePatterns: [
+ 'tap-testdir*/',
+ ],
extends: [
'@npmcli',
...localConfigs,
diff --git a/docs/.eslintrc.local.json b/docs/.eslintrc.local.json
new file mode 100644
index 0000000000000..2f2f707c490b9
--- /dev/null
+++ b/docs/.eslintrc.local.json
@@ -0,0 +1,5 @@
+{
+ "rules": {
+ "import/no-extraneous-dependencies": "off"
+ }
+}
diff --git a/docs/.gitignore b/docs/.gitignore
index 79af2bfcaa4d8..a96d056a7064e 100644
--- a/docs/.gitignore
+++ b/docs/.gitignore
@@ -2,6 +2,8 @@
# ignore everything in the root
/*
+# transient test directories
+tap-testdir*/
# keep these
!**/.gitignore
diff --git a/docs/bin/build.js b/docs/bin/build.js
index 6832274fde2e6..602596bc2d494 100644
--- a/docs/bin/build.js
+++ b/docs/bin/build.js
@@ -2,7 +2,7 @@ const run = require('../lib/build.js')
const { paths } = require('../lib/index')
run(paths)
- .then((res) => console.log(`Wrote ${res.length} files`))
+ .then((res) => console.error(`Wrote ${res.length} files`))
.catch((err) => {
process.exitCode = 1
console.error(err)
diff --git a/docs/lib/build.js b/docs/lib/build.js
index 563b4064277f1..cf047f3000938 100644
--- a/docs/lib/build.js
+++ b/docs/lib/build.js
@@ -93,35 +93,45 @@ const run = async ({ content, template, nav, man, html, md }) => {
: []),
])
+ const aliases = [
+ fullName === 'configuring-npm/package-json' && 'configuring-npm/npm-json',
+ fullName === 'configuring-npm/folders' && 'configuring-npm/npm-global',
+ ].filter(Boolean)
+
if (data.section) {
- const manSrc = applyTransforms(transformedSrc, [
+ const manSource = applyTransforms(transformedSrc, [
transform.helpLinks,
transform.man,
])
- const manPaths = [
- name,
- fullName === 'configuring-npm/package-json' && 'npm-json',
- fullName === 'configuring-npm/folders' && 'npm-global',
- ].filter(Boolean).map(p => applyTransforms(p, [transform.manPath]))
-
- acc.man.push(...manPaths.map((manPath) => ({
- path: manPath,
- fullPath: join(man, manPath),
- src: manSrc,
- })))
+ // Man page aliases are only the basename since the man pages have no hierarchy
+ acc.man.push(...[name, ...aliases.map(a => basename(a))]
+ .map((p) => applyTransforms(p, [transform.manPath]))
+ .map((manPath) => ({
+ path: manPath,
+ fullPath: join(man, manPath),
+ src: manSource,
+ }))
+ )
}
- acc.html.push({
- path: `${fullName}.html`,
- fullPath: join(html, `${fullName}.html`),
- src: applyTransforms(transformedSrc, [transform.html]),
- })
-
+ // html docs are used for npm help on Windows
+ const htmlSource = applyTransforms(transformedSrc, [transform.html])
+ acc.html.push(...[fullName, ...aliases].map((htmlPath) => ({
+ path: `${htmlPath}.html`,
+ fullPath: join(html, `${htmlPath}.html`),
+ src: htmlSource,
+ })))
+
+ // Markdown pages don't get aliases here. These are used to build the website so any redirects
+ // for these pages are applied in npm/documentation. Not ideal but there are also many more
+ // redirects that we would never apply to man or html docs pages
+ const mdSource = applyTransforms(transformedSrc, [transform.md])
acc.md.push({
path: childPath,
fullPath: join(md, childPath),
- src: applyTransforms(transformedSrc, [transform.md]),
+ src: mdSource,
})
+
return acc
}, { man: [], html: [], md: [] })
diff --git a/docs/lib/content/commands/npm-access.md b/docs/lib/content/commands/npm-access.md
index 819fe9ad323bb..312546f05c88e 100644
--- a/docs/lib/content/commands/npm-access.md
+++ b/docs/lib/content/commands/npm-access.md
@@ -57,8 +57,7 @@ You must have privileges to set the access of a package:
* You have been given read-write privileges for a package, either as a member
of a team or directly as an owner.
-If you have two-factor authentication enabled then you'll be prompted to
-provide an otp token, or may use the `--otp=...` option to specify it on
+If you have two-factor authentication enabled then you'll be prompted to provide a second factor, or may use the `--otp=...` option to specify it on
the command line.
If your account is not paid, then attempts to publish scoped packages will
diff --git a/docs/lib/content/commands/npm-audit.md b/docs/lib/content/commands/npm-audit.md
index 7a39b34d875be..3e5bc978b26e4 100644
--- a/docs/lib/content/commands/npm-audit.md
+++ b/docs/lib/content/commands/npm-audit.md
@@ -30,6 +30,13 @@ vulnerability is found. It may be useful in CI environments to include the
will cause the command to fail. This option does not filter the report
output, it simply changes the command's failure threshold.
+### Package lock
+
+By default npm requires a package-lock or shrinkwrap in order to run the
+audit. You can bypass the package lock with `--no-package-lock` but be
+aware the results may be different with every run, since npm will
+re-build the dependency tree each time.
+
### Audit Signatures
To ensure the integrity of packages you download from the public npm registry, or any registry that supports signatures, you can verify the registry signatures of downloaded packages using the npm CLI.
@@ -40,6 +47,13 @@ Registry signatures can be verified using the following `audit` command:
$ npm audit signatures
```
+The `audit signatures` command will also verify the provenance attestations of
+downloaded packages. Because provenance attestations are such a new feature,
+security features may be added to (or changed in) the attestation format over
+time. To ensure that you're always able to verify attestation signatures check
+that you're running the latest version of the npm CLI. Please note this often
+means updating npm beyond the version that ships with Node.js.
+
The npm CLI supports registry signatures and signing keys provided by any registry if the following conventions are followed:
1. Signatures are provided in the package's `packument` in each published version within the `dist` object:
@@ -74,13 +88,13 @@ The `sig` is generated using the following template: `${package.name}@${package.
Keys response:
-- `expires`: null or a simplified extended ISO 8601 format : `YYYY-MM-DDTHH:mm:ss.sssZ`
+- `expires`: null or a simplified extended [ISO 8601 format](https://en.wikipedia.org/wiki/ISO_8601): `YYYY-MM-DDTHH:mm:ss.sssZ`
- `keydid`: sha256 fingerprint of the public key
- `keytype`: only `ecdsa-sha2-nistp256` is currently supported by the npm CLI
- `scheme`: only `ecdsa-sha2-nistp256` is currently supported by the npm CLI
- `key`: base64 encoded public key
-See this example key's response from the public npm registry .
+See this [example key's response from the public npm registry](https://registry.npmjs.org/-/npm/v1/keys).
### Audit Endpoints
diff --git a/docs/lib/content/commands/npm-config.md b/docs/lib/content/commands/npm-config.md
index 1874aee418d00..c3a67f6349eb3 100644
--- a/docs/lib/content/commands/npm-config.md
+++ b/docs/lib/content/commands/npm-config.md
@@ -33,9 +33,10 @@ npm config set key=value [key=value...]
npm set key=value [key=value...]
```
-Sets each of the config keys to the value provided.
+Sets each of the config keys to the value provided. Modifies the user configuration
+file unless [`location`](/commands/npm-config#location) is passed.
-If value is omitted, then it sets it to an empty string.
+If value is omitted, the key will be removed from your config file entirely.
Note: for backwards compatibility, `npm config set key value` is supported
as an alias for `npm config set key=value`.
diff --git a/docs/lib/content/commands/npm-dist-tag.md b/docs/lib/content/commands/npm-dist-tag.md
index 34781ebe6777d..40484c63edad5 100644
--- a/docs/lib/content/commands/npm-dist-tag.md
+++ b/docs/lib/content/commands/npm-dist-tag.md
@@ -16,12 +16,12 @@ Add, remove, and enumerate distribution tags on a package:
or the [`--tag` config](/using-npm/config#tag) if not specified. If you have
two-factor authentication on auth-and-writes then you’ll need to include a
one-time password on the command line with
- `--otp `, or at the OTP prompt.
+ `--otp `, or go through a second factor flow based on your `authtype`.
* rm: Clear a tag that is no longer in use from the package. If you have
two-factor authentication on auth-and-writes then you’ll need to include
a one-time password on the command line with `--otp `,
- or at the OTP prompt.
+ or go through a second factor flow based on your `authtype`
* ls: Show all of the dist-tags for a package, defaulting to the package in
the current prefix. This is the default action if none is specified.
diff --git a/docs/lib/content/commands/npm-doctor.md b/docs/lib/content/commands/npm-doctor.md
index a015521fc1b3e..b5c8126b272c0 100644
--- a/docs/lib/content/commands/npm-doctor.md
+++ b/docs/lib/content/commands/npm-doctor.md
@@ -1,7 +1,7 @@
---
title: npm-doctor
section: 1
-description: Check your npm environment
+description: Check the health of your npm environment
---
### Synopsis
@@ -34,20 +34,21 @@ there are any recommended changes, it will display them. By default npm
runs all of these checks. You can limit what checks are ran by
specifying them as extra arguments.
-#### `npm ping`
+#### `Connecting to the registry`
By default, npm installs from the primary npm registry,
-`registry.npmjs.org`. `npm doctor` hits a special ping endpoint within the
-registry. This can also be checked with `npm ping`. If this check fails,
-you may be using a proxy that needs to be configured, or may need to talk
-to your IT staff to get access over HTTPS to `registry.npmjs.org`.
+`registry.npmjs.org`. `npm doctor` hits a special connection testing
+endpoint within the registry. This can also be checked with `npm ping`.
+If this check fails, you may be using a proxy that needs to be
+configured, or may need to talk to your IT staff to get access over
+HTTPS to `registry.npmjs.org`.
This check is done against whichever registry you've configured (you can
see what that is by running `npm config get registry`), and if you're using
a private registry that doesn't support the `/whoami` endpoint supported by
the primary registry, this check may fail.
-#### `npm -v`
+#### `Checking npm version`
While Node.js may come bundled with a particular version of npm, it's the
policy of the CLI team that we recommend all users run `npm@latest` if they
@@ -57,7 +58,7 @@ support releases typically only receive critical security and regression
fixes. The team believes that the latest tested version of npm is almost
always likely to be the most functional and defect-free version of npm.
-#### `node -v`
+#### `Checking node version`
For most users, in most circumstances, the best version of Node will be the
latest long-term support (LTS) release. Those of you who want access to new
@@ -66,7 +67,7 @@ be running a newer version, and some may be required to run an older
version of Node because of enterprise change control policies. That's OK!
But in general, the npm team recommends that most users run Node.js LTS.
-#### `npm config get registry`
+#### `Checking configured npm registry`
You may be installing from private package registries for your project or
company. That's great! Others may be following tutorials or StackOverflow
@@ -75,7 +76,7 @@ Sometimes, this may entail changing the registry you're pointing at. This
part of `npm doctor` just lets you, and maybe whoever's helping you with
support, know that you're not using the default registry.
-#### `which git`
+#### `Checking for git executable in PATH`
While it's documented in the README, it may not be obvious that npm needs
Git installed to do many of the things that it does. Also, in some cases
diff --git a/docs/lib/content/commands/npm-install.md b/docs/lib/content/commands/npm-install.md
index a705da2cb32d3..1b783b1b13fc2 100644
--- a/docs/lib/content/commands/npm-install.md
+++ b/docs/lib/content/commands/npm-install.md
@@ -134,6 +134,8 @@ into a tarball (b).
* `-D, --save-dev`: Package will appear in your `devDependencies`.
+ * `--save-peer`: Package will appear in your `peerDependencies`.
+
* `-O, --save-optional`: Package will appear in your
`optionalDependencies`.
diff --git a/docs/lib/content/commands/npm-owner.md b/docs/lib/content/commands/npm-owner.md
index a4c5762358111..9ff67b5784c59 100644
--- a/docs/lib/content/commands/npm-owner.md
+++ b/docs/lib/content/commands/npm-owner.md
@@ -24,8 +24,8 @@ or you can't. Future versions may contain more fine-grained access levels, but
that is not implemented at this time.
If you have two-factor authentication enabled with `auth-and-writes` (see
-[`npm-profile`](/commands/npm-profile)) then you'll need to include an otp
-on the command line when changing ownership with `--otp`.
+[`npm-profile`](/commands/npm-profile)) then you'll need to go through a second factor
+flow when changing ownership or include an otp on the command line with `--otp`.
### Configuration
diff --git a/docs/lib/content/commands/npm-pkg.md b/docs/lib/content/commands/npm-pkg.md
index dbb2f27e5c9aa..ae49409f81f2e 100644
--- a/docs/lib/content/commands/npm-pkg.md
+++ b/docs/lib/content/commands/npm-pkg.md
@@ -135,6 +135,13 @@ Returned values are always in **json** format.
npm pkg delete scripts.build
```
+* `npm pkg fix`
+
+ Auto corrects common errors in your `package.json`. npm already
+ does this during `publish`, which leads to subtle (mostly harmless)
+ differences between the contents of your `package.json` file and the
+ manifest that npm uses during installation.
+
### Workspaces support
You can set/get/delete items across your configured workspaces by using the
@@ -175,5 +182,4 @@ npm pkg get name version --ws
* [npm install](/commands/npm-install)
* [npm init](/commands/npm-init)
* [npm config](/commands/npm-config)
-* [npm set-script](/commands/npm-set-script)
* [workspaces](/using-npm/workspaces)
diff --git a/docs/lib/content/commands/npm-prefix.md b/docs/lib/content/commands/npm-prefix.md
index 6268e253552bf..913e7eea3a7e8 100644
--- a/docs/lib/content/commands/npm-prefix.md
+++ b/docs/lib/content/commands/npm-prefix.md
@@ -36,7 +36,6 @@ npm prefix -g
### See Also
* [npm root](/commands/npm-root)
-* [npm bin](/commands/npm-bin)
* [npm folders](/configuring-npm/folders)
* [npm config](/commands/npm-config)
* [npmrc](/configuring-npm/npmrc)
diff --git a/docs/lib/content/commands/npm-profile.md b/docs/lib/content/commands/npm-profile.md
index 0c93cef0d60da..ba6613393d736 100644
--- a/docs/lib/content/commands/npm-profile.md
+++ b/docs/lib/content/commands/npm-profile.md
@@ -17,28 +17,17 @@ support this interface.
* `npm profile get []`: Display all of the properties of your
profile, or one or more specific properties. It looks like:
-```bash
-+-----------------+---------------------------+
-| name | example |
-+-----------------+---------------------------+
-| email | me@example.com (verified) |
-+-----------------+---------------------------+
-| two factor auth | auth-and-writes |
-+-----------------+---------------------------+
-| fullname | Example User |
-+-----------------+---------------------------+
-| homepage | |
-+-----------------+---------------------------+
-| freenode | |
-+-----------------+---------------------------+
-| twitter | |
-+-----------------+---------------------------+
-| github | |
-+-----------------+---------------------------+
-| created | 2015-02-26T01:38:35.892Z |
-+-----------------+---------------------------+
-| updated | 2017-10-02T21:29:45.922Z |
-+-----------------+---------------------------+
+```
+name: example
+email: e@example.com (verified)
+two-factor auth: auth-and-writes
+fullname: Example User
+homepage:
+freenode:
+twitter:
+github:
+created: 2015-02-26T01:38:35.892Z
+updated: 2017-10-02T21:29:45.922Z
```
* `npm profile set `: Set the value of a profile
diff --git a/docs/lib/content/commands/npm-prune.md b/docs/lib/content/commands/npm-prune.md
index d0871c55e8054..d1f48a67be1bc 100644
--- a/docs/lib/content/commands/npm-prune.md
+++ b/docs/lib/content/commands/npm-prune.md
@@ -16,10 +16,9 @@ then only packages matching one of the supplied names are removed.
Extraneous packages are those present in the `node_modules` folder that are
not listed as any package's dependency list.
-If the `--production` flag is specified or the `NODE_ENV` environment
+If the `--omit=dev` flag is specified or the `NODE_ENV` environment
variable is set to `production`, this command will remove the packages
-specified in your `devDependencies`. Setting `--no-production` will negate
-`NODE_ENV` being set to `production`.
+specified in your `devDependencies`.
If the `--dry-run` flag is used then no changes will actually be made.
diff --git a/docs/lib/content/commands/npm-publish.md b/docs/lib/content/commands/npm-publish.md
index f8481a99b4401..7c97401440670 100644
--- a/docs/lib/content/commands/npm-publish.md
+++ b/docs/lib/content/commands/npm-publish.md
@@ -20,7 +20,7 @@ scope-configured registry (see
A `package` is interpreted the same way as other commands (like
-`npm install` and can be:
+`npm install`) and can be:
* a) a folder containing a program described by a
[`package.json`](/configuring-npm/package-json) file
@@ -50,7 +50,7 @@ to the registry.
### Files included in package
-To see what will be included in your package, run `npx npm-packlist`. All
+To see what will be included in your package, run `npm pack --dry-run`. All
files are included by default, with the following exceptions:
- Certain files that are relevant to package installation and distribution
diff --git a/docs/lib/content/commands/npm-query.md b/docs/lib/content/commands/npm-query.md
index e6bf53f3de614..490eccffcc4b3 100644
--- a/docs/lib/content/commands/npm-query.md
+++ b/docs/lib/content/commands/npm-query.md
@@ -134,6 +134,32 @@ npm query ":type(git)" | jq 'map(.name)' | xargs -I {} npm why {}
...
```
+### Expecting a certain number of results
+
+One common use of `npm query` is to make sure there is only one version of
+a certain dependency in your tree. This is especially common for
+ecosystems like that rely on `typescript` where having state split
+across two different but identically-named packages causes bugs. You
+can use the `--expect-results` or `--expect-result-count` in your setup
+to ensure that npm will exit with an exit code if your tree doesn't look
+like you want it to.
+
+
+```sh
+$ npm query '#react' --expect-result-count=1
+```
+
+Perhaps you want to quickly check if there are any production
+dependencies that could be updated:
+
+```sh
+$ npm query ':root>:outdated(in-range).prod' --no-expect-results
+```
+
+### Package lock only mode
+
+If package-lock-only is enabled, only the information in the package lock (or shrinkwrap) is loaded. This means that information from the package.json files of your dependencies will not be included in the result set (e.g. description, homepage, engines).
+
### Configuration
diff --git a/docs/lib/content/commands/npm-rebuild.md b/docs/lib/content/commands/npm-rebuild.md
index da3a088ac7689..aee332e37d8a1 100644
--- a/docs/lib/content/commands/npm-rebuild.md
+++ b/docs/lib/content/commands/npm-rebuild.md
@@ -10,14 +10,29 @@ description: Rebuild a package
### Description
-This command runs the `npm build` command on the matched folders. This is
-useful when you install a new version of node, and must recompile all your
-C++ addons with the new binary. It is also useful when installing with
-`--ignore-scripts` and `--no-bin-links`, to explicitly choose which
-packages to build and/or link bins.
-
-If one or more package specs are provided, then only packages with a
-name and version matching one of the specifiers will be rebuilt.
+This command does the following:
+
+1. Execute lifecycle scripts (`preinstall`, `install`, `postinstall`, `prepare`)
+2. Links bins depending on whether bin links are enabled
+
+This command is particularly useful in scenarios including but not limited to:
+
+1. Installing a new version of **node.js**, where you need to recompile all your C++ add-ons with the updated binary.
+2. Installing with `--ignore-scripts` and `--no-bin-links`, to explicitly choose which packages to build and/or link bins.
+
+If one or more package specs are provided, then only packages with a name and version matching one of the specifiers will be rebuilt.
+
+Usually, you should not need to run `npm rebuild` as it is already done for you as part of npm install (unless you suppressed these steps with `--ignore-scripts` or `--no-bin-links`).
+
+If there is a `binding.gyp` file in the root of your package, then npm will use a default install hook:
+
+```
+"scripts": {
+ "install": "node-gyp rebuild"
+}
+```
+
+This default behavior is suppressed if the `package.json` has its own `install` or `preinstall` scripts. It is also suppressed if the package specifies `"gypfile": false`
### Configuration
diff --git a/docs/lib/content/commands/npm-root.md b/docs/lib/content/commands/npm-root.md
index 6bf6c3ee811ac..60b77bb5a839c 100644
--- a/docs/lib/content/commands/npm-root.md
+++ b/docs/lib/content/commands/npm-root.md
@@ -28,7 +28,6 @@ echo "Global packages installed in: ${global_node_modules}"
### See Also
* [npm prefix](/commands/npm-prefix)
-* [npm bin](/commands/npm-bin)
* [npm folders](/configuring-npm/folders)
* [npm config](/commands/npm-config)
* [npmrc](/configuring-npm/npmrc)
diff --git a/docs/lib/content/commands/npm-sbom.md b/docs/lib/content/commands/npm-sbom.md
new file mode 100644
index 0000000000000..a5ac81baf6704
--- /dev/null
+++ b/docs/lib/content/commands/npm-sbom.md
@@ -0,0 +1,223 @@
+---
+title: npm-sbom
+section: 1
+description: Generate a Software Bill of Materials (SBOM)
+---
+
+### Synopsis
+
+
+
+### Description
+
+The `npm sbom` command generates a Software Bill of Materials (SBOM) listing the
+dependencies for the current project. SBOMs can be generated in either
+[SPDX](https://spdx.dev/) or [CycloneDX](https://cyclonedx.org/) format.
+
+### Example CycloneDX SBOM
+
+```json
+{
+ "$schema": "http://cyclonedx.org/schema/bom-1.5.schema.json",
+ "bomFormat": "CycloneDX",
+ "specVersion": "1.5",
+ "serialNumber": "urn:uuid:09f55116-97e1-49cf-b3b8-44d0207e7730",
+ "version": 1,
+ "metadata": {
+ "timestamp": "2023-09-01T00:00:00.001Z",
+ "lifecycles": [
+ {
+ "phase": "build"
+ }
+ ],
+ "tools": [
+ {
+ "vendor": "npm",
+ "name": "cli",
+ "version": "10.1.0"
+ }
+ ],
+ "component": {
+ "bom-ref": "simple@1.0.0",
+ "type": "library",
+ "name": "simple",
+ "version": "1.0.0",
+ "scope": "required",
+ "author": "John Doe",
+ "description": "simple react app",
+ "purl": "pkg:npm/simple@1.0.0",
+ "properties": [
+ {
+ "name": "cdx:npm:package:path",
+ "value": ""
+ }
+ ],
+ "externalReferences": [],
+ "licenses": [
+ {
+ "license": {
+ "id": "MIT"
+ }
+ }
+ ]
+ }
+ },
+ "components": [
+ {
+ "bom-ref": "lodash@4.17.21",
+ "type": "library",
+ "name": "lodash",
+ "version": "4.17.21",
+ "scope": "required",
+ "author": "John-David Dalton",
+ "description": "Lodash modular utilities.",
+ "purl": "pkg:npm/lodash@4.17.21",
+ "properties": [
+ {
+ "name": "cdx:npm:package:path",
+ "value": "node_modules/lodash"
+ }
+ ],
+ "externalReferences": [
+ {
+ "type": "distribution",
+ "url": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
+ },
+ {
+ "type": "vcs",
+ "url": "git+https://github.com/lodash/lodash.git"
+ },
+ {
+ "type": "website",
+ "url": "https://lodash.com/"
+ },
+ {
+ "type": "issue-tracker",
+ "url": "https://github.com/lodash/lodash/issues"
+ }
+ ],
+ "hashes": [
+ {
+ "alg": "SHA-512",
+ "content": "bf690311ee7b95e713ba568322e3533f2dd1cb880b189e99d4edef13592b81764daec43e2c54c61d5c558dc5cfb35ecb85b65519e74026ff17675b6f8f916f4a"
+ }
+ ],
+ "licenses": [
+ {
+ "license": {
+ "id": "MIT"
+ }
+ }
+ ]
+ }
+ ],
+ "dependencies": [
+ {
+ "ref": "simple@1.0.0",
+ "dependsOn": [
+ "lodash@4.17.21"
+ ]
+ },
+ {
+ "ref": "lodash@4.17.21",
+ "dependsOn": []
+ }
+ ]
+}
+```
+
+### Example SPDX SBOM
+
+```json
+{
+ "spdxVersion": "SPDX-2.3",
+ "dataLicense": "CC0-1.0",
+ "SPDXID": "SPDXRef-DOCUMENT",
+ "name": "simple@1.0.0",
+ "documentNamespace": "http://spdx.org/spdxdocs/simple-1.0.0-bf81090e-8bbc-459d-bec9-abeb794e096a",
+ "creationInfo": {
+ "created": "2023-09-01T00:00:00.001Z",
+ "creators": [
+ "Tool: npm/cli-10.1.0"
+ ]
+ },
+ "documentDescribes": [
+ "SPDXRef-Package-simple-1.0.0"
+ ],
+ "packages": [
+ {
+ "name": "simple",
+ "SPDXID": "SPDXRef-Package-simple-1.0.0",
+ "versionInfo": "1.0.0",
+ "packageFileName": "",
+ "description": "simple react app",
+ "primaryPackagePurpose": "LIBRARY",
+ "downloadLocation": "NOASSERTION",
+ "filesAnalyzed": false,
+ "homepage": "NOASSERTION",
+ "licenseDeclared": "MIT",
+ "externalRefs": [
+ {
+ "referenceCategory": "PACKAGE-MANAGER",
+ "referenceType": "purl",
+ "referenceLocator": "pkg:npm/simple@1.0.0"
+ }
+ ]
+ },
+ {
+ "name": "lodash",
+ "SPDXID": "SPDXRef-Package-lodash-4.17.21",
+ "versionInfo": "4.17.21",
+ "packageFileName": "node_modules/lodash",
+ "description": "Lodash modular utilities.",
+ "downloadLocation": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+ "filesAnalyzed": false,
+ "homepage": "https://lodash.com/",
+ "licenseDeclared": "MIT",
+ "externalRefs": [
+ {
+ "referenceCategory": "PACKAGE-MANAGER",
+ "referenceType": "purl",
+ "referenceLocator": "pkg:npm/lodash@4.17.21"
+ }
+ ],
+ "checksums": [
+ {
+ "algorithm": "SHA512",
+ "checksumValue": "bf690311ee7b95e713ba568322e3533f2dd1cb880b189e99d4edef13592b81764daec43e2c54c61d5c558dc5cfb35ecb85b65519e74026ff17675b6f8f916f4a"
+ }
+ ]
+ }
+ ],
+ "relationships": [
+ {
+ "spdxElementId": "SPDXRef-DOCUMENT",
+ "relatedSpdxElement": "SPDXRef-Package-simple-1.0.0",
+ "relationshipType": "DESCRIBES"
+ },
+ {
+ "spdxElementId": "SPDXRef-Package-simple-1.0.0",
+ "relatedSpdxElement": "SPDXRef-Package-lodash-4.17.21",
+ "relationshipType": "DEPENDS_ON"
+ }
+ ]
+}
+```
+
+### Package lock only mode
+
+If package-lock-only is enabled, only the information in the package
+lock (or shrinkwrap) is loaded. This means that information from the
+package.json files of your dependencies will not be included in the
+result set (e.g. description, homepage, engines).
+
+### Configuration
+
+
+## See Also
+
+* [package spec](/using-npm/package-spec)
+* [dependency selectors](/using-npm/dependency-selectors)
+* [package.json](/configuring-npm/package-json)
+* [workspaces](/using-npm/workspaces)
+
diff --git a/docs/lib/content/commands/npm-team.md b/docs/lib/content/commands/npm-team.md
index 2672b466f75a1..d3b7ca58fe2af 100644
--- a/docs/lib/content/commands/npm-team.md
+++ b/docs/lib/content/commands/npm-team.md
@@ -20,11 +20,12 @@ as `@org:newteam` in these commands.
If you have two-factor authentication enabled in `auth-and-writes` mode, then
you can provide a code from your authenticator with `[--otp ]`.
-If you don't include this then you will be prompted.
+If you don't include this then you will be taken through a second factor flow based
+on your `authtype`.
* create / destroy:
Create a new team, or destroy an existing one. Note: You cannot remove the
- `developers` team, learn more.
+ `developers` team, [learn more.](https://docs.npmjs.com/about-developers-team)
Here's how to create a new team `newteam` under the `org` org:
diff --git a/docs/lib/content/commands/npm-token.md b/docs/lib/content/commands/npm-token.md
index 81624ba6952f8..d4d9d6bd83cce 100644
--- a/docs/lib/content/commands/npm-token.md
+++ b/docs/lib/content/commands/npm-token.md
@@ -16,24 +16,14 @@ This lets you list, create and revoke authentication tokens.
Shows a table of all active authentication tokens. You can request
this as JSON with `--json` or tab-separated values with `--parseable`.
-```bash
-+--------+---------+------------+----------+----------------+
-| id | token | created | read-only | CIDR whitelist |
-+--------+---------+------------+----------+----------------+
-| 7f3134 | 1fa9ba… | 2017-10-02 | yes | |
-+--------+---------+------------+----------+----------------+
-| c03241 | af7aef… | 2017-10-02 | no | 192.168.0.1/24 |
-+--------+---------+------------+----------+----------------+
-| e0cf92 | 3a436a… | 2017-10-02 | no | |
-+--------+---------+------------+----------+----------------+
-| 63eb9d | 74ef35… | 2017-09-28 | no | |
-+--------+---------+------------+----------+----------------+
-| 2daaa8 | cbad5f… | 2017-09-26 | no | |
-+--------+---------+------------+----------+----------------+
-| 68c2fe | 127e51… | 2017-09-23 | no | |
-+--------+---------+------------+----------+----------------+
-| 6334e1 | 1dadd1… | 2017-09-23 | no | |
-+--------+---------+------------+----------+----------------+
+```
+Read only token npm_1f… with id 7f3134 created 2017-10-21
+
+Publish token npm_af… with id c03241 created 2017-10-02
+with IP Whitelist: 192.168.0.1/24
+
+Publish token npm_… with id e0cf92 created 2017-10-02
+
```
* `npm token create [--read-only] [--cidr=]`:
@@ -49,16 +39,8 @@ This lets you list, create and revoke authentication tokens.
website](https://docs.npmjs.com/creating-and-viewing-access-tokens)
for more information on generating automation tokens.
-```bash
-+----------------+--------------------------------------+
-| token | a73c9572-f1b9-8983-983d-ba3ac3cc913d |
-+----------------+--------------------------------------+
-| cidr_whitelist | |
-+----------------+--------------------------------------+
-| readonly | false |
-+----------------+--------------------------------------+
-| created | 2017-10-02T07:52:24.838Z |
-+----------------+--------------------------------------+
+```
+Created publish token a73c9572-f1b9-8983-983d-ba3ac3cc913d
```
* `npm token revoke `:
diff --git a/docs/lib/content/commands/npm-unpublish.md b/docs/lib/content/commands/npm-unpublish.md
index 76f4cd00bf061..741fc83cee9aa 100644
--- a/docs/lib/content/commands/npm-unpublish.md
+++ b/docs/lib/content/commands/npm-unpublish.md
@@ -8,9 +8,8 @@ description: Remove a package from the registry
-To learn more about how the npm registry treats unpublish, see our unpublish policies
+To learn more about how the npm registry treats unpublish, see our
+[unpublish policies](https://docs.npmjs.com/policies/unpublish).
### Warning
@@ -26,8 +25,12 @@ removing the tarball.
The npm registry will return an error if you are not [logged
in](/commands/npm-adduser).
-If you do not specify a version or if you remove all of a package's
-versions then the registry will remove the root package entry entirely.
+If you do not specify a package name at all, the name and version to be
+unpublished will be pulled from the project in the current directory.
+
+If you specify a package name but do not specify a version or if you
+remove all of a package's versions then the registry will remove the
+root package entry entirely.
Even if you unpublish a package version, that specific name and version
combination can never be reused. In order to publish the package again,
diff --git a/docs/lib/content/commands/npm-update.md b/docs/lib/content/commands/npm-update.md
index 7e4f0564d17d7..65919a4a7f914 100644
--- a/docs/lib/content/commands/npm-update.md
+++ b/docs/lib/content/commands/npm-update.md
@@ -24,7 +24,7 @@ If no package name is specified, all packages in the specified location (global
or local) will be updated.
Note that by default `npm update` will not update the semver values of direct
-dependencies in your project `package.json`, if you want to also update
+dependencies in your project `package.json`. If you want to also update
values in `package.json` you can run: `npm update --save` (or add the
`save=true` option to a [configuration file](/configuring-npm/npmrc)
to make that the default behavior).
@@ -76,7 +76,7 @@ However, if `app`'s `package.json` contains:
```
In this case, running `npm update` will install `dep1@1.1.2`. Even though the
-`latest` tag points to `1.2.2`, this version do not satisfy `~1.1.1`, which is
+`latest` tag points to `1.2.2`, this version does not satisfy `~1.1.1`, which is
equivalent to `>=1.1.1 <1.2.0`. So the highest-sorting version that satisfies
`~1.1.1` is used, which is `1.1.2`.
@@ -90,8 +90,7 @@ Suppose `app` has a caret dependency on a version below `1.0.0`, for example:
}
```
-`npm update` will install `dep1@0.2.0`, because there are no other
-versions which satisfy `^0.2.0`.
+`npm update` will install `dep1@0.2.0`.
If the dependence were on `^0.4.0`:
diff --git a/docs/lib/content/commands/npm-view.md b/docs/lib/content/commands/npm-view.md
index a74427f32a0e6..63ff520e7bd29 100644
--- a/docs/lib/content/commands/npm-view.md
+++ b/docs/lib/content/commands/npm-view.md
@@ -28,6 +28,13 @@ For example, to show the dependencies of the `ronn` package at version
npm view ronn@0.3.5 dependencies
```
+By default, `npm view` shows data about the current project context (by looking for a `package.json`).
+To show field data for the current project use a file path (i.e. `.`):
+
+```bash
+npm view . dependencies
+```
+
You can view child fields by separating them with a period.
To view the git repository URL for the latest version of `npm`, you would run the following command:
diff --git a/docs/lib/content/commands/npm.md b/docs/lib/content/commands/npm.md
index d47a34e74eaf2..5dac1b24bf66c 100644
--- a/docs/lib/content/commands/npm.md
+++ b/docs/lib/content/commands/npm.md
@@ -130,7 +130,7 @@ npm is extremely configurable. It reads its configuration options from
in the cli, env, or user config, then that file is parsed instead.
* Defaults:
npm's default configuration options are defined in
- lib/utils/config-defs.js. These must not be changed.
+ `lib/utils/config/definitions.js`. These must not be changed.
See [`config`](/using-npm/config) for much much more information.
@@ -154,7 +154,7 @@ Please be sure to follow the template and bug reporting guidelines.
Discuss new feature ideas on our discussion forum:
-*
+*
Or suggest formal RFC proposals:
diff --git a/docs/lib/content/commands/npx.md b/docs/lib/content/commands/npx.md
index 6d6aed6be2979..ca72b3e03bdae 100644
--- a/docs/lib/content/commands/npx.md
+++ b/docs/lib/content/commands/npx.md
@@ -145,7 +145,8 @@ This resulted in some shifts in its functionality:
always present in the executed process `PATH`.
- The `--npm` option is removed. `npx` will always use the `npm` it ships
with.
-- The `--node-arg` and `-n` options are removed.
+- The `--node-arg` and `-n` options have been removed. Use [`NODE_OPTIONS`](https://nodejs.org/api/cli.html#node_optionsoptions) instead: e.g.,
+ `NODE_OPTIONS="--trace-warnings --trace-exit" npx foo --random=true`
- The `--always-spawn` option is redundant, and thus removed.
- The `--shell` option is replaced with `--script-shell`, but maintained
in the `npx` executable for backwards compatibility.
diff --git a/docs/lib/content/configuring-npm/folders.md b/docs/lib/content/configuring-npm/folders.md
index 6295fd7e65d61..5fb4ca257fc33 100644
--- a/docs/lib/content/configuring-npm/folders.md
+++ b/docs/lib/content/configuring-npm/folders.md
@@ -72,7 +72,7 @@ Man pages are not installed on Windows systems.
#### Cache
See [`npm cache`](/commands/npm-cache). Cache files are stored in `~/.npm` on Posix, or
-`%AppData%/npm-cache` on Windows.
+`%LocalAppData%/npm-cache` on Windows.
This is controlled by the [`cache` config](/using-npm/config#cache) param.
diff --git a/docs/lib/content/configuring-npm/install.md b/docs/lib/content/configuring-npm/install.md
index 18b4421687ba9..d9c1d32631fa1 100644
--- a/docs/lib/content/configuring-npm/install.md
+++ b/docs/lib/content/configuring-npm/install.md
@@ -38,17 +38,8 @@ npm -v
Node version managers allow you to install and switch between multiple
versions of Node.js and npm on your system so you can test your
applications on multiple versions of npm to ensure they work for users on
-different versions.
-
-#### OSX or Linux Node version managers
-
-* [nvm](https://github.com/creationix/nvm)
-* [n](https://github.com/tj/n)
-
-#### Windows Node version managers
-
-* [nodist](https://github.com/marcelklehr/nodist)
-* [nvm-windows](https://github.com/coreybutler/nvm-windows)
+different versions. You can
+[search for them on GitHub](https://github.com/search?q=node+version+manager+archived%3Afalse&type=repositories&ref=advsearch).
### Using a Node installer to install Node.js and npm
diff --git a/docs/lib/content/configuring-npm/npmrc.md b/docs/lib/content/configuring-npm/npmrc.md
index 8cd532abc1c2d..0aa99fc271013 100644
--- a/docs/lib/content/configuring-npm/npmrc.md
+++ b/docs/lib/content/configuring-npm/npmrc.md
@@ -19,10 +19,10 @@ For a list of available configuration options, see
The four relevant files are:
-* per-project config file (/path/to/my/project/.npmrc)
-* per-user config file (~/.npmrc)
-* global config file ($PREFIX/etc/npmrc)
-* npm builtin config file (/path/to/npm/npmrc)
+* per-project config file (`/path/to/my/project/.npmrc`)
+* per-user config file (`~/.npmrc`)
+* global config file (`$PREFIX/etc/npmrc`)
+* npm builtin config file (`/path/to/npm/npmrc`)
All npm config files are an ini-formatted list of `key = value` parameters.
Environment variables can be replaced using `${VARIABLE_NAME}`. For
diff --git a/docs/lib/content/configuring-npm/package-json.md b/docs/lib/content/configuring-npm/package-json.md
index 68a4a62d2ae0a..755071c6f10bd 100644
--- a/docs/lib/content/configuring-npm/package-json.md
+++ b/docs/lib/content/configuring-npm/package-json.md
@@ -40,7 +40,7 @@ Some tips:
* Don't use the same name as a core Node module.
* Don't put "js" or "node" in the name. It's assumed that it's js, since
you're writing a package.json file, and you can specify the engine using
- the "engines" field. (See below.)
+ the "[engines](#engines)" field. (See below.)
* The name will probably be passed as an argument to require(), so it
should be something short, but also reasonably descriptive.
* You may want to check the npm registry to see if there's something by
@@ -75,7 +75,7 @@ your package as it's listed in `npm search`.
### homepage
-The url to the project homepage.
+The URL to the project homepage.
Example:
@@ -85,7 +85,7 @@ Example:
### bugs
-The url to your project's issue tracker and / or the email address to which
+The URL to your project's issue tracker and / or the email address to which
issues should be reported. These are helpful for people who encounter
issues with your package.
@@ -93,16 +93,18 @@ It should look like this:
```json
{
- "url" : "https://github.com/owner/project/issues",
- "email" : "project@hostname.com"
+ "bugs": {
+ "url": "https://github.com/owner/project/issues",
+ "email": "project@hostname.com"
+ }
}
```
You can specify either one or both values. If you want to provide only a
-url, you can specify the value for "bugs" as a simple string instead of an
+URL, you can specify the value for "bugs" as a simple string instead of an
object.
-If a url is provided, it will be used by the `npm bugs` command.
+If a URL is provided, it will be used by the `npm bugs` command.
### license
@@ -120,7 +122,7 @@ SPDX license identifier for the license you're using, like this:
You can check [the full list of SPDX license
IDs](https://spdx.org/licenses/). Ideally you should pick one that is
-[OSI](https://opensource.org/licenses/alphabetical) approved.
+[OSI](https://opensource.org/licenses/) approved.
If your package is licensed under multiple common licenses, use an [SPDX
license expression syntax version 2.0
@@ -223,23 +225,35 @@ npm also sets a top-level "maintainers" field with your npm user info.
### funding
You can specify an object containing a URL that provides up-to-date
-information about ways to help fund development of your package, or a
-string URL, or an array of these:
+information about ways to help fund development of your package, a
+string URL, or an array of objects and string URLs:
```json
{
"funding": {
"type" : "individual",
"url" : "http://example.com/donate"
- },
+ }
+}
+```
+```json
+{
"funding": {
"type" : "patreon",
"url" : "https://www.patreon.com/my-account"
- },
+ }
+}
+```
- "funding": "http://example.com/donate",
+```json
+{
+ "funding": "http://example.com/donate"
+}
+```
+```json
+{
"funding": [
{
"type" : "individual",
@@ -256,7 +270,7 @@ string URL, or an array of these:
Users can use the `npm fund` subcommand to list the `funding` URLs of all
dependencies of their project, direct and indirect. A shortcut to visit
-each funding url is also available when providing the project name such as:
+each funding URL is also available when providing the project name such as:
`npm fund ` (when there are multiple URLs, the first one will
be visited)
@@ -279,37 +293,49 @@ it will. The `.npmignore` file works just like a `.gitignore`. If there is
a `.gitignore` file, and `.npmignore` is missing, `.gitignore`'s contents
will be used instead.
-Files included with the "package.json#files" field _cannot_ be excluded
-through `.npmignore` or `.gitignore`.
-
Certain files are always included, regardless of settings:
* `package.json`
* `README`
* `LICENSE` / `LICENCE`
* The file in the "main" field
+* The file(s) in the "bin" field
`README` & `LICENSE` can have any case and extension.
-Conversely, some files are always ignored:
+Some files are always ignored by default:
+* `*.orig`
+* `.*.swp`
+* `.DS_Store`
+* `._*`
* `.git`
-* `CVS`
-* `.svn`
* `.hg`
* `.lock-wscript`
+* `.npmrc`
+* `.svn`
* `.wafpickle-N`
-* `.*.swp`
-* `.DS_Store`
-* `._*`
+* `CVS`
+* `config.gypi`
+* `node_modules`
* `npm-debug.log`
+* `package-lock.json` (use
+ [`npm-shrinkwrap.json`](/configuring-npm/npm-shrinkwrap-json)
+ if you wish it to be published)
+* `pnpm-lock.yaml`
+* `yarn.lock`
+
+Most of these ignored files can be included specifically if included in
+the `files` globs. Exceptions to this are:
+
+* `.git`
* `.npmrc`
* `node_modules`
-* `config.gypi`
-* `*.orig`
-* `package-lock.json` (use
- [`npm-shrinkwrap.json`](/configuring-npm/npm-shrinkwrap-json) if you wish
- it to be published)
+* `package-lock.json`
+* `pnpm-lock.yaml`
+* `yarn.lock`
+
+These can not be included.
### main
@@ -323,7 +349,7 @@ This should be a module relative to the root of your package folder.
For most modules, it makes the most sense to have a main script and often
not much else.
-If `main` is not set it defaults to `index.js` in the package's root folder.
+If `main` is not set, it defaults to `index.js` in the package's root folder.
### browser
@@ -341,11 +367,11 @@ feature to install the "npm" executable.)
To use this, supply a `bin` field in your package.json which is a map of
command name to local file name. When this package is installed globally,
that file will be either linked inside the global bins directory or
-a cmd (Windows Command File) will be created which executes the specified
+a cmd (Windows Command File) will be created which executes the specified
file in the `bin` field, so it is available to run by `name` or `name.cmd` (on
-Windows PowerShell). When this package is installed as a dependency in another
+Windows PowerShell). When this package is installed as a dependency in another
package, the file will be linked where it will be available to that package
-either directly by `npm exec` or by name in other scripts when invoking them
+either directly by `npm exec` or by name in other scripts when invoking them
via `npm run-script`.
@@ -354,15 +380,15 @@ For example, myapp could have this:
```json
{
"bin": {
- "myapp": "./cli.js"
+ "myapp": "bin/cli.js"
}
}
```
-So, when you install myapp, in case of unix-like OS it'll create a symlink
-from the `cli.js` script to `/usr/local/bin/myapp` and in case of windows it
+So, when you install myapp, in case of unix-like OS it'll create a symlink
+from the `cli.js` script to `/usr/local/bin/myapp` and in case of windows it
will create a cmd file usually at `C:\Users\{Username}\AppData\Roaming\npm\myapp.cmd`
-which runs the `cli.js` script.
+which runs the `cli.js` script.
If you have a single executable, and its name should be the name of the
package, then you can just supply it as a string. For example:
@@ -371,7 +397,7 @@ package, then you can just supply it as a string. For example:
{
"name": "my-program",
"version": "1.2.5",
- "bin": "./path/to/program"
+ "bin": "path/to/program"
}
```
@@ -382,7 +408,7 @@ would be the same as this:
"name": "my-program",
"version": "1.2.5",
"bin": {
- "my-program": "./path/to/program"
+ "my-program": "path/to/program"
}
}
```
@@ -483,7 +509,7 @@ walking the folder.
### repository
Specify the place where your code lives. This is helpful for people who
-want to contribute. If the git repo is on GitHub, then the `npm docs`
+want to contribute. If the git repo is on GitHub, then the `npm repo`
command will be able to find you.
Do it like this:
@@ -492,14 +518,14 @@ Do it like this:
{
"repository": {
"type": "git",
- "url": "https://github.com/npm/cli.git"
+ "url": "git+https://github.com/npm/cli.git"
}
}
```
-The URL should be a publicly available (perhaps read-only) url that can be
+The URL should be a publicly available (perhaps read-only) URL that can be
handed directly to a VCS program without any modification. It should not
-be a url to an html project page that you put in your browser. It's for
+be a URL to an html project page that you put in your browser. It's for
computers.
For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the
@@ -527,8 +553,8 @@ which it lives:
{
"repository": {
"type": "git",
- "url": "https://github.com/facebook/react.git",
- "directory": "packages/react-dom"
+ "url": "git+https://github.com/npm/cli.git",
+ "directory": "workspaces/libnpmpublish"
}
}
```
@@ -622,7 +648,7 @@ install time.
#### Git URLs as Dependencies
-Git urls are of the form:
+Git URLs are of the form:
```bash
://[[:]@][:][:][/][# | #semver:]
@@ -669,7 +695,7 @@ will be rebuilt for every installation.
#### GitHub URLs
-As of version 1.1.65, you can refer to GitHub urls as just "foo":
+As of version 1.1.65, you can refer to GitHub URLs as just "foo":
"user/foo-project". Just as with git URLs, a `commit-ish` suffix can be
included. For example:
@@ -712,7 +738,7 @@ in which case they will be normalized to a relative path and added to your
This feature is helpful for local offline development and creating tests
that require npm installing where you don't want to hit an external server,
-but should not be used when publishing packages to the public registry.
+but should not be used when publishing your package to the public registry.
*note*: Packages linked by local path will not have their own
dependencies installed when `npm install` is ran in this case. You must
@@ -804,11 +830,12 @@ to express this. If you depend on features introduced in 1.5.2, use
### peerDependenciesMeta
-When a user installs your package, npm will emit warnings if packages
-specified in `peerDependencies` are not already installed. The
-`peerDependenciesMeta` field serves to provide npm more information on how
+The `peerDependenciesMeta` field serves to provide npm more information on how
your peer dependencies are to be used. Specifically, it allows peer
-dependencies to be marked as optional.
+dependencies to be marked as optional. Npm will not automatically install
+optional peer dependencies. This allows you to
+integrate and interact with a variety of host packages without requiring
+all of them to be installed.
For example:
@@ -828,11 +855,6 @@ For example:
}
```
-Marking a peer dependency as optional ensures npm will not emit a warning
-if the `soy-milk` package is not installed on the host. This allows you to
-integrate and interact with a variety of host packages without requiring
-all of them to be installed.
-
### bundleDependencies
This defines an array of package names that will be bundled when publishing
@@ -875,7 +897,7 @@ none.
If a dependency can be used, but you would like npm to proceed if it cannot
be found or fails to install, then you may put it in the
`optionalDependencies` object. This is a map of package name to version or
-url, just like the `dependencies` object. The difference is that build
+URL, just like the `dependencies` object. The difference is that build
failures do not cause installation to fail. Running `npm install
--omit=optional` will prevent these dependencies from being installed.
@@ -914,6 +936,13 @@ Overrides provide a way to replace a package in your dependency tree with
another version, or another package entirely. These changes can be scoped as
specific or as vague as desired.
+Overrides are only considered in the root `package.json` file for a project.
+Overrides in installed dependencies (including
+[workspaces](/using-npm/workspaces)) are not considered in dependency tree
+resolution. Published packages may dictate their resolutions by pinning
+dependencies or using an
+[`npm-shrinkwrap.json`](/configuring-npm/npm-shrinkwrap-json) file.
+
To make sure the package `foo` is always installed as version `1.0.0` no matter
what version your dependencies rely on:
diff --git a/docs/lib/content/configuring-npm/package-lock-json.md b/docs/lib/content/configuring-npm/package-lock-json.md
index 4aa8dc375c89f..f3b012175fa0e 100644
--- a/docs/lib/content/configuring-npm/package-lock-json.md
+++ b/docs/lib/content/configuring-npm/package-lock-json.md
@@ -31,12 +31,14 @@ various purposes:
picture of the package tree, reducing the need to read `package.json`
files, and allowing for significant performance improvements.
+When `npm` creates or updates `package-lock.json`, it will infer line endings and indentation from `package.json` so that the formatting of both files matches.
+
### `package-lock.json` vs `npm-shrinkwrap.json`
Both of these files have the same format, and perform similar functions in
the root of a project.
-The difference is that `package-lock.json` cannot be published, and it will
+The difference is that `package-lock.json` cannot be published, and it will
be ignored if found in any place other than the root project.
In contrast, [npm-shrinkwrap.json](/configuring-npm/npm-shrinkwrap-json) allows
@@ -112,12 +114,9 @@ the npm registry. Lockfiles generated by npm v7 will contain
* No version provided: an "ancient" shrinkwrap file from a version of npm
prior to npm v5.
* `1`: The lockfile version used by npm v5 and v6.
-* `2`: The lockfile version used by npm v7, which is backwards compatible
- to v1 lockfiles.
-* `3`: The lockfile version used by npm v7, _without_ backwards
- compatibility affordances. This is used for the hidden lockfile at
- `node_modules/.package-lock.json`, and will likely be used in a future
- version of npm, once support for npm v6 is no longer relevant.
+* `2`: The lockfile version used by npm v7 and v8. Backwards compatible to v1
+ lockfiles.
+* `3`: The lockfile version used by npm v9 and above. Backwards compatible to npm v7.
npm will always attempt to get whatever data it can out of a lockfile, even
if it is not a version that it was designed to support.
diff --git a/docs/lib/content/nav.yml b/docs/lib/content/nav.yml
index 00e62eaaf94e9..6b7325fec5f12 100644
--- a/docs/lib/content/nav.yml
+++ b/docs/lib/content/nav.yml
@@ -150,6 +150,9 @@
- title: npm run-script
url: /commands/npm-run-script
description: Run arbitrary package scripts
+ - title: npm sbom
+ url: /commands/npm-sbom
+ description: Generate a Software Bill of Materials (SBOM)
- title: npm search
url: /commands/npm-search
description: Search for packages
diff --git a/docs/lib/content/using-npm/config.md b/docs/lib/content/using-npm/config.md
index 14972ea2fbaa1..ba0e54d8da9f9 100644
--- a/docs/lib/content/using-npm/config.md
+++ b/docs/lib/content/using-npm/config.md
@@ -6,6 +6,9 @@ description: More than you probably want to know about npm configuration
### Description
+This article details npm configuration in general. To learn about the `config` command,
+see [`npm config`](/commands/npm-config).
+
npm gets its configuration values from the following sources, sorted by priority:
#### Command Line Flags
diff --git a/docs/lib/content/using-npm/dependency-selectors.md b/docs/lib/content/using-npm/dependency-selectors.md
index 5cedf8fe84030..5f7e27ad21848 100644
--- a/docs/lib/content/using-npm/dependency-selectors.md
+++ b/docs/lib/content/using-npm/dependency-selectors.md
@@ -13,7 +13,7 @@ The [`npm query`](/commands/npm-query) command exposes a new dependency selector
- Unlocks the ability to answer complex, multi-faceted questions about dependencies, their relationships & associative metadata
- Consolidates redundant logic of similar query commands in `npm` (ex. `npm fund`, `npm ls`, `npm outdated`, `npm audit` ...)
-### Dependency Selector Syntax `v1.0.0`
+### Dependency Selector Syntax
#### Overview:
@@ -62,6 +62,7 @@ The [`npm query`](/commands/npm-query) command exposes a new dependency selector
- `:path()` [glob](https://www.npmjs.com/package/glob) matching based on dependencies path relative to the project
- `:type()` [based on currently recognized types](https://github.com/npm/npm-package-arg#result-object)
- `:outdated()` when a dependency is outdated
+- `:vuln()` when a dependency has a known vulnerability
##### `:semver(, [selector], [function])`
@@ -84,8 +85,8 @@ Some examples:
The `:outdated` pseudo selector retrieves data from the registry and returns information about which of your dependencies are outdated. The type parameter may be one of the following:
- `any` (default) a version exists that is greater than the current one
-- `in-range` a version exists that is greater than the current one, and satisfies at least one if its dependents
-- `out-of-range` a version exists that is greater than the current one, does not satisfy at least one of its dependents
+- `in-range` a version exists that is greater than the current one, and satisfies at least one if its parent's dependencies
+- `out-of-range` a version exists that is greater than the current one, does not satisfy at least one of its parent's dependencies
- `major` a version exists that is a semver major greater than the current one
- `minor` a version exists that is a semver minor greater than the current one
- `patch` a version exists that is a semver patch greater than the current one
@@ -99,14 +100,29 @@ In addition to the filtering performed by the pseudo selector, some extra data i
Some examples:
- `:root > :outdated(major)` returns every direct dependency that has a new semver major release
-- `.prod:outdated(in-range)` returns production dependencies that have a new release that satisfies at least one of its edges in
+- `.prod:outdated(in-range)` returns production dependencies that have a new release that satisfies at least one of its parent's dependencies
+
+##### `:vuln`
+
+The `:vuln` pseudo selector retrieves data from the registry and returns information about which if your dependencies has a known vulnerability. Only dependencies whose current version matches a vulnerability will be returned. For example if you have `semver@7.6.0` in your tree, a vulnerability for `semver` which affects versions `<=6.3.1` will not match.
+
+You can also filter results by certain attributes in advisories. Currently that includes `severity` and `cwe`. Note that severity filtering is done per severity, it does not include severities "higher" or "lower" than the one specified.
+
+In addition to the filtering performed by the pseudo selector, info about each relevant advisory will be added to the `queryContext` attribute of each node under the `advisories` attribute.
+
+Some examples:
+
+- `:root > .prod:vuln` returns direct production dependencies with any known vulnerability
+- `:vuln([severity=high])` returns only dependencies with a vulnerability with a `high` severity.
+- `:vuln([severity=high],[severity=moderate])` returns only dependencies with a vulnerability with a `high` or `moderate` severity.
+- `:vuln([cwe=1333])` returns only dependencies with a vulnerability that includes CWE-1333 (ReDoS)
#### [Attribute Selectors](https://developer.mozilla.org/en-US/docs/Web/CSS/Attribute_selectors)
The attribute selector evaluates the key/value pairs in `package.json` if they are `String`s.
- `[]` attribute selector (ie. existence of attribute)
-- `[attribute=value]` attribute value is equivalant...
+- `[attribute=value]` attribute value is equivalent...
- `[attribute~=value]` attribute value contains word...
- `[attribute*=value]` attribute value contains string...
- `[attribute|=value]` attribute value is equal to or starts with...
diff --git a/docs/lib/content/using-npm/registry.md b/docs/lib/content/using-npm/registry.md
index 8d5ac94160b33..035ede5b32a3a 100644
--- a/docs/lib/content/using-npm/registry.md
+++ b/docs/lib/content/using-npm/registry.md
@@ -35,7 +35,7 @@ Authentication configuration such as auth tokens and certificates are configured
specifically scoped to an individual registry. See
[Auth Related Configuration](/configuring-npm/npmrc#auth-related-configuration)
-When the default registry is used in a package-lock or shrinkwrap is has the
+When the default registry is used in a package-lock or shrinkwrap it has the
special meaning of "the currently configured registry". If you create a lock
file while using the default registry you can switch to another registry and
npm will install packages from the new registry, but if you create a lock
diff --git a/docs/lib/content/using-npm/removal.md b/docs/lib/content/using-npm/removal.md
index c5e13b6741b6d..3b94a7d18f9d7 100644
--- a/docs/lib/content/using-npm/removal.md
+++ b/docs/lib/content/using-npm/removal.md
@@ -12,11 +12,7 @@ So sad to see you go.
sudo npm uninstall npm -g
```
-Or, if that fails, get the npm source code, and do:
-
-```bash
-sudo make uninstall
-```
+Or, if that fails, please proceed to more severe uninstalling methods.
### More Severe Uninstalling
@@ -28,8 +24,8 @@ continue reading.
Note that this is only necessary for globally-installed packages. Local
installs are completely contained within a project's `node_modules`
-folder. Delete that folder, and everything is gone less a package's
-install script is particularly ill-behaved).
+folder. Delete that folder, and everything is gone unless a package's
+install script is particularly ill-behaved.
This assumes that you installed node and npm in the default place. If
you configured node with a different `--prefix`, or installed npm with a
diff --git a/docs/lib/content/using-npm/scope.md b/docs/lib/content/using-npm/scope.md
index 829e6cc408e6c..b43fa2e9ff381 100644
--- a/docs/lib/content/using-npm/scope.md
+++ b/docs/lib/content/using-npm/scope.md
@@ -127,7 +127,7 @@ host multiple scopes, but a scope only ever points to one registry.
You can also associate a scope with a registry using `npm config`:
```bash
-npm config set @myco:registry http://reg.example.com
+npm config set @myco:registry=http://reg.example.com
```
Once a scope is associated with a registry, any `npm install` for a package
diff --git a/docs/lib/content/using-npm/scripts.md b/docs/lib/content/using-npm/scripts.md
index 7395c520ef4e5..75f8929bd99fe 100644
--- a/docs/lib/content/using-npm/scripts.md
+++ b/docs/lib/content/using-npm/scripts.md
@@ -159,8 +159,6 @@ These are run from the scripts of ``
* `publish`
* `postpublish`
-`prepare` will not run during `--dry-run`
-
#### [`npm rebuild`](/commands/npm-rebuild)
* `preinstall`
@@ -296,18 +294,15 @@ For example, if your package.json contains this:
{
"scripts" : {
"install" : "scripts/install.js",
- "postinstall" : "scripts/install.js",
- "uninstall" : "scripts/uninstall.js"
+ "postinstall" : "scripts/install.js"
}
}
```
-then `scripts/install.js` will be called for the install
-and post-install stages of the lifecycle, and `scripts/uninstall.js`
-will be called when the package is uninstalled. Since
-`scripts/install.js` is running for two different phases, it would
-be wise in this case to look at the `npm_lifecycle_event` environment
-variable.
+then `scripts/install.js` will be called for the install and post-install
+stages of the lifecycle. Since `scripts/install.js` is running for two
+different phases, it would be wise in this case to look at the
+`npm_lifecycle_event` environment variable.
If you want to run a make command, you can do so. This works just
fine:
@@ -336,10 +331,8 @@ file.
### Best Practices
* Don't exit with a non-zero error code unless you *really* mean it.
- Except for uninstall scripts, this will cause the npm action to
- fail, and potentially be rolled back. If the failure is minor or
- only will prevent some optional features, then it's better to just
- print a warning and exit successfully.
+ If the failure is minor or only will prevent some optional features, then
+ it's better to just print a warning and exit successfully.
* Try not to use scripts to do what npm can do for you. Read through
[`package.json`](/configuring-npm/package-json) to see all the things that you can specify and enable
by simply describing your package appropriately. In general, this
diff --git a/docs/lib/content/using-npm/workspaces.md b/docs/lib/content/using-npm/workspaces.md
index bbfa2d8817eb2..cb545c0b46bf1 100644
--- a/docs/lib/content/using-npm/workspaces.md
+++ b/docs/lib/content/using-npm/workspaces.md
@@ -7,12 +7,12 @@ description: Working with workspaces
### Description
**Workspaces** is a generic term that refers to the set of features in the
-npm cli that provides support to managing multiple packages from your local
+npm cli that provides support for managing multiple packages from your local
file system from within a singular top-level, root package.
This set of features makes up for a much more streamlined workflow handling
-linked packages from the local file system. Automating the linking process
-as part of `npm install` and avoiding manually having to use `npm link` in
+linked packages from the local file system. It automates the linking process
+as part of `npm install` and removes the need to manually use `npm link` in
order to add references to packages that should be symlinked into the current
`node_modules` folder.
@@ -110,7 +110,7 @@ respect the provided `workspace` configuration.
### Using workspaces
-Given the [specifities of how Node.js handles module resolution](https://nodejs.org/dist/latest-v14.x/docs/api/modules.html#modules_all_together) it's possible to consume any defined workspace
+Given the [specifics of how Node.js handles module resolution](https://nodejs.org/dist/latest-v14.x/docs/api/modules.html#modules_all_together) it's possible to consume any defined workspace
by its declared `package.json` `name`. Continuing from the example defined
above, let's also create a Node.js script that will require the workspace `a`
example module, e.g:
diff --git a/docs/lib/index.js b/docs/lib/index.js
index deb715b38107a..b88d20cca3558 100644
--- a/docs/lib/index.js
+++ b/docs/lib/index.js
@@ -3,7 +3,7 @@ const { join, basename, resolve } = require('path')
const transformHTML = require('./transform-html.js')
const { version } = require('../../lib/npm.js')
const { aliases } = require('../../lib/utils/cmd-list')
-const { shorthands, definitions } = require('../../lib/utils/config/index.js')
+const { shorthands, definitions } = require('@npmcli/config/lib/definitions')
const DOC_EXT = '.md'
@@ -42,6 +42,17 @@ const getCommandByDoc = (docFile, docExt) => {
const srcName = name === 'npx' ? 'exec' : name
const { params, usage = [''], workspaces } = require(`../../lib/commands/${srcName}`)
const usagePrefix = name === 'npx' ? 'npx' : `npm ${name}`
+ if (params) {
+ for (const param of params) {
+ if (definitions[param].exclusive) {
+ for (const e of definitions[param].exclusive) {
+ if (!params.includes(e)) {
+ params.splice(params.indexOf(param) + 1, 0, e)
+ }
+ }
+ }
+ }
+ }
return {
name,
@@ -108,7 +119,7 @@ const replaceConfig = (src, { path }) => {
}
const allConfig = Object.entries(definitions).sort(sort)
- .map(([_, def]) => def.describe())
+ .map(([, def]) => def.describe())
.join('\n\n')
return src.replace(replacer, allConfig)
@@ -146,7 +157,7 @@ const replaceHelpLinks = (src) => {
const transformMan = (src, { data, unified, remarkParse, remarkMan }) => unified()
.use(remarkParse)
- .use(remarkMan)
+ .use(remarkMan, { version: `NPM@${version}` })
.processSync(`# ${data.title}(${data.section}) - ${data.description}\n\n${src}`)
.toString()
diff --git a/docs/lib/template.html b/docs/lib/template.html
index be3bafd61aa05..622dc327046ee 100644
--- a/docs/lib/template.html
+++ b/docs/lib/template.html
@@ -115,6 +115,11 @@
line-height: 1;
}
+header.title .version {
+ font-size: 0.8em;
+ color: #666666;
+}
+
footer#edit {
border-top: solid 1px #e1e4e8;
margin: 3em 0 4em 0;
@@ -138,7 +143,10 @@
-{{ title }}
+
+ {{ title }}
+ @{{ version }}
+
{{ description }}
diff --git a/docs/lib/transform-html.js b/docs/lib/transform-html.js
index c587d75e994f8..dc3dd4930d05d 100644
--- a/docs/lib/transform-html.js
+++ b/docs/lib/transform-html.js
@@ -1,4 +1,5 @@
const jsdom = require('jsdom')
+const { version } = require('../../package.json')
function transformHTML (
src,
@@ -36,6 +37,9 @@ function transformHTML (
case 'config.github_path':
return data[key.replace(/^config\./, '')]
+ case 'version':
+ return version
+
default:
throw new Error(`warning: unknown token '${token}' in ${path}`)
}
diff --git a/docs/package.json b/docs/package.json
index 704ff75bd24eb..d3f0ecbd26e92 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -5,36 +5,36 @@
"private": true,
"main": "lib/index.js",
"scripts": {
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
- "lintfix": "node .. run lint -- --fix",
+ "lintfix": "npm run lint -- --fix",
"snap": "tap",
"test": "tap",
- "posttest": "node .. run lint",
+ "posttest": "npm run lint",
"build": "node bin/build.js"
},
"repository": {
"type": "git",
- "url": "https://github.com/npm/cli.git",
+ "url": "git+https://github.com/npm/cli.git",
"directory": "docs"
},
"devDependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.11.0",
+ "@npmcli/template-oss": "4.22.0",
"front-matter": "^4.0.2",
- "ignore-walk": "^6.0.0",
- "jsdom": "^20.0.3",
- "mkdirp": "^1.0.4",
+ "ignore-walk": "^6.0.5",
+ "jsdom": "^24.0.0",
"rehype-stringify": "^9.0.3",
"remark-gfm": "^3.0.1",
"remark-man": "^8.0.1",
"remark-parse": "^10.0.1",
"remark-rehype": "^10.1.0",
- "tap": "^16.3.2",
+ "semver": "^7.3.8",
+ "tap": "^16.3.8",
"unified": "^10.1.2",
- "yaml": "^2.1.3"
+ "yaml": "^2.2.1"
},
"author": "GitHub Inc.",
"license": "ISC",
@@ -43,7 +43,7 @@
"lib/"
],
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
},
"tap": {
"timeout": 600,
@@ -55,12 +55,11 @@
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"ciVersions": "latest",
- "engines": "^14.17.0 || ^16.13.0 || >=18.0.0",
- "version": "4.11.0",
+ "version": "4.22.0",
"content": "../scripts/template-oss/index.js",
"workspaceRepo": {
"add": {
- ".github/workflows/ci-{{ pkgNameFs }}.yml": "ci-npmcli-docs.yml"
+ ".github/workflows/ci-{{ pkgNameFs }}.yml": "ci-npmcli-docs-yml.hbs"
}
}
}
diff --git a/lib/arborist-cmd.js b/lib/arborist-cmd.js
index 42699ece364ad..9d247d02fa181 100644
--- a/lib/arborist-cmd.js
+++ b/lib/arborist-cmd.js
@@ -1,10 +1,9 @@
-const log = require('./utils/log-shim.js')
+const { log } = require('proc-log')
+const BaseCommand = require('./base-cmd.js')
// This is the base for all commands whose execWorkspaces just gets
// a list of workspace names and passes it on to new Arborist() to
// be able to run a filtered Arborist.reify() at some point.
-
-const BaseCommand = require('./base-command.js')
class ArboristCmd extends BaseCommand {
get isArboristCmd () {
return true
diff --git a/lib/base-cmd.js b/lib/base-cmd.js
new file mode 100644
index 0000000000000..99ae6d7f43c70
--- /dev/null
+++ b/lib/base-cmd.js
@@ -0,0 +1,156 @@
+const { log } = require('proc-log')
+
+class BaseCommand {
+ static workspaces = false
+ static ignoreImplicitWorkspace = true
+
+ // these are all overridden by individual commands
+ static name = null
+ static description = null
+ static params = null
+
+ // this is a static so that we can read from it without instantiating a command
+ // which would require loading the config
+ static get describeUsage () {
+ const { definitions } = require('@npmcli/config/lib/definitions')
+ const { aliases: cmdAliases } = require('./utils/cmd-list')
+ const seenExclusive = new Set()
+ const wrapWidth = 80
+ const { description, usage = [''], name, params } = this
+
+ const fullUsage = [
+ `${description}`,
+ '',
+ 'Usage:',
+ ...usage.map(u => `npm ${name} ${u}`.trim()),
+ ]
+
+ if (params) {
+ let results = ''
+ let line = ''
+ for (const param of params) {
+ /* istanbul ignore next */
+ if (seenExclusive.has(param)) {
+ continue
+ }
+ const { exclusive } = definitions[param]
+ let paramUsage = `${definitions[param].usage}`
+ if (exclusive) {
+ const exclusiveParams = [paramUsage]
+ seenExclusive.add(param)
+ for (const e of exclusive) {
+ seenExclusive.add(e)
+ exclusiveParams.push(definitions[e].usage)
+ }
+ paramUsage = `${exclusiveParams.join('|')}`
+ }
+ paramUsage = `[${paramUsage}]`
+ if (line.length + paramUsage.length > wrapWidth) {
+ results = [results, line].filter(Boolean).join('\n')
+ line = ''
+ }
+ line = [line, paramUsage].filter(Boolean).join(' ')
+ }
+ fullUsage.push('')
+ fullUsage.push('Options:')
+ fullUsage.push([results, line].filter(Boolean).join('\n'))
+ }
+
+ const aliases = Object.entries(cmdAliases).reduce((p, [k, v]) => {
+ return p.concat(v === name ? k : [])
+ }, [])
+
+ if (aliases.length) {
+ const plural = aliases.length === 1 ? '' : 'es'
+ fullUsage.push('')
+ fullUsage.push(`alias${plural}: ${aliases.join(', ')}`)
+ }
+
+ fullUsage.push('')
+ fullUsage.push(`Run "npm help ${name}" for more info`)
+
+ return fullUsage.join('\n')
+ }
+
+ constructor (npm) {
+ this.npm = npm
+
+ const { config } = this.npm
+
+ if (!this.constructor.skipConfigValidation) {
+ config.validate()
+ }
+
+ if (config.get('workspaces') === false && config.get('workspace').length) {
+ throw new Error('Can not use --no-workspaces and --workspace at the same time')
+ }
+ }
+
+ get name () {
+ return this.constructor.name
+ }
+
+ get description () {
+ return this.constructor.description
+ }
+
+ get params () {
+ return this.constructor.params
+ }
+
+ get usage () {
+ return this.constructor.describeUsage
+ }
+
+ usageError (prefix = '') {
+ if (prefix) {
+ prefix += '\n\n'
+ }
+ return Object.assign(new Error(`\n${prefix}${this.usage}`), {
+ code: 'EUSAGE',
+ })
+ }
+
+ // Compare the number of entries with what was expected
+ checkExpected (entries) {
+ if (!this.npm.config.isDefault('expect-results')) {
+ const expected = this.npm.config.get('expect-results')
+ if (!!entries !== !!expected) {
+ log.warn(this.name, `Expected ${expected ? '' : 'no '}results, got ${entries}`)
+ process.exitCode = 1
+ }
+ } else if (!this.npm.config.isDefault('expect-result-count')) {
+ const expected = this.npm.config.get('expect-result-count')
+ if (expected !== entries) {
+ /* eslint-disable-next-line max-len */
+ log.warn(this.name, `Expected ${expected} result${expected === 1 ? '' : 's'}, got ${entries}`)
+ process.exitCode = 1
+ }
+ }
+ }
+
+ async setWorkspaces () {
+ const { relative } = require('node:path')
+
+ const includeWorkspaceRoot = this.isArboristCmd
+ ? false
+ : this.npm.config.get('include-workspace-root')
+
+ const prefixInsideCwd = relative(this.npm.localPrefix, process.cwd()).startsWith('..')
+ const relativeFrom = prefixInsideCwd ? this.npm.localPrefix : process.cwd()
+
+ const filters = this.npm.config.get('workspace')
+ const getWorkspaces = require('./utils/get-workspaces.js')
+ const ws = await getWorkspaces(filters, {
+ path: this.npm.localPrefix,
+ includeWorkspaceRoot,
+ relativeFrom,
+ })
+
+ this.workspaces = ws
+ this.workspaceNames = [...ws.keys()]
+ this.workspacePaths = [...ws.values()]
+ }
+}
+
+module.exports = BaseCommand
diff --git a/lib/base-command.js b/lib/base-command.js
deleted file mode 100644
index 0adff8e5d95ea..0000000000000
--- a/lib/base-command.js
+++ /dev/null
@@ -1,153 +0,0 @@
-// Base class for npm commands
-
-const { relative } = require('path')
-
-const ConfigDefinitions = require('./utils/config/definitions.js')
-const getWorkspaces = require('./workspaces/get-workspaces.js')
-
-const cmdAliases = require('./utils/cmd-list').aliases
-
-class BaseCommand {
- static workspaces = false
- static ignoreImplicitWorkspace = true
-
- constructor (npm) {
- this.wrapWidth = 80
- this.npm = npm
-
- const { config } = this.npm
-
- if (!this.constructor.skipConfigValidation) {
- config.validate()
- }
-
- if (config.get('workspaces') === false && config.get('workspace').length) {
- throw new Error('Can not use --no-workspaces and --workspace at the same time')
- }
- }
-
- get name () {
- return this.constructor.name
- }
-
- get description () {
- return this.constructor.description
- }
-
- get params () {
- return this.constructor.params
- }
-
- get usage () {
- const usage = [
- `${this.description}`,
- '',
- 'Usage:',
- ]
-
- if (!this.constructor.usage) {
- usage.push(`npm ${this.name}`)
- } else {
- usage.push(...this.constructor.usage.map(u => `npm ${this.name} ${u}`))
- }
-
- if (this.params) {
- usage.push('')
- usage.push('Options:')
- usage.push(this.wrappedParams)
- }
-
- const aliases = Object.keys(cmdAliases).reduce((p, c) => {
- if (cmdAliases[c] === this.name) {
- p.push(c)
- }
- return p
- }, [])
-
- if (aliases.length === 1) {
- usage.push('')
- usage.push(`alias: ${aliases.join(', ')}`)
- } else if (aliases.length > 1) {
- usage.push('')
- usage.push(`aliases: ${aliases.join(', ')}`)
- }
-
- usage.push('')
- usage.push(`Run "npm help ${this.name}" for more info`)
-
- return usage.join('\n')
- }
-
- get wrappedParams () {
- let results = ''
- let line = ''
-
- for (const param of this.params) {
- const usage = `[${ConfigDefinitions[param].usage}]`
- if (line.length && line.length + usage.length > this.wrapWidth) {
- results = [results, line].filter(Boolean).join('\n')
- line = ''
- }
- line = [line, usage].filter(Boolean).join(' ')
- }
- results = [results, line].filter(Boolean).join('\n')
- return results
- }
-
- usageError (prefix = '') {
- if (prefix) {
- prefix += '\n\n'
- }
- return Object.assign(new Error(`\n${prefix}${this.usage}`), {
- code: 'EUSAGE',
- })
- }
-
- async cmdExec (args) {
- const { config } = this.npm
-
- if (config.get('usage')) {
- return this.npm.output(this.usage)
- }
-
- const hasWsConfig = config.get('workspaces') || config.get('workspace').length
- // if cwd is a workspace, the default is set to [that workspace]
- const implicitWs = config.get('workspace', 'default').length
-
- // (-ws || -w foo) && (cwd is not a workspace || command is not ignoring implicit workspaces)
- if (hasWsConfig && (!implicitWs || !this.constructor.ignoreImplicitWorkspace)) {
- if (this.npm.global) {
- throw new Error('Workspaces not supported for global packages')
- }
- if (!this.constructor.workspaces) {
- throw Object.assign(new Error('This command does not support workspaces.'), {
- code: 'ENOWORKSPACES',
- })
- }
- return this.execWorkspaces(args)
- }
-
- return this.exec(args)
- }
-
- async setWorkspaces () {
- const includeWorkspaceRoot = this.isArboristCmd
- ? false
- : this.npm.config.get('include-workspace-root')
-
- const prefixInsideCwd = relative(this.npm.localPrefix, process.cwd()).startsWith('..')
- const relativeFrom = prefixInsideCwd ? this.npm.localPrefix : process.cwd()
-
- const filters = this.npm.config.get('workspace')
- const ws = await getWorkspaces(filters, {
- path: this.npm.localPrefix,
- includeWorkspaceRoot,
- relativeFrom,
- })
-
- this.workspaces = ws
- this.workspaceNames = [...ws.keys()]
- this.workspacePaths = [...ws.values()]
- }
-}
-module.exports = BaseCommand
diff --git a/lib/cli.js b/lib/cli.js
index 007778aa4b986..e11729fe3205b 100644
--- a/lib/cli.js
+++ b/lib/cli.js
@@ -1,147 +1,4 @@
-// This is separate to indicate that it should contain code we expect to work in
-// all conceivably runnable versions of node. This is a best effort to catch
-// syntax errors to give users a good error message if they are using a node
-// version that doesn't allow syntax we are using such as private properties, etc
-const createEnginesValidation = () => {
- const node = process.version.replace(/-.*$/, '')
- const pkg = require('../package.json')
- const engines = pkg.engines.node
- const npm = `v${pkg.version}`
+const validateEngines = require('./cli/validate-engines.js')
+const cliEntry = require('node:path').resolve(__dirname, 'cli/entry.js')
- const cols = Math.min(Math.max(20, process.stdout.columns) || 80, 80)
- const wrap = (lines) => lines
- .join(' ')
- .split(/[ \n]+/)
- .reduce((left, right) => {
- const last = left.split('\n').pop()
- const join = last.length && last.length + right.length > cols ? '\n' : ' '
- return left + join + right
- })
- .trim()
-
- const unsupportedMessage = wrap([
- `npm ${npm} does not support Node.js ${node}.`,
- `You should probably upgrade to a newer version of node as we can't make any`,
- `promises that npm will work with this version.`,
- `This version of npm supports the following node versions: \`${engines}\`.`,
- 'You can find the latest version at https://nodejs.org/.',
- ])
-
- const brokenMessage = wrap([
- `ERROR: npm ${npm} is known not to run on Node.js ${node}.`,
- `You'll need to upgrade to a newer Node.js version in order to use this version of npm.`,
- `This version of npm supports the following node versions: \`${engines}\`.`,
- 'You can find the latest version at https://nodejs.org/.',
- ])
-
- // coverage ignored because this is only hit in very unsupported node versions
- // and it's a best effort attempt to show something nice in those cases
- /* istanbul ignore next */
- const syntaxErrorHandler = (err) => {
- if (err instanceof SyntaxError) {
- // eslint-disable-next-line no-console
- console.error(`${brokenMessage}\n\nERROR:`)
- // eslint-disable-next-line no-console
- console.error(err)
- return process.exit(1)
- }
- throw err
- }
-
- process.on('uncaughtException', syntaxErrorHandler)
- process.on('unhandledRejection', syntaxErrorHandler)
-
- return {
- node,
- engines,
- unsupportedMessage,
- off: () => {
- process.off('uncaughtException', syntaxErrorHandler)
- process.off('unhandledRejection', syntaxErrorHandler)
- },
- }
-}
-
-// Separated out for easier unit testing
-module.exports = async process => {
- // set it here so that regardless of what happens later, we don't
- // leak any private CLI configs to other programs
- process.title = 'npm'
-
- // if npm is called as "npmg" or "npm_g", then run in global mode.
- if (process.argv[1][process.argv[1].length - 1] === 'g') {
- process.argv.splice(1, 1, 'npm', '-g')
- }
-
- // Nothing should happen before this line if we can't guarantee it will
- // not have syntax errors in some version of node
- const validateEngines = createEnginesValidation()
-
- const satisfies = require('semver/functions/satisfies')
- const exitHandler = require('./utils/exit-handler.js')
- const Npm = require('./npm.js')
- const npm = new Npm()
- exitHandler.setNpm(npm)
-
- // only log node and npm paths in argv initially since argv can contain
- // sensitive info. a cleaned version will be logged later
- const log = require('./utils/log-shim.js')
- log.verbose('cli', process.argv.slice(0, 2).join(' '))
- log.info('using', 'npm@%s', npm.version)
- log.info('using', 'node@%s', process.version)
-
- // At this point we've required a few files and can be pretty sure
- // we dont contain invalid syntax for this version of node. It's
- // possible a lazy require would, but that's unlikely enough that
- // it's not worth catching anymore and we attach the more important
- // exit handlers.
- validateEngines.off()
- process.on('uncaughtException', exitHandler)
- process.on('unhandledRejection', exitHandler)
-
- // It is now safe to log a warning if they are using a version of node
- // that is not going to fail on syntax errors but is still unsupported
- // and untested and might not work reliably. This is safe to use the logger
- // now which we want since this will show up in the error log too.
- if (!satisfies(validateEngines.node, validateEngines.engines)) {
- log.warn('cli', validateEngines.unsupportedMessage)
- }
-
- let cmd
- // now actually fire up npm and run the command.
- // this is how to use npm programmatically:
- try {
- await npm.load()
-
- if (npm.config.get('version', 'cli')) {
- npm.output(npm.version)
- return exitHandler()
- }
-
- // npm --versions=cli
- if (npm.config.get('versions', 'cli')) {
- npm.argv = ['version']
- npm.config.set('usage', false, 'cli')
- }
-
- cmd = npm.argv.shift()
- if (!cmd) {
- npm.output(await npm.usage)
- process.exitCode = 1
- return exitHandler()
- }
-
- await npm.exec(cmd)
- return exitHandler()
- } catch (err) {
- if (err.code === 'EUNKNOWNCOMMAND') {
- const didYouMean = require('./utils/did-you-mean.js')
- const suggestions = await didYouMean(npm, npm.localPrefix, cmd)
- npm.output(`Unknown command: "${cmd}"${suggestions}\n`)
- npm.output('To see a list of supported npm commands, run:\n npm help')
- process.exitCode = 1
- return exitHandler()
- }
- return exitHandler(err)
- }
-}
+module.exports = (process) => validateEngines(process, () => require(cliEntry))
diff --git a/lib/cli/entry.js b/lib/cli/entry.js
new file mode 100644
index 0000000000000..ed73eb89e2d36
--- /dev/null
+++ b/lib/cli/entry.js
@@ -0,0 +1,79 @@
+/* eslint-disable max-len */
+
+// Separated out for easier unit testing
+module.exports = async (process, validateEngines) => {
+ // set it here so that regardless of what happens later, we don't
+ // leak any private CLI configs to other programs
+ process.title = 'npm'
+
+ // if npm is called as "npmg" or "npm_g", then run in global mode.
+ if (process.argv[1][process.argv[1].length - 1] === 'g') {
+ process.argv.splice(1, 1, 'npm', '-g')
+ }
+
+ // Patch the global fs module here at the app level
+ require('graceful-fs').gracefulify(require('node:fs'))
+
+ const satisfies = require('semver/functions/satisfies')
+ const ExitHandler = require('./exit-handler.js')
+ const exitHandler = new ExitHandler({ process })
+ const Npm = require('../npm.js')
+ const npm = new Npm()
+ exitHandler.setNpm(npm)
+
+ // only log node and npm paths in argv initially since argv can contain sensitive info. a cleaned version will be logged later
+ const { log, output } = require('proc-log')
+ log.verbose('cli', process.argv.slice(0, 2).join(' '))
+ log.info('using', 'npm@%s', npm.version)
+ log.info('using', 'node@%s', process.version)
+
+ // At this point we've required a few files and can be pretty sure we dont contain invalid syntax for this version of node. It's possible a lazy require would, but that's unlikely enough that it's not worth catching anymore and we attach the more important exit handlers.
+ validateEngines.off()
+ exitHandler.registerUncaughtHandlers()
+
+ // It is now safe to log a warning if they are using a version of node that is not going to fail on syntax errors but is still unsupported and untested and might not work reliably. This is safe to use the logger now which we want since this will show up in the error log too.
+ if (!satisfies(validateEngines.node, validateEngines.engines)) {
+ log.warn('cli', validateEngines.unsupportedMessage)
+ }
+
+ // Now actually fire up npm and run the command.
+ // This is how to use npm programmatically:
+ try {
+ const { exec, command, args } = await npm.load()
+
+ if (!exec) {
+ return exitHandler.exit()
+ }
+
+ if (!command) {
+ output.standard(npm.usage)
+ process.exitCode = 1
+ return exitHandler.exit()
+ }
+
+ // Options are prefixed by a hyphen-minus (-, \u2d).
+ // Other dash-type chars look similar but are invalid.
+ const nonDashArgs = npm.argv.filter(a => /^[\u2010-\u2015\u2212\uFE58\uFE63\uFF0D]/.test(a))
+ if (nonDashArgs.length) {
+ log.error(
+ 'arg',
+ 'Argument starts with non-ascii dash, this is probably invalid:',
+ require('@npmcli/redact').redactLog(nonDashArgs.join(', '))
+ )
+ }
+
+ const execPromise = npm.exec(command, args)
+
+ // this is async but we dont await it, since its ok if it doesnt
+ // finish before the command finishes running. it uses command and argv
+ // so it must be initiated here, after the command name is set
+ const updateNotifier = require('./update-notifier.js')
+ // eslint-disable-next-line promise/catch-or-return
+ updateNotifier(npm).then((msg) => (npm.updateNotification = msg))
+
+ await execPromise
+ return exitHandler.exit()
+ } catch (err) {
+ return exitHandler.exit(err)
+ }
+}
diff --git a/lib/cli/exit-handler.js b/lib/cli/exit-handler.js
new file mode 100644
index 0000000000000..fff95165a18de
--- /dev/null
+++ b/lib/cli/exit-handler.js
@@ -0,0 +1,173 @@
+const { log, output, META } = require('proc-log')
+const { errorMessage, getExitCodeFromError } = require('../utils/error-message.js')
+
+class ExitHandler {
+ #npm = null
+ #process = null
+ #exited = false
+ #exitErrorMessage = false
+
+ #noNpmError = false
+
+ get #hasNpm () {
+ return !!this.#npm
+ }
+
+ get #loaded () {
+ return !!this.#npm?.loaded
+ }
+
+ get #showExitErrorMessage () {
+ if (!this.#loaded) {
+ return false
+ }
+ if (!this.#exited) {
+ return true
+ }
+ return this.#exitErrorMessage
+ }
+
+ get #notLoadedOrExited () {
+ return !this.#loaded && !this.#exited
+ }
+
+ setNpm (npm) {
+ this.#npm = npm
+ }
+
+ constructor ({ process }) {
+ this.#process = process
+ this.#process.on('exit', this.#handleProcesExitAndReset)
+ }
+
+ registerUncaughtHandlers () {
+ this.#process.on('uncaughtException', this.#handleExit)
+ this.#process.on('unhandledRejection', this.#handleExit)
+ }
+
+ exit (err) {
+ this.#handleExit(err)
+ }
+
+ #handleProcesExitAndReset = (code) => {
+ this.#handleProcessExit(code)
+
+ // Reset all the state. This is only relevant for tests since
+ // in reality the process fully exits here.
+ this.#process.off('exit', this.#handleProcesExitAndReset)
+ this.#process.off('uncaughtException', this.#handleExit)
+ this.#process.off('unhandledRejection', this.#handleExit)
+ if (this.#loaded) {
+ this.#npm.unload()
+ }
+ this.#npm = null
+ this.#exited = false
+ this.#exitErrorMessage = false
+ }
+
+ #handleProcessExit (code) {
+ // Force exit code to a number if it has not been set
+ const exitCode = typeof code === 'number' ? code : (this.#exited ? 0 : 1)
+ this.#process.exitCode = exitCode
+
+ if (this.#notLoadedOrExited) {
+ // Exit handler was not called and npm was not loaded so we have to log something
+ this.#logConsoleError(new Error(`Process exited unexpectedly with code: ${exitCode}`))
+ return
+ }
+
+ if (this.#logNoNpmError()) {
+ return
+ }
+
+ const os = require('node:os')
+ log.verbose('cwd', this.#process.cwd())
+ log.verbose('os', `${os.type()} ${os.release()}`)
+ log.verbose('node', this.#process.version)
+ log.verbose('npm ', `v${this.#npm.version}`)
+
+ // only show the notification if it finished
+ if (typeof this.#npm.updateNotification === 'string') {
+ log.notice('', this.#npm.updateNotification, { [META]: true, force: true })
+ }
+
+ if (!this.#exited) {
+ log.error('', 'Exit handler never called!')
+ log.error('', 'This is an error with npm itself. Please report this error at:')
+ log.error('', ' ')
+ if (this.#npm.silent) {
+ output.error('')
+ }
+ }
+
+ log.verbose('exit', exitCode)
+
+ if (exitCode) {
+ log.verbose('code', exitCode)
+ } else {
+ log.info('ok')
+ }
+
+ if (this.#showExitErrorMessage) {
+ log.error('', this.#npm.exitErrorMessage())
+ }
+ }
+
+ #logConsoleError (err) {
+ // Run our error message formatters on all errors even if we
+ // have no npm or an unloaded npm. This will clean the error
+ // and possible return a formatted message about EACCESS or something.
+ const { summary, detail } = errorMessage(err, this.#npm)
+ const formatted = [...new Set([...summary, ...detail].flat().filter(Boolean))].join('\n')
+ // If we didn't get anything from the formatted message then just display the full stack
+ // eslint-disable-next-line no-console
+ console.error(formatted === err.message ? err.stack : formatted)
+ }
+
+ #logNoNpmError (err) {
+ if (this.#hasNpm) {
+ return false
+ }
+ // Make sure we only log this error once
+ if (!this.#noNpmError) {
+ this.#noNpmError = true
+ this.#logConsoleError(
+ new Error(`Exit prior to setting npm in exit handler`, err ? { cause: err } : {})
+ )
+ }
+ return true
+ }
+
+ #handleExit = (err) => {
+ this.#exited = true
+
+ // No npm at all
+ if (this.#logNoNpmError(err)) {
+ return this.#process.exit(this.#process.exitCode || getExitCodeFromError(err) || 1)
+ }
+
+ // npm was never loaded but we still might have a config loading error or
+ // something similar that we can run through the error message formatter
+ // to give the user a clue as to what happened.s
+ if (!this.#loaded) {
+ this.#logConsoleError(new Error('Exit prior to config file resolving', { cause: err }))
+ return this.#process.exit(this.#process.exitCode || getExitCodeFromError(err) || 1)
+ }
+
+ this.#exitErrorMessage = err?.suppressError === true ? false : !!err
+
+ // Prefer the exit code of the error, then the current process exit code,
+ // then set it to 1 if we still have an error. Otherwise we call process.exit
+ // with undefined so that it can determine the final exit code
+ const exitCode = err?.exitCode ?? this.#process.exitCode ?? (err ? 1 : undefined)
+
+ // explicitly call process.exit now so we don't hang on things like the
+ // update notifier, also flush stdout/err beforehand because process.exit doesn't
+ // wait for that to happen.
+ this.#process.stderr.write('', () => this.#process.stdout.write('', () => {
+ this.#process.exit(exitCode)
+ }))
+ }
+}
+
+module.exports = ExitHandler
diff --git a/lib/cli/update-notifier.js b/lib/cli/update-notifier.js
new file mode 100644
index 0000000000000..32cac18350be9
--- /dev/null
+++ b/lib/cli/update-notifier.js
@@ -0,0 +1,121 @@
+// print a banner telling the user to upgrade npm to latest
+// but not in CI, and not if we're doing that already.
+// Check daily for betas, and weekly otherwise.
+
+const ciInfo = require('ci-info')
+const gt = require('semver/functions/gt')
+const gte = require('semver/functions/gte')
+const parse = require('semver/functions/parse')
+const { stat, writeFile } = require('node:fs/promises')
+const { resolve } = require('node:path')
+
+// update check frequency
+const DAILY = 1000 * 60 * 60 * 24
+const WEEKLY = DAILY * 7
+
+// don't put it in the _cacache folder, just in npm's cache
+const lastCheckedFile = npm =>
+ resolve(npm.flatOptions.cache, '../_update-notifier-last-checked')
+
+// Actual check for updates. This is a separate function so that we only load
+// this if we are doing the actual update
+const updateCheck = async (npm, spec, version, current) => {
+ const pacote = require('pacote')
+
+ const mani = await pacote.manifest(`npm@${spec}`, {
+ // always prefer latest, even if doing --tag=whatever on the cmd
+ defaultTag: 'latest',
+ ...npm.flatOptions,
+ cache: false,
+ }).catch(() => null)
+
+ // if pacote failed, give up
+ if (!mani) {
+ return null
+ }
+
+ const latest = mani.version
+
+ // if the current version is *greater* than latest, we're on a 'next'
+ // and should get the updates from that release train.
+ // Note that this isn't another http request over the network, because
+ // the packument will be cached by pacote from previous request.
+ if (gt(version, latest) && spec === 'latest') {
+ return updateNotifier(npm, `^${version}`)
+ }
+
+ // if we already have something >= the desired spec, then we're done
+ if (gte(version, latest)) {
+ return null
+ }
+
+ const chalk = npm.logChalk
+
+ // ok! notify the user about this update they should get.
+ // The message is saved for printing at process exit so it will not get
+ // lost in any other messages being printed as part of the command.
+ const update = parse(mani.version)
+ const type = update.major !== current.major ? 'major'
+ : update.minor !== current.minor ? 'minor'
+ : update.patch !== current.patch ? 'patch'
+ : 'prerelease'
+ const typec = type === 'major' ? 'red'
+ : type === 'minor' ? 'yellow'
+ : 'cyan'
+ const cmd = `npm install -g npm@${latest}`
+ const message = `\nNew ${chalk[typec](type)} version of npm available! ` +
+ `${chalk[typec](current)} -> ${chalk.blue(latest)}\n` +
+ `Changelog: ${chalk.blue(`https://github.com/npm/cli/releases/tag/v${latest}`)}\n` +
+ `To update run: ${chalk.underline(cmd)}\n`
+
+ return message
+}
+
+const updateNotifier = async (npm, spec = 'latest') => {
+ // if we're on a prerelease train, then updates are coming fast
+ // check for a new one daily. otherwise, weekly.
+ const { version } = npm
+ const current = parse(version)
+
+ // if we're on a beta train, always get the next beta
+ if (current.prerelease.length) {
+ spec = `^${version}`
+ }
+
+ // while on a beta train, get updates daily
+ const duration = spec !== 'latest' ? DAILY : WEEKLY
+
+ const t = new Date(Date.now() - duration)
+ // if we don't have a file, then definitely check it.
+ const st = await stat(lastCheckedFile(npm)).catch(() => ({ mtime: t - 1 }))
+
+ // if we've already checked within the specified duration, don't check again
+ if (!(t > st.mtime)) {
+ return null
+ }
+
+ // intentional. do not await this. it's a best-effort update. if this
+ // fails, it's ok. might be using /dev/null as the cache or something weird
+ // like that.
+ writeFile(lastCheckedFile(npm), '').catch(() => {})
+
+ return updateCheck(npm, spec, version, current)
+}
+
+// only update the notification timeout if we actually finished checking
+module.exports = npm => {
+ if (
+ // opted out
+ !npm.config.get('update-notifier')
+ // global npm update
+ || (npm.flatOptions.global &&
+ ['install', 'update'].includes(npm.command) &&
+ npm.argv.some(arg => /^npm(@|$)/.test(arg)))
+ // CI
+ || ciInfo.isCI
+ ) {
+ return Promise.resolve(null)
+ }
+
+ return updateNotifier(npm)
+}
diff --git a/lib/cli/validate-engines.js b/lib/cli/validate-engines.js
new file mode 100644
index 0000000000000..cf5315a25dce0
--- /dev/null
+++ b/lib/cli/validate-engines.js
@@ -0,0 +1,49 @@
+// This is separate to indicate that it should contain code we expect to work in
+// all versions of node >= 6. This is a best effort to catch syntax errors to
+// give users a good error message if they are using a node version that doesn't
+// allow syntax we are using such as private properties, etc. This file is
+// linted with ecmaVersion=6 so we don't use invalid syntax, which is set in the
+// .eslintrc.local.json file
+
+const { engines: { node: engines }, version } = require('../../package.json')
+const npm = `v${version}`
+
+module.exports = (process, getCli) => {
+ const node = process.version
+
+ /* eslint-disable-next-line max-len */
+ const unsupportedMessage = `npm ${npm} does not support Node.js ${node}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.`
+
+ /* eslint-disable-next-line max-len */
+ const brokenMessage = `ERROR: npm ${npm} is known not to run on Node.js ${node}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.`
+
+ // coverage ignored because this is only hit in very unsupported node versions
+ // and it's a best effort attempt to show something nice in those cases
+ /* istanbul ignore next */
+ const syntaxErrorHandler = (err) => {
+ if (err instanceof SyntaxError) {
+ // eslint-disable-next-line no-console
+ console.error(`${brokenMessage}\n\nERROR:`)
+ // eslint-disable-next-line no-console
+ console.error(err)
+ return process.exit(1)
+ }
+ throw err
+ }
+
+ process.on('uncaughtException', syntaxErrorHandler)
+ process.on('unhandledRejection', syntaxErrorHandler)
+
+ // require this only after setting up the error handlers
+ const cli = getCli()
+ return cli(process, {
+ node,
+ npm,
+ engines,
+ unsupportedMessage,
+ off: () => {
+ process.off('uncaughtException', syntaxErrorHandler)
+ process.off('unhandledRejection', syntaxErrorHandler)
+ },
+ })
+}
diff --git a/lib/commands/access.js b/lib/commands/access.js
index 23e51f071b112..547fa7af01577 100644
--- a/lib/commands/access.js
+++ b/lib/commands/access.js
@@ -1,13 +1,11 @@
-const path = require('path')
-
const libnpmaccess = require('libnpmaccess')
const npa = require('npm-package-arg')
-const readPackageJson = require('read-package-json-fast')
+const { output } = require('proc-log')
+const pkgJson = require('@npmcli/package-json')
const localeCompare = require('@isaacs/string-locale-compare')('en')
-
-const otplease = require('../utils/otplease.js')
+const { otplease } = require('../utils/auth.js')
const getIdentity = require('../utils/get-identity.js')
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
const commands = [
'get',
@@ -38,7 +36,7 @@ class Access extends BaseCommand {
]
static usage = [
- 'list packages [|| []',
+ 'list packages [||] []',
'list collaborators [ []]',
'get status []',
'set status=public|private []',
@@ -47,26 +45,28 @@ class Access extends BaseCommand {
'revoke []',
]
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return commands
}
- switch (argv[2]) {
- case 'grant':
- return ['read-only', 'read-write']
- case 'revoke':
- return []
- case 'list':
- case 'ls':
- return ['packages', 'collaborators']
- case 'get':
- return ['status']
- case 'set':
- return setCommands
- default:
- throw new Error(argv[2] + ' not recognized')
+ if (argv.length === 3) {
+ switch (argv[2]) {
+ case 'grant':
+ return ['read-only', 'read-write']
+ case 'revoke':
+ return []
+ case 'list':
+ case 'ls':
+ return ['packages', 'collaborators']
+ case 'get':
+ return ['status']
+ case 'set':
+ return setCommands
+ default:
+ throw new Error(argv[2] + ' not recognized')
+ }
}
}
@@ -116,11 +116,11 @@ class Access extends BaseCommand {
}
async #grant (permissions, scope, pkg) {
- await libnpmaccess.setPermissions(scope, pkg, permissions)
+ await libnpmaccess.setPermissions(scope, pkg, permissions, this.npm.flatOptions)
}
async #revoke (scope, pkg) {
- await libnpmaccess.removePermissions(scope, pkg)
+ await libnpmaccess.removePermissions(scope, pkg, this.npm.flatOptions)
}
async #listPackages (owner, pkg) {
@@ -176,8 +176,8 @@ class Access extends BaseCommand {
async #getPackage (name, requireScope) {
if (!name) {
try {
- const pkg = await readPackageJson(path.resolve(this.npm.prefix, 'package.json'))
- name = pkg.name
+ const { content } = await pkgJson.normalize(this.npm.prefix)
+ name = content.name
} catch (err) {
if (err.code === 'ENOENT') {
throw Object.assign(new Error('no package name given and no package.json found'), {
@@ -197,7 +197,7 @@ class Access extends BaseCommand {
}
#output (items, limiter) {
- const output = {}
+ const outputs = {}
const lookup = {
__proto__: null,
read: 'read-only',
@@ -205,14 +205,14 @@ class Access extends BaseCommand {
}
for (const item in items) {
const val = items[item]
- output[item] = lookup[val] || val
+ outputs[item] = lookup[val] || val
}
if (this.npm.config.get('json')) {
- this.npm.output(JSON.stringify(output, null, 2))
+ output.buffer(outputs)
} else {
- for (const item of Object.keys(output).sort(localeCompare)) {
+ for (const item of Object.keys(outputs).sort(localeCompare)) {
if (!limiter || limiter === item) {
- this.npm.output(`${item}: ${output[item]}`)
+ output.standard(`${item}: ${outputs[item]}`)
}
}
}
diff --git a/lib/commands/adduser.js b/lib/commands/adduser.js
index cd4cba60511cb..cf64e7a7e7438 100644
--- a/lib/commands/adduser.js
+++ b/lib/commands/adduser.js
@@ -1,8 +1,7 @@
-const log = require('../utils/log-shim.js')
-const replaceInfo = require('../utils/replace-info.js')
+const { log, output } = require('proc-log')
+const { redactLog: replaceInfo } = require('@npmcli/redact')
const auth = require('../utils/auth.js')
-
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
class AddUser extends BaseCommand {
static description = 'Add a registry user account'
@@ -13,7 +12,7 @@ class AddUser extends BaseCommand {
'auth-type',
]
- async exec (args) {
+ async exec () {
const scope = this.npm.config.get('scope')
let registry = this.npm.config.get('registry')
@@ -27,7 +26,6 @@ class AddUser extends BaseCommand {
const creds = this.npm.config.getCredentialsByURI(registry)
- log.disableProgress()
log.notice('', `Log in on ${replaceInfo(registry)}`)
const { message, newCreds } = await auth.adduser(this.npm, {
@@ -45,7 +43,8 @@ class AddUser extends BaseCommand {
await this.npm.config.save('user')
- this.npm.output(message)
+ output.standard(message)
}
}
+
module.exports = AddUser
diff --git a/lib/commands/audit.js b/lib/commands/audit.js
index 13886ea6350b6..486bef1bb5dc1 100644
--- a/lib/commands/audit.js
+++ b/lib/commands/audit.js
@@ -1,337 +1,9 @@
-const Arborist = require('@npmcli/arborist')
-const auditReport = require('npm-audit-report')
-const fetch = require('npm-registry-fetch')
-const localeCompare = require('@isaacs/string-locale-compare')('en')
-const npa = require('npm-package-arg')
-const pacote = require('pacote')
-const pMap = require('p-map')
-
+const npmAuditReport = require('npm-audit-report')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const auditError = require('../utils/audit-error.js')
-const log = require('../utils/log-shim.js')
+const { log, output } = require('proc-log')
const reifyFinish = require('../utils/reify-finish.js')
-
-const sortAlphabetically = (a, b) => localeCompare(a.name, b.name)
-
-class VerifySignatures {
- constructor (tree, filterSet, npm, opts) {
- this.tree = tree
- this.filterSet = filterSet
- this.npm = npm
- this.opts = opts
- this.keys = new Map()
- this.invalid = []
- this.missing = []
- this.checkedPackages = new Set()
- this.auditedWithKeysCount = 0
- this.verifiedCount = 0
- this.output = []
- this.exitCode = 0
- }
-
- async run () {
- const start = process.hrtime.bigint()
-
- // Find all deps in tree
- const { edges, registries } = this.getEdgesOut(this.tree.inventory.values(), this.filterSet)
- if (edges.size === 0) {
- throw new Error('found no installed dependencies to audit')
- }
-
- await Promise.all([...registries].map(registry => this.setKeys({ registry })))
-
- const progress = log.newItem('verifying registry signatures', edges.size)
- const mapper = async (edge) => {
- progress.completeWork(1)
- await this.getVerifiedInfo(edge)
- }
- await pMap(edges, mapper, { concurrency: 20, stopOnError: true })
-
- // Didn't find any dependencies that could be verified, e.g. only local
- // deps, missing version, not on a registry etc.
- if (!this.auditedWithKeysCount) {
- throw new Error('found no dependencies to audit that where installed from ' +
- 'a supported registry')
- }
-
- const invalid = this.invalid.sort(sortAlphabetically)
- const missing = this.missing.sort(sortAlphabetically)
-
- const hasNoInvalidOrMissing = invalid.length === 0 && missing.length === 0
-
- if (!hasNoInvalidOrMissing) {
- this.exitCode = 1
- }
-
- if (this.npm.config.get('json')) {
- this.appendOutput(JSON.stringify({
- invalid: this.makeJSON(invalid),
- missing: this.makeJSON(missing),
- }, null, 2))
- return
- }
- const end = process.hrtime.bigint()
- const elapsed = end - start
-
- const auditedPlural = this.auditedWithKeysCount > 1 ? 's' : ''
- const timing = `audited ${this.auditedWithKeysCount} package${auditedPlural} in ` +
- `${Math.floor(Number(elapsed) / 1e9)}s`
- this.appendOutput(`${timing}\n`)
-
- if (this.verifiedCount) {
- const verifiedBold = this.npm.chalk.bold('verified')
- const msg = this.verifiedCount === 1 ?
- `${this.verifiedCount} package has a ${verifiedBold} registry signature\n` :
- `${this.verifiedCount} packages have ${verifiedBold} registry signatures\n`
- this.appendOutput(msg)
- }
-
- if (missing.length) {
- const missingClr = this.npm.chalk.bold(this.npm.chalk.red('missing'))
- const msg = missing.length === 1 ?
- `package has a ${missingClr} registry signature` :
- `packages have ${missingClr} registry signatures`
- this.appendOutput(
- `${missing.length} ${msg} but the registry is ` +
- `providing signing keys:\n`
- )
- this.appendOutput(this.humanOutput(missing))
- }
-
- if (invalid.length) {
- const invalidClr = this.npm.chalk.bold(this.npm.chalk.red('invalid'))
- const msg = invalid.length === 1 ?
- `${invalid.length} package has an ${invalidClr} registry signature:\n` :
- `${invalid.length} packages have ${invalidClr} registry signatures:\n`
- this.appendOutput(
- `${missing.length ? '\n' : ''}${msg}`
- )
- this.appendOutput(this.humanOutput(invalid))
- const tamperMsg = invalid.length === 1 ?
- `\nSomeone might have tampered with this package since it was ` +
- `published on the registry!\n` :
- `\nSomeone might have tampered with these packages since they where ` +
- `published on the registry!\n`
- this.appendOutput(tamperMsg)
- }
- }
-
- appendOutput (...args) {
- this.output.push(...args.flat())
- }
-
- report () {
- return { report: this.output.join('\n'), exitCode: this.exitCode }
- }
-
- getEdgesOut (nodes, filterSet) {
- const edges = new Set()
- const registries = new Set()
- for (const node of nodes) {
- for (const edge of node.edgesOut.values()) {
- const filteredOut =
- edge.from
- && filterSet
- && filterSet.size > 0
- && !filterSet.has(edge.from.target)
-
- if (!filteredOut) {
- const spec = this.getEdgeSpec(edge)
- if (spec) {
- // Prefetch and cache public keys from used registries
- registries.add(this.getSpecRegistry(spec))
- }
- edges.add(edge)
- }
- }
- }
- return { edges, registries }
- }
-
- async setKeys ({ registry }) {
- const keys = await fetch.json('/-/npm/v1/keys', {
- ...this.npm.flatOptions,
- registry,
- }).then(({ keys: ks }) => ks.map((key) => ({
- ...key,
- pemkey: `-----BEGIN PUBLIC KEY-----\n${key.key}\n-----END PUBLIC KEY-----`,
- }))).catch(err => {
- if (err.code === 'E404' || err.code === 'E400') {
- return null
- } else {
- throw err
- }
- })
- if (keys) {
- this.keys.set(registry, keys)
- }
- }
-
- getEdgeType (edge) {
- return edge.optional ? 'optionalDependencies'
- : edge.peer ? 'peerDependencies'
- : edge.dev ? 'devDependencies'
- : 'dependencies'
- }
-
- getEdgeSpec (edge) {
- let name = edge.name
- try {
- name = npa(edge.spec).subSpec.name
- } catch {
- // leave it as edge.name
- }
- try {
- return npa(`${name}@${edge.spec}`)
- } catch {
- // Skip packages with invalid spec
- }
- }
-
- buildRegistryConfig (registry) {
- const keys = this.keys.get(registry) || []
- const parsedRegistry = new URL(registry)
- const regKey = `//${parsedRegistry.host}${parsedRegistry.pathname}`
- return {
- [`${regKey}:_keys`]: keys,
- }
- }
-
- getSpecRegistry (spec) {
- return fetch.pickRegistry(spec, this.npm.flatOptions)
- }
-
- getValidPackageInfo (edge) {
- const type = this.getEdgeType(edge)
- // Skip potentially optional packages that are not on disk, as these could
- // be omitted during install
- if (edge.error === 'MISSING' && type !== 'dependencies') {
- return
- }
-
- const spec = this.getEdgeSpec(edge)
- // Skip invalid version requirements
- if (!spec) {
- return
- }
- const node = edge.to || edge
- const { version } = node.package || {}
-
- if (node.isWorkspace || // Skip local workspaces packages
- !version || // Skip packages that don't have a installed version, e.g. optonal dependencies
- !spec.registry) { // Skip if not from registry, e.g. git package
- return
- }
-
- for (const omitType of this.npm.config.get('omit')) {
- if (node[omitType]) {
- return
- }
- }
-
- return {
- name: spec.name,
- version,
- type,
- location: node.location,
- registry: this.getSpecRegistry(spec),
- }
- }
-
- async verifySignatures (name, version, registry) {
- const {
- _integrity: integrity,
- _signatures,
- _resolved: resolved,
- } = await pacote.manifest(`${name}@${version}`, {
- verifySignatures: true,
- ...this.buildRegistryConfig(registry),
- ...this.npm.flatOptions,
- })
- const signatures = _signatures || []
- return {
- integrity,
- signatures,
- resolved,
- }
- }
-
- async getVerifiedInfo (edge) {
- const info = this.getValidPackageInfo(edge)
- if (!info) {
- return
- }
- const { name, version, location, registry, type } = info
- if (this.checkedPackages.has(location)) {
- // we already did or are doing this one
- return
- }
- this.checkedPackages.add(location)
-
- // We only "audit" or verify the signature, or the presence of it, on
- // packages whose registry returns signing keys
- const keys = this.keys.get(registry) || []
- if (keys.length) {
- this.auditedWithKeysCount += 1
- }
-
- try {
- const { integrity, signatures, resolved } = await this.verifySignatures(
- name, version, registry
- )
-
- // Currently we only care about missing signatures on registries that provide a public key
- // We could make this configurable in the future with a strict/paranoid mode
- if (signatures.length) {
- this.verifiedCount += 1
- } else if (keys.length) {
- this.missing.push({
- name,
- version,
- location,
- resolved,
- integrity,
- registry,
- })
- }
- } catch (e) {
- if (e.code === 'EINTEGRITYSIGNATURE') {
- const { signature, keyid, integrity, resolved } = e
- this.invalid.push({
- name,
- type,
- version,
- resolved,
- location,
- integrity,
- registry,
- signature,
- keyid,
- })
- } else {
- throw e
- }
- }
- }
-
- humanOutput (list) {
- return list.map(v =>
- `${this.npm.chalk.red(`${v.name}@${v.version}`)} (${v.registry})`
- ).join('\n')
- }
-
- makeJSON (deps) {
- return deps.map(d => ({
- name: d.name,
- version: d.version,
- location: d.location,
- resolved: d.resolved,
- integrity: d.integrity,
- signature: d.signature,
- keyid: d.keyid,
- }))
- }
-}
+const VerifySignatures = require('../utils/verify-signatures.js')
class Audit extends ArboristWorkspaceCmd {
static description = 'Run a security audit'
@@ -342,7 +14,9 @@ class Audit extends ArboristWorkspaceCmd {
'force',
'json',
'package-lock-only',
+ 'package-lock',
'omit',
+ 'include',
'foreground-scripts',
'ignore-scripts',
...super.params,
@@ -350,15 +24,16 @@ class Audit extends ArboristWorkspaceCmd {
static usage = ['[fix|signatures]']
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
- return ['fix']
+ return ['fix', 'signatures']
}
switch (argv[2]) {
case 'fix':
+ case 'signatures':
return []
default:
throw Object.assign(new Error(argv[2] + ' not recognized'), {
@@ -376,7 +51,12 @@ class Audit extends ArboristWorkspaceCmd {
}
async auditAdvisories (args) {
+ const fix = args[0] === 'fix'
+ if (this.npm.config.get('package-lock') === false && fix) {
+ throw this.usageError('fix can not be used without a package-lock')
+ }
const reporter = this.npm.config.get('json') ? 'json' : 'detail'
+ const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
audit: true,
@@ -386,16 +66,18 @@ class Audit extends ArboristWorkspaceCmd {
}
const arb = new Arborist(opts)
- const fix = args[0] === 'fix'
await arb.audit({ fix })
if (fix) {
await reifyFinish(this.npm, arb)
} else {
// will throw if there's an error, because this is an audit command
auditError(this.npm, arb.auditReport)
- const result = auditReport(arb.auditReport, opts)
+ const result = npmAuditReport(arb.auditReport, {
+ ...opts,
+ chalk: this.npm.chalk,
+ })
process.exitCode = process.exitCode || result.exitCode
- this.npm.output(result.report)
+ output.standard(result.report)
}
}
@@ -408,7 +90,8 @@ class Audit extends ArboristWorkspaceCmd {
)
}
- log.verbose('loading installed dependencies')
+ log.verbose('audit', 'loading installed dependencies')
+ const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: this.npm.prefix,
@@ -432,9 +115,6 @@ class Audit extends ArboristWorkspaceCmd {
const verify = new VerifySignatures(tree, filterSet, this.npm, { ...opts })
await verify.run()
- const result = verify.report()
- process.exitCode = process.exitCode || result.exitCode
- this.npm.output(result.report)
}
}
diff --git a/lib/commands/bugs.js b/lib/commands/bugs.js
index 17cbd96649b87..44926afbc9a0a 100644
--- a/lib/commands/bugs.js
+++ b/lib/commands/bugs.js
@@ -21,8 +21,9 @@ class Bugs extends PackageUrlCmd {
// try to get it from the repo, if possible
const info = this.hostedFromMani(mani)
- if (info) {
- return info.bugs()
+ const infoUrl = info?.bugs()
+ if (infoUrl) {
+ return infoUrl
}
// just send them to the website, hopefully that has some info!
diff --git a/lib/commands/cache.js b/lib/commands/cache.js
index 0ab40b9ed44a9..87c70a57dc0ed 100644
--- a/lib/commands/cache.js
+++ b/lib/commands/cache.js
@@ -1,14 +1,13 @@
const cacache = require('cacache')
-const Arborist = require('@npmcli/arborist')
const pacote = require('pacote')
-const fs = require('fs/promises')
-const { join } = require('path')
+const fs = require('node:fs/promises')
+const { join } = require('node:path')
const semver = require('semver')
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
const npa = require('npm-package-arg')
const jsonParse = require('json-parse-even-better-errors')
const localeCompare = require('@isaacs/string-locale-compare')('en')
-const log = require('../utils/log-shim')
+const { log, output } = require('proc-log')
const searchCachePackage = async (path, parsed, cacheKeys) => {
/* eslint-disable-next-line max-len */
@@ -74,10 +73,10 @@ class Cache extends BaseCommand {
'verify',
]
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
- return ['add', 'clean', 'verify', 'ls', 'delete']
+ return ['add', 'clean', 'verify', 'ls']
}
// TODO - eventually...
@@ -86,7 +85,6 @@ class Cache extends BaseCommand {
case 'clean':
case 'add':
case 'ls':
- case 'delete':
return []
}
}
@@ -134,10 +132,10 @@ class Cache extends BaseCommand {
try {
entry = await cacache.get(cachePath, key)
} catch (err) {
- log.warn(`Not Found: ${key}`)
+ log.warn('cache', `Not Found: ${key}`)
break
}
- this.npm.output(`Deleted: ${key}`)
+ output.standard(`Deleted: ${key}`)
await cacache.rm.entry(cachePath, key)
// XXX this could leave other entries without content!
await cacache.rm.content(cachePath, entry.integrity)
@@ -154,15 +152,20 @@ class Cache extends BaseCommand {
throw this.usageError('First argument to `add` is required')
}
- return Promise.all(args.map(spec => {
+ await Promise.all(args.map(async spec => {
log.silly('cache add', 'spec', spec)
// we ask pacote for the thing, and then just throw the data
// away so that it tee-pipes it into the cache like it does
// for a normal request.
- return pacote.tarball.stream(spec, stream => {
+ await pacote.tarball.stream(spec, stream => {
stream.resume()
return stream.promise()
- }, { ...this.npm.flatOptions, Arborist })
+ }, { ...this.npm.flatOptions })
+
+ await pacote.manifest(spec, {
+ ...this.npm.flatOptions,
+ fullMetadata: true,
+ })
}))
}
@@ -172,20 +175,20 @@ class Cache extends BaseCommand {
? `~${cache.slice(process.env.HOME.length)}`
: cache
const stats = await cacache.verify(cache)
- this.npm.output(`Cache verified and compressed (${prefix})`)
- this.npm.output(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`)
+ output.standard(`Cache verified and compressed (${prefix})`)
+ output.standard(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`)
if (stats.badContentCount) {
- this.npm.output(`Corrupted content removed: ${stats.badContentCount}`)
+ output.standard(`Corrupted content removed: ${stats.badContentCount}`)
}
if (stats.reclaimedCount) {
/* eslint-disable-next-line max-len */
- this.npm.output(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`)
+ output.standard(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`)
}
if (stats.missingContent) {
- this.npm.output(`Missing content: ${stats.missingContent}`)
+ output.standard(`Missing content: ${stats.missingContent}`)
}
- this.npm.output(`Index entries: ${stats.totalEntries}`)
- this.npm.output(`Finished in ${stats.runTime.total / 1000}s`)
+ output.standard(`Index entries: ${stats.totalEntries}`)
+ output.standard(`Finished in ${stats.runTime.total / 1000}s`)
}
// npm cache ls [--package ...]
@@ -205,10 +208,10 @@ class Cache extends BaseCommand {
results.add(key)
}
}
- [...results].sort(localeCompare).forEach(key => this.npm.output(key))
+ [...results].sort(localeCompare).forEach(key => output.standard(key))
return
}
- cacheKeys.sort(localeCompare).forEach(key => this.npm.output(key))
+ cacheKeys.sort(localeCompare).forEach(key => output.standard(key))
}
}
diff --git a/lib/commands/ci.js b/lib/commands/ci.js
index a2c61044eb96e..c190de7e3ea7f 100644
--- a/lib/commands/ci.js
+++ b/lib/commands/ci.js
@@ -1,18 +1,32 @@
-const Arborist = require('@npmcli/arborist')
const reifyFinish = require('../utils/reify-finish.js')
const runScript = require('@npmcli/run-script')
-const fs = require('fs/promises')
-const log = require('../utils/log-shim.js')
+const fs = require('node:fs/promises')
+const path = require('node:path')
+const { log, time } = require('proc-log')
const validateLockfile = require('../utils/validate-lockfile.js')
-
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
-const Install = require('./install.js')
+const getWorkspaces = require('../utils/get-workspaces.js')
class CI extends ArboristWorkspaceCmd {
static description = 'Clean install a project'
static name = 'ci'
- static params = Install.params
+ // These are in the order they will show up in when running "-h"
+ static params = [
+ 'install-strategy',
+ 'legacy-bundling',
+ 'global-style',
+ 'omit',
+ 'include',
+ 'strict-peer-deps',
+ 'foreground-scripts',
+ 'ignore-scripts',
+ 'audit',
+ 'bin-links',
+ 'fund',
+ 'dry-run',
+ ...super.params,
+ ]
async exec () {
if (this.npm.global) {
@@ -22,6 +36,7 @@ class CI extends ArboristWorkspaceCmd {
}
const where = this.npm.prefix
+ const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
packageLock: true, // npm ci should never skip lock files
@@ -61,14 +76,26 @@ class CI extends ArboristWorkspaceCmd {
)
}
- // Only remove node_modules after we've successfully loaded the virtual
- // tree and validated the lockfile
- await this.npm.time('npm-ci:rm', async () => {
- const path = `${where}/node_modules`
- // get the list of entries so we can skip the glob for performance
- const entries = await fs.readdir(path, null).catch(er => [])
- return Promise.all(entries.map(f => fs.rm(`${path}/${f}`, { force: true })))
- })
+ const dryRun = this.npm.config.get('dry-run')
+ if (!dryRun) {
+ const workspacePaths = await getWorkspaces([], {
+ path: this.npm.localPrefix,
+ includeWorkspaceRoot: true,
+ })
+
+ // Only remove node_modules after we've successfully loaded the virtual
+ // tree and validated the lockfile
+ await time.start('npm-ci:rm', async () => {
+ return await Promise.all([...workspacePaths.values()].map(async modulePath => {
+ const fullPath = path.join(modulePath, 'node_modules')
+ // get the list of entries so we can skip the glob for performance
+ const entries = await fs.readdir(fullPath, null).catch(() => [])
+ return Promise.all(entries.map(folder => {
+ return fs.rm(path.join(fullPath, folder), { force: true, recursive: true })
+ }))
+ }))
+ })
+ }
await arb.reify(opts)
@@ -91,7 +118,6 @@ class CI extends ArboristWorkspaceCmd {
args: [],
scriptShell,
stdio: 'inherit',
- banner: !this.npm.silent,
event,
})
}
diff --git a/lib/commands/completion.js b/lib/commands/completion.js
index f5604e099f9a2..f8c2e00c6baee 100644
--- a/lib/commands/completion.js
+++ b/lib/commands/completion.js
@@ -27,30 +27,29 @@
// Matches are wrapped with ' to escape them, if necessary, and then printed
// one per line for the shell completion method to consume in IFS=$'\n' mode
// as an array.
-//
-const fs = require('fs/promises')
+const fs = require('node:fs/promises')
const nopt = require('nopt')
-const { resolve } = require('path')
+const { resolve } = require('node:path')
+const { output } = require('proc-log')
+const Npm = require('../npm.js')
+const { definitions, shorthands } = require('@npmcli/config/lib/definitions')
+const { commands, aliases, deref } = require('../utils/cmd-list.js')
+const { isWindowsShell } = require('../utils/is-windows.js')
+const BaseCommand = require('../base-cmd.js')
+
+const fileExists = (file) => fs.stat(file).then(s => s.isFile()).catch(() => false)
-const { definitions, shorthands } = require('../utils/config/index.js')
-const { aliases, commands, plumbing } = require('../utils/cmd-list.js')
-const aliasNames = Object.keys(aliases)
-const fullList = commands.concat(aliasNames).filter(c => !plumbing.includes(c))
const configNames = Object.keys(definitions)
const shorthandNames = Object.keys(shorthands)
const allConfs = configNames.concat(shorthandNames)
-const { isWindowsShell } = require('../utils/is-windows.js')
-const fileExists = (file) => fs.stat(file).then(s => s.isFile()).catch(() => false)
-
-const BaseCommand = require('../base-command.js')
class Completion extends BaseCommand {
static description = 'Tab Completion for npm'
static name = 'completion'
// completion for the completion command
- async completion (opts) {
+ static async completion (opts) {
if (opts.w > 2) {
return
}
@@ -79,12 +78,10 @@ class Completion extends BaseCommand {
})
}
- const { COMP_CWORD, COMP_LINE, COMP_POINT } = process.env
+ const { COMP_CWORD, COMP_LINE, COMP_POINT, COMP_FISH } = process.env
// if the COMP_* isn't in the env, then just dump the script.
- if (COMP_CWORD === undefined ||
- COMP_LINE === undefined ||
- COMP_POINT === undefined) {
+ if (COMP_CWORD === undefined || COMP_LINE === undefined || COMP_POINT === undefined) {
return dumpScript(resolve(this.npm.npmRoot, 'lib', 'utils', 'completion.sh'))
}
@@ -111,6 +108,7 @@ class Completion extends BaseCommand {
partialWords.push(partialWord)
const opts = {
+ isFish: COMP_FISH === 'true',
words,
w,
word,
@@ -159,10 +157,14 @@ class Completion extends BaseCommand {
// at this point, if words[1] is some kind of npm command,
// then complete on it.
// otherwise, do nothing
- const impl = await this.npm.cmd(cmd)
- if (impl.completion) {
- const comps = await impl.completion(opts)
- return this.wrap(opts, comps)
+ try {
+ const { completion } = Npm.cmd(cmd)
+ if (completion) {
+ const comps = await completion(opts, this.npm)
+ return this.wrap(opts, comps)
+ }
+ } catch {
+ // it wasnt a valid command, so do nothing
}
}
@@ -183,7 +185,7 @@ class Completion extends BaseCommand {
}
if (compls.length > 0) {
- this.npm.output(compls.join('\n'))
+ output.standard(compls.join('\n'))
}
}
}
@@ -246,7 +248,7 @@ const configCompl = opts => {
// expand with the valid values of various config values.
// not yet implemented.
-const configValueCompl = opts => []
+const configValueCompl = () => []
// check if the thing is a flag or not.
const isFlag = word => {
@@ -263,18 +265,19 @@ const isFlag = word => {
// complete against the npm commands
// if they all resolve to the same thing, just return the thing it already is
-const cmdCompl = (opts, npm) => {
- const matches = fullList.filter(c => c.startsWith(opts.partialWord))
+const cmdCompl = (opts) => {
+ const allCommands = commands.concat(Object.keys(aliases))
+ const matches = allCommands.filter(c => c.startsWith(opts.partialWord))
if (!matches.length) {
return matches
}
- const derefs = new Set([...matches.map(c => npm.deref(c))])
+ const derefs = new Set([...matches.map(c => deref(c))])
if (derefs.size === 1) {
return [...derefs]
}
- return fullList
+ return allCommands
}
module.exports = Completion
diff --git a/lib/commands/config.js b/lib/commands/config.js
index ac5a74d01f7de..6b1447d7e8426 100644
--- a/lib/commands/config.js
+++ b/lib/commands/config.js
@@ -1,25 +1,42 @@
-// don't expand so that we only assemble the set of defaults when needed
-const configDefs = require('../utils/config/index.js')
-
-const { mkdir, readFile, writeFile } = require('fs/promises')
-const { dirname, resolve } = require('path')
-const { spawn } = require('child_process')
-const { EOL } = require('os')
-const ini = require('ini')
+const { mkdir, readFile, writeFile } = require('node:fs/promises')
+const { dirname, resolve } = require('node:path')
+const { spawn } = require('node:child_process')
+const { EOL } = require('node:os')
const localeCompare = require('@isaacs/string-locale-compare')('en')
-const rpj = require('read-package-json-fast')
-const log = require('../utils/log-shim.js')
+const pkgJson = require('@npmcli/package-json')
+const { defaults, definitions } = require('@npmcli/config/lib/definitions')
+const { log, output } = require('proc-log')
+const BaseCommand = require('../base-cmd.js')
+const { redact } = require('@npmcli/redact')
// These are the configs that we can nerf-dart. Not all of them currently even
-// *have* config definitions so we have to explicitly validate them here
+// *have* config definitions so we have to explicitly validate them here.
+// This is used to validate during "npm config set"
const nerfDarts = [
'_auth',
'_authToken',
- 'username',
'_password',
+ 'certfile',
'email',
+ 'keyfile',
+ 'username',
+]
+// These are the config values to swap with "protected". It does not catch
+// every single sensitive thing a user may put in the npmrc file but it gets
+// the common ones. This is distinct from nerfDarts because that is used to
+// validate valid configs during "npm config set", and folks may have old
+// invalid entries lying around in a config file that we still want to protect
+// when running "npm config list"
+// This is a more general list of values to consider protected. You can not
+// "npm config get" them, and they will not display during "npm config list"
+const protected = [
+ 'auth',
+ 'authToken',
'certfile',
+ 'email',
'keyfile',
+ 'password',
+ 'username',
]
// take an array of `[key, value, k2=v2, k3, v3, ...]` and turn into
@@ -37,19 +54,35 @@ const keyValues = args => {
return kv
}
-const publicVar = k => {
+const isProtected = (k) => {
// _password
if (k.startsWith('_')) {
- return false
+ return true
+ }
+ if (protected.includes(k)) {
+ return true
}
// //localhost:8080/:_password
- if (k.startsWith('//') && k.includes(':_')) {
- return false
+ if (k.startsWith('//')) {
+ if (k.includes(':_')) {
+ return true
+ }
+ // //registry:_authToken or //registry:authToken
+ for (const p of protected) {
+ if (k.endsWith(`:${p}`) || k.endsWith(`:_${p}`)) {
+ return true
+ }
+ }
}
- return true
+ return false
}
-const BaseCommand = require('../base-command.js')
+// Private fields are either protected or they can redacted info
+const isPrivate = (k, v) => isProtected(k) || redact(v) !== v
+
+const displayVar = (k, v) =>
+ `${k} = ${isProtected(k, v) ? '(protected)' : JSON.stringify(redact(v))}`
+
class Config extends BaseCommand {
static description = 'Manage the npm configuration files'
static name = 'config'
@@ -74,7 +107,7 @@ class Config extends BaseCommand {
static skipConfigValidation = true
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv[1] !== 'config') {
argv.unshift('config')
@@ -102,7 +135,7 @@ class Config extends BaseCommand {
case 'get':
case 'delete':
case 'rm':
- return Object.keys(configDefs.definitions)
+ return Object.keys(definitions)
case 'edit':
case 'list':
case 'ls':
@@ -113,35 +146,30 @@ class Config extends BaseCommand {
}
async exec ([action, ...args]) {
- log.disableProgress()
- try {
- switch (action) {
- case 'set':
- await this.set(args)
- break
- case 'get':
- await this.get(args)
- break
- case 'delete':
- case 'rm':
- case 'del':
- await this.del(args)
- break
- case 'list':
- case 'ls':
- await (this.npm.flatOptions.json ? this.listJson() : this.list())
- break
- case 'edit':
- await this.edit()
- break
- case 'fix':
- await this.fix()
- break
- default:
- throw this.usageError()
- }
- } finally {
- log.enableProgress()
+ switch (action) {
+ case 'set':
+ await this.set(args)
+ break
+ case 'get':
+ await this.get(args)
+ break
+ case 'delete':
+ case 'rm':
+ case 'del':
+ await this.del(args)
+ break
+ case 'list':
+ case 'ls':
+ await (this.npm.flatOptions.json ? this.listJson() : this.list())
+ break
+ case 'edit':
+ await this.edit()
+ break
+ case 'fix':
+ await this.fix()
+ break
+ default:
+ throw this.usageError()
}
}
@@ -163,7 +191,13 @@ class Config extends BaseCommand {
`The \`${baseKey}\` option is deprecated, and can not be set in this way${deprecated}`
)
}
- this.npm.config.set(key, val || '', where)
+
+ if (val === '') {
+ this.npm.config.delete(key, where)
+ } else {
+ this.npm.config.set(key, val, where)
+ }
+
if (!this.npm.config.validate(where)) {
log.warn('config', 'omitting invalid config values')
}
@@ -179,14 +213,15 @@ class Config extends BaseCommand {
const out = []
for (const key of keys) {
- if (!publicVar(key)) {
+ const val = this.npm.config.get(key)
+ if (isPrivate(key, val)) {
throw new Error(`The ${key} option is protected, and can not be retrieved in this way`)
}
const pref = keys.length > 1 ? `${key}=` : ''
- out.push(pref + this.npm.config.get(key))
+ out.push(pref + val)
}
- this.npm.output(out.join('\n'))
+ output.standard(out.join('\n'))
}
async del (keys) {
@@ -202,6 +237,7 @@ class Config extends BaseCommand {
}
async edit () {
+ const ini = require('ini')
const e = this.npm.flatOptions.editor
const where = this.npm.flatOptions.location
const file = this.npm.config.data.get(where).source
@@ -213,7 +249,7 @@ class Config extends BaseCommand {
const data = (
await readFile(file, 'utf8').catch(() => '')
).replace(/\r\n/g, '\n')
- const entries = Object.entries(configDefs.defaults)
+ const entries = Object.entries(defaults)
const defData = entries.reduce((str, [key, val]) => {
const obj = { [key]: val }
const i = ini.stringify(obj)
@@ -283,7 +319,7 @@ ${defData}
this.npm.config.repair(problems)
const locations = []
- this.npm.output('The following configuration problems have been repaired:\n')
+ output.standard('The following configuration problems have been repaired:\n')
const summary = problems.map(({ action, from, to, key, where }) => {
// coverage disabled for else branch because it is intentionally omitted
// istanbul ignore else
@@ -296,7 +332,7 @@ ${defData}
return `- \`${key}\` deleted from ${where} config`
}
}).join('\n')
- this.npm.output(summary)
+ output.standard(summary)
return await Promise.all(locations.map((location) => this.npm.config.save(location)))
}
@@ -310,18 +346,17 @@ ${defData}
continue
}
- const keys = Object.keys(data).sort(localeCompare)
- if (!keys.length) {
+ const entries = Object.entries(data).sort(([a], [b]) => localeCompare(a, b))
+ if (!entries.length) {
continue
}
msg.push(`; "${where}" config from ${source}`, '')
- for (const k of keys) {
- const v = publicVar(k) ? JSON.stringify(data[k]) : '(protected)'
+ for (const [k, v] of entries) {
+ const display = displayVar(k, v)
const src = this.npm.config.find(k)
- const overridden = src !== where
- msg.push((overridden ? '; ' : '') +
- `${k} = ${v} ${overridden ? `; overridden by ${src}` : ''}`)
+ msg.push(src === where ? display : `; ${display} ; overridden by ${src}`)
+ msg.push()
}
msg.push('')
}
@@ -340,34 +375,35 @@ ${defData}
}
if (!this.npm.global) {
- const pkgPath = resolve(this.npm.prefix, 'package.json')
- const pkg = await rpj(pkgPath).catch(() => ({}))
+ const { content } = await pkgJson.normalize(this.npm.prefix).catch(() => ({ content: {} }))
- if (pkg.publishConfig) {
+ if (content.publishConfig) {
+ const pkgPath = resolve(this.npm.prefix, 'package.json')
msg.push(`; "publishConfig" from ${pkgPath}`)
msg.push('; This set of config values will be used at publish-time.', '')
- const pkgKeys = Object.keys(pkg.publishConfig).sort(localeCompare)
- for (const k of pkgKeys) {
- const v = publicVar(k) ? JSON.stringify(pkg.publishConfig[k]) : '(protected)'
- msg.push(`${k} = ${v}`)
+ const entries = Object.entries(content.publishConfig)
+ .sort(([a], [b]) => localeCompare(a, b))
+ for (const [k, value] of entries) {
+ msg.push(displayVar(k, value))
}
msg.push('')
}
}
- this.npm.output(msg.join('\n').trim())
+ output.standard(msg.join('\n').trim())
}
async listJson () {
const publicConf = {}
for (const key in this.npm.config.list[0]) {
- if (!publicVar(key)) {
+ const value = this.npm.config.get(key)
+ if (isPrivate(key, value)) {
continue
}
- publicConf[key] = this.npm.config.get(key)
+ publicConf[key] = value
}
- this.npm.output(JSON.stringify(publicConf, null, 2))
+ output.buffer(publicConf)
}
}
diff --git a/lib/commands/dedupe.js b/lib/commands/dedupe.js
index 0cc0e80709883..e07bcd31e894b 100644
--- a/lib/commands/dedupe.js
+++ b/lib/commands/dedupe.js
@@ -1,9 +1,7 @@
-// dedupe duplicated packages, or find them in the tree
-const Arborist = require('@npmcli/arborist')
const reifyFinish = require('../utils/reify-finish.js')
-
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+// dedupe duplicated packages, or find them in the tree
class Dedupe extends ArboristWorkspaceCmd {
static description = 'Reduce duplication in the package tree'
static name = 'dedupe'
@@ -14,6 +12,7 @@ class Dedupe extends ArboristWorkspaceCmd {
'strict-peer-deps',
'package-lock',
'omit',
+ 'include',
'ignore-scripts',
'audit',
'bin-links',
@@ -22,7 +21,7 @@ class Dedupe extends ArboristWorkspaceCmd {
...super.params,
]
- async exec (args) {
+ async exec () {
if (this.npm.global) {
const er = new Error('`npm dedupe` does not work in global mode.')
er.code = 'EDEDUPEGLOBAL'
@@ -31,6 +30,7 @@ class Dedupe extends ArboristWorkspaceCmd {
const dryRun = this.npm.config.get('dry-run')
const where = this.npm.prefix
+ const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
diff --git a/lib/commands/deprecate.js b/lib/commands/deprecate.js
index 1e1b8994c55a3..977fd9fce11da 100644
--- a/lib/commands/deprecate.js
+++ b/lib/commands/deprecate.js
@@ -1,10 +1,11 @@
const fetch = require('npm-registry-fetch')
-const otplease = require('../utils/otplease.js')
+const { otplease } = require('../utils/auth.js')
const npa = require('npm-package-arg')
+const { log } = require('proc-log')
const semver = require('semver')
const getIdentity = require('../utils/get-identity.js')
const libaccess = require('libnpmaccess')
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
class Deprecate extends BaseCommand {
static description = 'Deprecate a version of a package'
@@ -15,15 +16,15 @@ class Deprecate extends BaseCommand {
'otp',
]
- static ignoreImplicitWorkspace = false
+ static ignoreImplicitWorkspace = true
- async completion (opts) {
+ static async completion (opts, npm) {
if (opts.conf.argv.remain.length > 1) {
return []
}
- const username = await getIdentity(this.npm, this.npm.flatOptions)
- const packages = await libaccess.getPackages(username, this.npm.flatOptions)
+ const username = await getIdentity(npm, npm.flatOptions)
+ const packages = await libaccess.getPackages(username, npm.flatOptions)
return Object.keys(packages)
.filter((name) =>
packages[name] === 'write' &&
@@ -52,19 +53,23 @@ class Deprecate extends BaseCommand {
query: { write: true },
})
- Object.keys(packument.versions)
+ const versions = Object.keys(packument.versions)
.filter(v => semver.satisfies(v, spec, { includePrerelease: true }))
- .forEach(v => {
- packument.versions[v].deprecated = msg
- })
- return otplease(this.npm, this.npm.flatOptions, opts => fetch(uri, {
- ...opts,
- spec: p,
- method: 'PUT',
- body: packument,
- ignoreBody: true,
- }))
+ if (versions.length) {
+ for (const v of versions) {
+ packument.versions[v].deprecated = msg
+ }
+ return otplease(this.npm, this.npm.flatOptions, opts => fetch(uri, {
+ ...opts,
+ spec: p,
+ method: 'PUT',
+ body: packument,
+ ignoreBody: true,
+ }))
+ } else {
+ log.warn('deprecate', 'No version found for', p.rawSpec)
+ }
}
}
diff --git a/lib/commands/diff.js b/lib/commands/diff.js
index 1f4bfd3eb1151..3fa8090a35046 100644
--- a/lib/commands/diff.js
+++ b/lib/commands/diff.js
@@ -1,13 +1,12 @@
-const { resolve } = require('path')
+const { resolve } = require('node:path')
const semver = require('semver')
const libnpmdiff = require('libnpmdiff')
const npa = require('npm-package-arg')
-const Arborist = require('@npmcli/arborist')
const pacote = require('pacote')
const pickManifest = require('npm-pick-manifest')
-const log = require('../utils/log-shim')
-const readPackage = require('read-package-json-fast')
-const BaseCommand = require('../base-command.js')
+const { log, output } = require('proc-log')
+const pkgJson = require('@npmcli/package-json')
+const BaseCommand = require('../base-cmd.js')
class Diff extends BaseCommand {
static description = 'The registry diff command'
@@ -65,7 +64,7 @@ class Diff extends BaseCommand {
diffFiles: args,
where: this.top,
})
- return this.npm.output(res)
+ return output.standard(res)
}
async execWorkspaces (args) {
@@ -79,10 +78,10 @@ class Diff extends BaseCommand {
// get the package name from the packument at `path`
// throws if no packument is present OR if it does not have `name` attribute
- async packageName (path) {
+ async packageName () {
let name
try {
- const pkg = await readPackage(resolve(this.prefix, 'package.json'))
+ const { content: pkg } = await pkgJson.normalize(this.prefix)
name = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
@@ -104,7 +103,7 @@ class Diff extends BaseCommand {
// no arguments, defaults to comparing cwd
// to its latest published registry version
if (!a) {
- const pkgName = await this.packageName(this.prefix)
+ const pkgName = await this.packageName()
return [
`${pkgName}@${this.npm.config.get('tag')}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
@@ -116,7 +115,7 @@ class Diff extends BaseCommand {
let noPackageJson
let pkgName
try {
- const pkg = await readPackage(resolve(this.prefix, 'package.json'))
+ const { content: pkg } = await pkgJson.normalize(this.prefix)
pkgName = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
@@ -146,6 +145,7 @@ class Diff extends BaseCommand {
if (spec.registry) {
let actualTree
let node
+ const Arborist = require('@npmcli/arborist')
try {
const opts = {
...this.npm.flatOptions,
@@ -228,7 +228,7 @@ class Diff extends BaseCommand {
if (semverA && semverB) {
let pkgName
try {
- const pkg = await readPackage(resolve(this.prefix, 'package.json'))
+ const { content: pkg } = await pkgJson.normalize(this.prefix)
pkgName = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
@@ -257,6 +257,7 @@ class Diff extends BaseCommand {
async findVersionsByPackageName (specs) {
let actualTree
+ const Arborist = require('@npmcli/arborist')
try {
const opts = {
...this.npm.flatOptions,
diff --git a/lib/commands/dist-tag.js b/lib/commands/dist-tag.js
index bc61a4691e55a..663f0eb44a26a 100644
--- a/lib/commands/dist-tag.js
+++ b/lib/commands/dist-tag.js
@@ -1,11 +1,10 @@
const npa = require('npm-package-arg')
-const path = require('path')
const regFetch = require('npm-registry-fetch')
const semver = require('semver')
-const log = require('../utils/log-shim')
-const otplease = require('../utils/otplease.js')
-const readPackage = require('read-package-json-fast')
-const BaseCommand = require('../base-command.js')
+const { log, output } = require('proc-log')
+const { otplease } = require('../utils/auth.js')
+const pkgJson = require('@npmcli/package-json')
+const BaseCommand = require('../base-cmd.js')
class DistTag extends BaseCommand {
static description = 'Modify package distribution tags'
@@ -20,7 +19,7 @@ class DistTag extends BaseCommand {
static workspaces = true
static ignoreImplicitWorkspace = false
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['add', 'rm', 'ls']
@@ -90,6 +89,9 @@ class DistTag extends BaseCommand {
log.verbose('dist-tag add', defaultTag, 'to', spec.name + '@' + version)
+ // make sure new spec with tag is valid, this will throw if invalid
+ npa(`${spec.name}@${defaultTag}`)
+
if (!spec.name || !version || !defaultTag) {
throw this.usageError('must provide a spec with a name and version, and a tag to add')
}
@@ -118,7 +120,7 @@ class DistTag extends BaseCommand {
spec,
}
await otplease(this.npm, reqOpts, o => regFetch(url, o))
- this.npm.output(`+${t}: ${spec.name}@${version}`)
+ output.standard(`+${t}: ${spec.name}@${version}`)
}
async remove (spec, tag, opts) {
@@ -144,7 +146,7 @@ class DistTag extends BaseCommand {
spec,
}
await otplease(this.npm, reqOpts, o => regFetch(url, o))
- this.npm.output(`-${tag}: ${spec.name}@${version}`)
+ output.standard(`-${tag}: ${spec.name}@${version}`)
}
async list (spec, opts) {
@@ -152,7 +154,7 @@ class DistTag extends BaseCommand {
if (this.npm.global) {
throw this.usageError()
}
- const { name } = await readPackage(path.resolve(this.npm.prefix, 'package.json'))
+ const { content: { name } } = await pkgJson.normalize(this.npm.prefix)
if (!name) {
throw this.usageError()
}
@@ -165,7 +167,7 @@ class DistTag extends BaseCommand {
const tags = await this.fetchTags(spec, opts)
const msg =
Object.keys(tags).map(k => `${k}: ${tags[k]}`).sort().join('\n')
- this.npm.output(msg)
+ output.standard(msg)
return tags
} catch (err) {
log.error('dist-tag ls', "Couldn't get dist-tag data for", spec)
@@ -178,7 +180,7 @@ class DistTag extends BaseCommand {
for (const name of this.workspaceNames) {
try {
- this.npm.output(`${name}:`)
+ output.standard(`${name}:`)
await this.list(npa(name), this.npm.flatOptions)
} catch (err) {
// set the exitCode directly, but ignore the error
@@ -203,4 +205,5 @@ class DistTag extends BaseCommand {
return data
}
}
+
module.exports = DistTag
diff --git a/lib/commands/docs.js b/lib/commands/docs.js
index 5d20215b56a07..2259b49f79617 100644
--- a/lib/commands/docs.js
+++ b/lib/commands/docs.js
@@ -1,4 +1,5 @@
const PackageUrlCmd = require('../package-url-cmd.js')
+
class Docs extends PackageUrlCmd {
static description = 'Open documentation for a package in a web browser'
static name = 'docs'
@@ -16,4 +17,5 @@ class Docs extends PackageUrlCmd {
return `https://www.npmjs.com/package/${mani.name}`
}
}
+
module.exports = Docs
diff --git a/lib/commands/doctor.js b/lib/commands/doctor.js
index 19262e537dbe0..8fbd49b7ca8bf 100644
--- a/lib/commands/doctor.js
+++ b/lib/commands/doctor.js
@@ -1,21 +1,14 @@
const cacache = require('cacache')
-const fs = require('fs')
+const { access, lstat, readdir, constants: { R_OK, W_OK, X_OK } } = require('node:fs/promises')
const fetch = require('make-fetch-happen')
-const Table = require('cli-table3')
const which = require('which')
const pacote = require('pacote')
-const { resolve } = require('path')
+const { resolve } = require('node:path')
const semver = require('semver')
-const { promisify } = require('util')
-const log = require('../utils/log-shim.js')
+const { log, output } = require('proc-log')
const ping = require('../utils/ping.js')
-const {
- registry: { default: defaultRegistry },
-} = require('../utils/config/definitions.js')
-const lstat = promisify(fs.lstat)
-const readdir = promisify(fs.readdir)
-const access = promisify(fs.access)
-const { R_OK, W_OK, X_OK } = fs.constants
+const { defaults } = require('@npmcli/config/lib/definitions')
+const BaseCommand = require('../base-cmd.js')
const maskLabel = mask => {
const label = []
@@ -36,57 +29,59 @@ const maskLabel = mask => {
const subcommands = [
{
- groups: ['ping', 'registry'],
- title: 'npm ping',
+ // Ping is left in as a legacy command but is listed as "connection" to
+ // make more sense to more people
+ groups: ['connection', 'ping', 'registry'],
+ title: 'Connecting to the registry',
cmd: 'checkPing',
}, {
groups: ['versions'],
- title: 'npm -v',
+ title: 'Checking npm version',
cmd: 'getLatestNpmVersion',
}, {
groups: ['versions'],
- title: 'node -v',
+ title: 'Checking node version',
cmd: 'getLatestNodejsVersion',
}, {
groups: ['registry'],
- title: 'npm config get registry',
+ title: 'Checking configured npm registry',
cmd: 'checkNpmRegistry',
}, {
groups: ['environment'],
- title: 'git executable in PATH',
+ title: 'Checking for git executable in PATH',
cmd: 'getGitPath',
}, {
groups: ['environment'],
- title: 'global bin folder in PATH',
+ title: 'Checking for global bin folder in PATH',
cmd: 'getBinPath',
}, {
groups: ['permissions', 'cache'],
- title: 'Perms check on cached files',
+ title: 'Checking permissions on cached files (this may take awhile)',
cmd: 'checkCachePermission',
windows: false,
}, {
groups: ['permissions'],
- title: 'Perms check on local node_modules',
+ title: 'Checking permissions on local node_modules (this may take awhile)',
cmd: 'checkLocalModulesPermission',
windows: false,
}, {
groups: ['permissions'],
- title: 'Perms check on global node_modules',
+ title: 'Checking permissions on global node_modules (this may take awhile)',
cmd: 'checkGlobalModulesPermission',
windows: false,
}, {
groups: ['permissions'],
- title: 'Perms check on local bin folder',
+ title: 'Checking permissions on local bin folder',
cmd: 'checkLocalBinPermission',
windows: false,
}, {
groups: ['permissions'],
- title: 'Perms check on global bin folder',
+ title: 'Checking permissions on global bin folder',
cmd: 'checkGlobalBinPermission',
windows: false,
}, {
groups: ['cache'],
- title: 'Verify cache contents',
+ title: 'Verifying cache contents (this may take awhile)',
cmd: 'verifyCachedFiles',
windows: false,
},
@@ -99,50 +94,35 @@ const subcommands = [
// - verify all local packages have bins linked
// What is the fix for these?
]
-const BaseCommand = require('../base-command.js')
+
class Doctor extends BaseCommand {
- static description = 'Check your npm environment'
+ static description = 'Check the health of your npm environment'
static name = 'doctor'
static params = ['registry']
static ignoreImplicitWorkspace = false
static usage = [`[${subcommands.flatMap(s => s.groups)
- .filter((value, index, self) => self.indexOf(value) === index)
+ .filter((value, index, self) => self.indexOf(value) === index && value !== 'ping')
.join('] [')}]`]
static subcommands = subcommands
- // minimum width of check column, enough for the word `Check`
- #checkWidth = 5
-
async exec (args) {
- log.info('Running checkup')
+ log.info('doctor', 'Running checkup')
let allOk = true
const actions = this.actions(args)
- this.#checkWidth = actions.reduce((length, item) =>
- Math.max(item.title.length, length), this.#checkWidth)
- if (!this.npm.silent) {
- this.output(['Check', 'Value', 'Recommendation/Notes'].map(h => this.npm.chalk.underline(h)))
- }
- // Do the actual work
+ const chalk = this.npm.chalk
for (const { title, cmd } of actions) {
- const item = [title]
+ this.output(title)
+ // TODO when we have an in progress indicator that could go here
+ let result
try {
- item.push(true, await this[cmd]())
+ result = await this[cmd]()
+ this.output(`${chalk.green('Ok')}${result ? `\n${result}` : ''}\n`)
} catch (err) {
- item.push(false, err)
- }
- if (!item[1]) {
allOk = false
- item[0] = this.npm.chalk.red(item[0])
- item[1] = this.npm.chalk.red('not ok')
- item[2] = this.npm.chalk.magenta(String(item[2]))
- } else {
- item[1] = this.npm.chalk.green('ok')
- }
- if (!this.npm.silent) {
- this.output(item)
+ this.output(`${chalk.red('Not ok')}\n${chalk.cyan(err)}\n`)
}
}
@@ -157,8 +137,7 @@ class Doctor extends BaseCommand {
}
async checkPing () {
- const tracker = log.newItem('checkPing', 1)
- tracker.info('checkPing', 'Pinging registry')
+ log.info('doctor', 'Pinging registry')
try {
await ping({ ...this.npm.flatOptions, retry: false })
return ''
@@ -168,23 +147,16 @@ class Doctor extends BaseCommand {
} else {
throw er.message
}
- } finally {
- tracker.finish()
}
}
async getLatestNpmVersion () {
- const tracker = log.newItem('getLatestNpmVersion', 1)
- tracker.info('getLatestNpmVersion', 'Getting npm package information')
- try {
- const latest = (await pacote.manifest('npm@latest', this.npm.flatOptions)).version
- if (semver.gte(this.npm.version, latest)) {
- return `current: v${this.npm.version}, latest: v${latest}`
- } else {
- throw `Use npm v${latest}`
- }
- } finally {
- tracker.finish()
+ log.info('doctor', 'Getting npm package information')
+ const latest = (await pacote.manifest('npm@latest', this.npm.flatOptions)).version
+ if (semver.gte(this.npm.version, latest)) {
+ return `current: v${this.npm.version}, latest: v${latest}`
+ } else {
+ throw `Use npm v${latest}`
}
}
@@ -193,36 +165,30 @@ class Doctor extends BaseCommand {
const current = process.version
const currentRange = `^${current}`
const url = 'https://nodejs.org/dist/index.json'
- const tracker = log.newItem('getLatestNodejsVersion', 1)
- tracker.info('getLatestNodejsVersion', 'Getting Node.js release information')
- try {
- const res = await fetch(url, { method: 'GET', ...this.npm.flatOptions })
- const data = await res.json()
- let maxCurrent = '0.0.0'
- let maxLTS = '0.0.0'
- for (const { lts, version } of data) {
- if (lts && semver.gt(version, maxLTS)) {
- maxLTS = version
- }
-
- if (semver.satisfies(version, currentRange) && semver.gt(version, maxCurrent)) {
- maxCurrent = version
- }
+ log.info('doctor', 'Getting Node.js release information')
+ const res = await fetch(url, { method: 'GET', ...this.npm.flatOptions })
+ const data = await res.json()
+ let maxCurrent = '0.0.0'
+ let maxLTS = '0.0.0'
+ for (const { lts, version } of data) {
+ if (lts && semver.gt(version, maxLTS)) {
+ maxLTS = version
}
- const recommended = semver.gt(maxCurrent, maxLTS) ? maxCurrent : maxLTS
- if (semver.gte(process.version, recommended)) {
- return `current: ${current}, recommended: ${recommended}`
- } else {
- throw `Use node ${recommended} (current: ${current})`
+
+ if (semver.satisfies(version, currentRange) && semver.gt(version, maxCurrent)) {
+ maxCurrent = version
}
- } finally {
- tracker.finish()
+ }
+ const recommended = semver.gt(maxCurrent, maxLTS) ? maxCurrent : maxLTS
+ if (semver.gte(process.version, recommended)) {
+ return `current: ${current}, recommended: ${recommended}`
+ } else {
+ throw `Use node ${recommended} (current: ${current})`
}
}
- async getBinPath (dir) {
- const tracker = log.newItem('getBinPath', 1)
- tracker.info('getBinPath', 'Finding npm global bin in your PATH')
+ async getBinPath () {
+ log.info('doctor', 'getBinPath', 'Finding npm global bin in your PATH')
if (!process.env.PATH.includes(this.npm.globalBin)) {
throw new Error(`Add ${this.npm.globalBin} to your $PATH`)
}
@@ -252,30 +218,25 @@ class Doctor extends BaseCommand {
async checkFilesPermission (root, shouldOwn, mask, missingOk) {
let ok = true
- const tracker = log.newItem(root, 1)
-
try {
const uid = process.getuid()
const gid = process.getgid()
const files = new Set([root])
for (const f of files) {
- tracker.silly('checkFilesPermission', f.slice(root.length + 1))
const st = await lstat(f).catch(er => {
// if it can't be missing, or if it can and the error wasn't that it was missing
if (!missingOk || er.code !== 'ENOENT') {
ok = false
- tracker.warn('checkFilesPermission', 'error getting info for ' + f)
+ log.warn('doctor', 'checkFilesPermission', 'error getting info for ' + f)
}
})
- tracker.completeWork(1)
-
if (!st) {
continue
}
if (shouldOwn && (uid !== st.uid || gid !== st.gid)) {
- tracker.warn('checkFilesPermission', 'should be owner of ' + f)
+ log.warn('doctor', 'checkFilesPermission', 'should be owner of ' + f)
ok = false
}
@@ -288,14 +249,14 @@ class Doctor extends BaseCommand {
} catch (er) {
ok = false
const msg = `Missing permissions on ${f} (expect: ${maskLabel(mask)})`
- tracker.error('checkFilesPermission', msg)
+ log.error('doctor', 'checkFilesPermission', msg)
continue
}
if (st.isDirectory()) {
- const entries = await readdir(f).catch(er => {
+ const entries = await readdir(f).catch(() => {
ok = false
- tracker.warn('checkFilesPermission', 'error reading directory ' + f)
+ log.warn('doctor', 'checkFilesPermission', 'error reading directory ' + f)
return []
})
for (const entry of entries) {
@@ -304,7 +265,6 @@ class Doctor extends BaseCommand {
}
}
} finally {
- tracker.finish()
if (!ok) {
throw (
`Check the permissions of files in ${root}` +
@@ -317,82 +277,58 @@ class Doctor extends BaseCommand {
}
async getGitPath () {
- const tracker = log.newItem('getGitPath', 1)
- tracker.info('getGitPath', 'Finding git in your PATH')
- try {
- return await which('git').catch(er => {
- tracker.warn(er)
- throw new Error("Install git and ensure it's in your PATH.")
- })
- } finally {
- tracker.finish()
- }
+ log.info('doctor', 'Finding git in your PATH')
+ return await which('git').catch(er => {
+ log.warn('doctor', 'getGitPath', er)
+ throw new Error("Install git and ensure it's in your PATH.")
+ })
}
async verifyCachedFiles () {
- const tracker = log.newItem('verifyCachedFiles', 1)
- tracker.info('verifyCachedFiles', 'Verifying the npm cache')
- try {
- const stats = await cacache.verify(this.npm.flatOptions.cache)
- const { badContentCount, reclaimedCount, missingContent, reclaimedSize } = stats
- if (badContentCount || reclaimedCount || missingContent) {
- if (badContentCount) {
- tracker.warn('verifyCachedFiles', `Corrupted content removed: ${badContentCount}`)
- }
+ log.info('doctor', 'verifyCachedFiles', 'Verifying the npm cache')
- if (reclaimedCount) {
- tracker.warn(
- 'verifyCachedFiles',
- `Content garbage-collected: ${reclaimedCount} (${reclaimedSize} bytes)`
- )
- }
+ const stats = await cacache.verify(this.npm.flatOptions.cache)
+ const { badContentCount, reclaimedCount, missingContent, reclaimedSize } = stats
+ if (badContentCount || reclaimedCount || missingContent) {
+ if (badContentCount) {
+ log.warn('doctor', 'verifyCachedFiles', `Corrupted content removed: ${badContentCount}`)
+ }
- if (missingContent) {
- tracker.warn('verifyCachedFiles', `Missing content: ${missingContent}`)
- }
+ if (reclaimedCount) {
+ log.warn(
+ 'doctor',
+ 'verifyCachedFiles',
+ `Content garbage-collected: ${reclaimedCount} (${reclaimedSize} bytes)`
+ )
+ }
- tracker.warn('verifyCachedFiles', 'Cache issues have been fixed')
+ if (missingContent) {
+ log.warn('doctor', 'verifyCachedFiles', `Missing content: ${missingContent}`)
}
- tracker.info(
- 'verifyCachedFiles',
- `Verification complete. Stats: ${JSON.stringify(stats, null, 2)}`
- )
- return `verified ${stats.verifiedContent} tarballs`
- } finally {
- tracker.finish()
+
+ log.warn('doctor', 'verifyCachedFiles', 'Cache issues have been fixed')
}
+ log.info(
+ 'doctor',
+ 'verifyCachedFiles',
+ `Verification complete. Stats: ${JSON.stringify(stats, null, 2)}`
+ )
+ return `verified ${stats.verifiedContent} tarballs`
}
async checkNpmRegistry () {
- if (this.npm.flatOptions.registry !== defaultRegistry) {
- throw `Try \`npm config set registry=${defaultRegistry}\``
+ if (this.npm.flatOptions.registry !== defaults.registry) {
+ throw `Try \`npm config set registry=${defaults.registry}\``
} else {
- return `using default registry (${defaultRegistry})`
+ return `using default registry (${defaults.registry})`
}
}
- output (row) {
- const t = new Table({
- chars: { top: '',
- 'top-mid': '',
- 'top-left': '',
- 'top-right': '',
- bottom: '',
- 'bottom-mid': '',
- 'bottom-left': '',
- 'bottom-right': '',
- left: '',
- 'left-mid': '',
- mid: '',
- 'mid-mid': '',
- right: '',
- 'right-mid': '',
- middle: ' ' },
- style: { 'padding-left': 0, 'padding-right': 0 },
- colWidths: [this.#checkWidth, 6],
- })
- t.push(row)
- this.npm.output(t.toString())
+ output (...args) {
+ // TODO display layer should do this
+ if (!this.npm.silent) {
+ output.standard(...args)
+ }
}
actions (params) {
diff --git a/lib/commands/edit.js b/lib/commands/edit.js
index a671a5d6bad5d..b2c2ec8d2a39a 100644
--- a/lib/commands/edit.js
+++ b/lib/commands/edit.js
@@ -1,34 +1,31 @@
-// npm edit
-// open the package folder in the $EDITOR
+const { resolve } = require('node:path')
+const { lstat } = require('node:fs/promises')
+const cp = require('node:child_process')
+const completion = require('../utils/installed-shallow.js')
+const BaseCommand = require('../base-cmd.js')
-const { resolve } = require('path')
-const fs = require('graceful-fs')
-const cp = require('child_process')
-const completion = require('../utils/completion/installed-shallow.js')
-const BaseCommand = require('../base-command.js')
+const splitPackageNames = (path) => path.split('/')
+// combine scoped parts
+ .reduce((parts, part) => {
+ if (parts.length === 0) {
+ return [part]
+ }
-const splitPackageNames = (path) => {
- return path.split('/')
- // combine scoped parts
- .reduce((parts, part) => {
- if (parts.length === 0) {
- return [part]
- }
+ const lastPart = parts[parts.length - 1]
+ // check if previous part is the first part of a scoped package
+ if (lastPart[0] === '@' && !lastPart.includes('/')) {
+ parts[parts.length - 1] += '/' + part
+ } else {
+ parts.push(part)
+ }
- const lastPart = parts[parts.length - 1]
- // check if previous part is the first part of a scoped package
- if (lastPart[0] === '@' && !lastPart.includes('/')) {
- parts[parts.length - 1] += '/' + part
- } else {
- parts.push(part)
- }
-
- return parts
- }, [])
- .join('/node_modules/')
- .replace(/(\/node_modules)+/, '/node_modules')
-}
+ return parts
+ }, [])
+ .join('/node_modules/')
+ .replace(/(\/node_modules)+/, '/node_modules')
+// npm edit
+// open the package folder in the $EDITOR
class Edit extends BaseCommand {
static description = 'Edit an installed package'
static name = 'edit'
@@ -38,8 +35,8 @@ class Edit extends BaseCommand {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ return completion(npm, opts)
}
async exec (args) {
@@ -50,27 +47,18 @@ class Edit extends BaseCommand {
const path = splitPackageNames(args[0])
const dir = resolve(this.npm.dir, path)
- // graceful-fs does not promisify
+ await lstat(dir)
await new Promise((res, rej) => {
- fs.lstat(dir, (err) => {
- if (err) {
- return rej(err)
+ const [bin, ...spawnArgs] = this.npm.config.get('editor').split(/\s+/)
+ const editor = cp.spawn(bin, [...spawnArgs, dir], { stdio: 'inherit' })
+ editor.on('exit', async (code) => {
+ if (code) {
+ return rej(new Error(`editor process exited with code: ${code}`))
}
- const [bin, ...spawnArgs] = this.npm.config.get('editor').split(/\s+/)
- const editor = cp.spawn(bin, [...spawnArgs, dir], { stdio: 'inherit' })
- editor.on('exit', async (code) => {
- if (code) {
- return rej(new Error(`editor process exited with code: ${code}`))
- }
- try {
- await this.npm.exec('rebuild', [dir])
- } catch (execErr) {
- rej(execErr)
- }
- res()
- })
+ await this.npm.exec('rebuild', [dir]).then(res).catch(rej)
})
})
}
}
+
module.exports = Edit
diff --git a/lib/commands/exec.js b/lib/commands/exec.js
index a5235c7845851..57ee8efe2c98f 100644
--- a/lib/commands/exec.js
+++ b/lib/commands/exec.js
@@ -1,6 +1,6 @@
-const { resolve } = require('path')
+const { resolve } = require('node:path')
const libexec = require('libnpmexec')
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
class Exec extends BaseCommand {
static description = 'Run a command from a local or remote npm package'
@@ -34,28 +34,34 @@ class Exec extends BaseCommand {
for (const [name, path] of this.workspaces) {
const locationMsg =
`in workspace ${this.npm.chalk.green(name)} at location:\n${this.npm.chalk.dim(path)}`
- await this.callExec(args, { locationMsg, runPath: path })
+ await this.callExec(args, { name, locationMsg, runPath: path })
}
}
- async callExec (args, { locationMsg, runPath } = {}) {
- // This is where libnpmexec will look for locally installed packages
- const localPrefix = this.npm.localPrefix
+ async callExec (args, { name, locationMsg, runPath } = {}) {
+ let localBin = this.npm.localBin
+ let pkgPath = this.npm.localPrefix
// This is where libnpmexec will actually run the scripts from
if (!runPath) {
runPath = process.cwd()
+ } else {
+ // We have to consider if the workspace has its own separate versions
+ // libnpmexec will walk up to localDir after looking here
+ localBin = resolve(this.npm.localDir, name, 'node_modules', '.bin')
+ // We also need to look for `bin` entries in the workspace package.json
+ // libnpmexec will NOT look in the project root for the bin entry
+ pkgPath = runPath
}
const call = this.npm.config.get('call')
let globalPath
const {
flatOptions,
- localBin,
globalBin,
globalDir,
+ chalk,
} = this.npm
- const output = this.npm.output.bind(this.npm)
const scriptShell = this.npm.config.get('script-shell') || undefined
const packages = this.npm.config.get('package')
const yes = this.npm.config.get('yes')
@@ -75,16 +81,25 @@ class Exec extends BaseCommand {
// we explicitly set packageLockOnly to false because if it's true
// when we try to install a missing package, we won't actually install it
packageLockOnly: false,
- // copy args so they dont get mutated
- args: [...args],
+ // what the user asked to run args[0] is run by default
+ args: [...args], // copy args so they dont get mutated
+ // specify a custom command to be run instead of args[0]
call,
- localBin,
- locationMsg,
+ chalk,
+ // where to look for bins globally, if a file matches call or args[0] it is called
globalBin,
+ // where to look for packages globally, if a package matches call or args[0] it is called
globalPath,
- output,
+ // where to look for bins locally, if a file matches call or args[0] it is called
+ localBin,
+ locationMsg,
+ // packages that need to be installed
packages,
- path: localPrefix,
+ // path where node_modules is
+ path: this.npm.localPrefix,
+ // where to look for package.json#bin entries first
+ pkgPath,
+ // cwd to run from
runPath,
scriptShell,
yes,
diff --git a/lib/commands/explain.js b/lib/commands/explain.js
index a06ad24152a1e..cb0644304d2b5 100644
--- a/lib/commands/explain.js
+++ b/lib/commands/explain.js
@@ -1,10 +1,9 @@
const { explainNode } = require('../utils/explain-dep.js')
-const completion = require('../utils/completion/installed-deep.js')
-const Arborist = require('@npmcli/arborist')
const npa = require('npm-package-arg')
const semver = require('semver')
-const { relative, resolve } = require('path')
+const { relative, resolve } = require('node:path')
const validName = require('validate-npm-package-name')
+const { output } = require('proc-log')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Explain extends ArboristWorkspaceCmd {
@@ -20,8 +19,9 @@ class Explain extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ const completion = require('../utils/installed-deep.js')
+ return completion(npm, opts)
}
async exec (args) {
@@ -29,6 +29,7 @@ class Explain extends ArboristWorkspaceCmd {
throw this.usageError()
}
+ const Arborist = require('@npmcli/arborist')
const arb = new Arborist({ path: this.npm.prefix, ...this.npm.flatOptions })
const tree = await arb.loadActual()
@@ -75,10 +76,10 @@ class Explain extends ArboristWorkspaceCmd {
}
if (this.npm.flatOptions.json) {
- this.npm.output(JSON.stringify(expls, null, 2))
+ output.buffer(expls)
} else {
- this.npm.output(expls.map(expl => {
- return explainNode(expl, Infinity, this.npm.color)
+ output.standard(expls.map(expl => {
+ return explainNode(expl, Infinity, this.npm.chalk)
}).join('\n\n'))
}
}
@@ -125,4 +126,5 @@ class Explain extends ArboristWorkspaceCmd {
})
}
}
+
module.exports = Explain
diff --git a/lib/commands/explore.js b/lib/commands/explore.js
index 0d915cb4c6958..184af2bdc5a16 100644
--- a/lib/commands/explore.js
+++ b/lib/commands/explore.js
@@ -1,13 +1,12 @@
-// npm explore [@]
-// open a subshell to the package folder.
-
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const runScript = require('@npmcli/run-script')
-const { join, resolve, relative } = require('path')
-const log = require('../utils/log-shim.js')
-const completion = require('../utils/completion/installed-shallow.js')
-const BaseCommand = require('../base-command.js')
+const { join, relative } = require('node:path')
+const { log, output } = require('proc-log')
+const completion = require('../utils/installed-shallow.js')
+const BaseCommand = require('../base-cmd.js')
+// npm explore [@]
+// open a subshell to the package folder.
class Explore extends BaseCommand {
static description = 'Browse an installed package'
static name = 'explore'
@@ -17,8 +16,8 @@ class Explore extends BaseCommand {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ return completion(npm, opts)
}
async exec (args) {
@@ -38,7 +37,7 @@ class Explore extends BaseCommand {
// the set of arguments, or the shell config, and let @npmcli/run-script
// handle all the escaping and PATH setup stuff.
- const pkg = await rpj(resolve(path, 'package.json')).catch(er => {
+ const { content: pkg } = await pkgJson.normalize(path).catch(er => {
log.error('explore', `It doesn't look like ${pkgname} is installed.`)
throw er
})
@@ -50,30 +49,26 @@ class Explore extends BaseCommand {
}
if (!args.length) {
- this.npm.output(`\nExploring ${path}\nType 'exit' or ^D when finished\n`)
+ output.standard(`\nExploring ${path}\nType 'exit' or ^D when finished\n`)
}
- log.disableProgress()
- try {
- return await runScript({
- ...this.npm.flatOptions,
- pkg,
- banner: false,
- path,
- event: '_explore',
- stdio: 'inherit',
- }).catch(er => {
- process.exitCode = typeof er.code === 'number' && er.code !== 0 ? er.code
- : 1
+
+ return runScript({
+ ...this.npm.flatOptions,
+ pkg,
+ path,
+ event: '_explore',
+ stdio: 'inherit',
+ }).catch(er => {
+ process.exitCode = typeof er.code === 'number' && er.code !== 0 ? er.code
+ : 1
// if it's not an exit error, or non-interactive, throw it
- const isProcExit = er.message === 'command failed' &&
+ const isProcExit = er.message === 'command failed' &&
(typeof er.code === 'number' || /^SIG/.test(er.signal || ''))
- if (args.length || !isProcExit) {
- throw er
- }
- })
- } finally {
- log.enableProgress()
- }
+ if (args.length || !isProcExit) {
+ throw er
+ }
+ })
}
}
+
module.exports = Explore
diff --git a/lib/commands/find-dupes.js b/lib/commands/find-dupes.js
index b1a3120860366..735ac7c4a7ed0 100644
--- a/lib/commands/find-dupes.js
+++ b/lib/commands/find-dupes.js
@@ -1,6 +1,6 @@
-// dedupe duplicated packages, or find them in the tree
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+// dedupe duplicated packages, or find them in the tree
class FindDupes extends ArboristWorkspaceCmd {
static description = 'Find duplication in the package tree'
static name = 'find-dupes'
@@ -11,6 +11,7 @@ class FindDupes extends ArboristWorkspaceCmd {
'strict-peer-deps',
'package-lock',
'omit',
+ 'include',
'ignore-scripts',
'audit',
'bin-links',
@@ -18,9 +19,10 @@ class FindDupes extends ArboristWorkspaceCmd {
...super.params,
]
- async exec (args) {
+ async exec () {
this.npm.config.set('dry-run', true)
return this.npm.exec('dedupe', [])
}
}
+
module.exports = FindDupes
diff --git a/lib/commands/fund.js b/lib/commands/fund.js
index 12762533c123e..8c194dac80b49 100644
--- a/lib/commands/fund.js
+++ b/lib/commands/fund.js
@@ -1,14 +1,11 @@
const archy = require('archy')
-const Arborist = require('@npmcli/arborist')
-const chalk = require('chalk')
const pacote = require('pacote')
const semver = require('semver')
+const { output } = require('proc-log')
const npa = require('npm-package-arg')
const { depth } = require('treeverse')
const { readTree: getFundingInfo, normalizeFunding, isValidFunding } = require('libnpmfund')
-
-const completion = require('../utils/completion/installed-deep.js')
-const openUrl = require('../utils/open-url.js')
+const { openUrl } = require('../utils/open-url.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const getPrintableName = ({ name, version }) => {
@@ -39,8 +36,9 @@ class Fund extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ const completion = require('../utils/installed-deep.js')
+ return completion(npm, opts)
}
async exec (args) {
@@ -65,6 +63,7 @@ class Fund extends ArboristWorkspaceCmd {
}
const where = this.npm.prefix
+ const Arborist = require('@npmcli/arborist')
const arb = new Arborist({ ...this.npm.flatOptions, path: where })
const tree = await arb.loadActual()
@@ -81,22 +80,18 @@ class Fund extends ArboristWorkspaceCmd {
// TODO: add !workspacesEnabled option handling to libnpmfund
const fundingInfo = getFundingInfo(tree, {
...this.flatOptions,
+ Arborist,
workspaces: this.workspaceNames,
})
if (this.npm.config.get('json')) {
- this.npm.output(this.printJSON(fundingInfo))
+ output.buffer(fundingInfo)
} else {
- this.npm.output(this.printHuman(fundingInfo))
+ output.standard(this.printHuman(fundingInfo))
}
}
- printJSON (fundingInfo) {
- return JSON.stringify(fundingInfo, null, 2)
- }
-
printHuman (fundingInfo) {
- const color = this.npm.color
const unicode = this.npm.config.get('unicode')
const seenUrls = new Map()
@@ -111,26 +106,25 @@ class Fund extends ArboristWorkspaceCmd {
const [fundingSource] = [].concat(normalizeFunding(funding)).filter(isValidFunding)
const { url } = fundingSource || {}
const pkgRef = getPrintableName({ name, version })
- let item = {
- label: pkgRef,
- }
- if (url) {
- item.label = tree({
- label: color ? chalk.bgBlack.white(url) : url,
+ if (!url) {
+ return { label: pkgRef }
+ }
+ let item
+ if (seenUrls.has(url)) {
+ item = seenUrls.get(url)
+ item.label += `${this.npm.chalk.dim(',')} ${pkgRef}`
+ return null
+ }
+ item = {
+ label: tree({
+ label: this.npm.chalk.blue(url),
nodes: [pkgRef],
- }).trim()
-
- // stacks all packages together under the same item
- if (seenUrls.has(url)) {
- item = seenUrls.get(url)
- item.label += `, ${pkgRef}`
- return null
- } else {
- seenUrls.set(url, item)
- }
+ }).trim(),
}
+ // stacks all packages together under the same item
+ seenUrls.set(url, item)
return item
},
@@ -154,7 +148,7 @@ class Fund extends ArboristWorkspaceCmd {
})
const res = tree(result)
- return color ? chalk.reset(res) : res
+ return res
}
async openFundingUrl ({ path, tree, spec, fundingSourceNumber }) {
@@ -213,7 +207,7 @@ class Fund extends ArboristWorkspaceCmd {
if (fundingSourceNumber) {
ambiguousUrlMsg.unshift(`--which=${fundingSourceNumber} is not a valid index`)
}
- this.npm.output(ambiguousUrlMsg.join('\n'))
+ output.standard(ambiguousUrlMsg.join('\n'))
}
urlMessage (source) {
@@ -223,4 +217,5 @@ class Fund extends ArboristWorkspaceCmd {
return [url, message]
}
}
+
module.exports = Fund
diff --git a/lib/commands/get.js b/lib/commands/get.js
index 5e92e85a66382..4191f2c973e7d 100644
--- a/lib/commands/get.js
+++ b/lib/commands/get.js
@@ -1,20 +1,23 @@
-const BaseCommand = require('../base-command.js')
+const Npm = require('../npm.js')
+const BaseCommand = require('../base-cmd.js')
class Get extends BaseCommand {
static description = 'Get a value from the npm configuration'
static name = 'get'
static usage = ['[ ...] (See `npm config`)']
+ static params = ['long']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
- async completion (opts) {
- const config = await this.npm.cmd('config')
- return config.completion(opts)
+ static async completion (opts) {
+ const Config = Npm.cmd('config')
+ return Config.completion(opts)
}
async exec (args) {
return this.npm.exec('config', ['get'].concat(args))
}
}
+
module.exports = Get
diff --git a/lib/commands/help-search.js b/lib/commands/help-search.js
index afb82bfaca9ee..72dd03ac7406e 100644
--- a/lib/commands/help-search.js
+++ b/lib/commands/help-search.js
@@ -1,10 +1,8 @@
-const fs = require('fs')
-const path = require('path')
-const chalk = require('chalk')
-const { promisify } = require('util')
-const glob = promisify(require('glob'))
-const readFile = promisify(fs.readFile)
-const BaseCommand = require('../base-command.js')
+const { readFile } = require('node:fs/promises')
+const path = require('node:path')
+const { glob } = require('glob')
+const { output } = require('proc-log')
+const BaseCommand = require('../base-cmd.js')
const globify = pattern => pattern.split('\\').join('/')
@@ -20,14 +18,16 @@ class HelpSearch extends BaseCommand {
}
const docPath = path.resolve(this.npm.npmRoot, 'docs/content')
- const files = await glob(`${globify(docPath)}/*/*.md`)
+ let files = await glob(`${globify(docPath)}/*/*.md`)
+ // preserve glob@8 behavior
+ files = files.sort((a, b) => a.localeCompare(b, 'en'))
const data = await this.readFiles(files)
- const results = await this.searchFiles(args, data, files)
+ const results = await this.searchFiles(args, data)
const formatted = this.formatResults(args, results)
if (!formatted.trim()) {
- this.npm.output(`No matches in help for: ${args.join(' ')}\n`)
+ output.standard(`No matches in help for: ${args.join(' ')}\n`)
} else {
- this.npm.output(formatted)
+ output.standard(formatted)
}
}
@@ -40,7 +40,7 @@ class HelpSearch extends BaseCommand {
return res
}
- async searchFiles (args, data, files) {
+ async searchFiles (args, data) {
const results = []
for (const [file, content] of Object.entries(data)) {
const lowerCase = content.toLowerCase()
@@ -141,7 +141,7 @@ class HelpSearch extends BaseCommand {
formatResults (args, results) {
const cols = Math.min(process.stdout.columns || Infinity, 80) + 1
- const output = results.map(res => {
+ const formattedOutput = results.map(res => {
const out = [res.cmd]
const r = Object.keys(res.hits)
.map(k => `${k}:${res.hits[k]}`)
@@ -163,10 +163,6 @@ class HelpSearch extends BaseCommand {
return
}
- if (!this.npm.color) {
- out.push(line + '\n')
- return
- }
const hilitLine = []
for (const arg of args) {
const finder = line.toLowerCase().split(arg.toLowerCase())
@@ -174,8 +170,7 @@ class HelpSearch extends BaseCommand {
for (const f of finder) {
hilitLine.push(line.slice(p, p + f.length))
const word = line.slice(p + f.length, p + f.length + arg.length)
- const hilit = chalk.bgBlack.red(word)
- hilitLine.push(hilit)
+ hilitLine.push(this.npm.chalk.blue(word))
p += f.length + arg.length
}
}
@@ -188,12 +183,13 @@ class HelpSearch extends BaseCommand {
const finalOut = results.length && !this.npm.config.get('long')
? 'Top hits for ' + (args.map(JSON.stringify).join(' ')) + '\n' +
'—'.repeat(cols - 1) + '\n' +
- output + '\n' +
+ formattedOutput + '\n' +
'—'.repeat(cols - 1) + '\n' +
'(run with -l or --long to see more context)'
- : output
+ : formattedOutput
return finalOut.trim()
}
}
+
module.exports = HelpSearch
diff --git a/lib/commands/help.js b/lib/commands/help.js
index 3ab2c56319868..057090da0036c 100644
--- a/lib/commands/help.js
+++ b/lib/commands/help.js
@@ -1,19 +1,18 @@
const spawn = require('@npmcli/promise-spawn')
-const path = require('path')
-const openUrl = require('../utils/open-url.js')
-const { promisify } = require('util')
-const glob = promisify(require('glob'))
+const path = require('node:path')
+const { openUrl } = require('../utils/open-url.js')
+const { glob } = require('glob')
+const { output, input } = require('proc-log')
const localeCompare = require('@isaacs/string-locale-compare')('en')
+const { deref } = require('../utils/cmd-list.js')
+const BaseCommand = require('../base-cmd.js')
const globify = pattern => pattern.split('\\').join('/')
-const BaseCommand = require('../base-command.js')
// Strips out the number from foo.7 or foo.7. or foo.7.tgz
// We don't currently compress our man pages but if we ever did this would
-// seemlessly continue supporting it
+// seamlessly continue supporting it
const manNumberRegex = /\.(\d+)(\.[^/\\]*)?$/
-// Searches for the "npm-" prefix in page names, to prefer those.
-const manNpmPrefixRegex = /\/npm-/
// hardcoded names for mansections
// XXX: these are used in the docs workspace and should be exported
// from npm so section names can changed more easily
@@ -29,12 +28,14 @@ class Help extends BaseCommand {
static usage = [' []']
static params = ['viewer']
- async completion (opts) {
+ static async completion (opts, npm) {
if (opts.conf.argv.remain.length > 2) {
return []
}
- const g = path.resolve(this.npm.npmRoot, 'man/man[0-9]/*.[0-9]')
- const files = await glob(globify(g))
+ const g = path.resolve(npm.npmRoot, 'man/man[0-9]/*.[0-9]')
+ let files = await glob(globify(g))
+ // preserve glob@8 behavior
+ files = files.sort((a, b) => a.localeCompare(b, 'en'))
return Object.keys(files.reduce(function (acc, file) {
file = path.basename(file).replace(/\.[0-9]+$/, '')
@@ -50,7 +51,7 @@ class Help extends BaseCommand {
const manSearch = /^\d+$/.test(args[0]) ? `man${args.shift()}` : 'man*'
if (!args.length) {
- return this.npm.output(await this.npm.usage)
+ return output.standard(this.npm.usage)
}
// npm help foo bar baz: search topics
@@ -59,37 +60,19 @@ class Help extends BaseCommand {
}
// `npm help package.json`
- const arg = (this.npm.deref(args[0]) || args[0]).replace('.json', '-json')
+ const arg = (deref(args[0]) || args[0]).replace('.json', '-json')
// find either section.n or npm-section.n
const f = globify(path.resolve(this.npm.npmRoot, `man/${manSearch}/?(npm-)${arg}.[0-9]*`))
const [man] = await glob(f).then(r => r.sort((a, b) => {
- // Prefer the page with an npm prefix, if there's only one.
- const aHasPrefix = manNpmPrefixRegex.test(a)
- const bHasPrefix = manNpmPrefixRegex.test(b)
- if (aHasPrefix !== bHasPrefix) {
- /* istanbul ignore next */
- return aHasPrefix ? -1 : 1
- }
-
// Because the glob is (subtly) different from manNumberRegex,
// we can't rely on it passing.
- const aManNumberMatch = a.match(manNumberRegex)
- const bManNumberMatch = b.match(manNumberRegex)
- if (aManNumberMatch) {
- /* istanbul ignore next */
- if (!bManNumberMatch) {
- return -1
- }
- // man number sort first so that 1 aka commands are preferred
- if (aManNumberMatch[1] !== bManNumberMatch[1]) {
- return aManNumberMatch[1] - bManNumberMatch[1]
- }
- } else if (bManNumberMatch) {
- return 1
+ const aManNumberMatch = a.match(manNumberRegex)?.[1] || 999
+ const bManNumberMatch = b.match(manNumberRegex)?.[1] || 999
+ if (aManNumberMatch !== bManNumberMatch) {
+ return aManNumberMatch - bManNumberMatch
}
-
return localeCompare(a, b)
}))
@@ -112,13 +95,15 @@ class Help extends BaseCommand {
args = ['emacsclient', ['-e', `(woman-find-file '${man}')`]]
}
- return spawn(...args, { stdio: 'inherit' }).catch(err => {
+ try {
+ await input.start(() => spawn(...args, { stdio: 'inherit' }))
+ } catch (err) {
if (err.code) {
throw new Error(`help process exited with code: ${err.code}`)
} else {
throw err
}
- })
+ }
}
// Returns the path to the html version of the man page
@@ -128,4 +113,5 @@ class Help extends BaseCommand {
return 'file:///' + path.resolve(this.npm.npmRoot, `docs/output/${sect}/${f}.html`)
}
}
+
module.exports = Help
diff --git a/lib/commands/hook.js b/lib/commands/hook.js
index b0f52a801f571..5793b974197c8 100644
--- a/lib/commands/hook.js
+++ b/lib/commands/hook.js
@@ -1,9 +1,9 @@
const hookApi = require('libnpmhook')
-const otplease = require('../utils/otplease.js')
+const { otplease } = require('../utils/auth.js')
const relativeDate = require('tiny-relative-date')
-const Table = require('cli-table3')
+const { output } = require('proc-log')
+const BaseCommand = require('../base-cmd.js')
-const BaseCommand = require('../base-command.js')
class Hook extends BaseCommand {
static description = 'Manage registry hooks'
static name = 'hook'
@@ -40,86 +40,70 @@ class Hook extends BaseCommand {
async add (pkg, uri, secret, opts) {
const hook = await hookApi.add(pkg, uri, secret, opts)
if (opts.json) {
- this.npm.output(JSON.stringify(hook, null, 2))
+ output.buffer(hook)
} else if (opts.parseable) {
- this.npm.output(Object.keys(hook).join('\t'))
- this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t'))
+ output.standard(Object.keys(hook).join('\t'))
+ output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
- this.npm.output(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`)
+ output.standard(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`)
}
}
async ls (pkg, opts) {
const hooks = await hookApi.ls({ ...opts, package: pkg })
+
if (opts.json) {
- this.npm.output(JSON.stringify(hooks, null, 2))
+ output.buffer(hooks)
} else if (opts.parseable) {
- this.npm.output(Object.keys(hooks[0]).join('\t'))
+ output.standard(Object.keys(hooks[0]).join('\t'))
hooks.forEach(hook => {
- this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t'))
+ output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
})
} else if (!hooks.length) {
- this.npm.output("You don't have any hooks configured yet.")
+ output.standard("You don't have any hooks configured yet.")
} else if (!this.npm.silent) {
- if (hooks.length === 1) {
- this.npm.output('You have one hook configured.')
- } else {
- this.npm.output(`You have ${hooks.length} hooks configured.`)
- }
+ output.standard(`You have ${hooks.length} hook${hooks.length !== 1 ? 's' : ''} configured.`)
- const table = new Table({ head: ['id', 'target', 'endpoint'] })
- hooks.forEach((hook) => {
- table.push([
- { rowSpan: 2, content: hook.id },
- this.hookName(hook),
- hook.endpoint,
- ])
+ for (const hook of hooks) {
+ output.standard(`Hook ${hook.id}: ${this.hookName(hook)}`)
+ output.standard(`Endpoint: ${hook.endpoint}`)
if (hook.last_delivery) {
- table.push([
- {
- colSpan: 1,
- content: `triggered ${relativeDate(hook.last_delivery)}`,
- },
- hook.response_code,
- ])
+ /* eslint-disable-next-line max-len */
+ output.standard(`Triggered ${relativeDate(hook.last_delivery)}, response code was "${hook.response_code}"\n`)
} else {
- table.push([{ colSpan: 2, content: 'never triggered' }])
+ output.standard('Never triggered\n')
}
- })
- this.npm.output(table.toString())
+ }
}
}
async rm (id, opts) {
const hook = await hookApi.rm(id, opts)
if (opts.json) {
- this.npm.output(JSON.stringify(hook, null, 2))
+ output.buffer(hook)
} else if (opts.parseable) {
- this.npm.output(Object.keys(hook).join('\t'))
- this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t'))
+ output.standard(Object.keys(hook).join('\t'))
+ output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
- this.npm.output(`- ${this.hookName(hook)} ${opts.unicode ? ' ✘ ' : ' X '} ${hook.endpoint}`)
+ output.standard(`- ${this.hookName(hook)} ${opts.unicode ? ' ✘ ' : ' X '} ${hook.endpoint}`)
}
}
async update (id, uri, secret, opts) {
const hook = await hookApi.update(id, uri, secret, opts)
if (opts.json) {
- this.npm.output(JSON.stringify(hook, null, 2))
+ output.buffer(hook)
} else if (opts.parseable) {
- this.npm.output(Object.keys(hook).join('\t'))
- this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t'))
+ output.standard(Object.keys(hook).join('\t'))
+ output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
- this.npm.output(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`)
+ output.standard(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`)
}
}
hookName (hook) {
- let target = hook.name
- if (hook.type === 'owner') {
- target = '~' + target
- }
- return target
+ return `${hook.type === 'owner' ? '~' : ''}${hook.name}`
}
}
+
module.exports = Hook
diff --git a/lib/commands/init.js b/lib/commands/init.js
index d1bde934374c9..4c68210483598 100644
--- a/lib/commands/init.js
+++ b/lib/commands/init.js
@@ -1,22 +1,25 @@
-const fs = require('fs')
-const { relative, resolve } = require('path')
-const { mkdir } = require('fs/promises')
+const { statSync } = require('node:fs')
+const { relative, resolve } = require('node:path')
+const { mkdir } = require('node:fs/promises')
const initJson = require('init-package-json')
const npa = require('npm-package-arg')
-const rpj = require('read-package-json-fast')
const libexec = require('libnpmexec')
const mapWorkspaces = require('@npmcli/map-workspaces')
const PackageJson = require('@npmcli/package-json')
-const log = require('../utils/log-shim.js')
-const updateWorkspaces = require('../workspaces/update-workspaces.js')
+const { log, output, input } = require('proc-log')
+const updateWorkspaces = require('../utils/update-workspaces.js')
+const BaseCommand = require('../base-cmd.js')
const posixPath = p => p.split('\\').join('/')
-const BaseCommand = require('../base-command.js')
-
class Init extends BaseCommand {
static description = 'Create a package.json file'
static params = [
+ 'init-author-name',
+ 'init-author-url',
+ 'init-license',
+ 'init-module',
+ 'init-version',
'yes',
'force',
'scope',
@@ -28,7 +31,7 @@ class Init extends BaseCommand {
static name = 'init'
static usage = [
- ' (same as `npx )',
+ ' (same as `npx `)',
'<@scope> (same as `npx <@scope>/create`)',
]
@@ -54,9 +57,9 @@ class Init extends BaseCommand {
// reads package.json for the top-level folder first, by doing this we
// ensure the command throw if no package.json is found before trying
// to create a workspace package.json file or its folders
- const pkg = await rpj(resolve(this.npm.localPrefix, 'package.json')).catch((err) => {
+ const { content: pkg } = await PackageJson.normalize(this.npm.localPrefix).catch(err => {
if (err.code === 'ENOENT') {
- log.warn('Missing package.json. Try with `--include-workspace-root`.')
+ log.warn('init', 'Missing package.json. Try with `--include-workspace-root`.')
}
throw err
})
@@ -120,13 +123,12 @@ class Init extends BaseCommand {
}
const newArgs = [packageName, ...otherArgs]
- const { color } = this.npm.flatOptions
const {
flatOptions,
localBin,
globalBin,
+ chalk,
} = this.npm
- const output = this.npm.output.bind(this.npm)
const runPath = path
const scriptShell = this.npm.config.get('script-shell') || undefined
const yes = this.npm.config.get('yes')
@@ -134,10 +136,10 @@ class Init extends BaseCommand {
await libexec({
...flatOptions,
args: newArgs,
- color,
localBin,
globalBin,
output,
+ chalk,
path,
runPath,
scriptShell,
@@ -146,12 +148,9 @@ class Init extends BaseCommand {
}
async template (path = process.cwd()) {
- log.pause()
- log.disableProgress()
-
const initFile = this.npm.config.get('init-module')
if (!this.npm.config.get('yes') && !this.npm.config.get('force')) {
- this.npm.output([
+ output.standard([
'This utility will walk you through creating a package.json file.',
'It only covers the most common items, and tries to guess sensible defaults.',
'',
@@ -165,24 +164,17 @@ class Init extends BaseCommand {
].join('\n'))
}
- // XXX promisify init-package-json
- await new Promise((res, rej) => {
- initJson(path, initFile, this.npm.config, (er, data) => {
- log.resume()
- log.enableProgress()
- log.silly('package data', data)
- if (er && er.message === 'canceled') {
- log.warn('init', 'canceled')
- return res()
- }
- if (er) {
- rej(er)
- } else {
- log.info('init', 'written successfully')
- res(data)
- }
- })
- })
+ try {
+ const data = await input.read(() => initJson(path, initFile, this.npm.config))
+ log.silly('package data', data)
+ return data
+ } catch (er) {
+ if (er.message === 'canceled') {
+ log.warn('init', 'canceled')
+ } else {
+ throw er
+ }
+ }
}
async setWorkspace (pkg, workspacePath) {
@@ -200,7 +192,7 @@ class Init extends BaseCommand {
// mapWorkspaces, so we're just going to avoid touching the
// top-level package.json
try {
- fs.statSync(resolve(workspacePath, 'package.json'))
+ statSync(resolve(workspacePath, 'package.json'))
} catch (err) {
return
}
@@ -221,7 +213,7 @@ class Init extends BaseCommand {
// translate workspaces paths into an array containing workspaces names
const workspaces = []
for (const path of workspacesPaths) {
- const { name } = await rpj(resolve(path, 'package.json')).catch(() => ({}))
+ const { content: { name } } = await PackageJson.normalize(path).catch(() => ({ content: {} }))
if (name) {
workspaces.push(name)
diff --git a/lib/commands/install-ci-test.js b/lib/commands/install-ci-test.js
index f7a357ba6e124..4b9dd269f8c74 100644
--- a/lib/commands/install-ci-test.js
+++ b/lib/commands/install-ci-test.js
@@ -1,8 +1,7 @@
-// npm install-ci-test
-// Runs `npm ci` and then runs `npm test`
-
const CI = require('./ci.js')
+// npm install-ci-test
+// Runs `npm ci` and then runs `npm test`
class InstallCITest extends CI {
static description = 'Install a project with a clean slate and run tests'
static name = 'install-ci-test'
@@ -12,4 +11,5 @@ class InstallCITest extends CI {
return this.npm.exec('test', [])
}
}
+
module.exports = InstallCITest
diff --git a/lib/commands/install-test.js b/lib/commands/install-test.js
index 11f22e535403c..e21ca7c929c55 100644
--- a/lib/commands/install-test.js
+++ b/lib/commands/install-test.js
@@ -1,8 +1,7 @@
-// npm install-test
-// Runs `npm install` and then runs `npm test`
-
const Install = require('./install.js')
+// npm install-test
+// Runs `npm install` and then runs `npm test`
class InstallTest extends Install {
static description = 'Install package(s) and run tests'
static name = 'install-test'
@@ -12,4 +11,5 @@ class InstallTest extends Install {
return this.npm.exec('test', [])
}
}
+
module.exports = InstallTest
diff --git a/lib/commands/install.js b/lib/commands/install.js
index 246d7fdf8f02e..24e5f6819b314 100644
--- a/lib/commands/install.js
+++ b/lib/commands/install.js
@@ -1,21 +1,18 @@
-/* eslint-disable camelcase */
-const fs = require('fs')
-const util = require('util')
-const readdir = util.promisify(fs.readdir)
-const reifyFinish = require('../utils/reify-finish.js')
-const log = require('../utils/log-shim.js')
-const { resolve, join } = require('path')
-const Arborist = require('@npmcli/arborist')
+const { readdir } = require('node:fs/promises')
+const { resolve, join } = require('node:path')
+const { log } = require('proc-log')
const runScript = require('@npmcli/run-script')
const pacote = require('pacote')
const checks = require('npm-install-checks')
-
+const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+
class Install extends ArboristWorkspaceCmd {
static description = 'Install a package'
static name = 'install'
// These are in the order they will show up in when running "-h"
+ // If adding to this list, consider adding also to ci.js
static params = [
'save',
'save-exact',
@@ -24,20 +21,26 @@ class Install extends ArboristWorkspaceCmd {
'legacy-bundling',
'global-style',
'omit',
+ 'include',
'strict-peer-deps',
+ 'prefer-dedupe',
'package-lock',
+ 'package-lock-only',
'foreground-scripts',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
+ 'cpu',
+ 'os',
+ 'libc',
...super.params,
]
static usage = ['[ ...]']
- async completion (opts) {
+ static async completion (opts) {
const { partialWord } = opts
// install can complete to a folder with a package.json, or any package.
// if it has a slash, then it's gotta be a folder
@@ -135,6 +138,7 @@ class Install extends ArboristWorkspaceCmd {
throw this.usageError()
}
+ const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
auditLevel: null,
@@ -161,7 +165,6 @@ class Install extends ArboristWorkspaceCmd {
args: [],
scriptShell,
stdio: 'inherit',
- banner: !this.npm.silent,
event,
})
}
@@ -169,4 +172,5 @@ class Install extends ArboristWorkspaceCmd {
await reifyFinish(this.npm, arb)
}
}
+
module.exports = Install
diff --git a/lib/commands/link.js b/lib/commands/link.js
index 5af2c7c269f8f..8a41548d7f108 100644
--- a/lib/commands/link.js
+++ b/lib/commands/link.js
@@ -1,16 +1,11 @@
-const fs = require('fs')
-const util = require('util')
-const readdir = util.promisify(fs.readdir)
-const { resolve } = require('path')
-
-const Arborist = require('@npmcli/arborist')
+const { readdir } = require('node:fs/promises')
+const { resolve } = require('node:path')
const npa = require('npm-package-arg')
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const semver = require('semver')
-
const reifyFinish = require('../utils/reify-finish.js')
-
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+
class Link extends ArboristWorkspaceCmd {
static description = 'Symlink a package folder'
static name = 'link'
@@ -28,6 +23,7 @@ class Link extends ArboristWorkspaceCmd {
'strict-peer-deps',
'package-lock',
'omit',
+ 'include',
'ignore-scripts',
'audit',
'bin-links',
@@ -36,8 +32,8 @@ class Link extends ArboristWorkspaceCmd {
...super.params,
]
- async completion (opts) {
- const dir = this.npm.globalDir
+ static async completion (opts, npm) {
+ const dir = npm.globalDir
const files = await readdir(dir)
return files.filter(f => !/^[._-]/.test(f))
}
@@ -52,7 +48,7 @@ class Link extends ArboristWorkspaceCmd {
{ code: 'ELINKGLOBAL' }
)
}
- // install-links is implicitely false when running `npm link`
+ // install-links is implicitly false when running `npm link`
this.npm.config.set('install-links', false)
// link with no args: symlink the folder to the global location
@@ -67,8 +63,10 @@ class Link extends ArboristWorkspaceCmd {
// load current packages from the global space,
// and then add symlinks installs locally
const globalTop = resolve(this.npm.globalDir, '..')
+ const Arborist = require('@npmcli/arborist')
const globalOpts = {
...this.npm.flatOptions,
+ Arborist,
path: globalTop,
global: true,
prune: false,
@@ -95,24 +93,25 @@ class Link extends ArboristWorkspaceCmd {
const names = []
for (const a of args) {
const arg = npa(a)
- names.push(
- arg.type === 'directory'
- ? (await rpj(resolve(arg.fetchSpec, 'package.json'))).name
- : arg.name
- )
+ if (arg.type === 'directory') {
+ const { content } = await pkgJson.normalize(arg.fetchSpec)
+ names.push(content.name)
+ } else {
+ names.push(arg.name)
+ }
}
// npm link should not save=true by default unless you're
// using any of --save-dev or other types
const save =
Boolean(
- this.npm.config.find('save') !== 'default' ||
+ (this.npm.config.find('save') !== 'default' &&
+ this.npm.config.get('save')) ||
this.npm.config.get('save-optional') ||
this.npm.config.get('save-peer') ||
this.npm.config.get('save-dev') ||
this.npm.config.get('save-prod')
)
-
// create a new arborist instance for the local prefix and
// reify all the pending names as symlinks there
const localArb = new Arborist({
@@ -138,8 +137,10 @@ class Link extends ArboristWorkspaceCmd {
const paths = wsp && wsp.length ? wsp : [this.npm.prefix]
const add = paths.map(path => `file:${path.replace(/#/g, '%23')}`)
const globalTop = resolve(this.npm.globalDir, '..')
+ const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
+ Arborist,
path: globalTop,
global: true,
})
@@ -184,4 +185,5 @@ class Link extends ArboristWorkspaceCmd {
return missing
}
}
+
module.exports = Link
diff --git a/lib/commands/login.js b/lib/commands/login.js
index dc4ed8a67acd9..630abf9ac8e04 100644
--- a/lib/commands/login.js
+++ b/lib/commands/login.js
@@ -1,8 +1,7 @@
-const log = require('../utils/log-shim.js')
-const replaceInfo = require('../utils/replace-info.js')
+const { log, output } = require('proc-log')
+const { redactLog: replaceInfo } = require('@npmcli/redact')
const auth = require('../utils/auth.js')
-
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
class Login extends BaseCommand {
static description = 'Login to a registry user account'
@@ -13,7 +12,7 @@ class Login extends BaseCommand {
'auth-type',
]
- async exec (args) {
+ async exec () {
const scope = this.npm.config.get('scope')
let registry = this.npm.config.get('registry')
@@ -27,7 +26,6 @@ class Login extends BaseCommand {
const creds = this.npm.config.getCredentialsByURI(registry)
- log.disableProgress()
log.notice('', `Log in on ${replaceInfo(registry)}`)
const { message, newCreds } = await auth.login(this.npm, {
@@ -45,7 +43,8 @@ class Login extends BaseCommand {
await this.npm.config.save('user')
- this.npm.output(message)
+ output.standard(message)
}
}
+
module.exports = Login
diff --git a/lib/commands/logout.js b/lib/commands/logout.js
index aea5e93652b0e..dc5a0dfda0e98 100644
--- a/lib/commands/logout.js
+++ b/lib/commands/logout.js
@@ -1,7 +1,7 @@
-const getAuth = require('npm-registry-fetch/lib/auth.js')
const npmFetch = require('npm-registry-fetch')
-const log = require('../utils/log-shim')
-const BaseCommand = require('../base-command.js')
+const { getAuth } = npmFetch
+const { log } = require('proc-log')
+const BaseCommand = require('../base-cmd.js')
class Logout extends BaseCommand {
static description = 'Log out of the registry'
@@ -11,7 +11,7 @@ class Logout extends BaseCommand {
'scope',
]
- async exec (args) {
+ async exec () {
const registry = this.npm.config.get('registry')
const scope = this.npm.config.get('scope')
const regRef = scope ? `${scope}:registry` : 'registry'
@@ -19,10 +19,14 @@ class Logout extends BaseCommand {
const auth = getAuth(reg, this.npm.flatOptions)
+ const level = this.npm.config.find(`${auth.regKey}:${auth.authKey}`)
+
+ // find the config level and only delete from there
if (auth.token) {
log.verbose('logout', `clearing token for ${reg}`)
await npmFetch(`/-/user/token/${encodeURIComponent(auth.token)}`, {
...this.npm.flatOptions,
+ registry: reg,
method: 'DELETE',
ignoreBody: true,
})
@@ -34,12 +38,13 @@ class Logout extends BaseCommand {
}
if (scope) {
- this.npm.config.delete(regRef, 'user')
+ this.npm.config.delete(regRef, level)
}
- this.npm.config.clearCredentialsByURI(reg)
+ this.npm.config.clearCredentialsByURI(reg, level)
- await this.npm.config.save('user')
+ await this.npm.config.save(level)
}
}
+
module.exports = Logout
diff --git a/lib/commands/ls.js b/lib/commands/ls.js
index 2213e7937407a..417cb1b40d8c2 100644
--- a/lib/commands/ls.js
+++ b/lib/commands/ls.js
@@ -1,14 +1,12 @@
-const { resolve, relative, sep } = require('path')
-const relativePrefix = `.${sep}`
-const { EOL } = require('os')
-
+const { resolve, relative, sep } = require('node:path')
const archy = require('archy')
-const chalk = require('chalk')
-const Arborist = require('@npmcli/arborist')
const { breadth } = require('treeverse')
const npa = require('npm-package-arg')
+const { output } = require('proc-log')
+const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+const localeCompare = require('@isaacs/string-locale-compare')('en')
-const completion = require('../utils/completion/installed-deep.js')
+const relativePrefix = `.${sep}`
const _depth = Symbol('depth')
const _dedupe = Symbol('dedupe')
@@ -21,8 +19,6 @@ const _parent = Symbol('parent')
const _problems = Symbol('problems')
const _required = Symbol('required')
const _type = Symbol('type')
-const ArboristWorkspaceCmd = require('../arborist-cmd.js')
-const localeCompare = require('@isaacs/string-locale-compare')('en')
class LS extends ArboristWorkspaceCmd {
static description = 'List installed packages'
@@ -36,6 +32,7 @@ class LS extends ArboristWorkspaceCmd {
'global',
'depth',
'omit',
+ 'include',
'link',
'package-lock-only',
'unicode',
@@ -44,13 +41,14 @@ class LS extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ const completion = require('../utils/installed-deep.js')
+ return completion(npm, opts)
}
async exec (args) {
const all = this.npm.config.get('all')
- const color = this.npm.color
+ const chalk = this.npm.chalk
const depth = this.npm.config.get('depth')
const global = this.npm.global
const json = this.npm.config.get('json')
@@ -64,6 +62,8 @@ class LS extends ArboristWorkspaceCmd {
const path = global ? resolve(this.npm.globalDir, '..') : this.npm.prefix
+ const Arborist = require('@npmcli/arborist')
+
const arb = new Arborist({
global,
...this.npm.flatOptions,
@@ -157,7 +157,7 @@ class LS extends ArboristWorkspaceCmd {
? getJsonOutputItem(node, { global, long })
: parseable
? null
- : getHumanOutputItem(node, { args, color, global, long })
+ : getHumanOutputItem(node, { args, chalk, global, long })
// loop through list of node problems to add them to global list
if (node[_include]) {
@@ -177,11 +177,14 @@ class LS extends ArboristWorkspaceCmd {
const [rootError] = tree.errors.filter(e =>
e.code === 'EJSONPARSE' && e.path === resolve(path, 'package.json'))
- this.npm.outputBuffer(
- json ? jsonOutput({ path, problems, result, rootError, seenItems }) :
- parseable ? parseableOutput({ seenNodes, global, long }) :
- humanOutput({ color, result, seenItems, unicode })
- )
+ if (json) {
+ output.buffer(jsonOutput({ path, problems, result, rootError, seenItems }))
+ } else {
+ output.standard(parseable
+ ? parseableOutput({ seenNodes, global, long })
+ : humanOutput({ chalk, result, seenItems, unicode })
+ )
+ }
// if filtering items, should exit with error code on no results
if (result && !result[_include] && args.length) {
@@ -200,7 +203,7 @@ class LS extends ArboristWorkspaceCmd {
if (shouldThrow) {
throw Object.assign(
- new Error([...problems].join(EOL)),
+ new Error([...problems].join('\n')),
{ code: 'ELSPROBLEMS' }
)
}
@@ -219,6 +222,7 @@ class LS extends ArboristWorkspaceCmd {
return tree
}
}
+
module.exports = LS
const isGitNode = (node) => {
@@ -278,9 +282,9 @@ const augmentItemWithIncludeMetadata = (node, item) => {
return item
}
-const getHumanOutputItem = (node, { args, color, global, long }) => {
+const getHumanOutputItem = (node, { args, chalk, global, long }) => {
const { pkgid, path } = node
- const workspacePkgId = color ? chalk.green(pkgid) : pkgid
+ const workspacePkgId = chalk.blueBright(pkgid)
let printable = node.isWorkspace ? workspacePkgId : pkgid
// special formatting for top-level package name
@@ -289,15 +293,16 @@ const getHumanOutputItem = (node, { args, color, global, long }) => {
if (hasNoPackageJson || global) {
printable = path
} else {
- printable += `${long ? EOL : ' '}${path}`
+ printable += `${long ? '\n' : ' '}${path}`
}
}
- const highlightDepName =
- color && args.length && node[_filteredBy]
+ // TODO there is a LOT of overlap with lib/utils/explain-dep.js here
+
+ const highlightDepName = args.length && node[_filteredBy]
const missingColor = isOptional(node)
- ? chalk.yellow.bgBlack
- : chalk.red.bgBlack
+ ? chalk.yellow
+ : chalk.red
const missingMsg = `UNMET ${isOptional(node) ? 'OPTIONAL ' : ''}DEPENDENCY`
const targetLocation = node.root
? relative(node.root.realpath, node.realpath)
@@ -308,33 +313,33 @@ const getHumanOutputItem = (node, { args, color, global, long }) => {
const label =
(
node[_missing]
- ? (color ? missingColor(missingMsg) : missingMsg) + ' '
+ ? missingColor(missingMsg) + ' '
: ''
) +
- `${highlightDepName ? chalk.yellow.bgBlack(printable) : printable}` +
+ `${highlightDepName ? chalk.yellow(printable) : printable}` +
(
node[_dedupe]
- ? ' ' + (color ? chalk.gray('deduped') : 'deduped')
+ ? ' ' + chalk.dim('deduped')
: ''
) +
(
invalid
- ? ' ' + (color ? chalk.red.bgBlack(invalid) : invalid)
+ ? ' ' + chalk.red(invalid)
: ''
) +
(
isExtraneous(node, { global })
- ? ' ' + (color ? chalk.green.bgBlack('extraneous') : 'extraneous')
+ ? ' ' + chalk.red('extraneous')
: ''
) +
(
node.overridden
- ? ' ' + (color ? chalk.gray('overridden') : 'overridden')
+ ? ' ' + chalk.dim('overridden')
: ''
) +
(isGitNode(node) ? ` (${node.resolved})` : '') +
(node.isLink ? ` -> ${relativePrefix}${targetLocation}` : '') +
- (long ? `${EOL}${node.package.description || ''}` : '')
+ (long ? `\n${node.package.description || ''}` : '')
return augmentItemWithIncludeMetadata(node, { label, nodes: [] })
}
@@ -504,7 +509,7 @@ const augmentNodesWithMetadata = ({
const sortAlphabetically = ({ pkgid: a }, { pkgid: b }) => localeCompare(a, b)
-const humanOutput = ({ color, result, seenItems, unicode }) => {
+const humanOutput = ({ chalk, result, seenItems, unicode }) => {
// we need to traverse the entire tree in order to determine which items
// should be included (since a nested transitive included dep will make it
// so that all its ancestors should be displayed)
@@ -520,7 +525,7 @@ const humanOutput = ({ color, result, seenItems, unicode }) => {
}
const archyOutput = archy(result, '', { unicode })
- return color ? chalk.reset(archyOutput) : archyOutput
+ return chalk.reset(archyOutput)
}
const jsonOutput = ({ path, problems, result, rootError, seenItems }) => {
@@ -552,7 +557,7 @@ const jsonOutput = ({ path, problems, result, rootError, seenItems }) => {
}
}
- return JSON.stringify(result, null, 2)
+ return result
}
const parseableOutput = ({ global, long, seenNodes }) => {
@@ -567,7 +572,7 @@ const parseableOutput = ({ global, long, seenNodes }) => {
out += node[_invalid] ? ':INVALID' : ''
out += node.overridden ? ':OVERRIDDEN' : ''
}
- out += EOL
+ out += '\n'
}
}
return out.trim()
diff --git a/lib/commands/org.js b/lib/commands/org.js
index 575ff75e2a6cf..613498056f556 100644
--- a/lib/commands/org.js
+++ b/lib/commands/org.js
@@ -1,7 +1,7 @@
const liborg = require('libnpmorg')
-const otplease = require('../utils/otplease.js')
-const Table = require('cli-table3')
-const BaseCommand = require('../base-command.js')
+const { otplease } = require('../utils/auth.js')
+const BaseCommand = require('../base-cmd.js')
+const { output } = require('proc-log')
class Org extends BaseCommand {
static description = 'Manage orgs'
@@ -14,7 +14,7 @@ class Org extends BaseCommand {
static params = ['registry', 'otp', 'json', 'parseable']
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['set', 'rm', 'ls']
@@ -68,14 +68,14 @@ class Org extends BaseCommand {
const memDeets = await liborg.set(org, user, role, opts)
if (opts.json) {
- this.npm.output(JSON.stringify(memDeets, null, 2))
+ output.standard(JSON.stringify(memDeets, null, 2))
} else if (opts.parseable) {
- this.npm.output(['org', 'orgsize', 'user', 'role'].join('\t'))
- this.npm.output(
+ output.standard(['org', 'orgsize', 'user', 'role'].join('\t'))
+ output.standard(
[memDeets.org.name, memDeets.org.size, memDeets.user, memDeets.role].join('\t')
)
} else if (!this.npm.silent) {
- this.npm.output(
+ output.standard(
`Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${
memDeets.org.size
} member${memDeets.org.size === 1 ? '' : 's'} in this org.`
@@ -100,19 +100,17 @@ class Org extends BaseCommand {
org = org.replace(/^[~@]?/, '')
const userCount = Object.keys(roster).length
if (opts.json) {
- this.npm.output(
- JSON.stringify({
- user,
- org,
- userCount,
- deleted: true,
- })
- )
+ output.buffer({
+ user,
+ org,
+ userCount,
+ deleted: true,
+ })
} else if (opts.parseable) {
- this.npm.output(['user', 'org', 'userCount', 'deleted'].join('\t'))
- this.npm.output([user, org, userCount, true].join('\t'))
+ output.standard(['user', 'org', 'userCount', 'deleted'].join('\t'))
+ output.standard([user, org, userCount, true].join('\t'))
} else if (!this.npm.silent) {
- this.npm.output(
+ output.standard(
`Successfully removed ${user} from ${org}. You now have ${userCount} member${
userCount === 1 ? '' : 's'
} in this org.`
@@ -135,21 +133,19 @@ class Org extends BaseCommand {
roster = newRoster
}
if (opts.json) {
- this.npm.output(JSON.stringify(roster, null, 2))
+ output.buffer(roster)
} else if (opts.parseable) {
- this.npm.output(['user', 'role'].join('\t'))
+ output.standard(['user', 'role'].join('\t'))
Object.keys(roster).forEach(u => {
- this.npm.output([u, roster[u]].join('\t'))
+ output.standard([u, roster[u]].join('\t'))
})
} else if (!this.npm.silent) {
- const table = new Table({ head: ['user', 'role'] })
- Object.keys(roster)
- .sort()
- .forEach(u => {
- table.push([u, roster[u]])
- })
- this.npm.output(table.toString())
+ const chalk = this.npm.chalk
+ for (const u of Object.keys(roster).sort()) {
+ output.standard(`${u} - ${chalk.cyan(roster[u])}`)
+ }
}
}
}
+
module.exports = Org
diff --git a/lib/commands/outdated.js b/lib/commands/outdated.js
index 5e8a4e0d2168c..c401c0d50a5cd 100644
--- a/lib/commands/outdated.js
+++ b/lib/commands/outdated.js
@@ -1,16 +1,23 @@
-const os = require('os')
-const { resolve } = require('path')
+const { resolve } = require('node:path')
+const { stripVTControlCharacters } = require('node:util')
const pacote = require('pacote')
const table = require('text-table')
-const chalk = require('chalk')
const npa = require('npm-package-arg')
const pickManifest = require('npm-pick-manifest')
+const { output } = require('proc-log')
const localeCompare = require('@isaacs/string-locale-compare')('en')
+const ArboristWorkspaceCmd = require('../arborist-cmd.js')
-const Arborist = require('@npmcli/arborist')
+const safeNpa = (spec) => {
+ try {
+ return npa(spec)
+ } catch {
+ return null
+ }
+}
-const ansiTrim = require('../utils/ansi-trim.js')
-const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+// This string is load bearing and is shared with Arborist
+const MISSING = 'MISSING'
class Outdated extends ArboristWorkspaceCmd {
static description = 'Check for outdated packages'
@@ -25,183 +32,131 @@ class Outdated extends ArboristWorkspaceCmd {
'workspace',
]
- async exec (args) {
- const global = resolve(this.npm.globalDir, '..')
- const where = this.npm.global
- ? global
- : this.npm.prefix
+ #tree
+ #list = []
+ #edges = new Set()
+ #filterSet
+ async exec (args) {
+ const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
- path: where,
+ path: this.npm.global ? resolve(this.npm.globalDir, '..') : this.npm.prefix,
})
-
- this.edges = new Set()
- this.list = []
- this.tree = await arb.loadActual()
-
- if (this.workspaceNames && this.workspaceNames.length) {
- this.filterSet =
- arb.workspaceDependencySet(
- this.tree,
- this.workspaceNames,
- this.npm.flatOptions.includeWorkspaceRoot
- )
+ this.#tree = await arb.loadActual()
+
+ if (this.workspaceNames?.length) {
+ this.#filterSet = arb.workspaceDependencySet(
+ this.#tree,
+ this.workspaceNames,
+ this.npm.flatOptions.includeWorkspaceRoot
+ )
} else if (!this.npm.flatOptions.workspacesEnabled) {
- this.filterSet =
- arb.excludeWorkspacesDependencySet(this.tree)
+ this.#filterSet = arb.excludeWorkspacesDependencySet(this.#tree)
}
- if (args.length !== 0) {
- // specific deps
- for (let i = 0; i < args.length; i++) {
- const nodes = this.tree.inventory.query('name', args[i])
- this.getEdges(nodes, 'edgesIn')
+ if (args.length) {
+ for (const arg of args) {
+ // specific deps
+ this.#getEdges(this.#tree.inventory.query('name', arg), 'edgesIn')
}
} else {
if (this.npm.config.get('all')) {
// all deps in tree
- const nodes = this.tree.inventory.values()
- this.getEdges(nodes, 'edgesOut')
+ this.#getEdges(this.#tree.inventory.values(), 'edgesOut')
}
// top-level deps
- this.getEdges()
+ this.#getEdges()
}
- await Promise.all(Array.from(this.edges).map((edge) => {
- return this.getOutdatedInfo(edge)
- }))
+ await Promise.all([...this.#edges].map((e) => this.#getOutdatedInfo(e)))
- // sorts list alphabetically
- const outdated = this.list.sort((a, b) => localeCompare(a.name, b.name))
+ // sorts list alphabetically by name and then dependent
+ const outdated = this.#list
+ .sort((a, b) => localeCompare(a.name, b.name) || localeCompare(a.dependent, b.dependent))
- if (outdated.length > 0) {
+ if (outdated.length) {
process.exitCode = 1
}
- // return if no outdated packages
- if (outdated.length === 0 && !this.npm.config.get('json')) {
+ if (this.npm.config.get('json')) {
+ output.buffer(this.#json(outdated))
return
}
- // display results
- if (this.npm.config.get('json')) {
- this.npm.output(this.makeJSON(outdated))
- } else if (this.npm.config.get('parseable')) {
- this.npm.output(this.makeParseable(outdated))
- } else {
- const outList = outdated.map(x => this.makePretty(x))
- const outHead = ['Package',
- 'Current',
- 'Wanted',
- 'Latest',
- 'Location',
- 'Depended by',
- ]
-
- if (this.npm.config.get('long')) {
- outHead.push('Package Type', 'Homepage')
- }
- const outTable = [outHead].concat(outList)
+ const res = this.npm.config.get('parseable')
+ ? this.#parseable(outdated)
+ : this.#pretty(outdated)
- if (this.npm.color) {
- outTable[0] = outTable[0].map(heading => chalk.underline(heading))
- }
-
- const tableOpts = {
- align: ['l', 'r', 'r', 'r', 'l'],
- stringLength: s => ansiTrim(s).length,
- }
- this.npm.output(table(outTable, tableOpts))
+ if (res) {
+ output.standard(res)
}
}
- getEdges (nodes, type) {
+ #getEdges (nodes, type) {
// when no nodes are provided then it should only read direct deps
// from the root node and its workspaces direct dependencies
if (!nodes) {
- this.getEdgesOut(this.tree)
- this.getWorkspacesEdges()
+ this.#getEdgesOut(this.#tree)
+ this.#getWorkspacesEdges()
return
}
for (const node of nodes) {
- type === 'edgesOut'
- ? this.getEdgesOut(node)
- : this.getEdgesIn(node)
+ if (type === 'edgesOut') {
+ this.#getEdgesOut(node)
+ } else {
+ this.#getEdgesIn(node)
+ }
}
}
- getEdgesIn (node) {
+ #getEdgesIn (node) {
for (const edge of node.edgesIn) {
- this.trackEdge(edge)
+ this.#trackEdge(edge)
}
}
- getEdgesOut (node) {
+ #getEdgesOut (node) {
// TODO: normalize usage of edges and avoid looping through nodes here
- if (this.npm.global) {
- for (const child of node.children.values()) {
- this.trackEdge(child)
- }
- } else {
- for (const edge of node.edgesOut.values()) {
- this.trackEdge(edge)
- }
+ const edges = this.npm.global ? node.children.values() : node.edgesOut.values()
+ for (const edge of edges) {
+ this.#trackEdge(edge)
}
}
- trackEdge (edge) {
- const filteredOut =
- edge.from
- && this.filterSet
- && this.filterSet.size > 0
- && !this.filterSet.has(edge.from.target)
-
- if (filteredOut) {
+ #trackEdge (edge) {
+ if (edge.from && this.#filterSet?.size > 0 && !this.#filterSet.has(edge.from.target)) {
return
}
-
- this.edges.add(edge)
+ this.#edges.add(edge)
}
- getWorkspacesEdges (node) {
+ #getWorkspacesEdges () {
if (this.npm.global) {
return
}
- for (const edge of this.tree.edgesOut.values()) {
- const workspace = edge
- && edge.to
- && edge.to.target
- && edge.to.target.isWorkspace
-
- if (workspace) {
- this.getEdgesOut(edge.to.target)
+ for (const edge of this.#tree.edgesOut.values()) {
+ if (edge?.to?.target?.isWorkspace) {
+ this.#getEdgesOut(edge.to.target)
}
}
}
- async getPackument (spec) {
- const packument = await pacote.packument(spec, {
+ async #getPackument (spec) {
+ return pacote.packument(spec, {
...this.npm.flatOptions,
fullMetadata: this.npm.config.get('long'),
preferOnline: true,
})
- return packument
}
- async getOutdatedInfo (edge) {
- let alias = false
- try {
- alias = npa(edge.spec).subSpec
- } catch (err) {
- // ignore errors, no alias
- }
+ async #getOutdatedInfo (edge) {
+ const alias = safeNpa(edge.spec)?.subSpec
const spec = npa(alias ? alias.name : edge.name)
const node = edge.to || edge
- const { path, location } = node
- const { version: current } = node.package || {}
+ const { path, location, package: { version: current } = {} } = node
const type = edge.optional ? 'optionalDependencies'
: edge.peer ? 'peerDependencies'
@@ -216,34 +171,22 @@ class Outdated extends ArboristWorkspaceCmd {
// deps different from prod not currently
// on disk are not included in the output
- if (edge.error === 'MISSING' && type !== 'dependencies') {
+ if (edge.error === MISSING && type !== 'dependencies') {
return
}
+ // if it's not a range, version, or tag, skip it
+ if (!safeNpa(`${edge.name}@${edge.spec}`)?.registry) {
+ return null
+ }
+
try {
- const packument = await this.getPackument(spec)
+ const packument = await this.#getPackument(spec)
const expected = alias ? alias.fetchSpec : edge.spec
- // if it's not a range, version, or tag, skip it
- try {
- if (!npa(`${edge.name}@${edge.spec}`).registry) {
- return null
- }
- } catch (err) {
- return null
- }
const wanted = pickManifest(packument, expected, this.npm.flatOptions)
const latest = pickManifest(packument, '*', this.npm.flatOptions)
-
- if (
- !current ||
- current !== wanted.version ||
- wanted.version !== latest.version
- ) {
- const dependent = edge.from ?
- this.maybeWorkspaceName(edge.from)
- : 'global'
-
- this.list.push({
+ if (!current || current !== wanted.version || wanted.version !== latest.version) {
+ this.#list.push({
name: alias ? edge.spec.replace('npm', edge.name) : edge.name,
path,
type,
@@ -251,125 +194,89 @@ class Outdated extends ArboristWorkspaceCmd {
location,
wanted: wanted.version,
latest: latest.version,
- dependent,
+ workspaceDependent: edge.from?.isWorkspace ? edge.from.pkgid : null,
+ dependent: edge.from?.name ?? 'global',
homepage: packument.homepage,
})
}
} catch (err) {
// silently catch and ignore ETARGET, E403 &
// E404 errors, deps are just skipped
- if (!(
- err.code === 'ETARGET' ||
- err.code === 'E403' ||
- err.code === 'E404')
- ) {
+ if (!['ETARGET', 'E404', 'E404'].includes(err.code)) {
throw err
}
}
}
- maybeWorkspaceName (node) {
- if (!node.isWorkspace) {
- return node.name
- }
-
- const humanOutput =
- !this.npm.config.get('json') && !this.npm.config.get('parseable')
-
- const workspaceName =
- humanOutput
- ? node.pkgid
- : node.name
-
- return this.npm.color && humanOutput
- ? chalk.green(workspaceName)
- : workspaceName
- }
-
// formatting functions
- makePretty (dep) {
- const {
- current = 'MISSING',
- location = '-',
- homepage = '',
- name,
- wanted,
- latest,
- type,
- dependent,
- } = dep
-
- const columns = [name, current, wanted, latest, location, dependent]
-
- if (this.npm.config.get('long')) {
- columns[6] = type
- columns[7] = homepage
- }
- if (this.npm.color) {
- columns[0] = chalk[current === wanted ? 'yellow' : 'red'](columns[0]) // current
- columns[2] = chalk.green(columns[2]) // wanted
- columns[3] = chalk.magenta(columns[3]) // latest
+ #pretty (list) {
+ if (!list.length) {
+ return
}
- return columns
+ const long = this.npm.config.get('long')
+ const { bold, yellow, red, cyan, blue } = this.npm.chalk
+
+ return table([
+ [
+ 'Package',
+ 'Current',
+ 'Wanted',
+ 'Latest',
+ 'Location',
+ 'Depended by',
+ ...long ? ['Package Type', 'Homepage'] : [],
+ ].map(h => bold.underline(h)),
+ ...list.map((d) => [
+ d.current === d.wanted ? yellow(d.name) : red(d.name),
+ d.current ?? 'MISSING',
+ cyan(d.wanted),
+ blue(d.latest),
+ d.location ?? '-',
+ d.workspaceDependent ? blue(d.workspaceDependent) : d.dependent,
+ ...long ? [d.type, blue(d.homepage ?? '')] : [],
+ ]),
+ ], {
+ align: ['l', 'r', 'r', 'r', 'l'],
+ stringLength: s => stripVTControlCharacters(s).length,
+ })
}
// --parseable creates output like this:
// ::::
- makeParseable (list) {
- return list.map(dep => {
- const {
- name,
- current,
- wanted,
- latest,
- path,
- dependent,
- type,
- homepage,
- } = dep
- const out = [
- path,
- name + '@' + wanted,
- current ? (name + '@' + current) : 'MISSING',
- name + '@' + latest,
- dependent,
- ]
- if (this.npm.config.get('long')) {
- out.push(type, homepage)
- }
-
- return out.join(':')
- }).join(os.EOL)
+ #parseable (list) {
+ return list.map(d => [
+ d.path,
+ `${d.name}@${d.wanted}`,
+ d.current ? `${d.name}@${d.current}` : 'MISSING',
+ `${d.name}@${d.latest}`,
+ d.dependent,
+ ...this.npm.config.get('long') ? [d.type, d.homepage] : [],
+ ].join(':')).join('\n')
}
- makeJSON (list) {
- const out = {}
- list.forEach(dep => {
- const {
- name,
- current,
- wanted,
- latest,
- path,
- type,
- dependent,
- homepage,
- } = dep
- out[name] = {
- current,
- wanted,
- latest,
- dependent,
- location: path,
+ #json (list) {
+ // TODO(BREAKING_CHANGE): this should just return an array. It's a list and
+ // turing it into an object with keys is lossy since multiple items in the
+ // list could have the same key. For now we hack that by only changing
+ // top level values into arrays if they have multiple outdated items
+ return list.reduce((acc, d) => {
+ const dep = {
+ current: d.current,
+ wanted: d.wanted,
+ latest: d.latest,
+ dependent: d.dependent,
+ location: d.path,
+ ...this.npm.config.get('long') ? { type: d.type, homepage: d.homepage } : {},
}
- if (this.npm.config.get('long')) {
- out[name].type = type
- out[name].homepage = homepage
- }
- })
- return JSON.stringify(out, null, 2)
+ acc[d.name] = acc[d.name]
+ // If this item alread has an outdated dep then we turn it into an array
+ ? (Array.isArray(acc[d.name]) ? acc[d.name] : [acc[d.name]]).concat(dep)
+ : dep
+ return acc
+ }, {})
}
}
+
module.exports = Outdated
diff --git a/lib/commands/owner.js b/lib/commands/owner.js
index 40f16332b2922..0f12cf9293c30 100644
--- a/lib/commands/owner.js
+++ b/lib/commands/owner.js
@@ -1,16 +1,16 @@
const npa = require('npm-package-arg')
const npmFetch = require('npm-registry-fetch')
const pacote = require('pacote')
-const log = require('../utils/log-shim')
-const otplease = require('../utils/otplease.js')
-const readPackageJsonFast = require('read-package-json-fast')
-const BaseCommand = require('../base-command.js')
-const { resolve } = require('path')
+const { log, output } = require('proc-log')
+const { otplease } = require('../utils/auth.js')
+const pkgJson = require('@npmcli/package-json')
+const BaseCommand = require('../base-cmd.js')
+const { redact } = require('@npmcli/redact')
-const readJson = async (pkg) => {
+const readJson = async (path) => {
try {
- const json = await readPackageJsonFast(pkg)
- return json
+ const { content } = await pkgJson.normalize(path)
+ return content
} catch {
return {}
}
@@ -35,7 +35,7 @@ class Owner extends BaseCommand {
static workspaces = true
static ignoreImplicitWorkspace = false
- async completion (opts) {
+ static async completion (opts, npm) {
const argv = opts.conf.argv.remain
if (argv.length > 3) {
return []
@@ -51,17 +51,17 @@ class Owner extends BaseCommand {
// reaches registry in order to autocomplete rm
if (argv[2] === 'rm') {
- if (this.npm.global) {
+ if (npm.global) {
return []
}
- const { name } = await readJson(resolve(this.npm.prefix, 'package.json'))
+ const { name } = await readJson(npm.prefix)
if (!name) {
return []
}
const spec = npa(name)
const data = await pacote.packument(spec, {
- ...this.npm.flatOptions,
+ ...npm.flatOptions,
fullMetadata: true,
})
if (data && data.maintainers && data.maintainers.length) {
@@ -115,12 +115,12 @@ class Owner extends BaseCommand {
const packumentOpts = { ...this.npm.flatOptions, fullMetadata: true, preferOnline: true }
const { maintainers } = await pacote.packument(spec, packumentOpts)
if (!maintainers || !maintainers.length) {
- this.npm.output('no admin found')
+ output.standard('no admin found')
} else {
- this.npm.output(maintainers.map(m => `${m.name} <${m.email}>`).join('\n'))
+ output.standard(maintainers.map(m => `${m.name} <${m.email}>`).join('\n'))
}
} catch (err) {
- log.error('owner ls', "Couldn't get owner data", pkg)
+ log.error('owner ls', "Couldn't get owner data", redact(pkg))
throw err
}
}
@@ -130,7 +130,7 @@ class Owner extends BaseCommand {
if (this.npm.global) {
throw this.usageError()
}
- const { name } = await readJson(resolve(prefix, 'package.json'))
+ const { name } = await readJson(prefix)
if (!name) {
throw this.usageError()
}
@@ -216,9 +216,9 @@ class Owner extends BaseCommand {
})
})
if (addOrRm === 'add') {
- this.npm.output(`+ ${user} (${spec.name})`)
+ output.standard(`+ ${user} (${spec.name})`)
} else {
- this.npm.output(`- ${user} (${spec.name})`)
+ output.standard(`- ${user} (${spec.name})`)
}
return res
} catch (err) {
diff --git a/lib/commands/pack.js b/lib/commands/pack.js
index 74e80e573c2e9..79e7f49f819ec 100644
--- a/lib/commands/pack.js
+++ b/lib/commands/pack.js
@@ -1,9 +1,9 @@
const pacote = require('pacote')
const libpack = require('libnpmpack')
const npa = require('npm-package-arg')
-const log = require('../utils/log-shim')
+const { log, output } = require('proc-log')
const { getContents, logTar } = require('../utils/tar.js')
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
class Pack extends BaseCommand {
static description = 'Create a tarball from a package'
@@ -47,21 +47,22 @@ class Pack extends BaseCommand {
for (const { arg, manifest } of manifests) {
const tarballData = await libpack(arg, {
...this.npm.flatOptions,
+ foregroundScripts: this.npm.config.isDefault('foreground-scripts')
+ ? true
+ : this.npm.config.get('foreground-scripts'),
prefix: this.npm.localPrefix,
workspaces: this.workspacePaths,
})
- const pkgContents = await getContents(manifest, tarballData)
- tarballs.push(pkgContents)
+ tarballs.push(await getContents(manifest, tarballData))
}
- if (json) {
- this.npm.output(JSON.stringify(tarballs, null, 2))
- return
- }
-
- for (const tar of tarballs) {
- logTar(tar, { unicode })
- this.npm.output(tar.filename.replace(/^@/, '').replace(/\//, '-'))
+ for (const [index, tar] of Object.entries(tarballs)) {
+ // XXX(BREAKING_CHANGE): publish outputs a json object with package
+ // names as keys. Pack should do the same here instead of an array
+ logTar(tar, { unicode, json, key: index })
+ if (!json) {
+ output.standard(tar.filename.replace(/^@/, '').replace(/\//, '-'))
+ }
}
}
@@ -80,4 +81,5 @@ class Pack extends BaseCommand {
return this.exec([...this.workspacePaths, ...args.filter(a => a !== '.')])
}
}
+
module.exports = Pack
diff --git a/lib/commands/ping.js b/lib/commands/ping.js
index 5a651c4a6ab09..3388ba1aa378e 100644
--- a/lib/commands/ping.js
+++ b/lib/commands/ping.js
@@ -1,27 +1,30 @@
-const log = require('../utils/log-shim')
+const { redact } = require('@npmcli/redact')
+const { log, output } = require('proc-log')
const pingUtil = require('../utils/ping.js')
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
class Ping extends BaseCommand {
static description = 'Ping npm registry'
static params = ['registry']
static name = 'ping'
- async exec (args) {
- log.notice('PING', this.npm.config.get('registry'))
+ async exec () {
+ const cleanRegistry = redact(this.npm.config.get('registry'))
+ log.notice('PING', cleanRegistry)
const start = Date.now()
const details = await pingUtil({ ...this.npm.flatOptions })
const time = Date.now() - start
log.notice('PONG', `${time}ms`)
if (this.npm.config.get('json')) {
- this.npm.output(JSON.stringify({
- registry: this.npm.config.get('registry'),
+ output.buffer({
+ registry: cleanRegistry,
time,
details,
- }, null, 2))
+ })
} else if (Object.keys(details).length) {
- log.notice('PONG', `${JSON.stringify(details, null, 2)}`)
+ log.notice('PONG', JSON.stringify(details, null, 2))
}
}
}
+
module.exports = Ping
diff --git a/lib/commands/pkg.js b/lib/commands/pkg.js
index 5cdcd207887c9..a011fc10be107 100644
--- a/lib/commands/pkg.js
+++ b/lib/commands/pkg.js
@@ -1,5 +1,6 @@
+const { output } = require('proc-log')
const PackageJson = require('@npmcli/package-json')
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
const Queryable = require('../utils/queryable.js')
class Pkg extends BaseCommand {
@@ -11,6 +12,7 @@ class Pkg extends BaseCommand {
'delete [ ...]',
'set [[].= ...]',
'set [[].= ...]',
+ 'fix',
]
static params = [
@@ -23,13 +25,7 @@ class Pkg extends BaseCommand {
static workspaces = true
static ignoreImplicitWorkspace = false
- async exec (args, { prefix } = {}) {
- if (!prefix) {
- this.prefix = this.npm.localPrefix
- } else {
- this.prefix = prefix
- }
-
+ async exec (args, { path = this.npm.localPrefix, workspace } = {}) {
if (this.npm.global) {
throw Object.assign(
new Error(`There's no package.json file to manage on global mode`),
@@ -40,11 +36,13 @@ class Pkg extends BaseCommand {
const [cmd, ..._args] = args
switch (cmd) {
case 'get':
- return this.get(_args)
+ return this.get(_args, { path, workspace })
case 'set':
- return this.set(_args)
+ return this.set(_args, { path, workspace }).then(p => p.save())
case 'delete':
- return this.delete(_args)
+ return this.delete(_args, { path, workspace }).then(p => p.save())
+ case 'fix':
+ return PackageJson.fix(path).then(p => p.save())
default:
throw this.usageError()
}
@@ -52,44 +50,36 @@ class Pkg extends BaseCommand {
async execWorkspaces (args) {
await this.setWorkspaces()
- const result = {}
- for (const [workspaceName, workspacePath] of this.workspaces.entries()) {
- this.prefix = workspacePath
- result[workspaceName] = await this.exec(args, { prefix: workspacePath })
+ for (const [workspace, path] of this.workspaces.entries()) {
+ await this.exec(args, { path, workspace })
}
- // when running in workspaces names, make sure to key by workspace
- // name the results of each value retrieved in each ws
- this.npm.output(JSON.stringify(result, null, 2))
}
- async get (args) {
- const pkgJson = await PackageJson.load(this.prefix)
-
- const { content } = pkgJson
- let result = !args.length && content
+ async get (args, { path, workspace }) {
+ this.npm.config.set('json', true)
+ const pkgJson = await PackageJson.load(path)
- if (!result) {
- const q = new Queryable(content)
- result = q.query(args)
+ let result = pkgJson.content
+ if (args.length) {
+ result = new Queryable(result).query(args)
// in case there's only a single result from the query
// just prints that one element to stdout
+ // TODO(BREAKING_CHANGE): much like other places where we unwrap single
+ // item arrays this should go away. it makes the behavior unknown for users
+ // who don't already know the shape of the data.
if (Object.keys(result).length === 1) {
result = result[args]
}
}
- // only outputs if not running with workspaces config,
- // in case you're retrieving info for workspaces the pkgWorkspaces
- // will handle the output to make sure it get keyed by ws name
- if (!this.npm.config.get('workspaces')) {
- this.npm.output(JSON.stringify(result, null, 2))
- }
-
- return result
+ // The display layer is responsible for calling JSON.stringify on the result
+ // TODO: https://github.com/npm/cli/issues/5508 a raw mode has been requested similar
+ // to jq -r. If that was added then this method should no longer set `json:true` all the time
+ output.buffer(workspace ? { [workspace]: result } : result)
}
- async set (args) {
+ async set (args, { path }) {
const setError = () =>
this.usageError('npm pkg set expects a key=value pair of args.')
@@ -99,7 +89,7 @@ class Pkg extends BaseCommand {
const force = this.npm.config.get('force')
const json = this.npm.config.get('json')
- const pkgJson = await PackageJson.load(this.prefix)
+ const pkgJson = await PackageJson.load(path)
const q = new Queryable(pkgJson.content)
for (const arg of args) {
const [key, ...rest] = arg.split('=')
@@ -111,11 +101,10 @@ class Pkg extends BaseCommand {
q.set(key, json ? JSON.parse(value) : value, { force })
}
- pkgJson.update(q.toJSON())
- await pkgJson.save()
+ return pkgJson.update(q.toJSON())
}
- async delete (args) {
+ async delete (args, { path }) {
const setError = () =>
this.usageError('npm pkg delete expects key args.')
@@ -123,7 +112,7 @@ class Pkg extends BaseCommand {
throw setError()
}
- const pkgJson = await PackageJson.load(this.prefix)
+ const pkgJson = await PackageJson.load(path)
const q = new Queryable(pkgJson.content)
for (const key of args) {
if (!key) {
@@ -133,8 +122,7 @@ class Pkg extends BaseCommand {
q.delete(key)
}
- pkgJson.update(q.toJSON())
- await pkgJson.save()
+ return pkgJson.update(q.toJSON())
}
}
diff --git a/lib/commands/prefix.js b/lib/commands/prefix.js
index 264b819fc7692..da8702cf91caa 100644
--- a/lib/commands/prefix.js
+++ b/lib/commands/prefix.js
@@ -1,4 +1,5 @@
-const BaseCommand = require('../base-command.js')
+const { output } = require('proc-log')
+const BaseCommand = require('../base-cmd.js')
class Prefix extends BaseCommand {
static description = 'Display prefix'
@@ -6,8 +7,9 @@ class Prefix extends BaseCommand {
static params = ['global']
static usage = ['[-g]']
- async exec (args) {
- return this.npm.output(this.npm.prefix)
+ async exec () {
+ return output.standard(this.npm.prefix)
}
}
+
module.exports = Prefix
diff --git a/lib/commands/profile.js b/lib/commands/profile.js
index e42ebb276d202..965fcbcb8ce29 100644
--- a/lib/commands/profile.js
+++ b/lib/commands/profile.js
@@ -1,14 +1,11 @@
-const inspect = require('util').inspect
-const { URL } = require('url')
-const chalk = require('chalk')
-const log = require('../utils/log-shim.js')
-const npmProfile = require('npm-profile')
+const { inspect } = require('node:util')
+const { URL } = require('node:url')
+const { log, output } = require('proc-log')
+const { get, set, createToken } = require('npm-profile')
const qrcodeTerminal = require('qrcode-terminal')
-const Table = require('cli-table3')
-
-const otplease = require('../utils/otplease.js')
-const pulseTillDone = require('../utils/pulse-till-done.js')
+const { otplease } = require('../utils/auth.js')
const readUserInfo = require('../utils/read-user-info.js')
+const BaseCommand = require('../base-cmd.js')
const qrcode = url =>
new Promise((resolve) => qrcodeTerminal.generate(url, resolve))
@@ -36,7 +33,6 @@ const writableProfileKeys = [
'github',
]
-const BaseCommand = require('../base-command.js')
class Profile extends BaseCommand {
static description = 'Change settings on your registry profile'
static name = 'profile'
@@ -54,7 +50,7 @@ class Profile extends BaseCommand {
'otp',
]
- async completion (opts) {
+ static async completion (opts) {
var argv = opts.conf.argv.remain
if (!argv[2]) {
@@ -81,8 +77,6 @@ class Profile extends BaseCommand {
throw this.usageError()
}
- log.gauge.show('profile')
-
const [subcmd, ...opts] = args
switch (subcmd) {
@@ -107,16 +101,14 @@ class Profile extends BaseCommand {
async get (args) {
const tfa = 'two-factor auth'
- const info = await pulseTillDone.withPromise(
- npmProfile.get({ ...this.npm.flatOptions })
- )
+ const info = await get({ ...this.npm.flatOptions })
if (!info.cidr_whitelist) {
delete info.cidr_whitelist
}
if (this.npm.config.get('json')) {
- this.npm.output(JSON.stringify(info, null, 2))
+ output.buffer(info)
return
}
@@ -148,23 +140,20 @@ class Profile extends BaseCommand {
.filter((arg) => arg.trim() !== '')
.map((arg) => cleaned[arg])
.join('\t')
- this.npm.output(values)
+ output.standard(values)
} else {
if (this.npm.config.get('parseable')) {
for (const key of Object.keys(info)) {
if (key === 'tfa') {
- this.npm.output(`${key}\t${cleaned[tfa]}`)
+ output.standard(`${key}\t${cleaned[tfa]}`)
} else {
- this.npm.output(`${key}\t${info[key]}`)
+ output.standard(`${key}\t${info[key]}`)
}
}
} else {
- const table = new Table()
- for (const key of Object.keys(cleaned)) {
- table.push({ [chalk.bold(key)]: cleaned[key] })
+ for (const [key, value] of Object.entries(cleaned)) {
+ output.standard(`${key}: ${value}`)
}
-
- this.npm.output(table.toString())
}
}
}
@@ -210,7 +199,7 @@ class Profile extends BaseCommand {
}
// FIXME: Work around to not clear everything other than what we're setting
- const user = await pulseTillDone.withPromise(npmProfile.get(conf))
+ const user = await get(conf)
const newUser = {}
for (const key of writableProfileKeys) {
@@ -219,16 +208,16 @@ class Profile extends BaseCommand {
newUser[prop] = value
- const result = await otplease(this.npm, conf, c => npmProfile.set(newUser, c))
+ const result = await otplease(this.npm, conf, c => set(newUser, c))
if (this.npm.config.get('json')) {
- this.npm.output(JSON.stringify({ [prop]: result[prop] }, null, 2))
+ output.buffer({ [prop]: result[prop] })
} else if (this.npm.config.get('parseable')) {
- this.npm.output(prop + '\t' + result[prop])
+ output.standard(prop + '\t' + result[prop])
} else if (result[prop] != null) {
- this.npm.output('Set', prop, 'to', result[prop])
+ output.standard('Set', prop, 'to', result[prop])
} else {
- this.npm.output('Set', prop)
+ output.standard('Set', prop)
}
}
@@ -284,7 +273,7 @@ class Profile extends BaseCommand {
if (auth.basic) {
log.info('profile', 'Updating authentication to bearer token')
- const result = await npmProfile.createToken(
+ const result = await createToken(
auth.basic.password, false, [], { ...this.npm.flatOptions }
)
@@ -308,16 +297,12 @@ class Profile extends BaseCommand {
info.tfa.password = password
log.info('profile', 'Determine if tfa is pending')
- const userInfo = await pulseTillDone.withPromise(
- npmProfile.get({ ...this.npm.flatOptions })
- )
+ const userInfo = await get({ ...this.npm.flatOptions })
const conf = { ...this.npm.flatOptions }
if (userInfo && userInfo.tfa && userInfo.tfa.pending) {
log.info('profile', 'Resetting two-factor authentication')
- await pulseTillDone.withPromise(
- npmProfile.set({ tfa: { password, mode: 'disable' } }, conf)
- )
+ await set({ tfa: { password, mode: 'disable' } }, conf)
} else if (userInfo && userInfo.tfa) {
if (!conf.otp) {
conf.otp = await readUserInfo.otp(
@@ -327,12 +312,10 @@ class Profile extends BaseCommand {
}
log.info('profile', 'Setting two-factor authentication to ' + mode)
- const challenge = await pulseTillDone.withPromise(
- npmProfile.set(info, conf)
- )
+ const challenge = await set(info, conf)
if (challenge.tfa === null) {
- this.npm.output('Two factor authentication mode changed to: ' + mode)
+ output.standard('Two factor authentication mode changed to: ' + mode)
return
}
@@ -349,7 +332,7 @@ class Profile extends BaseCommand {
const secret = otpauth.searchParams.get('secret')
const code = await qrcode(challenge.tfa)
- this.npm.output(
+ output.standard(
'Scan into your authenticator app:\n' + code + '\n Or enter code:', secret
)
@@ -358,28 +341,28 @@ class Profile extends BaseCommand {
log.info('profile', 'Finalizing two-factor authentication')
- const result = await npmProfile.set({ tfa: [interactiveOTP] }, conf)
+ const result = await set({ tfa: [interactiveOTP] }, conf)
- this.npm.output(
+ output.standard(
'2FA successfully enabled. Below are your recovery codes, ' +
'please print these out.'
)
- this.npm.output(
+ output.standard(
'You will need these to recover access to your account ' +
'if you lose your authentication device.'
)
for (const tfaCode of result.tfa) {
- this.npm.output('\t' + tfaCode)
+ output.standard('\t' + tfaCode)
}
}
- async disable2fa (args) {
+ async disable2fa () {
const conf = { ...this.npm.flatOptions }
- const info = await pulseTillDone.withPromise(npmProfile.get(conf))
+ const info = await get(conf)
if (!info.tfa || info.tfa.pending) {
- this.npm.output('Two factor authentication not enabled.')
+ output.standard('Two factor authentication not enabled.')
return
}
@@ -392,17 +375,16 @@ class Profile extends BaseCommand {
log.info('profile', 'disabling tfa')
- await pulseTillDone.withPromise(npmProfile.set({
- tfa: { password: password, mode: 'disable' },
- }, conf))
+ await set({ tfa: { password: password, mode: 'disable' } }, conf)
if (this.npm.config.get('json')) {
- this.npm.output(JSON.stringify({ tfa: false }, null, 2))
+ output.buffer({ tfa: false })
} else if (this.npm.config.get('parseable')) {
- this.npm.output('tfa\tfalse')
+ output.standard('tfa\tfalse')
} else {
- this.npm.output('Two factor authentication disabled.')
+ output.standard('Two factor authentication disabled.')
}
}
}
+
module.exports = Profile
diff --git a/lib/commands/prune.js b/lib/commands/prune.js
index ee2c30553f1c5..1bcf8a9576316 100644
--- a/lib/commands/prune.js
+++ b/lib/commands/prune.js
@@ -1,13 +1,13 @@
-// prune extraneous packages
-const Arborist = require('@npmcli/arborist')
const reifyFinish = require('../utils/reify-finish.js')
-
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+
+// prune extraneous packages
class Prune extends ArboristWorkspaceCmd {
static description = 'Remove extraneous packages'
static name = 'prune'
static params = [
'omit',
+ 'include',
'dry-run',
'json',
'foreground-scripts',
@@ -19,6 +19,7 @@ class Prune extends ArboristWorkspaceCmd {
async exec () {
const where = this.npm.prefix
+ const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
@@ -29,4 +30,5 @@ class Prune extends ArboristWorkspaceCmd {
await reifyFinish(this.npm, arb)
}
}
+
module.exports = Prune
diff --git a/lib/commands/publish.js b/lib/commands/publish.js
index 76faea9457f74..3fe337a7b1c43 100644
--- a/lib/commands/publish.js
+++ b/lib/commands/publish.js
@@ -1,5 +1,4 @@
-const util = require('util')
-const log = require('../utils/log-shim.js')
+const { log, output } = require('proc-log')
const semver = require('semver')
const pack = require('libnpmpack')
const libpub = require('libnpmpublish').publish
@@ -7,23 +6,17 @@ const runScript = require('@npmcli/run-script')
const pacote = require('pacote')
const npa = require('npm-package-arg')
const npmFetch = require('npm-registry-fetch')
-const replaceInfo = require('../utils/replace-info.js')
-
-const otplease = require('../utils/otplease.js')
+const { redactLog: replaceInfo } = require('@npmcli/redact')
+const { otplease } = require('../utils/auth.js')
const { getContents, logTar } = require('../utils/tar.js')
-
// for historical reasons, publishConfig in package.json can contain ANY config
// keys that npm supports in .npmrc files and elsewhere. We *may* want to
// revisit this at some point, and have a minimal set that's a SemVer-major
// change that ought to get a RFC written on it.
-const { flatten } = require('../utils/config/index.js')
-
-// this is the only case in the CLI where we want to use the old full slow
-// 'read-package-json' module, because we want to pull in all the defaults and
-// metadata, like git sha's and default scripts and all that.
-const readJson = util.promisify(require('read-package-json'))
+const { flatten } = require('@npmcli/config/lib/definitions')
+const pkgJson = require('@npmcli/package-json')
+const BaseCommand = require('../base-cmd.js')
-const BaseCommand = require('../base-command.js')
class Publish extends BaseCommand {
static description = 'Publish a package'
static name = 'publish'
@@ -35,6 +28,7 @@ class Publish extends BaseCommand {
'workspace',
'workspaces',
'include-workspace-root',
+ 'provenance',
]
static usage = ['']
@@ -49,6 +43,26 @@ class Publish extends BaseCommand {
throw this.usageError()
}
+ await this.#publish(args)
+ }
+
+ async execWorkspaces () {
+ await this.setWorkspaces()
+
+ for (const [name, workspace] of this.workspaces.entries()) {
+ try {
+ await this.#publish([workspace], { workspace: name })
+ } catch (err) {
+ if (err.code !== 'EPRIVATE') {
+ throw err
+ }
+ // eslint-disable-next-line max-len
+ log.warn('publish', `Skipping workspace ${this.npm.chalk.cyan(name)}, marked as ${this.npm.chalk.bold('private')}`)
+ }
+ }
+ }
+
+ async #publish (args, { workspace } = {}) {
log.verbose('publish', replaceInfo(args))
const unicode = this.npm.config.get('unicode')
@@ -63,12 +77,11 @@ class Publish extends BaseCommand {
}
const opts = { ...this.npm.flatOptions, progress: false }
- log.disableProgress()
// you can publish name@version, ./foo.tgz, etc.
// even though the default is the 'file:.' cwd.
const spec = npa(args[0])
- let manifest = await this.getManifest(spec, opts)
+ let manifest = await this.#getManifest(spec, opts)
// only run scripts for directory type publishes
if (spec.type === 'directory' && !ignoreScripts) {
@@ -77,35 +90,52 @@ class Publish extends BaseCommand {
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
- banner: !silent,
})
}
// we pass dryRun: true to libnpmpack so it doesn't write the file to disk
const tarballData = await pack(spec, {
...opts,
+ foregroundScripts: this.npm.config.isDefault('foreground-scripts')
+ ? true
+ : this.npm.config.get('foreground-scripts'),
dryRun: true,
prefix: this.npm.localPrefix,
workspaces: this.workspacePaths,
})
const pkgContents = await getContents(manifest, tarballData)
+ const logPkg = () => logTar(pkgContents, { unicode, json, key: workspace })
// The purpose of re-reading the manifest is in case it changed,
// so that we send the latest and greatest thing to the registry
// note that publishConfig might have changed as well!
- manifest = await this.getManifest(spec, opts)
+ manifest = await this.#getManifest(spec, opts, true)
- // JSON already has the package contents
+ // If we are not in JSON mode then we show the user the contents of the tarball
+ // before it is published so they can see it while their otp is pending
if (!json) {
- logTar(pkgContents, { unicode })
+ logPkg()
}
const resolved = npa.resolve(manifest.name, manifest.version)
+
+ // make sure tag is valid, this will throw if invalid
+ npa(`${manifest.name}@${defaultTag}`)
+
const registry = npmFetch.pickRegistry(resolved, opts)
const creds = this.npm.config.getCredentialsByURI(registry)
const noCreds = !(creds.token || creds.username || creds.certfile && creds.keyfile)
const outputRegistry = replaceInfo(registry)
+ // if a workspace package is marked private then we skip it
+ if (workspace && manifest.private) {
+ throw Object.assign(
+ new Error(`This package has been marked as private
+ Remove the 'private' field from the package.json to publish it.`),
+ { code: 'EPRIVATE' }
+ )
+ }
+
if (noCreds) {
const msg = `This command requires you to be logged in to ${outputRegistry}`
if (dryRun) {
@@ -127,13 +157,18 @@ class Publish extends BaseCommand {
await otplease(this.npm, opts, o => libpub(manifest, tarballData, o))
}
+ // In json mode we dont log until the publish has completed as this will
+ // add it to the output only if completes successfully
+ if (json) {
+ logPkg()
+ }
+
if (spec.type === 'directory' && !ignoreScripts) {
await runScript({
event: 'publish',
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
- banner: !silent,
})
await runScript({
@@ -141,69 +176,30 @@ class Publish extends BaseCommand {
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
- banner: !silent,
})
}
- if (!this.suppressOutput) {
- if (!silent && json) {
- this.npm.output(JSON.stringify(pkgContents, null, 2))
- } else if (!silent) {
- this.npm.output(`+ ${pkgContents.id}`)
- }
- }
-
- return pkgContents
- }
-
- async execWorkspaces (args) {
- // Suppresses JSON output in publish() so we can handle it here
- this.suppressOutput = true
-
- const results = {}
- const json = this.npm.config.get('json')
- const { silent } = this.npm
- await this.setWorkspaces()
-
- for (const [name, workspace] of this.workspaces.entries()) {
- let pkgContents
- try {
- pkgContents = await this.exec([workspace])
- } catch (err) {
- if (err.code === 'EPRIVATE') {
- log.warn(
- 'publish',
- `Skipping workspace ${
- this.npm.chalk.green(name)
- }, marked as ${
- this.npm.chalk.bold('private')
- }`
- )
- continue
- }
- throw err
- }
- // This needs to be in-line w/ the rest of the output that non-JSON
- // publish generates
- if (!silent && !json) {
- this.npm.output(`+ ${pkgContents.id}`)
- } else {
- results[name] = pkgContents
- }
- }
-
- if (!silent && json) {
- this.npm.output(JSON.stringify(results, null, 2))
+ if (!json && !silent) {
+ output.standard(`+ ${pkgContents.id}`)
}
}
// if it's a directory, read it from the file system
// otherwise, get the full metadata from whatever it is
// XXX can't pacote read the manifest from a directory?
- async getManifest (spec, opts) {
+ async #getManifest (spec, opts, logWarnings = false) {
let manifest
if (spec.type === 'directory') {
- manifest = await readJson(`${spec.fetchSpec}/package.json`)
+ const changes = []
+ const pkg = await pkgJson.fix(spec.fetchSpec, { changes })
+ if (changes.length && logWarnings) {
+ /* eslint-disable-next-line max-len */
+ log.warn('publish', 'npm auto-corrected some errors in your package.json when publishing. Please run "npm pkg fix" to address these errors.')
+ log.warn('publish', `errors corrected:\n${changes.join('\n')}`)
+ }
+ // Prepare is the special function for publishing, different than normalize
+ const { content } = await pkg.prepare()
+ manifest = content
} else {
manifest = await pacote.manifest(spec, {
...opts,
@@ -212,9 +208,15 @@ class Publish extends BaseCommand {
})
}
if (manifest.publishConfig) {
- flatten(manifest.publishConfig, opts)
+ const cliFlags = this.npm.config.data.get('cli').raw
+ // Filter out properties set in CLI flags to prioritize them over
+ // corresponding `publishConfig` settings
+ const filteredPublishConfig = Object.fromEntries(
+ Object.entries(manifest.publishConfig).filter(([key]) => !(key in cliFlags)))
+ flatten(filteredPublishConfig, opts)
}
return manifest
}
}
+
module.exports = Publish
diff --git a/lib/commands/query.js b/lib/commands/query.js
index b5f4d8e57ddf5..74a50fc581d43 100644
--- a/lib/commands/query.js
+++ b/lib/commands/query.js
@@ -1,8 +1,6 @@
-'use strict'
-
-const { resolve } = require('path')
-const Arborist = require('@npmcli/arborist')
-const BaseCommand = require('../base-command.js')
+const { resolve } = require('node:path')
+const BaseCommand = require('../base-cmd.js')
+const { log, output } = require('proc-log')
class QuerySelectorItem {
constructor (node) {
@@ -49,57 +47,77 @@ class Query extends BaseCommand {
'workspace',
'workspaces',
'include-workspace-root',
+ 'package-lock-only',
+ 'expect-results',
]
- get parsedResponse () {
- return JSON.stringify(this.#response, null, 2)
+ constructor (...args) {
+ super(...args)
+ this.npm.config.set('json', true)
}
async exec (args) {
- // one dir up from wherever node_modules lives
- const where = resolve(this.npm.dir, '..')
- const opts = {
+ const packageLock = this.npm.config.get('package-lock-only')
+ const Arborist = require('@npmcli/arborist')
+ const arb = new Arborist({
...this.npm.flatOptions,
- path: where,
- forceActual: true,
+ // one dir up from wherever node_modules lives
+ path: resolve(this.npm.dir, '..'),
+ forceActual: !packageLock,
+ })
+ let tree
+ if (packageLock) {
+ try {
+ tree = await arb.loadVirtual()
+ } catch (err) {
+ log.verbose('loadVirtual', err.stack)
+ throw this.usageError(
+ 'A package lock or shrinkwrap file is required in package-lock-only mode'
+ )
+ }
+ } else {
+ tree = await arb.loadActual()
}
- const arb = new Arborist(opts)
- const tree = await arb.loadActual(opts)
- const items = await tree.querySelectorAll(args[0], this.npm.flatOptions)
- this.buildResponse(items)
-
- this.npm.output(this.parsedResponse)
+ await this.#queryTree(tree, args[0])
+ this.#output()
}
async execWorkspaces (args) {
await this.setWorkspaces()
- const opts = {
+ const Arborist = require('@npmcli/arborist')
+ const arb = new Arborist({
...this.npm.flatOptions,
path: this.npm.prefix,
+ })
+ // FIXME: Workspace support in query does not work as expected so this does not
+ // do the same package-lock-only check as this.exec().
+ // https://github.com/npm/cli/pull/6732#issuecomment-1708804921
+ const tree = await arb.loadActual()
+ for (const path of this.workspacePaths) {
+ const wsTree = path === tree.root.path
+ ? tree // --includes-workspace-root
+ : await tree.querySelectorAll(`.workspace:path(${path})`).then(r => r[0].target)
+ await this.#queryTree(wsTree, args[0])
}
- const arb = new Arborist(opts)
- const tree = await arb.loadActual(opts)
- for (const workspacePath of this.workspacePaths) {
- let items
- if (workspacePath === tree.root.path) {
- // include-workspace-root
- items = await tree.querySelectorAll(args[0])
- } else {
- const [workspace] = await tree.querySelectorAll(`.workspace:path(${workspacePath})`)
- items = await workspace.target.querySelectorAll(args[0], this.npm.flatOptions)
- }
- this.buildResponse(items)
- }
- this.npm.output(this.parsedResponse)
+ this.#output()
+ }
+
+ #output () {
+ this.checkExpected(this.#response.length)
+ output.buffer(this.#response)
}
// builds a normalized inventory
- buildResponse (items) {
+ async #queryTree (tree, arg) {
+ const items = await tree.querySelectorAll(arg, this.npm.flatOptions)
for (const node of items) {
- if (!this.#seen.has(node.target.location)) {
+ const { location } = node.target
+ if (!location || !this.#seen.has(location)) {
const item = new QuerySelectorItem(node)
this.#response.push(item)
- this.#seen.add(item.location)
+ if (location) {
+ this.#seen.add(item.location)
+ }
}
}
}
diff --git a/lib/commands/rebuild.js b/lib/commands/rebuild.js
index df791106fdd21..1c19836106e06 100644
--- a/lib/commands/rebuild.js
+++ b/lib/commands/rebuild.js
@@ -1,10 +1,9 @@
-const { resolve } = require('path')
-const Arborist = require('@npmcli/arborist')
+const { resolve } = require('node:path')
+const { output } = require('proc-log')
const npa = require('npm-package-arg')
const semver = require('semver')
-const completion = require('../utils/completion/installed-deep.js')
-
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+
class Rebuild extends ArboristWorkspaceCmd {
static description = 'Rebuild a package'
static name = 'rebuild'
@@ -20,13 +19,15 @@ class Rebuild extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ const completion = require('../utils/installed-deep.js')
+ return completion(npm, opts)
}
async exec (args) {
const globalTop = resolve(this.npm.globalDir, '..')
const where = this.npm.global ? globalTop : this.npm.prefix
+ const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
path: where,
@@ -56,7 +57,7 @@ class Rebuild extends ArboristWorkspaceCmd {
await arb.rebuild()
}
- this.npm.output('rebuilt dependencies successfully')
+ output.standard('rebuilt dependencies successfully')
}
isNode (specs, node) {
@@ -79,4 +80,5 @@ class Rebuild extends ArboristWorkspaceCmd {
})
}
}
+
module.exports = Rebuild
diff --git a/lib/commands/repo.js b/lib/commands/repo.js
index b89b74c0bf1ba..3f120c0a3f59f 100644
--- a/lib/commands/repo.js
+++ b/lib/commands/repo.js
@@ -1,6 +1,6 @@
-const { URL } = require('url')
-
+const { URL } = require('node:url')
const PackageUrlCmd = require('../package-url-cmd.js')
+
class Repo extends PackageUrlCmd {
static description = 'Open package repository page in the browser'
static name = 'repo'
@@ -30,6 +30,7 @@ class Repo extends PackageUrlCmd {
return url
}
}
+
module.exports = Repo
const unknownHostedUrl = url => {
diff --git a/lib/commands/root.js b/lib/commands/root.js
index 7749c602456b7..180f4c4ed0720 100644
--- a/lib/commands/root.js
+++ b/lib/commands/root.js
@@ -1,11 +1,14 @@
-const BaseCommand = require('../base-command.js')
+const { output } = require('proc-log')
+const BaseCommand = require('../base-cmd.js')
+
class Root extends BaseCommand {
static description = 'Display npm root'
static name = 'root'
static params = ['global']
async exec () {
- this.npm.output(this.npm.dir)
+ output.standard(this.npm.dir)
}
}
+
module.exports = Root
diff --git a/lib/commands/run-script.js b/lib/commands/run-script.js
index 51746c5e5285d..180dfa1cdeac5 100644
--- a/lib/commands/run-script.js
+++ b/lib/commands/run-script.js
@@ -1,32 +1,9 @@
-const { resolve } = require('path')
-const chalk = require('chalk')
-const runScript = require('@npmcli/run-script')
-const { isServerPackage } = runScript
-const rpj = require('read-package-json-fast')
-const log = require('../utils/log-shim.js')
-const didYouMean = require('../utils/did-you-mean.js')
-const { isWindowsShell } = require('../utils/is-windows.js')
+const { output } = require('proc-log')
+const pkgJson = require('@npmcli/package-json')
+const BaseCommand = require('../base-cmd.js')
+const { getError } = require('../utils/error-message.js')
+const { outputError } = require('../utils/output-error.js')
-const cmdList = [
- 'publish',
- 'install',
- 'uninstall',
- 'test',
- 'stop',
- 'start',
- 'restart',
- 'version',
-].reduce((l, p) => l.concat(['pre' + p, p, 'post' + p]), [])
-
-const nocolor = {
- reset: s => s,
- bold: s => s,
- dim: s => s,
- blue: s => s,
- green: s => s,
-}
-
-const BaseCommand = require('../base-command.js')
class RunScript extends BaseCommand {
static description = 'Run arbitrary package scripts'
static params = [
@@ -45,43 +22,74 @@ class RunScript extends BaseCommand {
static ignoreImplicitWorkspace = false
static isShellout = true
- async completion (opts) {
+ static async completion (opts, npm) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
- // find the script name
- const json = resolve(this.npm.localPrefix, 'package.json')
- const { scripts = {} } = await rpj(json).catch(er => ({}))
+ const { content: { scripts = {} } } = await pkgJson.normalize(npm.localPrefix)
+ .catch(() => ({ content: {} }))
+ if (opts.isFish) {
+ return Object.keys(scripts).map(s => `${s}\t${scripts[s].slice(0, 30)}`)
+ }
return Object.keys(scripts)
}
}
async exec (args) {
if (args.length) {
- return this.run(args)
+ await this.#run(args, { path: this.npm.localPrefix })
} else {
- return this.list(args)
+ await this.#list(this.npm.localPrefix)
}
}
async execWorkspaces (args) {
- if (args.length) {
- return this.runWorkspaces(args)
- } else {
- return this.listWorkspaces(args)
+ await this.setWorkspaces()
+
+ const ws = [...this.workspaces.entries()]
+ for (const [workspace, path] of ws) {
+ const last = path === ws.at(-1)[1]
+
+ if (!args.length) {
+ const newline = await this.#list(path, { workspace })
+ if (newline && !last) {
+ output.standard('')
+ }
+ continue
+ }
+
+ const pkg = await pkgJson.normalize(path).then(p => p.content)
+ try {
+ await this.#run(args, { path, pkg, workspace })
+ } catch (e) {
+ const err = getError(e, { npm: this.npm, command: null })
+ outputError({
+ ...err,
+ error: [
+ ['', `Lifecycle script \`${args[0]}\` failed with error:`],
+ ...err.error,
+ ['workspace', pkg._id || pkg.name],
+ ['location', path],
+ ],
+ })
+ process.exitCode = err.exitCode
+ if (!last) {
+ output.error('')
+ }
+ }
}
}
- async run ([event, ...args], { path = this.npm.localPrefix, pkg } = {}) {
- // this || undefined is because runScript will be unhappy with the default
- // null value
- const scriptShell = this.npm.config.get('script-shell') || undefined
+ async #run ([event, ...args], { path, pkg, workspace }) {
+ const runScript = require('@npmcli/run-script')
+
+ pkg ??= await pkgJson.normalize(path).then(p => p.content)
- pkg = pkg || (await rpj(`${path}/package.json`))
const { scripts = {} } = pkg
if (event === 'restart' && !scripts.restart) {
scripts.restart = 'npm stop --if-present && npm start'
} else if (event === 'env' && !scripts.env) {
+ const { isWindowsShell } = require('../utils/is-windows.js')
scripts.env = isWindowsShell ? 'SET' : 'env'
}
@@ -89,16 +97,21 @@ class RunScript extends BaseCommand {
if (
!Object.prototype.hasOwnProperty.call(scripts, event) &&
- !(event === 'start' && (await isServerPackage(path)))
+ !(event === 'start' && (await runScript.isServerPackage(path)))
) {
if (this.npm.config.get('if-present')) {
return
}
- const suggestions = await didYouMean(this.npm, path, event)
- throw new Error(
- `Missing script: "${event}"${suggestions}\n\nTo see a list of scripts, run:\n npm run`
- )
+ const suggestions = require('../utils/did-you-mean.js')(pkg, event)
+ const wsArg = workspace && path !== this.npm.localPrefix
+ ? ` --workspace=${pkg._id || pkg.name}`
+ : ''
+ throw new Error([
+ `Missing script: "${event}"${suggestions}\n`,
+ 'To see a list of scripts, run:',
+ ` npm run${wsArg}`,
+ ].join('\n'))
}
// positional args only added to the main event, not pre/post
@@ -113,156 +126,89 @@ class RunScript extends BaseCommand {
}
}
- const opts = {
- path,
- args,
- scriptShell,
- stdio: 'inherit',
- pkg,
- banner: !this.npm.silent,
- }
-
for (const [ev, evArgs] of events) {
await runScript({
- ...opts,
+ path,
+ // this || undefined is because runScript will be unhappy with the
+ // default null value
+ scriptShell: this.npm.config.get('script-shell') || undefined,
+ stdio: 'inherit',
+ pkg,
event: ev,
args: evArgs,
})
}
}
- async list (args, path) {
- path = path || this.npm.localPrefix
- const { scripts, name, _id } = await rpj(`${path}/package.json`)
- const pkgid = _id || name
- const color = this.npm.color
-
- if (!scripts) {
- return []
- }
+ async #list (path, { workspace } = {}) {
+ const { scripts = {}, name, _id } = await pkgJson.normalize(path).then(p => p.content)
+ const scriptEntries = Object.entries(scripts)
- const allScripts = Object.keys(scripts)
if (this.npm.silent) {
- return allScripts
+ return
}
if (this.npm.config.get('json')) {
- this.npm.output(JSON.stringify(scripts, null, 2))
- return allScripts
- }
-
- if (this.npm.config.get('parseable')) {
- for (const [script, cmd] of Object.entries(scripts)) {
- this.npm.output(`${script}:${cmd}`)
- }
-
- return allScripts
- }
-
- const indent = '\n '
- const prefix = ' '
- const cmds = []
- const runScripts = []
- for (const script of allScripts) {
- const list = cmdList.includes(script) ? cmds : runScripts
- list.push(script)
- }
- const colorize = color ? chalk : nocolor
-
- if (cmds.length) {
- this.npm.output(
- `${colorize.reset(colorize.bold('Lifecycle scripts'))} included in ${colorize.green(
- pkgid
- )}:`
- )
- }
-
- for (const script of cmds) {
- this.npm.output(prefix + script + indent + colorize.dim(scripts[script]))
- }
-
- if (!cmds.length && runScripts.length) {
- this.npm.output(
- `${colorize.bold('Scripts')} available in ${colorize.green(pkgid)} via \`${colorize.blue(
- 'npm run-script'
- )}\`:`
- )
- } else if (runScripts.length) {
- this.npm.output(`\navailable via \`${colorize.blue('npm run-script')}\`:`)
- }
-
- for (const script of runScripts) {
- this.npm.output(prefix + script + indent + colorize.dim(scripts[script]))
- }
-
- this.npm.output('')
- return allScripts
- }
-
- async runWorkspaces (args, filters) {
- const res = []
- await this.setWorkspaces()
-
- for (const workspacePath of this.workspacePaths) {
- const pkg = await rpj(`${workspacePath}/package.json`)
- const runResult = await this.run(args, {
- path: workspacePath,
- pkg,
- }).catch(err => {
- log.error(`Lifecycle script \`${args[0]}\` failed with error:`)
- log.error(err)
- log.error(` in workspace: ${pkg._id || pkg.name}`)
- log.error(` at location: ${workspacePath}`)
-
- const scriptMissing = err.message.startsWith('Missing script')
-
- // avoids exiting with error code in case there's scripts missing
- // in some workspaces since other scripts might have succeeded
- if (!scriptMissing) {
- process.exitCode = 1
- }
-
- return scriptMissing
- })
- res.push(runResult)
+ output.buffer(workspace ? { [workspace]: scripts } : scripts)
+ return
}
- // in case **all** tests are missing, then it should exit with error code
- if (res.every(Boolean)) {
- throw new Error(`Missing script: ${args[0]}`)
+ if (!scriptEntries.length) {
+ return
}
- }
-
- async listWorkspaces (args, filters) {
- await this.setWorkspaces()
- if (this.npm.silent) {
+ if (this.npm.config.get('parseable')) {
+ output.standard(scriptEntries
+ .map((s) => (workspace ? [workspace, ...s] : s).join(':'))
+ .join('\n')
+ .trim())
return
}
- if (this.npm.config.get('json')) {
- const res = {}
- for (const workspacePath of this.workspacePaths) {
- const { scripts, name } = await rpj(`${workspacePath}/package.json`)
- res[name] = { ...scripts }
+ // TODO this is missing things like prepare, prepublishOnly, and dependencies
+ const cmdList = [
+ 'preinstall', 'install', 'postinstall',
+ 'prepublish', 'publish', 'postpublish',
+ 'prerestart', 'restart', 'postrestart',
+ 'prestart', 'start', 'poststart',
+ 'prestop', 'stop', 'poststop',
+ 'pretest', 'test', 'posttest',
+ 'preuninstall', 'uninstall', 'postuninstall',
+ 'preversion', 'version', 'postversion',
+ ]
+ const [cmds, runScripts] = scriptEntries.reduce((acc, s) => {
+ acc[cmdList.includes(s[0]) ? 0 : 1].push(s)
+ return acc
+ }, [[], []])
+
+ const { reset, bold, cyan, dim, blue } = this.npm.chalk
+ const pkgId = `in ${cyan(_id || name)}`
+ const title = (t) => reset(bold(t))
+
+ if (cmds.length) {
+ output.standard(`${title('Lifecycle scripts')} included ${pkgId}:`)
+ for (const [k, v] of cmds) {
+ output.standard(` ${k}`)
+ output.standard(` ${dim(v)}`)
}
- this.npm.output(JSON.stringify(res, null, 2))
- return
}
- if (this.npm.config.get('parseable')) {
- for (const workspacePath of this.workspacePaths) {
- const { scripts, name } = await rpj(`${workspacePath}/package.json`)
- for (const [script, cmd] of Object.entries(scripts || {})) {
- this.npm.output(`${name}:${script}:${cmd}`)
- }
+ if (runScripts.length) {
+ const via = `via \`${blue('npm run-script')}\`:`
+ if (!cmds.length) {
+ output.standard(`${title('Scripts')} available ${pkgId} ${via}`)
+ } else {
+ output.standard(`available ${via}`)
+ }
+ for (const [k, v] of runScripts) {
+ output.standard(` ${k}`)
+ output.standard(` ${dim(v)}`)
}
- return
}
- for (const workspacePath of this.workspacePaths) {
- await this.list(args, workspacePath)
- }
+ // Return true to indicate that something was output for this path
+ // that should be separated from others
+ return true
}
}
diff --git a/lib/commands/sbom.js b/lib/commands/sbom.js
new file mode 100644
index 0000000000000..278c6d506b42a
--- /dev/null
+++ b/lib/commands/sbom.js
@@ -0,0 +1,134 @@
+const localeCompare = require('@isaacs/string-locale-compare')('en')
+const BaseCommand = require('../base-cmd.js')
+const { log, output } = require('proc-log')
+const { cyclonedxOutput } = require('../utils/sbom-cyclonedx.js')
+const { spdxOutput } = require('../utils/sbom-spdx.js')
+
+const SBOM_FORMATS = ['cyclonedx', 'spdx']
+
+class SBOM extends BaseCommand {
+ #response = {} // response is the sbom response
+
+ static description = 'Generate a Software Bill of Materials (SBOM)'
+ static name = 'sbom'
+ static workspaces = true
+
+ static params = [
+ 'omit',
+ 'package-lock-only',
+ 'sbom-format',
+ 'sbom-type',
+ 'workspace',
+ 'workspaces',
+ ]
+
+ async exec () {
+ const sbomFormat = this.npm.config.get('sbom-format')
+ const packageLockOnly = this.npm.config.get('package-lock-only')
+
+ if (!sbomFormat) {
+ /* eslint-disable-next-line max-len */
+ throw this.usageError(`Must specify --sbom-format flag with one of: ${SBOM_FORMATS.join(', ')}.`)
+ }
+
+ const opts = {
+ ...this.npm.flatOptions,
+ path: this.npm.prefix,
+ forceActual: true,
+ }
+ const Arborist = require('@npmcli/arborist')
+ const arb = new Arborist(opts)
+
+ const tree = packageLockOnly ? await arb.loadVirtual(opts).catch(() => {
+ /* eslint-disable-next-line max-len */
+ throw this.usageError('A package lock or shrinkwrap file is required in package-lock-only mode')
+ }) : await arb.loadActual(opts)
+
+ // Collect the list of selected workspaces in the project
+ const wsNodes = this.workspaceNames?.length
+ ? arb.workspaceNodes(tree, this.workspaceNames)
+ : null
+
+ // Build the selector and query the tree for the list of nodes
+ const selector = this.#buildSelector({ wsNodes })
+ log.info('sbom', `Using dependency selector: ${selector}`)
+ const items = await tree.querySelectorAll(selector)
+
+ const errors = items.flatMap(node => detectErrors(node))
+ if (errors.length) {
+ throw Object.assign(new Error([...new Set(errors)].join('\n')), {
+ code: 'ESBOMPROBLEMS',
+ })
+ }
+
+ // Populate the response with the list of unique nodes (sorted by location)
+ this.#buildResponse(items.sort((a, b) => localeCompare(a.location, b.location)))
+
+ // TODO(BREAKING_CHANGE): all sbom output is in json mode but setting it before
+ // any of the errors will cause those to be thrown in json mode.
+ this.npm.config.set('json', true)
+ output.buffer(this.#response)
+ }
+
+ async execWorkspaces (args) {
+ await this.setWorkspaces()
+ return this.exec(args)
+ }
+
+ // Build the selector from all of the specified filter options
+ #buildSelector ({ wsNodes }) {
+ let selector
+ const omit = this.npm.flatOptions.omit
+ const workspacesEnabled = this.npm.flatOptions.workspacesEnabled
+
+ // If omit is specified, omit all nodes and their children which match the
+ // specified selectors
+ const omits = omit.reduce((acc, o) => `${acc}:not(.${o})`, '')
+
+ if (!workspacesEnabled) {
+ // If workspaces are disabled, omit all workspace nodes and their children
+ selector = `:root > :not(.workspace)${omits},:root > :not(.workspace) *${omits},:extraneous`
+ } else if (wsNodes && wsNodes.length > 0) {
+ // If one or more workspaces are selected, select only those workspaces and their children
+ selector = wsNodes.map(ws => `#${ws.name},#${ws.name} *${omits}`).join(',')
+ } else {
+ selector = `:root *${omits},:extraneous`
+ }
+
+ // Always include the root node
+ return `:root,${selector}`
+ }
+
+ // builds a normalized inventory
+ #buildResponse (items) {
+ const sbomFormat = this.npm.config.get('sbom-format')
+ const packageType = this.npm.config.get('sbom-type')
+ const packageLockOnly = this.npm.config.get('package-lock-only')
+
+ this.#response = sbomFormat === 'cyclonedx'
+ ? cyclonedxOutput({ npm: this.npm, nodes: items, packageType, packageLockOnly })
+ : spdxOutput({ npm: this.npm, nodes: items, packageType })
+ }
+}
+
+const detectErrors = (node) => {
+ const errors = []
+
+ // Look for missing dependencies (that are NOT optional), or invalid dependencies
+ for (const edge of node.edgesOut.values()) {
+ if (edge.missing && !(edge.type === 'optional' || edge.type === 'peerOptional')) {
+ errors.push(`missing: ${edge.name}@${edge.spec}, required by ${edge.from.pkgid}`)
+ }
+
+ if (edge.invalid) {
+ /* istanbul ignore next */
+ const spec = edge.spec || '*'
+ const from = edge.from.pkgid
+ errors.push(`invalid: ${edge.to.pkgid}, ${spec} required by ${from}`)
+ }
+ }
+
+ return errors
+}
+
+module.exports = SBOM
diff --git a/lib/commands/search.js b/lib/commands/search.js
index 7419e97454688..8b6c01e3930d8 100644
--- a/lib/commands/search.js
+++ b/lib/commands/search.js
@@ -1,47 +1,18 @@
-const Minipass = require('minipass')
const Pipeline = require('minipass-pipeline')
const libSearch = require('libnpmsearch')
-const log = require('../utils/log-shim.js')
-
+const { log, output } = require('proc-log')
const formatSearchStream = require('../utils/format-search-stream.js')
+const BaseCommand = require('../base-cmd.js')
-function filter (data, include, exclude) {
- const words = [data.name]
- .concat(data.maintainers.map(m => `=${m.username}`))
- .concat(data.keywords || [])
- .map(f => f && f.trim && f.trim())
- .filter(f => f)
- .join(' ')
- .toLowerCase()
-
- if (exclude.find(e => match(words, e))) {
- return false
- }
-
- return true
-}
-
-function match (words, pattern) {
- if (pattern.startsWith('/')) {
- if (pattern.endsWith('/')) {
- pattern = pattern.slice(0, -1)
- }
- pattern = new RegExp(pattern.slice(1))
- return words.match(pattern)
- }
- return words.indexOf(pattern) !== -1
-}
-
-const BaseCommand = require('../base-command.js')
class Search extends BaseCommand {
static description = 'Search for packages'
static name = 'search'
static params = [
- 'long',
'json',
'color',
'parseable',
'description',
+ 'searchlimit',
'searchopts',
'searchexclude',
'registry',
@@ -50,13 +21,13 @@ class Search extends BaseCommand {
'offline',
]
- static usage = ['[search terms ...]']
+ static usage = [' [ ...]']
async exec (args) {
const opts = {
...this.npm.flatOptions,
...this.npm.flatOptions.search,
- include: args.map(s => s.toLowerCase()).filter(s => s),
+ include: args.map(s => s.toLowerCase()).filter(Boolean),
exclude: this.npm.flatOptions.search.exclude.split(/\s+/),
}
@@ -67,27 +38,16 @@ class Search extends BaseCommand {
// Used later to figure out whether we had any packages go out
let anyOutput = false
- class FilterStream extends Minipass {
- write (pkg) {
- if (filter(pkg, opts.include, opts.exclude)) {
- super.write(pkg)
- }
- }
- }
-
- const filterStream = new FilterStream()
-
- // Grab a configured output stream that will spit out packages in the
- // desired format.
+ // Grab a configured output stream that will spit out packages in the desired format.
const outputStream = formatSearchStream({
args, // --searchinclude options are not highlighted
...opts,
+ npm: this.npm,
})
log.silly('search', 'searching packages')
const p = new Pipeline(
libSearch.stream(opts.include, opts),
- filterStream,
outputStream
)
@@ -95,16 +55,16 @@ class Search extends BaseCommand {
if (!anyOutput) {
anyOutput = true
}
- this.npm.output(chunk.toString('utf8'))
+ output.standard(chunk.toString('utf8'))
})
await p.promise()
if (!anyOutput && !this.npm.config.get('json') && !this.npm.config.get('parseable')) {
- this.npm.output('No matches found for ' + (args.map(JSON.stringify).join(' ')))
+ output.standard('No matches found for ' + (args.map(JSON.stringify).join(' ')))
}
log.silly('search', 'search completed')
- log.clearProgress()
}
}
+
module.exports = Search
diff --git a/lib/commands/set.js b/lib/commands/set.js
index b650026a599a9..2e61762ba9dcd 100644
--- a/lib/commands/set.js
+++ b/lib/commands/set.js
@@ -1,15 +1,18 @@
-const BaseCommand = require('../base-command.js')
+const Npm = require('../npm.js')
+const BaseCommand = require('../base-cmd.js')
class Set extends BaseCommand {
static description = 'Set a value in the npm configuration'
static name = 'set'
static usage = ['= [= ...] (See `npm config`)']
+ static params = ['global', 'location']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return this.npm.cmd('config').completion(opts)
+ static async completion (opts) {
+ const Config = Npm.cmd('config')
+ return Config.completion(opts)
}
async exec (args) {
@@ -19,4 +22,5 @@ class Set extends BaseCommand {
return this.npm.exec('config', ['set'].concat(args))
}
}
+
module.exports = Set
diff --git a/lib/commands/shrinkwrap.js b/lib/commands/shrinkwrap.js
index a240f039356e7..86215c18e62dd 100644
--- a/lib/commands/shrinkwrap.js
+++ b/lib/commands/shrinkwrap.js
@@ -1,8 +1,8 @@
-const { resolve, basename } = require('path')
-const { unlink } = require('fs').promises
-const Arborist = require('@npmcli/arborist')
-const log = require('../utils/log-shim')
-const BaseCommand = require('../base-command.js')
+const { resolve, basename } = require('node:path')
+const { unlink } = require('node:fs/promises')
+const { log } = require('proc-log')
+const BaseCommand = require('../base-cmd.js')
+
class Shrinkwrap extends BaseCommand {
static description = 'Lock down dependency versions for publication'
static name = 'shrinkwrap'
@@ -21,6 +21,7 @@ class Shrinkwrap extends BaseCommand {
throw er
}
+ const Arborist = require('@npmcli/arborist')
const path = this.npm.prefix
const sw = resolve(path, 'npm-shrinkwrap.json')
const arb = new Arborist({ ...this.npm.flatOptions, path })
@@ -68,4 +69,5 @@ class Shrinkwrap extends BaseCommand {
}
}
}
+
module.exports = Shrinkwrap
diff --git a/lib/commands/star.js b/lib/commands/star.js
index 20039bf893811..1b76955810c72 100644
--- a/lib/commands/star.js
+++ b/lib/commands/star.js
@@ -1,9 +1,9 @@
const fetch = require('npm-registry-fetch')
const npa = require('npm-package-arg')
-const log = require('../utils/log-shim')
+const { log, output } = require('proc-log')
const getIdentity = require('../utils/get-identity')
+const BaseCommand = require('../base-cmd.js')
-const BaseCommand = require('../base-command.js')
class Star extends BaseCommand {
static description = 'Mark your favorite packages'
static name = 'star'
@@ -62,10 +62,11 @@ class Star extends BaseCommand {
body,
})
- this.npm.output(show + ' ' + pkg.name)
+ output.standard(show + ' ' + pkg.name)
log.verbose('star', data)
return data
}
}
}
+
module.exports = Star
diff --git a/lib/commands/stars.js b/lib/commands/stars.js
index 4214134eb5871..1059569979daf 100644
--- a/lib/commands/stars.js
+++ b/lib/commands/stars.js
@@ -1,8 +1,8 @@
const fetch = require('npm-registry-fetch')
-const log = require('../utils/log-shim')
+const { log, output } = require('proc-log')
const getIdentity = require('../utils/get-identity.js')
+const BaseCommand = require('../base-cmd.js')
-const BaseCommand = require('../base-command.js')
class Stars extends BaseCommand {
static description = 'View packages marked as favorites'
static name = 'stars'
@@ -25,7 +25,7 @@ class Stars extends BaseCommand {
}
for (const row of rows) {
- this.npm.output(row.value)
+ output.standard(row.value)
}
} catch (err) {
if (err.code === 'ENEEDAUTH') {
@@ -35,4 +35,5 @@ class Stars extends BaseCommand {
}
}
}
+
module.exports = Stars
diff --git a/lib/commands/team.js b/lib/commands/team.js
index 2d4fc663715e4..089e917909d10 100644
--- a/lib/commands/team.js
+++ b/lib/commands/team.js
@@ -1,9 +1,9 @@
const columns = require('cli-columns')
const libteam = require('libnpmteam')
+const { output } = require('proc-log')
+const { otplease } = require('../utils/auth.js')
-const otplease = require('../utils/otplease.js')
-
-const BaseCommand = require('../base-command.js')
+const BaseCommand = require('../base-cmd.js')
class Team extends BaseCommand {
static description = 'Manage organization teams and team memberships'
static name = 'team'
@@ -24,7 +24,7 @@ class Team extends BaseCommand {
static ignoreImplicitWorkspace = false
- async completion (opts) {
+ static async completion (opts) {
const { conf: { argv: { remain: argv } } } = opts
const subcommands = ['create', 'destroy', 'add', 'rm', 'ls']
@@ -68,87 +68,88 @@ class Team extends BaseCommand {
async create (entity, opts) {
await libteam.create(entity, opts)
if (opts.json) {
- this.npm.output(JSON.stringify({
+ output.buffer({
created: true,
team: entity,
- }))
+ })
} else if (opts.parseable) {
- this.npm.output(`${entity}\tcreated`)
+ output.standard(`${entity}\tcreated`)
} else if (!this.npm.silent) {
- this.npm.output(`+@${entity}`)
+ output.standard(`+@${entity}`)
}
}
async destroy (entity, opts) {
await libteam.destroy(entity, opts)
if (opts.json) {
- this.npm.output(JSON.stringify({
+ output.buffer({
deleted: true,
team: entity,
- }))
+ })
} else if (opts.parseable) {
- this.npm.output(`${entity}\tdeleted`)
+ output.standard(`${entity}\tdeleted`)
} else if (!this.npm.silent) {
- this.npm.output(`-@${entity}`)
+ output.standard(`-@${entity}`)
}
}
async add (entity, user, opts) {
await libteam.add(user, entity, opts)
if (opts.json) {
- this.npm.output(JSON.stringify({
+ output.buffer({
added: true,
team: entity,
user,
- }))
+ })
} else if (opts.parseable) {
- this.npm.output(`${user}\t${entity}\tadded`)
+ output.standard(`${user}\t${entity}\tadded`)
} else if (!this.npm.silent) {
- this.npm.output(`${user} added to @${entity}`)
+ output.standard(`${user} added to @${entity}`)
}
}
async rm (entity, user, opts) {
await libteam.rm(user, entity, opts)
if (opts.json) {
- this.npm.output(JSON.stringify({
+ output.buffer({
removed: true,
team: entity,
user,
- }))
+ })
} else if (opts.parseable) {
- this.npm.output(`${user}\t${entity}\tremoved`)
+ output.standard(`${user}\t${entity}\tremoved`)
} else if (!this.npm.silent) {
- this.npm.output(`${user} removed from @${entity}`)
+ output.standard(`${user} removed from @${entity}`)
}
}
async listUsers (entity, opts) {
const users = (await libteam.lsUsers(entity, opts)).sort()
if (opts.json) {
- this.npm.output(JSON.stringify(users, null, 2))
+ output.buffer(users)
} else if (opts.parseable) {
- this.npm.output(users.join('\n'))
+ output.standard(users.join('\n'))
} else if (!this.npm.silent) {
const plural = users.length === 1 ? '' : 's'
const more = users.length === 0 ? '' : ':\n'
- this.npm.output(`\n@${entity} has ${users.length} user${plural}${more}`)
- this.npm.output(columns(users, { padding: 1 }))
+ output.standard(`\n@${entity} has ${users.length} user${plural}${more}`)
+ output.standard(columns(users, { padding: 1 }))
}
}
async listTeams (entity, opts) {
const teams = (await libteam.lsTeams(entity, opts)).sort()
if (opts.json) {
- this.npm.output(JSON.stringify(teams, null, 2))
+ output.buffer(teams)
} else if (opts.parseable) {
- this.npm.output(teams.join('\n'))
+ output.standard(teams.join('\n'))
} else if (!this.npm.silent) {
const plural = teams.length === 1 ? '' : 's'
const more = teams.length === 0 ? '' : ':\n'
- this.npm.output(`\n@${entity} has ${teams.length} team${plural}${more}`)
- this.npm.output(columns(teams.map(t => `@${t}`), { padding: 1 }))
+ output.standard(`\n@${entity} has ${teams.length} team${plural}${more}`)
+ output.standard(columns(teams.map(t => `@${t}`), { padding: 1 }))
}
}
}
+
module.exports = Team
diff --git a/lib/commands/token.js b/lib/commands/token.js
index 8da8311875714..d2e85ffe5a549 100644
--- a/lib/commands/token.js
+++ b/lib/commands/token.js
@@ -1,21 +1,16 @@
-const Table = require('cli-table3')
-const chalk = require('chalk')
-const { v4: isCidrV4, v6: isCidrV6 } = require('is-cidr')
-const log = require('../utils/log-shim.js')
-const profile = require('npm-profile')
-
-const otplease = require('../utils/otplease.js')
-const pulseTillDone = require('../utils/pulse-till-done.js')
+const { log, output } = require('proc-log')
+const { listTokens, createToken, removeToken } = require('npm-profile')
+const { otplease } = require('../utils/auth.js')
const readUserInfo = require('../utils/read-user-info.js')
+const BaseCommand = require('../base-cmd.js')
-const BaseCommand = require('../base-command.js')
class Token extends BaseCommand {
static description = 'Manage your authentication tokens'
static name = 'token'
static usage = ['list', 'revoke ', 'create [--read-only] [--cidr=list]']
static params = ['read-only', 'cidr', 'registry', 'otp']
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
const subcommands = ['list', 'revoke', 'create']
if (argv.length === 2) {
@@ -30,7 +25,6 @@ class Token extends BaseCommand {
}
async exec (args) {
- log.gauge.show('token')
if (args.length === 0) {
return this.list()
}
@@ -38,10 +32,10 @@ class Token extends BaseCommand {
case 'list':
case 'ls':
return this.list()
+ case 'rm':
case 'delete':
case 'revoke':
case 'remove':
- case 'rm':
return this.rm(args.slice(1))
case 'create':
return this.create(args.slice(1))
@@ -51,16 +45,18 @@ class Token extends BaseCommand {
}
async list () {
- const conf = this.config()
+ const json = this.npm.config.get('json')
+ const parseable = this.npm.config.get('parseable')
log.info('token', 'getting list')
- const tokens = await pulseTillDone.withPromise(profile.listTokens(conf))
- if (conf.json) {
- this.npm.output(JSON.stringify(tokens, null, 2))
+ const tokens = await listTokens(this.npm.flatOptions)
+ if (json) {
+ output.buffer(tokens)
return
- } else if (conf.parseable) {
- this.npm.output(['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'))
+ }
+ if (parseable) {
+ output.standard(['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'))
tokens.forEach(token => {
- this.npm.output(
+ output.standard(
[
token.key,
token.token,
@@ -73,21 +69,17 @@ class Token extends BaseCommand {
return
}
this.generateTokenIds(tokens, 6)
- const idWidth = tokens.reduce((acc, token) => Math.max(acc, token.id.length), 0)
- const table = new Table({
- head: ['id', 'token', 'created', 'readonly', 'CIDR whitelist'],
- colWidths: [Math.max(idWidth, 2) + 2, 9, 12, 10],
- })
- tokens.forEach(token => {
- table.push([
- token.id,
- token.token + '…',
- String(token.created).slice(0, 10),
- token.readonly ? 'yes' : 'no',
- token.cidr_whitelist ? token.cidr_whitelist.join(', ') : '',
- ])
- })
- this.npm.output(table.toString())
+ const chalk = this.npm.chalk
+ for (const token of tokens) {
+ const level = token.readonly ? 'Read only token' : 'Publish token'
+ const created = String(token.created).slice(0, 10)
+ /* eslint-disable-next-line max-len */
+ output.standard(`${chalk.blue(level)} ${token.token}… with id ${chalk.cyan(token.id)} created ${created}`)
+ if (token.cidr_whitelist) {
+ output.standard(`with IP whitelist: ${chalk.green(token.cidr_whitelist.join(','))}`)
+ }
+ output.standard()
+ }
}
async rm (args) {
@@ -95,11 +87,12 @@ class Token extends BaseCommand {
throw this.usageError('`` argument is required.')
}
- const conf = this.config()
+ const json = this.npm.config.get('json')
+ const parseable = this.npm.config.get('parseable')
const toRemove = []
- const progress = log.newItem('removing tokens', toRemove.length)
- progress.info('token', 'getting existing list')
- const tokens = await pulseTillDone.withPromise(profile.listTokens(conf))
+ const opts = { ...this.npm.flatOptions }
+ log.info('token', `removing ${toRemove.length} tokens`)
+ const tokens = await listTokens(opts)
args.forEach(id => {
const matches = tokens.filter(token => token.key.indexOf(id) === 0)
if (matches.length === 1) {
@@ -120,80 +113,54 @@ class Token extends BaseCommand {
})
await Promise.all(
toRemove.map(key => {
- return otplease(this.npm, conf, c => profile.removeToken(key, c))
+ return otplease(this.npm, opts, c => removeToken(key, c))
})
)
- if (conf.json) {
- this.npm.output(JSON.stringify(toRemove))
- } else if (conf.parseable) {
- this.npm.output(toRemove.join('\t'))
+ if (json) {
+ output.buffer(toRemove)
+ } else if (parseable) {
+ output.standard(toRemove.join('\t'))
} else {
- this.npm.output('Removed ' + toRemove.length + ' token' + (toRemove.length !== 1 ? 's' : ''))
+ output.standard('Removed ' + toRemove.length + ' token' + (toRemove.length !== 1 ? 's' : ''))
}
}
- async create (args) {
- const conf = this.config()
- const cidr = conf.cidr
- const readonly = conf.readOnly
+ async create () {
+ const json = this.npm.config.get('json')
+ const parseable = this.npm.config.get('parseable')
+ const cidr = this.npm.config.get('cidr')
+ const readonly = this.npm.config.get('read-only')
+ const validCIDR = await this.validateCIDRList(cidr)
const password = await readUserInfo.password()
- const validCIDR = this.validateCIDRList(cidr)
log.info('token', 'creating')
- const result = await pulseTillDone.withPromise(
- otplease(this.npm, conf, c => profile.createToken(password, readonly, validCIDR, c))
+ const result = await otplease(
+ this.npm,
+ { ...this.npm.flatOptions },
+ c => createToken(password, readonly, validCIDR, c)
)
delete result.key
delete result.updated
- if (conf.json) {
- this.npm.output(JSON.stringify(result))
- } else if (conf.parseable) {
- Object.keys(result).forEach(k => this.npm.output(k + '\t' + result[k]))
+ if (json) {
+ output.buffer(result)
+ } else if (parseable) {
+ Object.keys(result).forEach(k => output.standard(k + '\t' + result[k]))
} else {
- const table = new Table()
- for (const k of Object.keys(result)) {
- table.push({ [chalk.bold(k)]: String(result[k]) })
+ const chalk = this.npm.chalk
+ // Identical to list
+ const level = result.readonly ? 'read only' : 'publish'
+ output.standard(`Created ${chalk.blue(level)} token ${result.token}`)
+ if (result.cidr_whitelist?.length) {
+ output.standard(`with IP whitelist: ${chalk.green(result.cidr_whitelist.join(','))}`)
}
- this.npm.output(table.toString())
}
}
- config () {
- const conf = { ...this.npm.flatOptions }
- const creds = this.npm.config.getCredentialsByURI(conf.registry)
- if (creds.token) {
- conf.auth = { token: creds.token }
- } else if (creds.username) {
- conf.auth = {
- basic: {
- username: creds.username,
- password: creds.password,
- },
- }
- } else if (creds.auth) {
- const auth = Buffer.from(creds.auth, 'base64').toString().split(':', 2)
- conf.auth = {
- basic: {
- username: auth[0],
- password: auth[1],
- },
- }
- } else {
- conf.auth = {}
- }
-
- if (conf.otp) {
- conf.auth.otp = conf.otp
- }
- return conf
- }
-
invalidCIDRError (msg) {
return Object.assign(new Error(msg), { code: 'EINVALIDCIDR' })
}
generateTokenIds (tokens, minLength) {
- const byId = {}
for (const token of tokens) {
token.id = token.key
for (let ii = minLength; ii < token.key.length; ++ii) {
@@ -205,26 +172,26 @@ class Token extends BaseCommand {
break
}
}
- byId[token.id] = token
}
- return byId
}
- validateCIDRList (cidrs) {
+ async validateCIDRList (cidrs) {
+ const { v4: isCidrV4, v6: isCidrV6 } = await import('is-cidr')
const maybeList = [].concat(cidrs).filter(Boolean)
const list = maybeList.length === 1 ? maybeList[0].split(/,\s*/) : maybeList
for (const cidr of list) {
if (isCidrV6(cidr)) {
throw this.invalidCIDRError(
- 'CIDR whitelist can only contain IPv4 addresses, ' + cidr + ' is IPv6'
+ `CIDR whitelist can only contain IPv4 addresses${cidr} is IPv6`
)
}
if (!isCidrV4(cidr)) {
- throw this.invalidCIDRError('CIDR whitelist contains invalid CIDR entry: ' + cidr)
+ throw this.invalidCIDRError(`CIDR whitelist contains invalid CIDR entry: ${cidr}`)
}
}
return list
}
}
+
module.exports = Token
diff --git a/lib/commands/uninstall.js b/lib/commands/uninstall.js
index 8c44f2e32106c..f9baebe3bc2e2 100644
--- a/lib/commands/uninstall.js
+++ b/lib/commands/uninstall.js
@@ -1,22 +1,20 @@
-const { resolve } = require('path')
-const Arborist = require('@npmcli/arborist')
-const rpj = require('read-package-json-fast')
-
+const { resolve } = require('node:path')
+const pkgJson = require('@npmcli/package-json')
const reifyFinish = require('../utils/reify-finish.js')
-const completion = require('../utils/completion/installed-shallow.js')
-
+const completion = require('../utils/installed-shallow.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+
class Uninstall extends ArboristWorkspaceCmd {
static description = 'Remove a package'
static name = 'uninstall'
- static params = ['save', ...super.params]
+ static params = ['save', 'global', ...super.params]
static usage = ['[<@scope>/]...']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ return completion(npm, opts)
}
async exec (args) {
@@ -25,7 +23,7 @@ class Uninstall extends ArboristWorkspaceCmd {
throw new Error('Must provide a package name to remove')
} else {
try {
- const pkg = await rpj(resolve(this.npm.localPrefix, 'package.json'))
+ const { content: pkg } = await pkgJson.normalize(this.npm.localPrefix)
args.push(pkg.name)
} catch (er) {
if (er.code !== 'ENOENT' && er.code !== 'ENOTDIR') {
@@ -42,6 +40,7 @@ class Uninstall extends ArboristWorkspaceCmd {
? resolve(this.npm.globalDir, '..')
: this.npm.localPrefix
+ const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path,
@@ -53,4 +52,5 @@ class Uninstall extends ArboristWorkspaceCmd {
await reifyFinish(this.npm, arb)
}
}
+
module.exports = Uninstall
diff --git a/lib/commands/unpublish.js b/lib/commands/unpublish.js
index 9985e2e39f140..4944888fe5aca 100644
--- a/lib/commands/unpublish.js
+++ b/lib/commands/unpublish.js
@@ -1,21 +1,18 @@
const libaccess = require('libnpmaccess')
const libunpub = require('libnpmpublish').unpublish
const npa = require('npm-package-arg')
-const npmFetch = require('npm-registry-fetch')
-const path = require('path')
-const util = require('util')
-const readJson = util.promisify(require('read-package-json'))
-
-const { flatten } = require('../utils/config/index.js')
+const pacote = require('pacote')
+const { output, log } = require('proc-log')
+const pkgJson = require('@npmcli/package-json')
+const { flatten } = require('@npmcli/config/lib/definitions')
const getIdentity = require('../utils/get-identity.js')
-const log = require('../utils/log-shim')
-const otplease = require('../utils/otplease.js')
+const { otplease } = require('../utils/auth.js')
+const BaseCommand = require('../base-cmd.js')
const LAST_REMAINING_VERSION_ERROR = 'Refusing to delete the last version of the package. ' +
'It will block from republishing a new version for 24 hours.\n' +
'Run with --force to do this.'
-const BaseCommand = require('../base-command.js')
class Unpublish extends BaseCommand {
static description = 'Remove a package from the registry'
static name = 'unpublish'
@@ -24,21 +21,24 @@ class Unpublish extends BaseCommand {
static workspaces = true
static ignoreImplicitWorkspace = false
- async getKeysOfVersions (name, opts) {
- const pkgUri = npa(name).escapedName
- const json = await npmFetch.json(`${pkgUri}?write=true`, opts)
- return Object.keys(json.versions)
+ static async getKeysOfVersions (name, opts) {
+ const packument = await pacote.packument(name, {
+ ...opts,
+ spec: name,
+ query: { write: true },
+ })
+ return Object.keys(packument.versions)
}
- async completion (args) {
+ static async completion (args, npm) {
const { partialWord, conf } = args
if (conf.argv.remain.length >= 3) {
return []
}
- const opts = { ...this.npm.flatOptions }
- const username = await getIdentity(this.npm, { ...opts }).catch(() => null)
+ const opts = { ...npm.flatOptions }
+ const username = await getIdentity(npm, { ...opts }).catch(() => null)
if (!username) {
return []
}
@@ -58,7 +58,7 @@ class Unpublish extends BaseCommand {
return pkgs
}
- const versions = await this.getKeysOfVersions(pkgs[0], opts)
+ const versions = await Unpublish.getKeysOfVersions(pkgs[0], opts)
if (!versions.length) {
return pkgs
} else {
@@ -66,20 +66,35 @@ class Unpublish extends BaseCommand {
}
}
- async exec (args) {
+ async exec (args, { localPrefix } = {}) {
if (args.length > 1) {
throw this.usageError()
}
- let spec = args.length && npa(args[0])
+ // workspace mode
+ if (!localPrefix) {
+ localPrefix = this.npm.localPrefix
+ }
+
const force = this.npm.config.get('force')
const { silent } = this.npm
const dryRun = this.npm.config.get('dry-run')
+ let spec
+ if (args.length) {
+ spec = npa(args[0])
+ if (spec.type !== 'version' && spec.rawSpec !== '*') {
+ throw this.usageError(
+ 'Can only unpublish a single version, or the entire project.\n' +
+ 'Tags and ranges are not supported.'
+ )
+ }
+ }
+
log.silly('unpublish', 'args[0]', args[0])
log.silly('unpublish', 'spec', spec)
- if ((!spec || !spec.rawSpec) && !force) {
+ if (spec?.rawSpec === '*' && !force) {
throw this.usageError(
'Refusing to delete entire project.\n' +
'Run with --force to do this.'
@@ -88,70 +103,74 @@ class Unpublish extends BaseCommand {
const opts = { ...this.npm.flatOptions }
- let pkgName
- let pkgVersion
let manifest
- let manifestErr
try {
- const pkgJson = path.join(this.npm.localPrefix, 'package.json')
- manifest = await readJson(pkgJson)
+ const { content } = await pkgJson.prepare(localPrefix)
+ manifest = content
} catch (err) {
- manifestErr = err
- }
- if (spec) {
- // If cwd has a package.json with a name that matches the package being
- // unpublished, load up the publishConfig
- if (manifest && manifest.name === spec.name && manifest.publishConfig) {
- flatten(manifest.publishConfig, opts)
- }
- const versions = await this.getKeysOfVersions(spec.name, opts)
- if (versions.length === 1 && !force) {
- throw this.usageError(LAST_REMAINING_VERSION_ERROR)
- }
- pkgName = spec.name
- pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : ''
- } else {
- if (manifestErr) {
- if (manifestErr.code === 'ENOENT' || manifestErr.code === 'ENOTDIR') {
+ if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+ if (!spec) {
+ // We needed a local package.json to figure out what package to
+ // unpublish
throw this.usageError()
- } else {
- throw manifestErr
}
+ } else {
+ // folks should know if ANY local package.json had a parsing error.
+ // They may be relying on `publishConfig` to be loading and we don't
+ // want to ignore errors in that case.
+ throw err
}
+ }
- log.verbose('unpublish', manifest)
-
+ let pkgVersion // for cli output
+ if (spec) {
+ pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : ''
+ } else {
spec = npa.resolve(manifest.name, manifest.version)
- if (manifest.publishConfig) {
- flatten(manifest.publishConfig, opts)
+ log.verbose('unpublish', manifest)
+ pkgVersion = manifest.version ? `@${manifest.version}` : ''
+ if (!manifest.version && !force) {
+ throw this.usageError(
+ 'Refusing to delete entire project.\n' +
+ 'Run with --force to do this.'
+ )
}
+ }
- pkgName = manifest.name
- pkgVersion = manifest.version ? `@${manifest.version}` : ''
+ // If localPrefix has a package.json with a name that matches the package
+ // being unpublished, load up the publishConfig
+ if (manifest?.name === spec.name && manifest.publishConfig) {
+ const cliFlags = this.npm.config.data.get('cli').raw
+ // Filter out properties set in CLI flags to prioritize them over
+ // corresponding `publishConfig` settings
+ const filteredPublishConfig = Object.fromEntries(
+ Object.entries(manifest.publishConfig).filter(([key]) => !(key in cliFlags)))
+ flatten(filteredPublishConfig, opts)
+ }
+
+ const versions = await Unpublish.getKeysOfVersions(spec.name, opts)
+ if (versions.length === 1 && spec.rawSpec === versions[0] && !force) {
+ throw this.usageError(LAST_REMAINING_VERSION_ERROR)
+ }
+ if (versions.length === 1) {
+ pkgVersion = ''
}
if (!dryRun) {
await otplease(this.npm, opts, o => libunpub(spec, o))
}
if (!silent) {
- this.npm.output(`- ${pkgName}${pkgVersion}`)
+ output.standard(`- ${spec.name}${pkgVersion}`)
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
- const force = this.npm.config.get('force')
- if (!force) {
- throw this.usageError(
- 'Refusing to delete entire project(s).\n' +
- 'Run with --force to do this.'
- )
- }
-
- for (const name of this.workspaceNames) {
- await this.exec([name])
+ for (const path of this.workspacePaths) {
+ await this.exec(args, { localPrefix: path })
}
}
}
+
module.exports = Unpublish
diff --git a/lib/commands/unstar.js b/lib/commands/unstar.js
index cbcb73636c638..c72966866669a 100644
--- a/lib/commands/unstar.js
+++ b/lib/commands/unstar.js
@@ -4,4 +4,5 @@ class Unstar extends Star {
static description = 'Remove an item from your favorite packages'
static name = 'unstar'
}
+
module.exports = Unstar
diff --git a/lib/commands/update.js b/lib/commands/update.js
index fd30bcb41e2b3..235a9a41177df 100644
--- a/lib/commands/update.js
+++ b/lib/commands/update.js
@@ -1,12 +1,8 @@
-const path = require('path')
-
-const Arborist = require('@npmcli/arborist')
-const log = require('../utils/log-shim.js')
-
+const path = require('node:path')
+const { log } = require('proc-log')
const reifyFinish = require('../utils/reify-finish.js')
-const completion = require('../utils/completion/installed-deep.js')
-
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
+
class Update extends ArboristWorkspaceCmd {
static description = 'Update packages'
static name = 'update'
@@ -18,6 +14,7 @@ class Update extends ArboristWorkspaceCmd {
'legacy-bundling',
'global-style',
'omit',
+ 'include',
'strict-peer-deps',
'package-lock',
'foreground-scripts',
@@ -33,8 +30,9 @@ class Update extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ const completion = require('../utils/installed-deep.js')
+ return completion(npm, opts)
}
async exec (args) {
@@ -53,6 +51,7 @@ class Update extends ArboristWorkspaceCmd {
'https://github.com/npm/rfcs/blob/latest/implemented/0019-remove-update-depth-option.md')
}
+ const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
@@ -65,4 +64,5 @@ class Update extends ArboristWorkspaceCmd {
await reifyFinish(this.npm, arb)
}
}
+
module.exports = Update
diff --git a/lib/commands/version.js b/lib/commands/version.js
index a523283671791..d6c2dd4caed75 100644
--- a/lib/commands/version.js
+++ b/lib/commands/version.js
@@ -1,10 +1,7 @@
-const libnpmversion = require('libnpmversion')
-const { resolve } = require('path')
-const { promisify } = require('util')
-const readFile = promisify(require('fs').readFile)
-
-const updateWorkspaces = require('../workspaces/update-workspaces.js')
-const BaseCommand = require('../base-command.js')
+const { resolve } = require('node:path')
+const { readFile } = require('node:fs/promises')
+const { output } = require('proc-log')
+const BaseCommand = require('../base-cmd.js')
class Version extends BaseCommand {
static description = 'Bump a package version'
@@ -28,7 +25,7 @@ class Version extends BaseCommand {
/* eslint-disable-next-line max-len */
static usage = ['[ | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git]']
- async completion (opts) {
+ static async completion (opts) {
const {
conf: {
argv: { remain },
@@ -73,29 +70,43 @@ class Version extends BaseCommand {
}
async change (args) {
+ const libnpmversion = require('libnpmversion')
const prefix = this.npm.config.get('tag-version-prefix')
const version = await libnpmversion(args[0], {
...this.npm.flatOptions,
path: this.npm.prefix,
})
- return this.npm.output(`${prefix}${version}`)
+ return output.standard(`${prefix}${version}`)
}
async changeWorkspaces (args) {
+ const updateWorkspaces = require('../utils/update-workspaces.js')
+ const libnpmversion = require('libnpmversion')
const prefix = this.npm.config.get('tag-version-prefix')
+ const {
+ config,
+ flatOptions,
+ localPrefix,
+ } = this.npm
await this.setWorkspaces()
const updatedWorkspaces = []
for (const [name, path] of this.workspaces) {
- this.npm.output(name)
+ output.standard(name)
const version = await libnpmversion(args[0], {
- ...this.npm.flatOptions,
+ ...flatOptions,
'git-tag-version': false,
path,
})
updatedWorkspaces.push(name)
- this.npm.output(`${prefix}${version}`)
+ output.standard(`${prefix}${version}`)
}
- return this.update(updatedWorkspaces)
+ return updateWorkspaces({
+ config,
+ flatOptions,
+ localPrefix,
+ npm: this.npm,
+ workspaces: updatedWorkspaces,
+ })
}
async list (results = {}) {
@@ -115,9 +126,9 @@ class Version extends BaseCommand {
}
if (this.npm.config.get('json')) {
- this.npm.output(JSON.stringify(results, null, 2))
+ output.buffer(results)
} else {
- this.npm.output(results)
+ output.standard(results)
}
}
@@ -135,22 +146,6 @@ class Version extends BaseCommand {
}
return this.list(results)
}
-
- async update (workspaces) {
- const {
- config,
- flatOptions,
- localPrefix,
- } = this.npm
-
- await updateWorkspaces({
- config,
- flatOptions,
- localPrefix,
- npm: this.npm,
- workspaces,
- })
- }
}
module.exports = Version
diff --git a/lib/commands/view.js b/lib/commands/view.js
index 855b37b81d42f..cf7292a2f3b81 100644
--- a/lib/commands/view.js
+++ b/lib/commands/view.js
@@ -1,21 +1,21 @@
-const chalk = require('chalk')
const columns = require('cli-columns')
-const fs = require('fs')
+const { readFile } = require('node:fs/promises')
const jsonParse = require('json-parse-even-better-errors')
-const log = require('../utils/log-shim.js')
+const { log, output, META } = require('proc-log')
const npa = require('npm-package-arg')
-const { resolve } = require('path')
+const { resolve } = require('node:path')
const formatBytes = require('../utils/format-bytes.js')
const relativeDate = require('tiny-relative-date')
const semver = require('semver')
-const { inspect, promisify } = require('util')
+const { inspect } = require('node:util')
const { packument } = require('pacote')
+const Queryable = require('../utils/queryable.js')
+const BaseCommand = require('../base-cmd.js')
+const { getError } = require('../utils/error-message.js')
+const { jsonError, outputError } = require('../utils/output-error.js')
-const readFile = promisify(fs.readFile)
-const readJson = async file => jsonParse(await readFile(file, 'utf8'))
+const readJson = file => readFile(file, 'utf8').then(jsonParse)
-const Queryable = require('../utils/queryable.js')
-const BaseCommand = require('../base-command.js')
class View extends BaseCommand {
static description = 'View registry info'
static name = 'view'
@@ -30,7 +30,7 @@ class View extends BaseCommand {
static ignoreImplicitWorkspace = false
static usage = ['[] [[.subfield]...]']
- async completion (opts) {
+ static async completion (opts, npm) {
if (opts.conf.argv.remain.length <= 2) {
// There used to be registry completion here, but it stopped
// making sense somewhere around 50,000 packages on the registry
@@ -38,52 +38,21 @@ class View extends BaseCommand {
}
// have the package, get the fields
const config = {
- ...this.npm.flatOptions,
+ ...npm.flatOptions,
fullMetadata: true,
preferOnline: true,
}
const spec = npa(opts.conf.argv.remain[2])
const pckmnt = await packument(spec, config)
- const defaultTag = this.npm.config.get('tag')
+ const defaultTag = npm.config.get('tag')
const dv = pckmnt.versions[pckmnt['dist-tags'][defaultTag]]
pckmnt.versions = Object.keys(pckmnt.versions).sort(semver.compareLoose)
- return getFields(pckmnt).concat(getFields(dv))
-
- function getFields (d, f, pref) {
- f = f || []
- pref = pref || []
- Object.keys(d).forEach((k) => {
- if (k.charAt(0) === '_' || k.indexOf('.') !== -1) {
- return
- }
- const p = pref.concat(k).join('.')
- f.push(p)
- if (Array.isArray(d[k])) {
- d[k].forEach((val, i) => {
- const pi = p + '[' + i + ']'
- if (val && typeof val === 'object') {
- getFields(val, f, [p])
- } else {
- f.push(pi)
- }
- })
- return
- }
- if (typeof d[k] === 'object') {
- getFields(d[k], f, [p])
- }
- })
- return f
- }
+ return getCompletionFields(pckmnt).concat(getCompletionFields(dv))
}
async exec (args) {
- if (!args.length) {
- args = ['.']
- }
- let pkg = args.shift()
- const local = /^\.@/.test(pkg) || pkg === '.'
+ let { pkg, local, rest } = parseArgs(args)
if (local) {
if (this.npm.global) {
@@ -98,95 +67,76 @@ class View extends BaseCommand {
pkg = `${manifest.name}${pkg.slice(1)}`
}
- let wholePackument = false
- if (!args.length) {
- args = ['']
- wholePackument = true
+ await this.#viewPackage(pkg, rest)
+ }
+
+ async execWorkspaces (args) {
+ const { pkg, local, rest } = parseArgs(args)
+
+ if (!local) {
+ log.warn('Ignoring workspaces for specified package(s)')
+ return this.exec([pkg, ...rest])
}
- const [pckmnt, data] = await this.getData(pkg, args)
- if (!this.npm.config.get('json') && wholePackument) {
- // pretty view (entire packument)
- data.map((v) => this.prettyView(pckmnt, v[Object.keys(v)[0]]['']))
- } else {
- // JSON formatted output (JSON or specific attributes from packument)
- let reducedData = data.reduce(reducer, {})
- if (wholePackument) {
- // No attributes
- reducedData = cleanBlanks(reducedData)
- log.silly('view', reducedData)
- }
- // disable the progress bar entirely, as we can't meaningfully update it
- // if we may have partial lines printed.
- log.disableProgress()
+ const json = this.npm.config.get('json')
+ await this.setWorkspaces()
- const msg = await this.jsonData(reducedData, pckmnt._id)
- if (msg !== '') {
- this.npm.output(msg)
+ for (const name of this.workspaceNames) {
+ try {
+ await this.#viewPackage(`${name}${pkg.slice(1)}`, rest, { workspace: true })
+ } catch (e) {
+ const err = getError(e, { npm: this.npm, command: this })
+ if (err.code !== 'E404') {
+ throw e
+ }
+ if (json) {
+ output.buffer({ [META]: true, jsonError: { [name]: jsonError(err, this.npm) } })
+ } else {
+ outputError(err)
+ }
+ process.exitCode = err.exitCode
}
}
}
- async execWorkspaces (args) {
- if (!args.length) {
- args = ['.']
+ async #viewPackage (name, args, { workspace } = {}) {
+ const wholePackument = !args.length
+ const json = this.npm.config.get('json')
+
+ // If we are viewing many packages and outputting individual fields then
+ // output the name before doing any async activity
+ if (!json && !wholePackument && workspace) {
+ output.standard(`${name}:`)
}
- const pkg = args.shift()
+ const [pckmnt, data] = await this.#getData(name, args, wholePackument)
- const local = /^\.@/.test(pkg) || pkg === '.'
- if (!local) {
- log.warn('Ignoring workspaces for specified package(s)')
- return this.exec([pkg, ...args])
- }
- let wholePackument = false
- if (!args.length) {
- wholePackument = true
- args = [''] // getData relies on this
- }
- const results = {}
- await this.setWorkspaces()
- for (const name of this.workspaceNames) {
- const wsPkg = `${name}${pkg.slice(1)}`
- const [pckmnt, data] = await this.getData(wsPkg, args)
-
- let reducedData = data.reduce(reducer, {})
- if (wholePackument) {
- // No attributes
- reducedData = cleanBlanks(reducedData)
- log.silly('view', reducedData)
+ if (!json && wholePackument) {
+ // pretty view (entire packument)
+ for (const v of data) {
+ output.standard(this.#prettyView(pckmnt, Object.values(v)[0][Queryable.ALL]))
}
+ return
+ }
- if (!this.npm.config.get('json')) {
- if (wholePackument) {
- data.map((v) => this.prettyView(pckmnt, v[Object.keys(v)[0]]['']))
- } else {
- this.npm.output(`${name}:`)
- const msg = await this.jsonData(reducedData, pckmnt._id)
- if (msg !== '') {
- this.npm.output(msg)
- }
- }
+ const res = this.#packageOutput(cleanData(data, wholePackument), pckmnt._id)
+ if (res) {
+ if (json) {
+ output.buffer(workspace ? { [name]: res } : res)
} else {
- const msg = await this.jsonData(reducedData, pckmnt._id)
- if (msg !== '') {
- results[name] = JSON.parse(msg)
- }
+ output.standard(res)
}
}
- if (Object.keys(results).length > 0) {
- this.npm.output(JSON.stringify(results, null, 2))
- }
}
- async getData (pkg, args) {
- const opts = {
+ async #getData (pkg, args) {
+ const spec = npa(pkg)
+
+ const pckmnt = await packument(spec, {
...this.npm.flatOptions,
preferOnline: true,
fullMetadata: true,
- }
-
- const spec = npa(pkg)
+ })
// get the data about this package
let version = this.npm.config.get('tag')
@@ -195,291 +145,281 @@ class View extends BaseCommand {
version = spec.rawSpec
}
- const pckmnt = await packument(spec, opts)
-
if (pckmnt['dist-tags']?.[version]) {
version = pckmnt['dist-tags'][version]
}
- if (pckmnt.time && pckmnt.time.unpublished) {
+ if (pckmnt.time?.unpublished) {
const u = pckmnt.time.unpublished
- const er = new Error(`Unpublished on ${u.time}`)
- er.statusCode = 404
- er.code = 'E404'
- er.pkgid = pckmnt._id
- throw er
+ throw Object.assign(new Error(`Unpublished on ${u.time}`), {
+ statusCode: 404,
+ code: 'E404',
+ pkgid: pckmnt._id,
+ })
}
- const data = []
const versions = pckmnt.versions || {}
- pckmnt.versions = Object.keys(versions).sort(semver.compareLoose)
+ pckmnt.versions = Object.keys(versions).filter(v => {
+ if (semver.valid(v)) {
+ return true
+ }
+ log.info('view', `Ignoring invalid version: ${v}`)
+ return false
+ }).sort(semver.compareLoose)
// remove readme unless we asked for it
if (args.indexOf('readme') === -1) {
delete pckmnt.readme
}
- Object.keys(versions).forEach((v) => {
- if (semver.satisfies(v, version, true)) {
- args.forEach(arg => {
- // remove readme unless we asked for it
- if (args.indexOf('readme') !== -1) {
- delete versions[v].readme
- }
-
- data.push(showFields(pckmnt, versions[v], arg))
+ const data = Object.entries(versions)
+ .filter(([v]) => semver.satisfies(v, version, true))
+ .flatMap(([, v]) => {
+ // remove readme unless we asked for it
+ if (args.indexOf('readme') !== -1) {
+ delete v.readme
+ }
+ return showFields({
+ data: pckmnt,
+ version: v,
+ fields: args,
+ json: this.npm.config.get('json'),
})
- }
- })
+ })
// No data has been pushed because no data is matching the specified version
- if (data.length === 0 && version !== 'latest') {
- const er = new Error(`No match found for version ${version}`)
- er.statusCode = 404
- er.code = 'E404'
- er.pkgid = `${pckmnt._id}@${version}`
- throw er
- }
-
- if (
- !this.npm.config.get('json') &&
- args.length === 1 &&
- args[0] === ''
- ) {
- pckmnt.version = version
+ if (!data.length && version !== 'latest') {
+ throw Object.assign(new Error(`No match found for version ${version}`), {
+ statusCode: 404,
+ code: 'E404',
+ pkgid: `${pckmnt._id}@${version}`,
+ })
}
return [pckmnt, data]
}
- async jsonData (data, name) {
+ #packageOutput (data, name) {
+ const json = this.npm.config.get('json')
const versions = Object.keys(data)
- let msg = ''
- let msgJson = []
const includeVersions = versions.length > 1
+
let includeFields
- const json = this.npm.config.get('json')
+ const res = versions.flatMap((v) => {
+ const fields = Object.entries(data[v])
- versions.forEach((v) => {
- const fields = Object.keys(data[v])
- includeFields = includeFields || (fields.length > 1)
- if (json) {
- msgJson.push({})
- }
- fields.forEach((f) => {
- let d = cleanup(data[v][f])
- if (fields.length === 1 && json) {
- msgJson[msgJson.length - 1][f] = d
+ includeFields ||= (fields.length > 1)
+
+ const msg = json ? {} : []
+
+ for (let [f, d] of fields) {
+ d = cleanup(d)
+
+ if (json) {
+ msg[f] = d
+ continue
}
if (includeVersions || includeFields || typeof d !== 'string') {
- if (json) {
- msgJson[msgJson.length - 1][f] = d
- } else {
- d = inspect(d, {
- showHidden: false,
- depth: 5,
- colors: this.npm.color,
- maxArrayLength: null,
- })
- }
- } else if (typeof d === 'string' && json) {
- d = JSON.stringify(d)
+ d = inspect(d, {
+ showHidden: false,
+ depth: 5,
+ colors: this.npm.color,
+ maxArrayLength: null,
+ })
}
- if (!json) {
- if (f && includeFields) {
- f += ' = '
- }
- msg += (includeVersions ? name + '@' + v + ' ' : '') +
- (includeFields ? f : '') + d + '\n'
+ if (f && includeFields) {
+ f += ' = '
}
- })
+
+ msg.push(`${includeVersions ? `${name}@${v} ` : ''}${includeFields ? f : ''}${d}`)
+ }
+
+ return msg
})
if (json) {
- if (msgJson.length && Object.keys(msgJson[0]).length === 1) {
- const k = Object.keys(msgJson[0])[0]
- msgJson = msgJson.map(m => m[k])
+ // TODO(BREAKING_CHANGE): all unwrapping should be removed. Users should know
+ // based on their arguments if they can expect an array or an object. And this
+ // unwrapping can break that assumption. Eg `npm view abbrev@^2` should always
+ // return an array, but currently since there is only one version matching `^2`
+ // this will return a single object instead.
+ const first = Object.keys(res[0] || {})
+ const jsonRes = first.length === 1 ? res.map(m => m[first[0]]) : res
+ if (jsonRes.length === 0) {
+ return
}
- if (msgJson.length === 1) {
- msg = JSON.stringify(msgJson[0], null, 2) + '\n'
- } else if (msgJson.length > 1) {
- msg = JSON.stringify(msgJson, null, 2) + '\n'
+ if (jsonRes.length === 1) {
+ return jsonRes[0]
}
+ return jsonRes
}
- return msg.trim()
+ return res.join('\n').trim()
}
- prettyView (packu, manifest) {
+ #prettyView (packu, manifest) {
// More modern, pretty printing of default view
const unicode = this.npm.config.get('unicode')
- const tags = []
-
- Object.keys(packu['dist-tags']).forEach((t) => {
- const version = packu['dist-tags'][t]
- tags.push(`${chalk.bold.green(t)}: ${version}`)
- })
- const unpackedSize = manifest.dist.unpackedSize &&
- formatBytes(manifest.dist.unpackedSize, true)
+ const chalk = this.npm.chalk
+ const deps = Object.entries(manifest.dependencies || {}).map(([k, dep]) =>
+ `${chalk.blue(k)}: ${dep}`
+ )
+ const site = manifest.homepage?.url || manifest.homepage
+ const bins = Object.keys(manifest.bin || {})
const licenseField = manifest.license || 'Proprietary'
- const info = {
- name: chalk.green(manifest.name),
- version: chalk.green(manifest.version),
- bins: Object.keys(manifest.bin || {}),
- versions: chalk.yellow(packu.versions.length + ''),
- description: manifest.description,
- deprecated: manifest.deprecated,
- keywords: packu.keywords || [],
- license: typeof licenseField === 'string'
- ? licenseField
- : (licenseField.type || 'Proprietary'),
- deps: Object.keys(manifest.dependencies || {}).map((dep) => {
- return `${chalk.yellow(dep)}: ${manifest.dependencies[dep]}`
- }),
- publisher: manifest._npmUser && unparsePerson({
- name: chalk.yellow(manifest._npmUser.name),
- email: chalk.cyan(manifest._npmUser.email),
- }),
- modified: !packu.time ? undefined
- : chalk.yellow(relativeDate(packu.time[manifest.version])),
- maintainers: (packu.maintainers || []).map((u) => unparsePerson({
- name: chalk.yellow(u.name),
- email: chalk.cyan(u.email),
- })),
- repo: (
- manifest.bugs && (manifest.bugs.url || manifest.bugs)
- ) || (
- manifest.repository && (manifest.repository.url || manifest.repository)
- ),
- site: (
- manifest.homepage && (manifest.homepage.url || manifest.homepage)
- ),
- tags,
- tarball: chalk.cyan(manifest.dist.tarball),
- shasum: chalk.yellow(manifest.dist.shasum),
- integrity:
- manifest.dist.integrity && chalk.yellow(manifest.dist.integrity),
- fileCount:
- manifest.dist.fileCount && chalk.yellow(manifest.dist.fileCount),
- unpackedSize: unpackedSize && chalk.yellow(unpackedSize),
- }
- if (info.license.toLowerCase().trim() === 'proprietary') {
- info.license = chalk.bold.red(info.license)
- } else {
- info.license = chalk.green(info.license)
+ const license = typeof licenseField === 'string'
+ ? licenseField
+ : (licenseField.type || 'Proprietary')
+
+ const res = []
+
+ res.push('')
+ res.push([
+ chalk.underline.cyan(`${manifest.name}@${manifest.version}`),
+ license.toLowerCase().trim() === 'proprietary'
+ ? chalk.red(license)
+ : chalk.green(license),
+ `deps: ${deps.length ? chalk.cyan(deps.length) : chalk.cyan('none')}`,
+ `versions: ${chalk.cyan(packu.versions.length + '')}`,
+ ].join(' | '))
+
+ manifest.description && res.push(manifest.description)
+ if (site) {
+ res.push(chalk.blue(site))
}
- this.npm.output('')
- this.npm.output(
- chalk.underline.bold(`${info.name}@${info.version}`) +
- ' | ' + info.license +
- ' | deps: ' + (info.deps.length ? chalk.cyan(info.deps.length) : chalk.green('none')) +
- ' | versions: ' + info.versions
+ manifest.deprecated && res.push(
+ `\n${chalk.redBright('DEPRECATED')}${unicode ? ' ⚠️ ' : '!!'} - ${manifest.deprecated}`
)
- info.description && this.npm.output(info.description)
- if (info.repo || info.site) {
- info.site && this.npm.output(chalk.cyan(info.site))
- }
- const warningSign = unicode ? ' ⚠️ ' : '!!'
- info.deprecated && this.npm.output(
- `\n${chalk.bold.red('DEPRECATED')}${
- warningSign
- } - ${info.deprecated}`
- )
+ if (packu.keywords?.length) {
+ res.push(`\nkeywords: ${
+ packu.keywords.map(k => chalk.cyan(k)).join(', ')
+ }`)
+ }
- if (info.keywords.length) {
- this.npm.output('')
- this.npm.output('keywords:', chalk.yellow(info.keywords.join(', ')))
+ if (bins.length) {
+ res.push(`\nbin: ${chalk.cyan(bins.join(', '))}`)
}
- if (info.bins.length) {
- this.npm.output('')
- this.npm.output('bin:', chalk.yellow(info.bins.join(', ')))
+ res.push('\ndist')
+ res.push(`.tarball: ${chalk.blue(manifest.dist.tarball)}`)
+ res.push(`.shasum: ${chalk.green(manifest.dist.shasum)}`)
+ if (manifest.dist.integrity) {
+ res.push(`.integrity: ${chalk.green(manifest.dist.integrity)}`)
+ }
+ if (manifest.dist.unpackedSize) {
+ res.push(`.unpackedSize: ${chalk.blue(formatBytes(manifest.dist.unpackedSize, true))}`)
}
- this.npm.output('')
- this.npm.output('dist')
- this.npm.output('.tarball:', info.tarball)
- this.npm.output('.shasum:', info.shasum)
- info.integrity && this.npm.output('.integrity:', info.integrity)
- info.unpackedSize && this.npm.output('.unpackedSize:', info.unpackedSize)
-
- const maxDeps = 24
- if (info.deps.length) {
- this.npm.output('')
- this.npm.output('dependencies:')
- this.npm.output(columns(info.deps.slice(0, maxDeps), { padding: 1 }))
- if (info.deps.length > maxDeps) {
- this.npm.output(`(...and ${info.deps.length - maxDeps} more.)`)
+ if (deps.length) {
+ const maxDeps = 24
+ res.push('\ndependencies:')
+ res.push(columns(deps.slice(0, maxDeps), { padding: 1 }))
+ if (deps.length > maxDeps) {
+ res.push(chalk.dim(`(...and ${deps.length - maxDeps} more.)`))
}
}
- if (info.maintainers && info.maintainers.length) {
- this.npm.output('')
- this.npm.output('maintainers:')
- info.maintainers.forEach((u) => this.npm.output('-', u))
+ if (packu.maintainers?.length) {
+ res.push('\nmaintainers:')
+ packu.maintainers.forEach(u =>
+ res.push(`- ${unparsePerson({
+ name: chalk.blue(u.name),
+ email: chalk.dim(u.email) })}`)
+ )
}
- this.npm.output('')
- this.npm.output('dist-tags:')
- this.npm.output(columns(info.tags))
+ res.push('\ndist-tags:')
+ res.push(columns(Object.entries(packu['dist-tags']).map(([k, t]) =>
+ `${chalk.blue(k)}: ${t}`
+ )))
- if (info.publisher || info.modified) {
+ const publisher = manifest._npmUser && unparsePerson({
+ name: chalk.blue(manifest._npmUser.name),
+ email: chalk.dim(manifest._npmUser.email),
+ })
+ if (publisher || packu.time) {
let publishInfo = 'published'
- if (info.modified) {
- publishInfo += ` ${info.modified}`
+ if (packu.time) {
+ publishInfo += ` ${chalk.cyan(relativeDate(packu.time[manifest.version]))}`
}
- if (info.publisher) {
- publishInfo += ` by ${info.publisher}`
+ if (publisher) {
+ publishInfo += ` by ${publisher}`
}
- this.npm.output('')
- this.npm.output(publishInfo)
+ res.push('')
+ res.push(publishInfo)
}
+
+ return res.join('\n')
}
}
+
module.exports = View
-function cleanBlanks (obj) {
- const clean = {}
- Object.keys(obj).forEach((version) => {
- clean[version] = obj[version]['']
- })
- return clean
+function parseArgs (args) {
+ if (!args.length) {
+ args = ['.']
+ }
+
+ const pkg = args.shift()
+
+ return {
+ pkg,
+ local: /^\.@/.test(pkg) || pkg === '.',
+ rest: args,
+ }
}
-// takes an array of objects and merges them into one object
-function reducer (acc, cur) {
- if (cur) {
- Object.keys(cur).forEach((v) => {
- acc[v] = acc[v] || {}
- Object.keys(cur[v]).forEach((t) => {
- acc[v][t] = cur[v][t]
+function cleanData (obj, wholePackument) {
+ // JSON formatted output (JSON or specific attributes from packument)
+ const data = obj.reduce((acc, cur) => {
+ if (cur) {
+ Object.entries(cur).forEach(([k, v]) => {
+ acc[k] ||= {}
+ Object.keys(v).forEach((t) => {
+ acc[k][t] = cur[k][t]
+ })
})
- })
+ }
+ return acc
+ }, {})
+
+ if (wholePackument) {
+ const cleaned = Object.entries(data).reduce((acc, [k, v]) => {
+ acc[k] = v[Queryable.ALL]
+ return acc
+ }, {})
+ log.silly('view', cleaned)
+ return cleaned
}
- return acc
+ return data
}
// return whatever was printed
-function showFields (data, version, fields) {
- const o = {}
- ;[data, version].forEach((s) => {
- Object.keys(s).forEach((k) => {
- o[k] = s[k]
+function showFields ({ data, version, fields, json }) {
+ const o = [data, version].reduce((acc, s) => {
+ Object.entries(s).forEach(([k, v]) => {
+ acc[k] = v
})
- })
+ return acc
+ }, {})
const queryable = new Queryable(o)
- const s = queryable.query(fields)
- const res = { [version.version]: s }
- if (s) {
- return res
+ if (!fields.length) {
+ return { [version.version]: queryable.query(Queryable.ALL) }
}
+
+ return fields.map((field) => {
+ const s = queryable.query(field, { unwrapSingleItemArrays: !json })
+ if (s) {
+ return { [version.version]: s }
+ }
+ })
}
function cleanup (data) {
@@ -492,19 +432,41 @@ function cleanup (data) {
}
const keys = Object.keys(data)
- if (keys.length <= 3 &&
- data.name &&
- (keys.length === 1 ||
- (keys.length === 3 && data.email && data.url) ||
- (keys.length === 2 && (data.email || data.url)))) {
+ if (keys.length <= 3 && data.name && (
+ (keys.length === 1) ||
+ (keys.length === 3 && data.email && data.url) ||
+ (keys.length === 2 && (data.email || data.url))
+ )) {
data = unparsePerson(data)
}
return data
}
-function unparsePerson (d) {
- return d.name +
- (d.email ? ' <' + d.email + '>' : '') +
- (d.url ? ' (' + d.url + ')' : '')
+const unparsePerson = (d) =>
+ `${d.name}${d.email ? ` <${d.email}>` : ''}${d.url ? ` (${d.url})` : ''}`
+
+function getCompletionFields (d, f = [], pref = []) {
+ Object.entries(d).forEach(([k, v]) => {
+ if (k.charAt(0) === '_' || k.indexOf('.') !== -1) {
+ return
+ }
+ const p = pref.concat(k).join('.')
+ f.push(p)
+ if (Array.isArray(v)) {
+ v.forEach((val, i) => {
+ const pi = p + '[' + i + ']'
+ if (val && typeof val === 'object') {
+ getCompletionFields(val, f, [p])
+ } else {
+ f.push(pi)
+ }
+ })
+ return
+ }
+ if (typeof v === 'object') {
+ getCompletionFields(v, f, [p])
+ }
+ })
+ return f
}
diff --git a/lib/commands/whoami.js b/lib/commands/whoami.js
index 154cc870391ba..6b6e93ce7f885 100644
--- a/lib/commands/whoami.js
+++ b/lib/commands/whoami.js
@@ -1,16 +1,20 @@
+const { output } = require('proc-log')
const getIdentity = require('../utils/get-identity.js')
+const BaseCommand = require('../base-cmd.js')
-const BaseCommand = require('../base-command.js')
class Whoami extends BaseCommand {
static description = 'Display npm username'
static name = 'whoami'
static params = ['registry']
- async exec (args) {
+ async exec () {
const username = await getIdentity(this.npm, { ...this.npm.flatOptions })
- this.npm.output(
- this.npm.config.get('json') ? JSON.stringify(username) : username
- )
+ if (this.npm.config.get('json')) {
+ output.buffer(username)
+ } else {
+ output.standard(username)
+ }
}
}
+
module.exports = Whoami
diff --git a/lib/lifecycle-cmd.js b/lib/lifecycle-cmd.js
index 848771a38355e..a509a9380f668 100644
--- a/lib/lifecycle-cmd.js
+++ b/lib/lifecycle-cmd.js
@@ -1,7 +1,7 @@
+const BaseCommand = require('./base-cmd.js')
+
// The implementation of commands that are just "run a script"
// restart, start, stop, test
-
-const BaseCommand = require('./base-command.js')
class LifecycleCmd extends BaseCommand {
static usage = ['[-- ]']
static isShellout = true
@@ -16,4 +16,5 @@ class LifecycleCmd extends BaseCommand {
return this.npm.exec('run-script', [this.constructor.name, ...args])
}
}
+
module.exports = LifecycleCmd
diff --git a/lib/npm.js b/lib/npm.js
index 841d145ddcbad..5563cec21ba4d 100644
--- a/lib/npm.js
+++ b/lib/npm.js
@@ -1,55 +1,47 @@
-const Arborist = require('@npmcli/arborist')
-const EventEmitter = require('events')
-const { resolve, dirname, join } = require('path')
+const { resolve, dirname, join } = require('node:path')
const Config = require('@npmcli/config')
-const chalk = require('chalk')
const which = require('which')
-const fs = require('fs/promises')
-
-// Patch the global fs module here at the app level
-require('graceful-fs').gracefulify(require('fs'))
-
-const { definitions, flatten, shorthands } = require('./utils/config/index.js')
+const fs = require('node:fs/promises')
+const { definitions, flatten, shorthands } = require('@npmcli/config/lib/definitions')
const usage = require('./utils/npm-usage.js')
const LogFile = require('./utils/log-file.js')
const Timers = require('./utils/timers.js')
const Display = require('./utils/display.js')
-const log = require('./utils/log-shim')
-const replaceInfo = require('./utils/replace-info.js')
-const updateNotifier = require('./utils/update-notifier.js')
+const { log, time, output, META } = require('proc-log')
+const { redactLog: replaceInfo } = require('@npmcli/redact')
const pkg = require('../package.json')
-const cmdList = require('./utils/cmd-list.js')
+const { deref } = require('./utils/cmd-list.js')
+const { jsonError, outputError } = require('./utils/output-error.js')
-class Npm extends EventEmitter {
+class Npm {
static get version () {
return pkg.version
}
+ static cmd (c) {
+ const command = deref(c)
+ if (!command) {
+ throw Object.assign(new Error(`Unknown command ${c}`), {
+ code: 'EUNKNOWNCOMMAND',
+ command: c,
+ })
+ }
+ return require(`./commands/${command}.js`)
+ }
+
+ unrefPromises = []
updateNotification = null
- loadErr = null
argv = []
#command = null
#runId = new Date().toISOString().replace(/[.:]/g, '_')
- #loadPromise = null
- #tmpFolder = null
#title = 'npm'
#argvClean = []
- #chalk = null
#npmRoot = null
- #warnedNonDashArg = false
- #outputBuffer = []
+ #display = null
#logFile = new LogFile()
- #display = new Display()
- #timers = new Timers({
- start: 'npm',
- listener: (name, ms) => {
- const args = ['timing', name, `Completed in ${ms}ms`]
- this.#logFile.log(...args)
- this.#display.log(...args)
- },
- })
+ #timers = new Timers()
// all these options are only used by tests in order to make testing more
// closely resemble real world usage. for now, npm has no programmatic API so
@@ -63,8 +55,14 @@ class Npm extends EventEmitter {
// allows tests created by tap inside this repo to not set the local
// prefix to `npmRoot` since that is the first dir it would encounter when
// doing implicit detection
- constructor ({ npmRoot = dirname(__dirname), argv = [], excludeNpmCwd = false } = {}) {
- super()
+ constructor ({
+ stdout = process.stdout,
+ stderr = process.stderr,
+ npmRoot = dirname(__dirname),
+ argv = [],
+ excludeNpmCwd = false,
+ } = {}) {
+ this.#display = new Display({ stdout, stderr })
this.#npmRoot = npmRoot
this.config = new Config({
npmPath: this.#npmRoot,
@@ -76,45 +74,150 @@ class Npm extends EventEmitter {
})
}
- get version () {
- return this.constructor.version
+ async load () {
+ let err
+ try {
+ return await time.start('npm:load', () => this.#load())
+ } catch (e) {
+ err = e
+ }
+ return this.#handleError(err)
}
- deref (c) {
- if (!c) {
- return
+ async #load () {
+ await time.start('npm:load:whichnode', async () => {
+ // TODO should we throw here?
+ const node = await which(process.argv[0]).catch(() => {})
+ if (node && node.toUpperCase() !== process.execPath.toUpperCase()) {
+ log.verbose('node symlink', node)
+ process.execPath = node
+ this.config.execPath = node
+ }
+ })
+
+ await time.start('npm:load:configload', () => this.config.load())
+
+ // npm --versions
+ if (this.config.get('versions', 'cli')) {
+ this.argv = ['version']
+ this.config.set('usage', false, 'cli')
+ } else {
+ this.argv = [...this.config.parsedArgv.remain]
}
- if (c.match(/[A-Z]/)) {
- c = c.replace(/([A-Z])/g, m => '-' + m.toLowerCase())
+
+ // Remove first argv since that is our command as typed
+ // Note that this might not be the actual name of the command
+ // due to aliases, etc. But we use the raw form of it later
+ // in user output so it must be preserved as is.
+ const commandArg = this.argv.shift()
+
+ // This is the actual name of the command that will be run or
+ // undefined if deref could not find a match
+ const command = deref(commandArg)
+
+ await this.#display.load({
+ command,
+ loglevel: this.config.get('loglevel'),
+ stdoutColor: this.color,
+ stderrColor: this.logColor,
+ timing: this.config.get('timing'),
+ unicode: this.config.get('unicode'),
+ progress: this.flatOptions.progress,
+ json: this.config.get('json'),
+ heading: this.config.get('heading'),
+ })
+ process.env.COLOR = this.color ? '1' : '0'
+
+ // npm -v
+ // return from here early so we dont create any caches/logfiles/timers etc
+ if (this.config.get('version', 'cli')) {
+ output.standard(this.version)
+ return { exec: false }
+ }
+
+ // mkdir this separately since the logs dir can be set to
+ // a different location. if this fails, then we don't have
+ // a cache dir, but we don't want to fail immediately since
+ // the command might not need a cache dir (like `npm --version`)
+ await time.start('npm:load:mkdirpcache', () =>
+ fs.mkdir(this.cache, { recursive: true })
+ .catch((e) => log.verbose('cache', `could not create cache: ${e}`)))
+
+ // it's ok if this fails. user might have specified an invalid dir
+ // which we will tell them about at the end
+ if (this.config.get('logs-max') > 0) {
+ await time.start('npm:load:mkdirplogs', () =>
+ fs.mkdir(this.#logsDir, { recursive: true })
+ .catch((e) => log.verbose('logfile', `could not create logs-dir: ${e}`)))
}
- if (cmdList.plumbing.indexOf(c) !== -1) {
- return c
+
+ // note: this MUST be shorter than the actual argv length, because it
+ // uses the same memory, so node will truncate it if it's too long.
+ // We time this because setting process.title is slow sometimes but we
+ // have to do it for security reasons. But still helpful to know how slow it is.
+ time.start('npm:load:setTitle', () => {
+ const { parsedArgv: { cooked, remain } } = this.config
+ // Secrets are mostly in configs, so title is set using only the positional args
+ // to keep those from being leaked. We still do a best effort replaceInfo.
+ this.#title = ['npm'].concat(replaceInfo(remain)).join(' ').trim()
+ process.title = this.#title
+ // The cooked argv is also logged separately for debugging purposes. It is
+ // cleaned as a best effort by replacing known secrets like basic auth
+ // password and strings that look like npm tokens. XXX: for this to be
+ // safer the config should create a sanitized version of the argv as it
+ // has the full context of what each option contains.
+ this.#argvClean = replaceInfo(cooked)
+ log.verbose('title', this.title)
+ log.verbose('argv', this.#argvClean.map(JSON.stringify).join(' '))
+ })
+
+ // logFile.load returns a promise that resolves when old logs are done being cleaned.
+ // We save this promise to an array so that we can await it in tests to ensure more
+ // deterministic logging behavior. The process will also hang open if this were to
+ // take a long time to resolve, but that is why process.exit is called explicitly
+ // in the exit-handler.
+ this.unrefPromises.push(this.#logFile.load({
+ command,
+ path: this.logPath,
+ logsMax: this.config.get('logs-max'),
+ timing: this.config.get('timing'),
+ }))
+
+ this.#timers.load({
+ path: this.logPath,
+ timing: this.config.get('timing'),
+ })
+
+ const configScope = this.config.get('scope')
+ if (configScope && !/^@/.test(configScope)) {
+ this.config.set('scope', `@${configScope}`, this.config.find('scope'))
}
- // first deref the abbrev, if there is one
- // then resolve any aliases
- // so `npm install-cl` will resolve to `install-clean` then to `ci`
- let a = cmdList.abbrevs[c]
- while (cmdList.aliases[a]) {
- a = cmdList.aliases[a]
+
+ if (this.config.get('force')) {
+ log.warn('using --force', 'Recommended protections disabled.')
}
- return a
- }
- // Get an instantiated npm command
- // npm.command is already taken as the currently running command, a refactor
- // would be needed to change this
- async cmd (cmd) {
- await this.load()
+ return { exec: true, command: commandArg, args: this.argv }
+ }
- const cmdId = this.deref(cmd)
- if (!cmdId) {
- throw Object.assign(new Error(`Unknown command ${cmd}`), {
- code: 'EUNKNOWNCOMMAND',
- })
+ async exec (cmd, args = this.argv) {
+ if (!this.#command) {
+ let err
+ try {
+ await this.#exec(cmd, args)
+ } catch (e) {
+ err = e
+ }
+ return this.#handleError(err)
+ } else {
+ return this.#exec(cmd, args)
}
+ }
- const Impl = require(`./commands/${cmdId}.js`)
- const command = new Impl(this)
+ // Call an npm command
+ async #exec (cmd, args) {
+ const Command = this.constructor.cmd(cmd)
+ const command = new Command(this)
// since 'test', 'start', 'stop', etc. commands re-enter this function
// to call the run-script command, we need to only set it one time.
@@ -123,49 +226,29 @@ class Npm extends EventEmitter {
process.env.npm_command = this.command
}
- return command
- }
-
- // Call an npm command
- async exec (cmd, args = this.argv) {
- const command = await this.cmd(cmd)
- const timeEnd = this.time(`command:${cmd}`)
-
- // this is async but we dont await it, since its ok if it doesnt
- // finish before the command finishes running. it uses command and argv
- // so it must be initiated here, after the command name is set
- // eslint-disable-next-line promise/catch-or-return
- updateNotifier(this).then((msg) => (this.updateNotification = msg))
-
- // Options are prefixed by a hyphen-minus (-, \u2d).
- // Other dash-type chars look similar but are invalid.
- if (!this.#warnedNonDashArg) {
- const nonDashArgs = args.filter(a => /^[\u2010-\u2015\u2212\uFE58\uFE63\uFF0D]/.test(a))
- if (nonDashArgs.length) {
- this.#warnedNonDashArg = true
- log.error(
- 'arg',
- 'Argument starts with non-ascii dash, this is probably invalid:',
- nonDashArgs.join(', ')
- )
- }
+ if (this.config.get('usage')) {
+ return output.standard(command.usage)
}
- return command.cmdExec(args).finally(timeEnd)
- }
-
- async load () {
- if (!this.#loadPromise) {
- this.#loadPromise = this.time('npm:load', () => this.#load().catch((er) => {
- this.loadErr = er
- throw er
- }))
+ let execWorkspaces = false
+ const hasWsConfig = this.config.get('workspaces') || this.config.get('workspace').length
+ // if cwd is a workspace, the default is set to [that workspace]
+ const implicitWs = this.config.get('workspace', 'default').length
+ // (-ws || -w foo) && (cwd is not a workspace || command is not ignoring implicit workspaces)
+ if (hasWsConfig && (!implicitWs || !Command.ignoreImplicitWorkspace)) {
+ if (this.global) {
+ throw new Error('Workspaces not supported for global packages')
+ }
+ if (!Command.workspaces) {
+ throw Object.assign(new Error('This command does not support workspaces.'), {
+ code: 'ENOWORKSPACES',
+ })
+ }
+ execWorkspaces = true
}
- return this.#loadPromise
- }
- get loaded () {
- return this.config.loaded
+ return time.start(`command:${cmd}`, () =>
+ execWorkspaces ? command.execWorkspaces(args) : command.exec(args))
}
// This gets called at the end of the exit handler and
@@ -177,115 +260,91 @@ class Npm extends EventEmitter {
this.#logFile.off()
}
- time (name, fn) {
- return this.#timers.time(name, fn)
- }
-
- writeTimingFile () {
- this.#timers.writeFile({
+ finish (err) {
+ // Finish all our timer work, this will write the file if requested, end timers, etc
+ this.#timers.finish({
id: this.#runId,
command: this.#argvClean,
logfiles: this.logFiles,
version: this.version,
})
- }
- get title () {
- return this.#title
+ output.flush({
+ [META]: true,
+ // json can be set during a command so we send the
+ // final value of it to the display layer here
+ json: this.loaded && this.config.get('json'),
+ jsonError: jsonError(err, this),
+ })
}
- set title (t) {
- process.title = t
- this.#title = t
- }
+ exitErrorMessage () {
+ if (this.logFiles.length) {
+ return `A complete log of this run can be found in: ${this.logFiles}`
+ }
- async #load () {
- await this.time('npm:load:whichnode', async () => {
- // TODO should we throw here?
- const node = await which(process.argv[0]).catch(() => {})
- if (node && node.toUpperCase() !== process.execPath.toUpperCase()) {
- log.verbose('node symlink', node)
- process.execPath = node
- this.config.execPath = node
- }
- })
+ const logsMax = this.config.get('logs-max')
+ if (logsMax <= 0) {
+ // user specified no log file
+ return `Log files were not written due to the config logs-max=${logsMax}`
+ }
- await this.time('npm:load:configload', () => this.config.load())
+ // could be an error writing to the directory
+ return `Log files were not written due to an error writing to the directory: ${this.#logsDir}` +
+ '\nYou can rerun the command with `--loglevel=verbose` to see the logs in your terminal'
+ }
- // mkdir this separately since the logs dir can be set to
- // a different location. if this fails, then we don't have
- // a cache dir, but we don't want to fail immediately since
- // the command might not need a cache dir (like `npm --version`)
- await this.time('npm:load:mkdirpcache', () =>
- fs.mkdir(this.cache, { recursive: true })
- .catch((e) => log.verbose('cache', `could not create cache: ${e}`)))
+ async #handleError (err) {
+ if (err) {
+ // Get the local package if it exists for a more helpful error message
+ const localPkg = await require('@npmcli/package-json')
+ .normalize(this.localPrefix)
+ .then(p => p.content)
+ .catch(() => null)
+ Object.assign(err, this.#getError(err, { pkg: localPkg }))
+ }
- // its ok if this fails. user might have specified an invalid dir
- // which we will tell them about at the end
- await this.time('npm:load:mkdirplogs', () =>
- fs.mkdir(this.logsDir, { recursive: true })
- .catch((e) => log.verbose('logfile', `could not create logs-dir: ${e}`)))
+ this.finish(err)
- // note: this MUST be shorter than the actual argv length, because it
- // uses the same memory, so node will truncate it if it's too long.
- this.time('npm:load:setTitle', () => {
- const { parsedArgv: { cooked, remain } } = this.config
- this.argv = remain
- // Secrets are mostly in configs, so title is set using only the positional args
- // to keep those from being leaked.
- this.title = ['npm'].concat(replaceInfo(remain)).join(' ').trim()
- // The cooked argv is also logged separately for debugging purposes. It is
- // cleaned as a best effort by replacing known secrets like basic auth
- // password and strings that look like npm tokens. XXX: for this to be
- // safer the config should create a sanitized version of the argv as it
- // has the full context of what each option contains.
- this.#argvClean = replaceInfo(cooked)
- log.verbose('title', this.title)
- log.verbose('argv', this.#argvClean.map(JSON.stringify).join(' '))
- })
+ if (err) {
+ throw err
+ }
+ }
- this.time('npm:load:display', () => {
- this.#display.load({
- // Use logColor since that is based on stderr
- color: this.logColor,
- progress: this.flatOptions.progress,
- silent: this.silent,
- timing: this.config.get('timing'),
- loglevel: this.config.get('loglevel'),
- unicode: this.config.get('unicode'),
- heading: this.config.get('heading'),
- })
- process.env.COLOR = this.color ? '1' : '0'
+ #getError (rawErr, opts) {
+ const { files = [], ...error } = require('./utils/error-message.js').getError(rawErr, {
+ npm: this,
+ command: this.#command,
+ ...opts,
})
- this.time('npm:load:logFile', () => {
- this.#logFile.load({
- path: this.logPath,
- logsMax: this.config.get('logs-max'),
- })
- log.verbose('logfile', this.#logFile.files[0] || 'no logfile created')
- })
+ const { writeFileSync } = require('node:fs')
+ for (const [file, content] of files) {
+ const filePath = `${this.logPath}${file}`
+ const fileContent = `'Log files:\n${this.logFiles.join('\n')}\n\n${content.trim()}\n`
+ try {
+ writeFileSync(filePath, fileContent)
+ error.detail.push(['', `\n\nFor a full report see:\n${filePath}`])
+ } catch (fileErr) {
+ log.warn('', `Could not write error message to ${file} due to ${fileErr}`)
+ }
+ }
- this.time('npm:load:timers', () =>
- this.#timers.load({
- path: this.config.get('timing') ? this.logPath : null,
- })
- )
+ outputError(error)
- this.time('npm:load:configScope', () => {
- const configScope = this.config.get('scope')
- if (configScope && !/^@/.test(configScope)) {
- this.config.set('scope', `@${configScope}`, this.config.find('scope'))
- }
- })
+ return error
+ }
- if (this.config.get('force')) {
- log.warn('using --force', 'Recommended protections disabled.')
- }
+ get title () {
+ return this.#title
}
- get isShellout () {
- return this.#command?.constructor?.isShellout
+ get loaded () {
+ return this.config.loaded
+ }
+
+ get version () {
+ return this.constructor.version
}
get command () {
@@ -294,10 +353,6 @@ class Npm extends EventEmitter {
get flatOptions () {
const { flat } = this.config
- // the Arborist constructor is used almost everywhere we call pacote, it's
- // easiest to attach it to flatOptions so it goes everywhere without having
- // to touch every call
- flat.Arborist = Arborist
flat.nodeVersion = process.version
flat.npmVersion = pkg.version
if (this.command) {
@@ -317,15 +372,16 @@ class Npm extends EventEmitter {
return this.flatOptions.logColor
}
+ get noColorChalk () {
+ return this.#display.chalk.noColor
+ }
+
get chalk () {
- if (!this.#chalk) {
- let level = chalk.level
- if (!this.color) {
- level = 0
- }
- this.#chalk = new chalk.Instance({ level })
- }
- return this.#chalk
+ return this.#display.chalk.stdout
+ }
+
+ get logChalk () {
+ return this.#display.chalk.stderr
}
get global () {
@@ -340,14 +396,6 @@ class Npm extends EventEmitter {
return 2
}
- get unfinishedTimers () {
- return this.#timers.unfinished
- }
-
- get finishedTimers () {
- return this.#timers.finished
- }
-
get started () {
return this.#timers.started
}
@@ -356,16 +404,12 @@ class Npm extends EventEmitter {
return this.#logFile.files
}
- get logsDir () {
+ get #logsDir () {
return this.config.get('logs-dir') || join(this.cache, '_logs')
}
get logPath () {
- return resolve(this.logsDir, `${this.#runId}-`)
- }
-
- get timingFile () {
- return this.#timers.file
+ return resolve(this.#logsDir, `${this.#runId}-`)
}
get npmRoot () {
@@ -376,26 +420,14 @@ class Npm extends EventEmitter {
return this.config.get('cache')
}
- set cache (r) {
- this.config.set('cache', r)
- }
-
get globalPrefix () {
return this.config.globalPrefix
}
- set globalPrefix (r) {
- this.config.globalPrefix = r
- }
-
get localPrefix () {
return this.config.localPrefix
}
- set localPrefix (r) {
- this.config.localPrefix = r
- }
-
get localPackage () {
return this.config.localPackage
}
@@ -431,68 +463,9 @@ class Npm extends EventEmitter {
return this.global ? this.globalPrefix : this.localPrefix
}
- set prefix (r) {
- const k = this.global ? 'globalPrefix' : 'localPrefix'
- this[k] = r
- }
-
get usage () {
return usage(this)
}
-
- // XXX add logging to see if we actually use this
- get tmp () {
- if (!this.#tmpFolder) {
- const rand = require('crypto').randomBytes(4).toString('hex')
- this.#tmpFolder = `npm-${process.pid}-${rand}`
- }
- return resolve(this.config.get('tmp'), this.#tmpFolder)
- }
-
- // output to stdout in a progress bar compatible way
- output (...msg) {
- log.clearProgress()
- // eslint-disable-next-line no-console
- console.log(...msg)
- log.showProgress()
- }
-
- outputBuffer (item) {
- this.#outputBuffer.push(item)
- }
-
- flushOutput (jsonError) {
- if (!jsonError && !this.#outputBuffer.length) {
- return
- }
-
- if (this.config.get('json')) {
- const jsonOutput = this.#outputBuffer.reduce((acc, item) => {
- if (typeof item === 'string') {
- // try to parse it as json in case its a string
- try {
- item = JSON.parse(item)
- } catch {
- return acc
- }
- }
- return { ...acc, ...item }
- }, {})
- this.output(JSON.stringify({ ...jsonOutput, ...jsonError }, null, 2))
- } else {
- for (const item of this.#outputBuffer) {
- this.output(item)
- }
- }
-
- this.#outputBuffer.length = 0
- }
-
- outputError (...msg) {
- log.clearProgress()
- // eslint-disable-next-line no-console
- console.error(...msg)
- log.showProgress()
- }
}
+
module.exports = Npm
diff --git a/lib/package-url-cmd.js b/lib/package-url-cmd.js
index 20e6a16fe1523..c7ae32174fcb6 100644
--- a/lib/package-url-cmd.js
+++ b/lib/package-url-cmd.js
@@ -1,13 +1,9 @@
-// Base command for opening urls from a package manifest (bugs, docs, repo)
-
const pacote = require('pacote')
-const hostedGitInfo = require('hosted-git-info')
-const Arborist = require('@npmcli/arborist')
-
-const openUrl = require('./utils/open-url.js')
-const log = require('./utils/log-shim')
+const { openUrl } = require('./utils/open-url.js')
+const { log } = require('proc-log')
+const BaseCommand = require('./base-cmd.js')
-const BaseCommand = require('./base-command.js')
+// Base command for opening urls from a package manifest (bugs, docs, repo)
class PackageUrlCommand extends BaseCommand {
static params = [
'browser',
@@ -33,7 +29,6 @@ class PackageUrlCommand extends BaseCommand {
...this.npm.flatOptions,
where: this.npm.localPrefix,
fullMetadata: true,
- Arborist,
}
const mani = await pacote.manifest(arg, opts)
const url = this.getUrl(arg, mani)
@@ -54,6 +49,7 @@ class PackageUrlCommand extends BaseCommand {
// repository (if a string) or repository.url (if an object) returns null
// if it's not a valid repo, or not a known hosted repo
hostedFromMani (mani) {
+ const hostedGitInfo = require('hosted-git-info')
const r = mani.repository
const rurl = !r ? null
: typeof r === 'string' ? r
@@ -64,4 +60,5 @@ class PackageUrlCommand extends BaseCommand {
return (rurl && hostedGitInfo.fromUrl(rurl.replace(/^git\+/, ''))) || null
}
}
+
module.exports = PackageUrlCommand
diff --git a/lib/utils/ansi-trim.js b/lib/utils/ansi-trim.js
deleted file mode 100644
index e35a1baf63335..0000000000000
--- a/lib/utils/ansi-trim.js
+++ /dev/null
@@ -1,3 +0,0 @@
-const r = new RegExp('\x1b(?:\\[(?:\\d+[ABCDEFGJKSTm]|\\d+;\\d+[Hfm]|' +
- '\\d+;\\d+;\\d+m|6n|s|u|\\?25[lh])|\\w)', 'g')
-module.exports = str => str.replace(r, '')
diff --git a/lib/utils/audit-error.js b/lib/utils/audit-error.js
index 7feccc739b6a9..c56ec9ba86f18 100644
--- a/lib/utils/audit-error.js
+++ b/lib/utils/audit-error.js
@@ -1,4 +1,5 @@
-const log = require('./log-shim')
+const { log, output } = require('proc-log')
+const { redactLog: replaceInfo } = require('@npmcli/redact')
// print an error or just nothing if the audit report has an error
// this is called by the audit command, and by the reify-output util
@@ -21,16 +22,16 @@ const auditError = (npm, report) => {
const { body: errBody } = error
const body = Buffer.isBuffer(errBody) ? errBody.toString() : errBody
if (npm.flatOptions.json) {
- npm.output(JSON.stringify({
+ output.buffer({
message: error.message,
method: error.method,
- uri: error.uri,
+ uri: replaceInfo(error.uri),
headers: error.headers,
statusCode: error.statusCode,
body,
- }, null, 2))
+ })
} else {
- npm.output(body)
+ output.standard(body)
}
throw 'audit endpoint returned an error'
diff --git a/lib/utils/auth.js b/lib/utils/auth.js
index 8b9125a1c3ef0..747271169124b 100644
--- a/lib/utils/auth.js
+++ b/lib/utils/auth.js
@@ -1,32 +1,61 @@
-const profile = require('npm-profile')
-const log = require('../utils/log-shim')
-const openUrlPrompt = require('../utils/open-url-prompt.js')
+const { webAuthOpener, adduserWeb, loginWeb, loginCouch, adduserCouch } = require('npm-profile')
+const { log } = require('proc-log')
+const { createOpener } = require('../utils/open-url.js')
const read = require('../utils/read-user-info.js')
-const otplease = require('../utils/otplease.js')
+
+const otplease = async (npm, opts, fn) => {
+ try {
+ return await fn(opts)
+ } catch (err) {
+ if (!process.stdin.isTTY || !process.stdout.isTTY) {
+ throw err
+ }
+
+ // web otp
+ if (err.code === 'EOTP' && err.body?.authUrl && err.body?.doneUrl) {
+ const { token: otp } = await webAuthOpener(
+ createOpener(npm, 'Authenticate your account at'),
+ err.body.authUrl,
+ err.body.doneUrl,
+ opts
+ )
+ return await fn({ ...opts, otp })
+ }
+
+ // classic otp
+ if (err.code === 'EOTP' || (err.code === 'E401' && /one-time pass/.test(err.body))) {
+ const otp = await read.otp('This operation requires a one-time password.\nEnter OTP:')
+ return await fn({ ...opts, otp })
+ }
+
+ throw err
+ }
+}
const adduser = async (npm, { creds, ...opts }) => {
const authType = npm.config.get('auth-type')
let res
if (authType === 'web') {
- res = await profile.adduserWeb((url, emitter) => {
- openUrlPrompt(
- npm,
- url,
- 'Create your account at',
- 'Press ENTER to open in the browser...',
- emitter
- )
- }, opts)
- } else {
+ try {
+ res = await adduserWeb(createOpener(npm, 'Create your account at'), opts)
+ } catch (err) {
+ if (err.code === 'ENYI') {
+ log.verbose('web add user not supported, trying couch')
+ } else {
+ throw err
+ }
+ }
+ }
+
+ // auth type !== web or ENYI error w/ web adduser
+ if (!res) {
const username = await read.username('Username:', creds.username)
const password = await read.password('Password:', creds.password)
const email = await read.email('Email: (this IS public) ', creds.email)
// npm registry quirk: If you "add" an existing user with their current
// password, it's effectively a login, and if that account has otp you'll
// be prompted for it.
- res = await otplease(npm, opts, (reqOpts) =>
- profile.adduserCouch(username, email, password, opts)
- )
+ res = await otplease(npm, opts, (reqOpts) => adduserCouch(username, email, password, reqOpts))
}
// We don't know the username if it was a web login, all we can reliably log is scope and registry
@@ -44,21 +73,22 @@ const login = async (npm, { creds, ...opts }) => {
const authType = npm.config.get('auth-type')
let res
if (authType === 'web') {
- res = await profile.loginWeb((url, emitter) => {
- openUrlPrompt(
- npm,
- url,
- 'Login at',
- 'Press ENTER to open in the browser...',
- emitter
- )
- }, opts)
- } else {
+ try {
+ res = await loginWeb(createOpener(npm, 'Login at'), opts)
+ } catch (err) {
+ if (err.code === 'ENYI') {
+ log.verbose('web login not supported, trying couch')
+ } else {
+ throw err
+ }
+ }
+ }
+
+ // auth type !== web or ENYI error w/ web login
+ if (!res) {
const username = await read.username('Username:', creds.username)
const password = await read.password('Password:', creds.password)
- res = await otplease(npm, opts, (reqOpts) =>
- profile.loginCouch(username, password, reqOpts)
- )
+ res = await otplease(npm, opts, (reqOpts) => loginCouch(username, password, reqOpts))
}
// We don't know the username if it was a web login, all we can reliably log is scope and registry
@@ -75,4 +105,5 @@ const login = async (npm, { creds, ...opts }) => {
module.exports = {
adduser,
login,
+ otplease,
}
diff --git a/lib/utils/cmd-list.js b/lib/utils/cmd-list.js
index 03fe8ed07c930..9017b2b80ce52 100644
--- a/lib/utils/cmd-list.js
+++ b/lib/utils/cmd-list.js
@@ -1,74 +1,7 @@
const abbrev = require('abbrev')
-const localeCompare = require('@isaacs/string-locale-compare')('en')
-// plumbing should not have any aliases
-const aliases = {
-
- // aliases
- author: 'owner',
- home: 'docs',
- issues: 'bugs',
- info: 'view',
- show: 'view',
- find: 'search',
- add: 'install',
- unlink: 'uninstall',
- remove: 'uninstall',
- rm: 'uninstall',
- r: 'uninstall',
-
- // short names for common things
- un: 'uninstall',
- rb: 'rebuild',
- list: 'ls',
- ln: 'link',
- create: 'init',
- i: 'install',
- it: 'install-test',
- cit: 'install-ci-test',
- up: 'update',
- c: 'config',
- s: 'search',
- se: 'search',
- tst: 'test',
- t: 'test',
- ddp: 'dedupe',
- v: 'view',
- run: 'run-script',
- 'clean-install': 'ci',
- 'clean-install-test': 'cit',
- x: 'exec',
- why: 'explain',
- la: 'll',
- verison: 'version',
- ic: 'ci',
-
- // typos
- innit: 'init',
- // manually abbrev so that install-test doesn't make insta stop working
- in: 'install',
- ins: 'install',
- inst: 'install',
- insta: 'install',
- instal: 'install',
- isnt: 'install',
- isnta: 'install',
- isntal: 'install',
- isntall: 'install',
- 'install-clean': 'ci',
- 'isntall-clean': 'ci',
- hlep: 'help',
- 'dist-tags': 'dist-tag',
- upgrade: 'update',
- udpate: 'update',
- rum: 'run-script',
- sit: 'cit',
- urn: 'run-script',
- ogr: 'org',
- 'add-user': 'adduser',
-}
-
-// these are filenames in .
+// These correspond to filenames in lib/commands
+// Please keep this list sorted alphabetically
const commands = [
'access',
'adduser',
@@ -92,6 +25,7 @@ const commands = [
'fund',
'get',
'help',
+ 'help-search',
'hook',
'init',
'install',
@@ -99,7 +33,7 @@ const commands = [
'install-test',
'link',
'll',
- 'login', // This is an alias for `adduser` but it can be confusing
+ 'login',
'logout',
'ls',
'org',
@@ -118,6 +52,7 @@ const commands = [
'restart',
'root',
'run-script',
+ 'sbom',
'search',
'set',
'shrinkwrap',
@@ -135,16 +70,109 @@ const commands = [
'version',
'view',
'whoami',
-].sort(localeCompare)
+]
+
+// These must resolve to an entry in commands
+const aliases = {
+
+ // aliases
+ author: 'owner',
+ home: 'docs',
+ issues: 'bugs',
+ info: 'view',
+ show: 'view',
+ find: 'search',
+ add: 'install',
+ unlink: 'uninstall',
+ remove: 'uninstall',
+ rm: 'uninstall',
+ r: 'uninstall',
-const plumbing = ['help-search']
-const allCommands = [...commands, ...plumbing].sort(localeCompare)
-const abbrevs = abbrev(commands.concat(Object.keys(aliases)))
+ // short names for common things
+ un: 'uninstall',
+ rb: 'rebuild',
+ list: 'ls',
+ ln: 'link',
+ create: 'init',
+ i: 'install',
+ it: 'install-test',
+ cit: 'install-ci-test',
+ up: 'update',
+ c: 'config',
+ s: 'search',
+ se: 'search',
+ tst: 'test',
+ t: 'test',
+ ddp: 'dedupe',
+ v: 'view',
+ run: 'run-script',
+ 'clean-install': 'ci',
+ 'clean-install-test': 'install-ci-test',
+ x: 'exec',
+ why: 'explain',
+ la: 'll',
+ verison: 'version',
+ ic: 'ci',
+
+ // typos
+ innit: 'init',
+ // manually abbrev so that install-test doesn't make insta stop working
+ in: 'install',
+ ins: 'install',
+ inst: 'install',
+ insta: 'install',
+ instal: 'install',
+ isnt: 'install',
+ isnta: 'install',
+ isntal: 'install',
+ isntall: 'install',
+ 'install-clean': 'ci',
+ 'isntall-clean': 'ci',
+ hlep: 'help',
+ 'dist-tags': 'dist-tag',
+ upgrade: 'update',
+ udpate: 'update',
+ rum: 'run-script',
+ sit: 'install-ci-test',
+ urn: 'run-script',
+ ogr: 'org',
+ 'add-user': 'adduser',
+}
+
+const deref = (c) => {
+ if (!c) {
+ return
+ }
+
+ // Translate camelCase to snake-case (i.e. installTest to install-test)
+ if (c.match(/[A-Z]/)) {
+ c = c.replace(/([A-Z])/g, m => '-' + m.toLowerCase())
+ }
+
+ // if they asked for something exactly we are done
+ if (commands.includes(c)) {
+ return c
+ }
+
+ // if they asked for a direct alias
+ if (aliases[c]) {
+ return aliases[c]
+ }
+
+ const abbrevs = abbrev(commands.concat(Object.keys(aliases)))
+
+ // first deref the abbrev, if there is one
+ // then resolve any aliases
+ // so `npm install-cl` will resolve to `install-clean` then to `ci`
+ let a = abbrevs[c]
+ while (aliases[a]) {
+ a = aliases[a]
+ }
+ return a
+}
module.exports = {
- abbrevs,
aliases,
commands,
- plumbing,
- allCommands,
+ deref,
}
diff --git a/lib/utils/completion.fish b/lib/utils/completion.fish
new file mode 100644
index 0000000000000..5e274ad77e5fd
--- /dev/null
+++ b/lib/utils/completion.fish
@@ -0,0 +1,40 @@
+# npm completions for Fish shell
+# This script is a work in progress and does not fall under the normal semver contract as the rest of npm.
+
+# __fish_npm_needs_command taken from:
+# https://stackoverflow.com/questions/16657803/creating-autocomplete-script-with-sub-commands
+function __fish_npm_needs_command
+ set -l cmd (commandline -opc)
+
+ if test (count $cmd) -eq 1
+ return 0
+ end
+
+ return 1
+end
+
+# Taken from https://github.com/fish-shell/fish-shell/blob/HEAD/share/completions/npm.fish
+function __fish_complete_npm -d "Complete the commandline using npm's 'completion' tool"
+ # tell npm we are fish shell
+ set -lx COMP_FISH true
+ if command -sq npm
+ # npm completion is bash-centric, so we need to translate fish's "commandline" stuff to bash's $COMP_* stuff
+ # COMP_LINE is an array with the words in the commandline
+ set -lx COMP_LINE (commandline -opc)
+ # COMP_CWORD is the index of the current word in COMP_LINE
+ # bash starts arrays with 0, so subtract 1
+ set -lx COMP_CWORD (math (count $COMP_LINE) - 1)
+ # COMP_POINT is the index of point/cursor when the commandline is viewed as a string
+ set -lx COMP_POINT (commandline -C)
+ # If the cursor is after the last word, the empty token will disappear in the expansion
+ # Readd it
+ if test (commandline -ct) = ""
+ set COMP_CWORD (math $COMP_CWORD + 1)
+ set COMP_LINE $COMP_LINE ""
+ end
+ command npm completion -- $COMP_LINE 2>/dev/null
+ end
+end
+
+# flush out what ships with fish
+complete -e npm
diff --git a/lib/utils/completion/installed-deep.js b/lib/utils/completion/installed-deep.js
deleted file mode 100644
index 7098d81fe7b49..0000000000000
--- a/lib/utils/completion/installed-deep.js
+++ /dev/null
@@ -1,45 +0,0 @@
-const { resolve } = require('path')
-const Arborist = require('@npmcli/arborist')
-const localeCompare = require('@isaacs/string-locale-compare')('en')
-
-const installedDeep = async (npm) => {
- const {
- depth,
- global,
- prefix,
- workspacesEnabled,
- } = npm.flatOptions
-
- const getValues = (tree) =>
- [...tree.inventory.values()]
- .filter(i => i.location !== '' && !i.isRoot)
- .map(i => {
- return i
- })
- .filter(i => (i.depth - 1) <= depth)
- .sort((a, b) => (a.depth - b.depth) || localeCompare(a.name, b.name))
-
- const res = new Set()
- const gArb = new Arborist({
- global: true,
- path: resolve(npm.globalDir, '..'),
- workspacesEnabled,
- })
- const gTree = await gArb.loadActual({ global: true })
-
- for (const node of getValues(gTree)) {
- res.add(global ? node.name : [node.name, '-g'])
- }
-
- if (!global) {
- const arb = new Arborist({ global: false, path: prefix, workspacesEnabled })
- const tree = await arb.loadActual()
- for (const node of getValues(tree)) {
- res.add(node.name)
- }
- }
-
- return [...res]
-}
-
-module.exports = installedDeep
diff --git a/lib/utils/config/definition.js b/lib/utils/config/definition.js
deleted file mode 100644
index f88d8334cf01f..0000000000000
--- a/lib/utils/config/definition.js
+++ /dev/null
@@ -1,251 +0,0 @@
-// class that describes a config key we know about
-// this keeps us from defining a config key and not
-// providing a default, description, etc.
-//
-// TODO: some kind of categorization system, so we can
-// say "these are for registry access", "these are for
-// version resolution" etc.
-
-const required = ['type', 'description', 'default', 'key']
-
-const allowed = [
- 'default',
- 'defaultDescription',
- 'deprecated',
- 'description',
- 'flatten',
- 'hint',
- 'key',
- 'short',
- 'type',
- 'typeDescription',
- 'usage',
- 'envExport',
-]
-
-const {
- typeDefs: {
- semver: { type: semver },
- Umask: { type: Umask },
- url: { type: url },
- path: { type: path },
- },
-} = require('@npmcli/config')
-
-class Definition {
- constructor (key, def) {
- this.key = key
- // if it's set falsey, don't export it, otherwise we do by default
- this.envExport = true
- Object.assign(this, def)
- this.validate()
- if (!this.defaultDescription) {
- this.defaultDescription = describeValue(this.default)
- }
- if (!this.typeDescription) {
- this.typeDescription = describeType(this.type)
- }
- // hint is only used for non-boolean values
- if (!this.hint) {
- if (this.type === Number) {
- this.hint = ''
- } else {
- this.hint = `<${this.key}>`
- }
- }
- if (!this.usage) {
- this.usage = describeUsage(this)
- }
- }
-
- validate () {
- for (const req of required) {
- if (!Object.prototype.hasOwnProperty.call(this, req)) {
- throw new Error(`config lacks ${req}: ${this.key}`)
- }
- }
- if (!this.key) {
- throw new Error(`config lacks key: ${this.key}`)
- }
- for (const field of Object.keys(this)) {
- if (!allowed.includes(field)) {
- throw new Error(`config defines unknown field ${field}: ${this.key}`)
- }
- }
- }
-
- // a textual description of this config, suitable for help output
- describe () {
- const description = unindent(this.description)
- const noEnvExport = this.envExport
- ? ''
- : `
-This value is not exported to the environment for child processes.
-`
- const deprecated = !this.deprecated ? '' : `* DEPRECATED: ${unindent(this.deprecated)}\n`
- return wrapAll(`#### \`${this.key}\`
-
-* Default: ${unindent(this.defaultDescription)}
-* Type: ${unindent(this.typeDescription)}
-${deprecated}
-${description}
-${noEnvExport}`)
- }
-}
-
-const describeUsage = def => {
- let key = ''
-
- // Single type
- if (!Array.isArray(def.type)) {
- if (def.short) {
- key = `-${def.short}|`
- }
-
- if (def.type === Boolean && def.default !== false) {
- key = `${key}--no-${def.key}`
- } else {
- key = `${key}--${def.key}`
- }
-
- if (def.type !== Boolean) {
- key = `${key} ${def.hint}`
- }
-
- return key
- }
-
- key = `--${def.key}`
- if (def.short) {
- key = `-${def.short}|--${def.key}`
- }
-
- // Multiple types
- let types = def.type
- const multiple = types.includes(Array)
- const bool = types.includes(Boolean)
-
- // null type means optional and doesn't currently affect usage output since
- // all non-optional params have defaults so we render everything as optional
- types = types.filter(t => t !== null && t !== Array && t !== Boolean)
-
- if (!types.length) {
- return key
- }
-
- let description
- if (!types.some(t => typeof t !== 'string')) {
- // Specific values, use specifics given
- description = `<${types.filter(d => d).join('|')}>`
- } else {
- // Generic values, use hint
- description = def.hint
- }
-
- if (bool) {
- // Currently none of our multi-type configs with boolean values default to
- // false so all their hints should show `--no-`, if we ever add ones that
- // default to false we can branch the logic here
- key = `--no-${def.key}|${key}`
- }
-
- const usage = `${key} ${description}`
- if (multiple) {
- return `${usage} [${usage} ...]`
- } else {
- return usage
- }
-}
-
-const describeType = type => {
- if (Array.isArray(type)) {
- const descriptions = type.filter(t => t !== Array).map(t => describeType(t))
-
- // [a] => "a"
- // [a, b] => "a or b"
- // [a, b, c] => "a, b, or c"
- // [a, Array] => "a (can be set multiple times)"
- // [a, Array, b] => "a or b (can be set multiple times)"
- const last = descriptions.length > 1 ? [descriptions.pop()] : []
- const oxford = descriptions.length > 1 ? ', or ' : ' or '
- const words = [descriptions.join(', ')].concat(last).join(oxford)
- const multiple = type.includes(Array) ? ' (can be set multiple times)' : ''
- return `${words}${multiple}`
- }
-
- // Note: these are not quite the same as the description printed
- // when validation fails. In that case, we want to give the user
- // a bit more information to help them figure out what's wrong.
- switch (type) {
- case String:
- return 'String'
- case Number:
- return 'Number'
- case Umask:
- return 'Octal numeric string in range 0000..0777 (0..511)'
- case Boolean:
- return 'Boolean'
- case Date:
- return 'Date'
- case path:
- return 'Path'
- case semver:
- return 'SemVer string'
- case url:
- return 'URL'
- default:
- return describeValue(type)
- }
-}
-
-// if it's a string, quote it. otherwise, just cast to string.
-const describeValue = val => (typeof val === 'string' ? JSON.stringify(val) : String(val))
-
-const unindent = s => {
- // get the first \n followed by a bunch of spaces, and pluck off
- // that many spaces from the start of every line.
- const match = s.match(/\n +/)
- return !match ? s.trim() : s.split(match[0]).join('\n').trim()
-}
-
-const wrap = s => {
- const cols = Math.min(Math.max(20, process.stdout.columns) || 80, 80) - 5
- return unindent(s)
- .split(/[ \n]+/)
- .reduce((left, right) => {
- const last = left.split('\n').pop()
- const join = last.length && last.length + right.length > cols ? '\n' : ' '
- return left + join + right
- })
-}
-
-const wrapAll = s => {
- let inCodeBlock = false
- return s
- .split('\n\n')
- .map(block => {
- if (inCodeBlock || block.startsWith('```')) {
- inCodeBlock = !block.endsWith('```')
- return block
- }
-
- if (block.charAt(0) === '*') {
- return (
- '* ' +
- block
- .slice(1)
- .trim()
- .split('\n* ')
- .map(li => {
- return wrap(li).replace(/\n/g, '\n ')
- })
- .join('\n* ')
- )
- } else {
- return wrap(block)
- }
- })
- .join('\n\n')
-}
-
-module.exports = Definition
diff --git a/lib/utils/config/definitions.js b/lib/utils/config/definitions.js
deleted file mode 100644
index dd3d9946af819..0000000000000
--- a/lib/utils/config/definitions.js
+++ /dev/null
@@ -1,2347 +0,0 @@
-const definitions = {}
-module.exports = definitions
-
-const Definition = require('./definition.js')
-
-const { version: npmVersion } = require('../../../package.json')
-const ciInfo = require('ci-info')
-const querystring = require('querystring')
-const { isWindows } = require('../is-windows.js')
-const { join } = require('path')
-
-// used by cafile flattening to flatOptions.ca
-const fs = require('fs')
-const maybeReadFile = file => {
- try {
- return fs.readFileSync(file, 'utf8')
- } catch (er) {
- if (er.code !== 'ENOENT') {
- throw er
- }
- return null
- }
-}
-
-const buildOmitList = obj => {
- const include = obj.include || []
- const omit = obj.omit || []
-
- const only = obj.only
- if (/^prod(uction)?$/.test(only) || obj.production) {
- omit.push('dev')
- } else if (obj.production === false) {
- include.push('dev')
- }
-
- if (/^dev/.test(obj.also)) {
- include.push('dev')
- }
-
- if (obj.dev) {
- include.push('dev')
- }
-
- if (obj.optional === false) {
- omit.push('optional')
- } else if (obj.optional === true) {
- include.push('optional')
- }
-
- obj.omit = [...new Set(omit)].filter(type => !include.includes(type))
- obj.include = [...new Set(include)]
-
- if (obj.omit.includes('dev')) {
- process.env.NODE_ENV = 'production'
- }
-
- return obj.omit
-}
-
-const editor = process.env.EDITOR ||
- process.env.VISUAL ||
- (isWindows ? `${process.env.SYSTEMROOT}\\notepad.exe` : 'vi')
-
-const shell = isWindows ? process.env.ComSpec || 'cmd'
- : process.env.SHELL || 'sh'
-
-const { tmpdir, networkInterfaces } = require('os')
-const getLocalAddresses = () => {
- try {
- return Object.values(networkInterfaces()).map(
- int => int.map(({ address }) => address)
- ).reduce((set, addrs) => set.concat(addrs), [null])
- } catch (e) {
- return [null]
- }
-}
-
-const unicode = /UTF-?8$/i.test(
- process.env.LC_ALL ||
- process.env.LC_CTYPE ||
- process.env.LANG
-)
-
-// use LOCALAPPDATA on Windows, if set
-// https://github.com/npm/cli/pull/899
-const cacheRoot = (isWindows && process.env.LOCALAPPDATA) || '~'
-const cacheExtra = isWindows ? 'npm-cache' : '.npm'
-const cache = `${cacheRoot}/${cacheExtra}`
-
-const Config = require('@npmcli/config')
-
-// TODO: refactor these type definitions so that they are less
-// weird to pull out of the config module.
-// TODO: use better type definition/validation API, nopt's is so weird.
-const {
- typeDefs: {
- semver: { type: semver },
- Umask: { type: Umask },
- url: { type: url },
- path: { type: path },
- },
-} = Config
-
-const define = (key, def) => {
- /* istanbul ignore if - this should never happen, prevents mistakes below */
- if (definitions[key]) {
- throw new Error(`defining key more than once: ${key}`)
- }
- definitions[key] = new Definition(key, def)
-}
-
-// basic flattening function, just copy it over camelCase
-const flatten = (key, obj, flatOptions) => {
- const camel = key.replace(/-([a-z])/g, (_0, _1) => _1.toUpperCase())
- flatOptions[camel] = obj[key]
-}
-
-// TODO:
-// Instead of having each definition provide a flatten method,
-// provide the (?list of?) flat option field(s?) that it impacts.
-// When that config is set, we mark the relevant flatOption fields
-// dirty. Then, a getter for that field defines how we actually
-// set it.
-//
-// So, `save-dev`, `save-optional`, `save-prod`, et al would indicate
-// that they affect the `saveType` flat option. Then the config.flat
-// object has a `get saveType () { ... }` that looks at the "real"
-// config settings from files etc and returns the appropriate value.
-//
-// Getters will also (maybe?) give us a hook to audit flat option
-// usage, so we can document and group these more appropriately.
-//
-// This will be a problem with cases where we currently do:
-// const opts = { ...npm.flatOptions, foo: 'bar' }, but we can maybe
-// instead do `npm.config.set('foo', 'bar')` prior to passing the
-// config object down where it needs to go.
-//
-// This way, when we go hunting for "where does saveType come from anyway!?"
-// while fixing some Arborist bug, we won't have to hunt through too
-// many places.
-
-// Define all config keys we know about
-
-define('_auth', {
- default: null,
- type: [null, String],
- description: `
- A basic-auth string to use when authenticating against the npm registry.
- This will ONLY be used to authenticate against the npm registry. For other
- registries you will need to scope it like "//other-registry.tld/:_auth"
-
- Warning: This should generally not be set via a command-line option. It
- is safer to use a registry-provided authentication bearer token stored in
- the ~/.npmrc file by running \`npm login\`.
- `,
- flatten,
-})
-
-define('access', {
- default: null,
- defaultDescription: `
- 'public' for new packages, existing packages it will not change the current level
- `,
- type: [null, 'restricted', 'public'],
- description: `
- If do not want your scoped package to be publicly viewable (and
- installable) set \`--access=restricted\`.
-
- Unscoped packages can not be set to \`restricted\`.
-
- Note: This defaults to not changing the current access level for existing
- packages. Specifying a value of \`restricted\` or \`public\` during
- publish will change the access for an existing package the same way that
- \`npm access set status\` would.
- `,
- flatten,
-})
-
-define('all', {
- default: false,
- type: Boolean,
- short: 'a',
- description: `
- When running \`npm outdated\` and \`npm ls\`, setting \`--all\` will show
- all outdated or installed packages, rather than only those directly
- depended upon by the current project.
- `,
- flatten,
-})
-
-define('allow-same-version', {
- default: false,
- type: Boolean,
- description: `
- Prevents throwing an error when \`npm version\` is used to set the new
- version to the same value as the current version.
- `,
- flatten,
-})
-
-define('also', {
- default: null,
- type: [null, 'dev', 'development'],
- description: `
- When set to \`dev\` or \`development\`, this is an alias for
- \`--include=dev\`.
- `,
- deprecated: 'Please use --include=dev instead.',
- flatten (key, obj, flatOptions) {
- definitions.omit.flatten('omit', obj, flatOptions)
- },
-})
-
-define('audit', {
- default: true,
- type: Boolean,
- description: `
- When "true" submit audit reports alongside the current npm command to the
- default registry and all registries configured for scopes. See the
- documentation for [\`npm audit\`](/commands/npm-audit) for details on what
- is submitted.
- `,
- flatten,
-})
-
-define('audit-level', {
- default: null,
- type: [null, 'info', 'low', 'moderate', 'high', 'critical', 'none'],
- description: `
- The minimum level of vulnerability for \`npm audit\` to exit with
- a non-zero exit code.
- `,
- flatten,
-})
-
-define('auth-type', {
- default: 'web',
- type: ['legacy', 'web'],
- description: `
- What authentication strategy to use with \`login\`.
- `,
- flatten,
-})
-
-define('before', {
- default: null,
- type: [null, Date],
- description: `
- If passed to \`npm install\`, will rebuild the npm tree such that only
- versions that were available **on or before** the \`--before\` time get
- installed. If there's no versions available for the current set of
- direct dependencies, the command will error.
-
- If the requested version is a \`dist-tag\` and the given tag does not
- pass the \`--before\` filter, the most recent version less than or equal
- to that tag will be used. For example, \`foo@latest\` might install
- \`foo@1.2\` even though \`latest\` is \`2.0\`.
- `,
- flatten,
-})
-
-define('bin-links', {
- default: true,
- type: Boolean,
- description: `
- Tells npm to create symlinks (or \`.cmd\` shims on Windows) for package
- executables.
-
- Set to false to have it not do this. This can be used to work around the
- fact that some file systems don't support symlinks, even on ostensibly
- Unix systems.
- `,
- flatten,
-})
-
-define('browser', {
- default: null,
- defaultDescription: `
- OS X: \`"open"\`, Windows: \`"start"\`, Others: \`"xdg-open"\`
- `,
- type: [null, Boolean, String],
- description: `
- The browser that is called by npm commands to open websites.
-
- Set to \`false\` to suppress browser behavior and instead print urls to
- terminal.
-
- Set to \`true\` to use default system URL opener.
- `,
- flatten,
-})
-
-define('ca', {
- default: null,
- type: [null, String, Array],
- description: `
- The Certificate Authority signing certificate that is trusted for SSL
- connections to the registry. Values should be in PEM format (Windows
- calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the
- string "\\n". For example:
-
- \`\`\`ini
- ca="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----"
- \`\`\`
-
- Set to \`null\` to only allow "known" registrars, or to a specific CA
- cert to trust only that specific signing authority.
-
- Multiple CAs can be trusted by specifying an array of certificates:
-
- \`\`\`ini
- ca[]="..."
- ca[]="..."
- \`\`\`
-
- See also the \`strict-ssl\` config.
- `,
- flatten,
-})
-
-define('cache', {
- default: cache,
- defaultDescription: `
- Windows: \`%LocalAppData%\\npm-cache\`, Posix: \`~/.npm\`
- `,
- type: path,
- description: `
- The location of npm's cache directory.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.cache = join(obj.cache, '_cacache')
- flatOptions.npxCache = join(obj.cache, '_npx')
- },
-})
-
-define('cache-max', {
- default: Infinity,
- type: Number,
- description: `
- \`--cache-max=0\` is an alias for \`--prefer-online\`
- `,
- deprecated: `
- This option has been deprecated in favor of \`--prefer-online\`
- `,
- flatten (key, obj, flatOptions) {
- if (obj[key] <= 0) {
- flatOptions.preferOnline = true
- }
- },
-})
-
-define('cache-min', {
- default: 0,
- type: Number,
- description: `
- \`--cache-min=9999 (or bigger)\` is an alias for \`--prefer-offline\`.
- `,
- deprecated: `
- This option has been deprecated in favor of \`--prefer-offline\`.
- `,
- flatten (key, obj, flatOptions) {
- if (obj[key] >= 9999) {
- flatOptions.preferOffline = true
- }
- },
-})
-
-define('cafile', {
- default: null,
- type: path,
- description: `
- A path to a file containing one or multiple Certificate Authority signing
- certificates. Similar to the \`ca\` setting, but allows for multiple
- CA's, as well as for the CA information to be stored in a file on disk.
- `,
- flatten (key, obj, flatOptions) {
- // always set to null in defaults
- if (!obj.cafile) {
- return
- }
-
- const raw = maybeReadFile(obj.cafile)
- if (!raw) {
- return
- }
-
- const delim = '-----END CERTIFICATE-----'
- flatOptions.ca = raw.replace(/\r\n/g, '\n').split(delim)
- .filter(section => section.trim())
- .map(section => section.trimLeft() + delim)
- },
-})
-
-define('call', {
- default: '',
- type: String,
- short: 'c',
- description: `
- Optional companion option for \`npm exec\`, \`npx\` that allows for
- specifying a custom command to be run along with the installed packages.
-
- \`\`\`bash
- npm exec --package yo --package generator-node --call "yo node"
- \`\`\`
- `,
- flatten,
-})
-
-define('cert', {
- default: null,
- type: [null, String],
- description: `
- A client certificate to pass when accessing the registry. Values should
- be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with
- newlines replaced by the string "\\n". For example:
-
- \`\`\`ini
- cert="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----"
- \`\`\`
-
- It is _not_ the path to a certificate file, though you can set a registry-scoped
- "certfile" path like "//other-registry.tld/:certfile=/path/to/cert.pem".
- `,
- deprecated: `
- \`key\` and \`cert\` are no longer used for most registry operations.
- Use registry scoped \`keyfile\` and \`certfile\` instead.
- Example:
- //other-registry.tld/:keyfile=/path/to/key.pem
- //other-registry.tld/:certfile=/path/to/cert.crt
- `,
- flatten,
-})
-
-define('ci-name', {
- default: ciInfo.name ? ciInfo.name.toLowerCase().split(' ').join('-') : null,
- defaultDescription: `
- The name of the current CI system, or \`null\` when not on a known CI
- platform.
- `,
- type: [null, String],
- description: `
- The name of a continuous integration system. If not set explicitly, npm
- will detect the current CI environment using the
- [\`ci-info\`](http://npm.im/ci-info) module.
- `,
- flatten,
-})
-
-define('cidr', {
- default: null,
- type: [null, String, Array],
- description: `
- This is a list of CIDR address to be used when configuring limited access
- tokens with the \`npm token create\` command.
- `,
- flatten,
-})
-
-// This should never be directly used, the flattened value is the derived value
-// and is sent to other modules, and is also exposed as `npm.color` for use
-// inside npm itself.
-define('color', {
- default: !process.env.NO_COLOR || process.env.NO_COLOR === '0',
- usage: '--color|--no-color|--color always',
- defaultDescription: `
- true unless the NO_COLOR environ is set to something other than '0'
- `,
- type: ['always', Boolean],
- description: `
- If false, never shows colors. If \`"always"\` then always shows colors.
- If true, then only prints color codes for tty file descriptors.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.color = !obj.color ? false
- : obj.color === 'always' ? true
- : !!process.stdout.isTTY
- flatOptions.logColor = !obj.color ? false
- : obj.color === 'always' ? true
- : !!process.stderr.isTTY
- },
-})
-
-define('commit-hooks', {
- default: true,
- type: Boolean,
- description: `
- Run git commit hooks when using the \`npm version\` command.
- `,
- flatten,
-})
-
-define('depth', {
- default: null,
- defaultDescription: `
- \`Infinity\` if \`--all\` is set, otherwise \`1\`
- `,
- type: [null, Number],
- description: `
- The depth to go when recursing packages for \`npm ls\`.
-
- If not set, \`npm ls\` will show only the immediate dependencies of the
- root project. If \`--all\` is set, then npm will show all dependencies
- by default.
- `,
- flatten,
-})
-
-define('description', {
- default: true,
- type: Boolean,
- usage: '--no-description',
- description: `
- Show the description in \`npm search\`
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.search = flatOptions.search || { limit: 20 }
- flatOptions.search[key] = obj[key]
- },
-})
-
-define('dev', {
- default: false,
- type: Boolean,
- description: `
- Alias for \`--include=dev\`.
- `,
- deprecated: 'Please use --include=dev instead.',
- flatten (key, obj, flatOptions) {
- definitions.omit.flatten('omit', obj, flatOptions)
- },
-})
-
-define('diff', {
- default: [],
- hint: '',
- type: [String, Array],
- description: `
- Define arguments to compare in \`npm diff\`.
- `,
- flatten,
-})
-
-define('diff-ignore-all-space', {
- default: false,
- type: Boolean,
- description: `
- Ignore whitespace when comparing lines in \`npm diff\`.
- `,
- flatten,
-})
-
-define('diff-name-only', {
- default: false,
- type: Boolean,
- description: `
- Prints only filenames when using \`npm diff\`.
- `,
- flatten,
-})
-
-define('diff-no-prefix', {
- default: false,
- type: Boolean,
- description: `
- Do not show any source or destination prefix in \`npm diff\` output.
-
- Note: this causes \`npm diff\` to ignore the \`--diff-src-prefix\` and
- \`--diff-dst-prefix\` configs.
- `,
- flatten,
-})
-
-define('diff-dst-prefix', {
- default: 'b/',
- hint: '',
- type: String,
- description: `
- Destination prefix to be used in \`npm diff\` output.
- `,
- flatten,
-})
-
-define('diff-src-prefix', {
- default: 'a/',
- hint: '',
- type: String,
- description: `
- Source prefix to be used in \`npm diff\` output.
- `,
- flatten,
-})
-
-define('diff-text', {
- default: false,
- type: Boolean,
- description: `
- Treat all files as text in \`npm diff\`.
- `,
- flatten,
-})
-
-define('diff-unified', {
- default: 3,
- type: Number,
- description: `
- The number of lines of context to print in \`npm diff\`.
- `,
- flatten,
-})
-
-define('dry-run', {
- default: false,
- type: Boolean,
- description: `
- Indicates that you don't want npm to make any changes and that it should
- only report what it would have done. This can be passed into any of the
- commands that modify your local installation, eg, \`install\`,
- \`update\`, \`dedupe\`, \`uninstall\`, as well as \`pack\` and
- \`publish\`.
-
- Note: This is NOT honored by other network related commands, eg
- \`dist-tags\`, \`owner\`, etc.
- `,
- flatten,
-})
-
-define('editor', {
- default: editor,
- defaultDescription: `
- The EDITOR or VISUAL environment variables, or '%SYSTEMROOT%\\notepad.exe' on Windows,
- or 'vi' on Unix systems
- `,
- type: String,
- description: `
- The command to run for \`npm edit\` and \`npm config edit\`.
- `,
- flatten,
-})
-
-define('engine-strict', {
- default: false,
- type: Boolean,
- description: `
- If set to true, then npm will stubbornly refuse to install (or even
- consider installing) any package that claims to not be compatible with
- the current Node.js version.
-
- This can be overridden by setting the \`--force\` flag.
- `,
- flatten,
-})
-
-define('fetch-retries', {
- default: 2,
- type: Number,
- description: `
- The "retries" config for the \`retry\` module to use when fetching
- packages from the registry.
-
- npm will retry idempotent read requests to the registry in the case
- of network failures or 5xx HTTP errors.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.retry = flatOptions.retry || {}
- flatOptions.retry.retries = obj[key]
- },
-})
-
-define('fetch-retry-factor', {
- default: 10,
- type: Number,
- description: `
- The "factor" config for the \`retry\` module to use when fetching
- packages.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.retry = flatOptions.retry || {}
- flatOptions.retry.factor = obj[key]
- },
-})
-
-define('fetch-retry-maxtimeout', {
- default: 60000,
- defaultDescription: '60000 (1 minute)',
- type: Number,
- description: `
- The "maxTimeout" config for the \`retry\` module to use when fetching
- packages.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.retry = flatOptions.retry || {}
- flatOptions.retry.maxTimeout = obj[key]
- },
-})
-
-define('fetch-retry-mintimeout', {
- default: 10000,
- defaultDescription: '10000 (10 seconds)',
- type: Number,
- description: `
- The "minTimeout" config for the \`retry\` module to use when fetching
- packages.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.retry = flatOptions.retry || {}
- flatOptions.retry.minTimeout = obj[key]
- },
-})
-
-define('fetch-timeout', {
- default: 5 * 60 * 1000,
- defaultDescription: `${5 * 60 * 1000} (5 minutes)`,
- type: Number,
- description: `
- The maximum amount of time to wait for HTTP requests to complete.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.timeout = obj[key]
- },
-})
-
-define('force', {
- default: false,
- type: Boolean,
- short: 'f',
- description: `
- Removes various protections against unfortunate side effects, common
- mistakes, unnecessary performance degradation, and malicious input.
-
- * Allow clobbering non-npm files in global installs.
- * Allow the \`npm version\` command to work on an unclean git repository.
- * Allow deleting the cache folder with \`npm cache clean\`.
- * Allow installing packages that have an \`engines\` declaration
- requiring a different version of npm.
- * Allow installing packages that have an \`engines\` declaration
- requiring a different version of \`node\`, even if \`--engine-strict\`
- is enabled.
- * Allow \`npm audit fix\` to install modules outside your stated
- dependency range (including SemVer-major changes).
- * Allow unpublishing all versions of a published package.
- * Allow conflicting peerDependencies to be installed in the root project.
- * Implicitly set \`--yes\` during \`npm init\`.
- * Allow clobbering existing values in \`npm pkg\`
- * Allow unpublishing of entire packages (not just a single version).
-
- If you don't have a clear idea of what you want to do, it is strongly
- recommended that you do not use this option!
- `,
- flatten,
-})
-
-define('foreground-scripts', {
- default: false,
- type: Boolean,
- description: `
- Run all build scripts (ie, \`preinstall\`, \`install\`, and
- \`postinstall\`) scripts for installed packages in the foreground
- process, sharing standard input, output, and error with the main npm
- process.
-
- Note that this will generally make installs run slower, and be much
- noisier, but can be useful for debugging.
- `,
- flatten,
-})
-
-define('format-package-lock', {
- default: true,
- type: Boolean,
- description: `
- Format \`package-lock.json\` or \`npm-shrinkwrap.json\` as a human
- readable file.
- `,
- flatten,
-})
-
-define('fund', {
- default: true,
- type: Boolean,
- description: `
- When "true" displays the message at the end of each \`npm install\`
- acknowledging the number of dependencies looking for funding.
- See [\`npm fund\`](/commands/npm-fund) for details.
- `,
- flatten,
-})
-
-define('git', {
- default: 'git',
- type: String,
- description: `
- The command to use for git commands. If git is installed on the
- computer, but is not in the \`PATH\`, then set this to the full path to
- the git binary.
- `,
- flatten,
-})
-
-define('git-tag-version', {
- default: true,
- type: Boolean,
- description: `
- Tag the commit when using the \`npm version\` command. Setting this to
- false results in no commit being made at all.
- `,
- flatten,
-})
-
-define('global', {
- default: false,
- type: Boolean,
- short: 'g',
- description: `
- Operates in "global" mode, so that packages are installed into the
- \`prefix\` folder instead of the current working directory. See
- [folders](/configuring-npm/folders) for more on the differences in
- behavior.
-
- * packages are installed into the \`{prefix}/lib/node_modules\` folder,
- instead of the current working directory.
- * bin files are linked to \`{prefix}/bin\`
- * man pages are linked to \`{prefix}/share/man\`
- `,
- flatten: (key, obj, flatOptions) => {
- flatten(key, obj, flatOptions)
- if (flatOptions.global) {
- flatOptions.location = 'global'
- }
- },
-})
-
-// the globalconfig has its default defined outside of this module
-define('globalconfig', {
- type: path,
- default: '',
- defaultDescription: `
- The global --prefix setting plus 'etc/npmrc'. For example,
- '/usr/local/etc/npmrc'
- `,
- description: `
- The config file to read for global config options.
- `,
- flatten,
-})
-
-define('global-style', {
- default: false,
- type: Boolean,
- description: `
- Only install direct dependencies in the top level \`node_modules\`,
- but hoist on deeper dependendencies.
- Sets \`--install-strategy=shallow\`.
- `,
- deprecated: `
- This option has been deprecated in favor of \`--install-strategy=shallow\`
- `,
- flatten (key, obj, flatOptions) {
- if (obj[key]) {
- obj['install-strategy'] = 'shallow'
- flatOptions.installStrategy = 'shallow'
- }
- },
-})
-
-define('heading', {
- default: 'npm',
- type: String,
- description: `
- The string that starts all the debugging log output.
- `,
- flatten,
-})
-
-define('https-proxy', {
- default: null,
- type: [null, url],
- description: `
- A proxy to use for outgoing https requests. If the \`HTTPS_PROXY\` or
- \`https_proxy\` or \`HTTP_PROXY\` or \`http_proxy\` environment variables
- are set, proxy settings will be honored by the underlying
- \`make-fetch-happen\` library.
- `,
- flatten,
-})
-
-define('if-present', {
- default: false,
- type: Boolean,
- envExport: false,
- description: `
- If true, npm will not exit with an error code when \`run-script\` is
- invoked for a script that isn't defined in the \`scripts\` section of
- \`package.json\`. This option can be used when it's desirable to
- optionally run a script when it's present and fail if the script fails.
- This is useful, for example, when running scripts that may only apply for
- some builds in an otherwise generic CI setup.
- `,
- flatten,
-})
-
-define('ignore-scripts', {
- default: false,
- type: Boolean,
- description: `
- If true, npm does not run scripts specified in package.json files.
-
- Note that commands explicitly intended to run a particular script, such
- as \`npm start\`, \`npm stop\`, \`npm restart\`, \`npm test\`, and \`npm
- run-script\` will still run their intended script if \`ignore-scripts\` is
- set, but they will *not* run any pre- or post-scripts.
- `,
- flatten,
-})
-
-define('include', {
- default: [],
- type: [Array, 'prod', 'dev', 'optional', 'peer'],
- description: `
- Option that allows for defining which types of dependencies to install.
-
- This is the inverse of \`--omit=\`.
-
- Dependency types specified in \`--include\` will not be omitted,
- regardless of the order in which omit/include are specified on the
- command-line.
- `,
- flatten (key, obj, flatOptions) {
- // just call the omit flattener, it reads from obj.include
- definitions.omit.flatten('omit', obj, flatOptions)
- },
-})
-
-define('include-staged', {
- default: false,
- type: Boolean,
- description: `
- Allow installing "staged" published packages, as defined by [npm RFC PR
- #92](https://github.com/npm/rfcs/pull/92).
-
- This is experimental, and not implemented by the npm public registry.
- `,
- flatten,
-})
-
-define('include-workspace-root', {
- default: false,
- type: Boolean,
- envExport: false,
- description: `
- Include the workspace root when workspaces are enabled for a command.
-
- When false, specifying individual workspaces via the \`workspace\` config,
- or all workspaces via the \`workspaces\` flag, will cause npm to operate only
- on the specified workspaces, and not on the root project.
- `,
- flatten,
-})
-
-define('init-author-email', {
- default: '',
- type: String,
- description: `
- The value \`npm init\` should use by default for the package author's
- email.
- `,
-})
-
-define('init-author-name', {
- default: '',
- type: String,
- description: `
- The value \`npm init\` should use by default for the package author's name.
- `,
-})
-
-define('init-author-url', {
- default: '',
- type: ['', url],
- description: `
- The value \`npm init\` should use by default for the package author's homepage.
- `,
-})
-
-define('init-license', {
- default: 'ISC',
- type: String,
- description: `
- The value \`npm init\` should use by default for the package license.
- `,
-})
-
-define('init-module', {
- default: '~/.npm-init.js',
- type: path,
- description: `
- A module that will be loaded by the \`npm init\` command. See the
- documentation for the
- [init-package-json](https://github.com/npm/init-package-json) module for
- more information, or [npm init](/commands/npm-init).
- `,
-})
-
-define('init-version', {
- default: '1.0.0',
- type: semver,
- description: `
- The value that \`npm init\` should use by default for the package
- version number, if not already set in package.json.
- `,
-})
-
-// these "aliases" are historically supported in .npmrc files, unfortunately
-// They should be removed in a future npm version.
-define('init.author.email', {
- default: '',
- type: String,
- deprecated: `
- Use \`--init-author-email\` instead.`,
- description: `
- Alias for \`--init-author-email\`
- `,
-})
-
-define('init.author.name', {
- default: '',
- type: String,
- deprecated: `
- Use \`--init-author-name\` instead.
- `,
- description: `
- Alias for \`--init-author-name\`
- `,
-})
-
-define('init.author.url', {
- default: '',
- type: ['', url],
- deprecated: `
- Use \`--init-author-url\` instead.
- `,
- description: `
- Alias for \`--init-author-url\`
- `,
-})
-
-define('init.license', {
- default: 'ISC',
- type: String,
- deprecated: `
- Use \`--init-license\` instead.
- `,
- description: `
- Alias for \`--init-license\`
- `,
-})
-
-define('init.module', {
- default: '~/.npm-init.js',
- type: path,
- deprecated: `
- Use \`--init-module\` instead.
- `,
- description: `
- Alias for \`--init-module\`
- `,
-})
-
-define('init.version', {
- default: '1.0.0',
- type: semver,
- deprecated: `
- Use \`--init-version\` instead.
- `,
- description: `
- Alias for \`--init-version\`
- `,
-})
-
-define('install-links', {
- default: true,
- type: Boolean,
- description: `
- When set file: protocol dependencies will be packed and installed as
- regular dependencies instead of creating a symlink. This option has
- no effect on workspaces.
- `,
- flatten,
-})
-
-define('install-strategy', {
- default: 'hoisted',
- type: ['hoisted', 'nested', 'shallow'],
- description: `
- Sets the strategy for installing packages in node_modules.
- hoisted (default): Install non-duplicated in top-level, and duplicated as
- necessary within directory structure.
- nested: (formerly --legacy-bundling) install in place, no hoisting.
- shallow (formerly --global-style) only install direct deps at top-level.
- linked: (coming soon) install in node_modules/.store, link in place,
- unhoisted.
- `,
- flatten,
-})
-
-define('json', {
- default: false,
- type: Boolean,
- description: `
- Whether or not to output JSON data, rather than the normal output.
-
- * In \`npm pkg set\` it enables parsing set values with JSON.parse()
- before saving them to your \`package.json\`.
-
- Not supported by all npm commands.
- `,
- flatten,
-})
-
-define('key', {
- default: null,
- type: [null, String],
- description: `
- A client key to pass when accessing the registry. Values should be in
- PEM format with newlines replaced by the string "\\n". For example:
-
- \`\`\`ini
- key="-----BEGIN PRIVATE KEY-----\\nXXXX\\nXXXX\\n-----END PRIVATE KEY-----"
- \`\`\`
-
- It is _not_ the path to a key file, though you can set a registry-scoped
- "keyfile" path like "//other-registry.tld/:keyfile=/path/to/key.pem".
- `,
- deprecated: `
- \`key\` and \`cert\` are no longer used for most registry operations.
- Use registry scoped \`keyfile\` and \`certfile\` instead.
- Example:
- //other-registry.tld/:keyfile=/path/to/key.pem
- //other-registry.tld/:certfile=/path/to/cert.crt
- `,
- flatten,
-})
-
-define('legacy-bundling', {
- default: false,
- type: Boolean,
- description: `
- Instead of hoisting package installs in \`node_modules\`, install packages
- in the same manner that they are depended on. This may cause very deep
- directory structures and duplicate package installs as there is no
- de-duplicating.
- Sets \`--install-strategy=nested\`.
- `,
- deprecated: `
- This option has been deprecated in favor of \`--install-strategy=nested\`
- `,
- flatten (key, obj, flatOptions) {
- if (obj[key]) {
- obj['install-strategy'] = 'nested'
- flatOptions.installStrategy = 'nested'
- }
- },
-})
-
-define('legacy-peer-deps', {
- default: false,
- type: Boolean,
- description: `
- Causes npm to completely ignore \`peerDependencies\` when building a
- package tree, as in npm versions 3 through 6.
-
- If a package cannot be installed because of overly strict
- \`peerDependencies\` that collide, it provides a way to move forward
- resolving the situation.
-
- This differs from \`--omit=peer\`, in that \`--omit=peer\` will avoid
- unpacking \`peerDependencies\` on disk, but will still design a tree such
- that \`peerDependencies\` _could_ be unpacked in a correct place.
-
- Use of \`legacy-peer-deps\` is not recommended, as it will not enforce
- the \`peerDependencies\` contract that meta-dependencies may rely on.
- `,
- flatten,
-})
-
-define('link', {
- default: false,
- type: Boolean,
- description: `
- Used with \`npm ls\`, limiting output to only those packages that are
- linked.
- `,
-})
-
-define('local-address', {
- default: null,
- type: getLocalAddresses(),
- typeDescription: 'IP Address',
- description: `
- The IP address of the local interface to use when making connections to
- the npm registry. Must be IPv4 in versions of Node prior to 0.12.
- `,
- flatten,
-})
-
-define('location', {
- default: 'user',
- short: 'L',
- type: [
- 'global',
- 'user',
- 'project',
- ],
- defaultDescription: `
- "user" unless \`--global\` is passed, which will also set this value to "global"
- `,
- description: `
- When passed to \`npm config\` this refers to which config file to use.
-
- When set to "global" mode, packages are installed into the \`prefix\` folder
- instead of the current working directory. See
- [folders](/configuring-npm/folders) for more on the differences in behavior.
-
- * packages are installed into the \`{prefix}/lib/node_modules\` folder,
- instead of the current working directory.
- * bin files are linked to \`{prefix}/bin\`
- * man pages are linked to \`{prefix}/share/man\`
- `,
- flatten: (key, obj, flatOptions) => {
- flatten(key, obj, flatOptions)
- if (flatOptions.global) {
- flatOptions.location = 'global'
- }
- if (obj.location === 'global') {
- flatOptions.global = true
- }
- },
-})
-
-define('lockfile-version', {
- default: null,
- type: [null, 1, 2, 3, '1', '2', '3'],
- defaultDescription: `
- Version 3 if no lockfile, auto-converting v1 lockfiles to v3, otherwise
- maintain current lockfile version.`,
- description: `
- Set the lockfile format version to be used in package-lock.json and
- npm-shrinkwrap-json files. Possible options are:
-
- 1: The lockfile version used by npm versions 5 and 6. Lacks some data that
- is used during the install, resulting in slower and possibly less
- deterministic installs. Prevents lockfile churn when interoperating with
- older npm versions.
-
- 2: The default lockfile version used by npm version 7 and 8. Includes both
- the version 1 lockfile data and version 3 lockfile data, for maximum
- determinism and interoperability, at the expense of more bytes on disk.
-
- 3: Only the new lockfile information introduced in npm version 7. Smaller
- on disk than lockfile version 2, but not interoperable with older npm
- versions. Ideal if all users are on npm version 7 and higher.
- `,
- flatten: (key, obj, flatOptions) => {
- flatOptions.lockfileVersion = obj[key] && parseInt(obj[key], 10)
- },
-})
-
-define('loglevel', {
- default: 'notice',
- type: [
- 'silent',
- 'error',
- 'warn',
- 'notice',
- 'http',
- 'info',
- 'verbose',
- 'silly',
- ],
- description: `
- What level of logs to report. All logs are written to a debug log,
- with the path to that file printed if the execution of a command fails.
-
- Any logs of a higher level than the setting are shown. The default is
- "notice".
-
- See also the \`foreground-scripts\` config.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.silent = obj[key] === 'silent'
- },
-})
-
-define('logs-dir', {
- default: null,
- type: [null, path],
- defaultDescription: `
- A directory named \`_logs\` inside the cache
-`,
- description: `
- The location of npm's log directory. See [\`npm
- logging\`](/using-npm/logging) for more information.
- `,
-})
-
-define('logs-max', {
- default: 10,
- type: Number,
- description: `
- The maximum number of log files to store.
-
- If set to 0, no log files will be written for the current run.
- `,
-})
-
-define('long', {
- default: false,
- type: Boolean,
- short: 'l',
- description: `
- Show extended information in \`ls\`, \`search\`, and \`help-search\`.
- `,
-})
-
-define('maxsockets', {
- default: 15,
- type: Number,
- description: `
- The maximum number of connections to use per origin (protocol/host/port
- combination).
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.maxSockets = obj[key]
- },
-})
-
-define('message', {
- default: '%s',
- type: String,
- short: 'm',
- description: `
- Commit message which is used by \`npm version\` when creating version commit.
-
- Any "%s" in the message will be replaced with the version number.
- `,
- flatten,
-})
-
-define('node-options', {
- default: null,
- type: [null, String],
- description: `
- Options to pass through to Node.js via the \`NODE_OPTIONS\` environment
- variable. This does not impact how npm itself is executed but it does
- impact how lifecycle scripts are called.
- `,
-})
-
-define('noproxy', {
- default: '',
- defaultDescription: `
- The value of the NO_PROXY environment variable
- `,
- type: [String, Array],
- description: `
- Domain extensions that should bypass any proxies.
-
- Also accepts a comma-delimited string.
- `,
- flatten (key, obj, flatOptions) {
- if (Array.isArray(obj[key])) {
- flatOptions.noProxy = obj[key].join(',')
- } else {
- flatOptions.noProxy = obj[key]
- }
- },
-})
-
-define('offline', {
- default: false,
- type: Boolean,
- description: `
- Force offline mode: no network requests will be done during install. To allow
- the CLI to fill in missing cache data, see \`--prefer-offline\`.
- `,
- flatten,
-})
-
-define('omit', {
- default: process.env.NODE_ENV === 'production' ? ['dev'] : [],
- defaultDescription: `
- 'dev' if the \`NODE_ENV\` environment variable is set to 'production',
- otherwise empty.
- `,
- type: [Array, 'dev', 'optional', 'peer'],
- description: `
- Dependency types to omit from the installation tree on disk.
-
- Note that these dependencies _are_ still resolved and added to the
- \`package-lock.json\` or \`npm-shrinkwrap.json\` file. They are just
- not physically installed on disk.
-
- If a package type appears in both the \`--include\` and \`--omit\`
- lists, then it will be included.
-
- If the resulting omit list includes \`'dev'\`, then the \`NODE_ENV\`
- environment variable will be set to \`'production'\` for all lifecycle
- scripts.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.omit = buildOmitList(obj)
- },
-})
-
-define('omit-lockfile-registry-resolved', {
- default: false,
- type: Boolean,
- description: `
- This option causes npm to create lock files without a \`resolved\` key for
- registry dependencies. Subsequent installs will need to resolve tarball
- endpoints with the configured registry, likely resulting in a longer install
- time.
- `,
- flatten,
-})
-
-define('only', {
- default: null,
- type: [null, 'prod', 'production'],
- deprecated: `
- Use \`--omit=dev\` to omit dev dependencies from the install.
- `,
- description: `
- When set to \`prod\` or \`production\`, this is an alias for
- \`--omit=dev\`.
- `,
- flatten (key, obj, flatOptions) {
- definitions.omit.flatten('omit', obj, flatOptions)
- },
-})
-
-define('optional', {
- default: null,
- type: [null, Boolean],
- deprecated: `
- Use \`--omit=optional\` to exclude optional dependencies, or
- \`--include=optional\` to include them.
-
- Default value does install optional deps unless otherwise omitted.
- `,
- description: `
- Alias for --include=optional or --omit=optional
- `,
- flatten (key, obj, flatOptions) {
- definitions.omit.flatten('omit', obj, flatOptions)
- },
-})
-
-define('otp', {
- default: null,
- type: [null, String],
- description: `
- This is a one-time password from a two-factor authenticator. It's needed
- when publishing or changing package permissions with \`npm access\`.
-
- If not set, and a registry response fails with a challenge for a one-time
- password, npm will prompt on the command line for one.
- `,
- flatten,
-})
-
-define('package', {
- default: [],
- hint: '',
- type: [String, Array],
- description: `
- The package or packages to install for [\`npm exec\`](/commands/npm-exec)
- `,
- flatten,
-})
-
-define('package-lock', {
- default: true,
- type: Boolean,
- description: `
- If set to false, then ignore \`package-lock.json\` files when installing.
- This will also prevent _writing_ \`package-lock.json\` if \`save\` is
- true.
-
- This configuration does not affect \`npm ci\`.
- `,
- flatten: (key, obj, flatOptions) => {
- flatten(key, obj, flatOptions)
- if (flatOptions.packageLockOnly) {
- flatOptions.packageLock = true
- }
- },
-})
-
-define('package-lock-only', {
- default: false,
- type: Boolean,
- description: `
- If set to true, the current operation will only use the \`package-lock.json\`,
- ignoring \`node_modules\`.
-
- For \`update\` this means only the \`package-lock.json\` will be updated,
- instead of checking \`node_modules\` and downloading dependencies.
-
- For \`list\` this means the output will be based on the tree described by the
- \`package-lock.json\`, rather than the contents of \`node_modules\`.
- `,
- flatten: (key, obj, flatOptions) => {
- flatten(key, obj, flatOptions)
- if (flatOptions.packageLockOnly) {
- flatOptions.packageLock = true
- }
- },
-})
-
-define('pack-destination', {
- default: '.',
- type: String,
- description: `
- Directory in which \`npm pack\` will save tarballs.
- `,
- flatten,
-})
-
-define('parseable', {
- default: false,
- type: Boolean,
- short: 'p',
- description: `
- Output parseable results from commands that write to standard output. For
- \`npm search\`, this will be tab-separated table format.
- `,
- flatten,
-})
-
-define('prefer-offline', {
- default: false,
- type: Boolean,
- description: `
- If true, staleness checks for cached data will be bypassed, but missing
- data will be requested from the server. To force full offline mode, use
- \`--offline\`.
- `,
- flatten,
-})
-
-define('prefer-online', {
- default: false,
- type: Boolean,
- description: `
- If true, staleness checks for cached data will be forced, making the CLI
- look for updates immediately even for fresh package data.
- `,
- flatten,
-})
-
-// `prefix` has its default defined outside of this module
-define('prefix', {
- type: path,
- short: 'C',
- default: '',
- defaultDescription: `
- In global mode, the folder where the node executable is installed.
- Otherwise, the nearest parent folder containing either a package.json
- file or a node_modules folder.
- `,
- description: `
- The location to install global items. If set on the command line, then
- it forces non-global commands to run in the specified folder.
- `,
-})
-
-define('preid', {
- default: '',
- hint: 'prerelease-id',
- type: String,
- description: `
- The "prerelease identifier" to use as a prefix for the "prerelease" part
- of a semver. Like the \`rc\` in \`1.2.0-rc.8\`.
- `,
- flatten,
-})
-
-define('production', {
- default: null,
- type: [null, Boolean],
- deprecated: 'Use `--omit=dev` instead.',
- description: 'Alias for `--omit=dev`',
- flatten (key, obj, flatOptions) {
- definitions.omit.flatten('omit', obj, flatOptions)
- },
-})
-
-define('progress', {
- default: !ciInfo.isCI,
- defaultDescription: `
- \`true\` unless running in a known CI system
- `,
- type: Boolean,
- description: `
- When set to \`true\`, npm will display a progress bar during time
- intensive operations, if \`process.stderr\` is a TTY.
-
- Set to \`false\` to suppress the progress bar.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.progress = !obj.progress ? false
- : !!process.stderr.isTTY && process.env.TERM !== 'dumb'
- },
-})
-
-define('proxy', {
- default: null,
- type: [null, false, url], // allow proxy to be disabled explicitly
- description: `
- A proxy to use for outgoing http requests. If the \`HTTP_PROXY\` or
- \`http_proxy\` environment variables are set, proxy settings will be
- honored by the underlying \`request\` library.
- `,
- flatten,
-})
-
-define('read-only', {
- default: false,
- type: Boolean,
- description: `
- This is used to mark a token as unable to publish when configuring
- limited access tokens with the \`npm token create\` command.
- `,
- flatten,
-})
-
-define('rebuild-bundle', {
- default: true,
- type: Boolean,
- description: `
- Rebuild bundled dependencies after installation.
- `,
- flatten,
-})
-
-define('registry', {
- default: 'https://registry.npmjs.org/',
- type: url,
- description: `
- The base URL of the npm registry.
- `,
- flatten,
-})
-
-define('replace-registry-host', {
- default: 'npmjs',
- hint: ' | hostname',
- type: ['npmjs', 'never', 'always', String],
- description: `
- Defines behavior for replacing the registry host in a lockfile with the
- configured registry.
-
- The default behavior is to replace package dist URLs from the default
- registry (https://registry.npmjs.org) to the configured registry. If set to
- "never", then use the registry value. If set to "always", then replace the
- registry host with the configured host every time.
-
- You may also specify a bare hostname (e.g., "registry.npmjs.org").
- `,
- flatten,
-})
-
-define('save', {
- default: true,
- defaultDescription: `\`true\` unless when using \`npm update\` where it
- defaults to \`false\``,
- usage: '-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle',
- type: Boolean,
- short: 'S',
- description: `
- Save installed packages to a \`package.json\` file as dependencies.
-
- When used with the \`npm rm\` command, removes the dependency from
- \`package.json\`.
-
- Will also prevent writing to \`package-lock.json\` if set to \`false\`.
- `,
- flatten,
-})
-
-define('save-bundle', {
- default: false,
- type: Boolean,
- short: 'B',
- description: `
- If a package would be saved at install time by the use of \`--save\`,
- \`--save-dev\`, or \`--save-optional\`, then also put it in the
- \`bundleDependencies\` list.
-
- Ignored if \`--save-peer\` is set, since peerDependencies cannot be bundled.
- `,
- flatten (key, obj, flatOptions) {
- // XXX update arborist to just ignore it if resulting saveType is peer
- // otherwise this won't have the expected effect:
- //
- // npm config set save-peer true
- // npm i foo --save-bundle --save-prod <-- should bundle
- flatOptions.saveBundle = obj['save-bundle'] && !obj['save-peer']
- },
-})
-
-// XXX: We should really deprecate all these `--save-blah` switches
-// in favor of a single `--save-type` option. The unfortunate shortcut
-// we took for `--save-peer --save-optional` being `--save-type=peerOptional`
-// makes this tricky, and likely a breaking change.
-
-define('save-dev', {
- default: false,
- type: Boolean,
- short: 'D',
- description: `
- Save installed packages to a package.json file as \`devDependencies\`.
- `,
- flatten (key, obj, flatOptions) {
- if (!obj[key]) {
- if (flatOptions.saveType === 'dev') {
- delete flatOptions.saveType
- }
- return
- }
-
- flatOptions.saveType = 'dev'
- },
-})
-
-define('save-exact', {
- default: false,
- type: Boolean,
- short: 'E',
- description: `
- Dependencies saved to package.json will be configured with an exact
- version rather than using npm's default semver range operator.
- `,
- flatten (key, obj, flatOptions) {
- // just call the save-prefix flattener, it reads from obj['save-exact']
- definitions['save-prefix'].flatten('save-prefix', obj, flatOptions)
- },
-})
-
-define('save-optional', {
- default: false,
- type: Boolean,
- short: 'O',
- description: `
- Save installed packages to a package.json file as
- \`optionalDependencies\`.
- `,
- flatten (key, obj, flatOptions) {
- if (!obj[key]) {
- if (flatOptions.saveType === 'optional') {
- delete flatOptions.saveType
- } else if (flatOptions.saveType === 'peerOptional') {
- flatOptions.saveType = 'peer'
- }
- return
- }
-
- if (flatOptions.saveType === 'peerOptional') {
- return
- }
-
- if (flatOptions.saveType === 'peer') {
- flatOptions.saveType = 'peerOptional'
- } else {
- flatOptions.saveType = 'optional'
- }
- },
-})
-
-define('save-peer', {
- default: false,
- type: Boolean,
- description: `
- Save installed packages to a package.json file as \`peerDependencies\`
- `,
- flatten (key, obj, flatOptions) {
- if (!obj[key]) {
- if (flatOptions.saveType === 'peer') {
- delete flatOptions.saveType
- } else if (flatOptions.saveType === 'peerOptional') {
- flatOptions.saveType = 'optional'
- }
- return
- }
-
- if (flatOptions.saveType === 'peerOptional') {
- return
- }
-
- if (flatOptions.saveType === 'optional') {
- flatOptions.saveType = 'peerOptional'
- } else {
- flatOptions.saveType = 'peer'
- }
- },
-})
-
-define('save-prefix', {
- default: '^',
- type: String,
- description: `
- Configure how versions of packages installed to a package.json file via
- \`--save\` or \`--save-dev\` get prefixed.
-
- For example if a package has version \`1.2.3\`, by default its version is
- set to \`^1.2.3\` which allows minor upgrades for that package, but after
- \`npm config set save-prefix='~'\` it would be set to \`~1.2.3\` which
- only allows patch upgrades.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.savePrefix = obj['save-exact'] ? '' : obj['save-prefix']
- obj['save-prefix'] = flatOptions.savePrefix
- },
-})
-
-define('save-prod', {
- default: false,
- type: Boolean,
- short: 'P',
- description: `
- Save installed packages into \`dependencies\` specifically. This is
- useful if a package already exists in \`devDependencies\` or
- \`optionalDependencies\`, but you want to move it to be a non-optional
- production dependency.
-
- This is the default behavior if \`--save\` is true, and neither
- \`--save-dev\` or \`--save-optional\` are true.
- `,
- flatten (key, obj, flatOptions) {
- if (!obj[key]) {
- if (flatOptions.saveType === 'prod') {
- delete flatOptions.saveType
- }
- return
- }
-
- flatOptions.saveType = 'prod'
- },
-})
-
-define('scope', {
- default: '',
- defaultDescription: `
- the scope of the current project, if any, or ""
- `,
- type: String,
- hint: '<@scope>',
- description: `
- Associate an operation with a scope for a scoped registry.
-
- Useful when logging in to or out of a private registry:
-
- \`\`\`
- # log in, linking the scope to the custom registry
- npm login --scope=@mycorp --registry=https://registry.mycorp.com
-
- # log out, removing the link and the auth token
- npm logout --scope=@mycorp
- \`\`\`
-
- This will cause \`@mycorp\` to be mapped to the registry for future
- installation of packages specified according to the pattern
- \`@mycorp/package\`.
-
- This will also cause \`npm init\` to create a scoped package.
-
- \`\`\`
- # accept all defaults, and create a package named "@foo/whatever",
- # instead of just named "whatever"
- npm init --scope=@foo --yes
- \`\`\`
- `,
- flatten (key, obj, flatOptions) {
- const value = obj[key]
- const scope = value && !/^@/.test(value) ? `@${value}` : value
- flatOptions.scope = scope
- // projectScope is kept for compatibility with npm-registry-fetch
- flatOptions.projectScope = scope
- },
-})
-
-define('script-shell', {
- default: null,
- defaultDescription: `
- '/bin/sh' on POSIX systems, 'cmd.exe' on Windows
- `,
- type: [null, String],
- description: `
- The shell to use for scripts run with the \`npm exec\`,
- \`npm run\` and \`npm init \` commands.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.scriptShell = obj[key] || undefined
- },
-})
-
-define('searchexclude', {
- default: '',
- type: String,
- description: `
- Space-separated options that limit the results from search.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.search = flatOptions.search || { limit: 20 }
- flatOptions.search.exclude = obj[key].toLowerCase()
- },
-})
-
-define('searchlimit', {
- default: 20,
- type: Number,
- description: `
- Number of items to limit search results to. Will not apply at all to
- legacy searches.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.search = flatOptions.search || {}
- flatOptions.search.limit = obj[key]
- },
-})
-
-define('searchopts', {
- default: '',
- type: String,
- description: `
- Space-separated options that are always passed to search.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.search = flatOptions.search || { limit: 20 }
- flatOptions.search.opts = querystring.parse(obj[key])
- },
-})
-
-define('searchstaleness', {
- default: 15 * 60,
- type: Number,
- description: `
- The age of the cache, in seconds, before another registry request is made
- if using legacy search endpoint.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.search = flatOptions.search || { limit: 20 }
- flatOptions.search.staleness = obj[key]
- },
-})
-
-define('shell', {
- default: shell,
- defaultDescription: `
- SHELL environment variable, or "bash" on Posix, or "cmd.exe" on Windows
- `,
- type: String,
- description: `
- The shell to run for the \`npm explore\` command.
- `,
- flatten,
-})
-
-define('shrinkwrap', {
- default: true,
- type: Boolean,
- deprecated: `
- Use the --package-lock setting instead.
- `,
- description: `
- Alias for --package-lock
- `,
- flatten (key, obj, flatOptions) {
- obj['package-lock'] = obj.shrinkwrap
- definitions['package-lock'].flatten('package-lock', obj, flatOptions)
- },
-})
-
-define('sign-git-commit', {
- default: false,
- type: Boolean,
- description: `
- If set to true, then the \`npm version\` command will commit the new
- package version using \`-S\` to add a signature.
-
- Note that git requires you to have set up GPG keys in your git configs
- for this to work properly.
- `,
- flatten,
-})
-
-define('sign-git-tag', {
- default: false,
- type: Boolean,
- description: `
- If set to true, then the \`npm version\` command will tag the version
- using \`-s\` to add a signature.
-
- Note that git requires you to have set up GPG keys in your git configs
- for this to work properly.
- `,
- flatten,
-})
-
-define('strict-peer-deps', {
- default: false,
- type: Boolean,
- description: `
- If set to \`true\`, and \`--legacy-peer-deps\` is not set, then _any_
- conflicting \`peerDependencies\` will be treated as an install failure,
- even if npm could reasonably guess the appropriate resolution based on
- non-peer dependency relationships.
-
- By default, conflicting \`peerDependencies\` deep in the dependency graph
- will be resolved using the nearest non-peer dependency specification,
- even if doing so will result in some packages receiving a peer dependency
- outside the range set in their package's \`peerDependencies\` object.
-
- When such and override is performed, a warning is printed, explaining the
- conflict and the packages involved. If \`--strict-peer-deps\` is set,
- then this warning is treated as a failure.
- `,
- flatten,
-})
-
-define('strict-ssl', {
- default: true,
- type: Boolean,
- description: `
- Whether or not to do SSL key validation when making requests to the
- registry via https.
-
- See also the \`ca\` config.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.strictSSL = obj[key]
- },
-})
-
-define('tag', {
- default: 'latest',
- type: String,
- description: `
- If you ask npm to install a package and don't tell it a specific version,
- then it will install the specified tag.
-
- Also the tag that is added to the package@version specified by the \`npm
- tag\` command, if no explicit tag is given.
-
- When used by the \`npm diff\` command, this is the tag used to fetch the
- tarball that will be compared with the local files by default.
- `,
- flatten (key, obj, flatOptions) {
- flatOptions.defaultTag = obj[key]
- },
-})
-
-define('tag-version-prefix', {
- default: 'v',
- type: String,
- description: `
- If set, alters the prefix used when tagging a new version when performing
- a version increment using \`npm version\`. To remove the prefix
- altogether, set it to the empty string: \`""\`.
-
- Because other tools may rely on the convention that npm version tags look
- like \`v1.0.0\`, _only use this property if it is absolutely necessary_.
- In particular, use care when overriding this setting for public packages.
- `,
- flatten,
-})
-
-define('timing', {
- default: false,
- type: Boolean,
- description: `
- If true, writes timing information to a process specific json file in
- the cache or \`logs-dir\`. The file name ends with \`-timing.json\`.
-
- You can quickly view it with this [json](https://npm.im/json) command
- line: \`cat ~/.npm/_logs/*-timing.json | npm exec -- json -g\`.
-
- Timing information will also be reported in the terminal. To suppress this
- while still writing the timing file, use \`--silent\`.
- `,
-})
-
-define('tmp', {
- default: tmpdir(),
- defaultDescription: `
- The value returned by the Node.js \`os.tmpdir()\` method
-
- `,
- type: path,
- deprecated: `
- This setting is no longer used. npm stores temporary files in a special
- location in the cache, and they are managed by
- [\`cacache\`](http://npm.im/cacache).
- `,
- description: `
- Historically, the location where temporary files were stored. No longer
- relevant.
- `,
-})
-
-define('umask', {
- default: 0,
- type: Umask,
- description: `
- The "umask" value to use when setting the file creation mode on files and
- folders.
-
- Folders and executables are given a mode which is \`0o777\` masked
- against this value. Other files are given a mode which is \`0o666\`
- masked against this value.
-
- Note that the underlying system will _also_ apply its own umask value to
- files and folders that are created, and npm does not circumvent this, but
- rather adds the \`--umask\` config to it.
-
- Thus, the effective default umask value on most POSIX systems is 0o22,
- meaning that folders and executables are created with a mode of 0o755 and
- other files are created with a mode of 0o644.
- `,
- flatten,
-})
-
-define('unicode', {
- default: unicode,
- defaultDescription: `
- false on windows, true on mac/unix systems with a unicode locale, as
- defined by the \`LC_ALL\`, \`LC_CTYPE\`, or \`LANG\` environment variables.
- `,
- type: Boolean,
- description: `
- When set to true, npm uses unicode characters in the tree output. When
- false, it uses ascii characters instead of unicode glyphs.
- `,
- flatten,
-})
-
-define('update-notifier', {
- default: true,
- type: Boolean,
- description: `
- Set to false to suppress the update notification when using an older
- version of npm than the latest.
- `,
-})
-
-define('usage', {
- default: false,
- type: Boolean,
- short: ['?', 'H', 'h'],
- description: `
- Show short usage output about the command specified.
- `,
-})
-
-define('user-agent', {
- default: 'npm/{npm-version} ' +
- 'node/{node-version} ' +
- '{platform} ' +
- '{arch} ' +
- 'workspaces/{workspaces} ' +
- '{ci}',
- type: String,
- description: `
- Sets the User-Agent request header. The following fields are replaced
- with their actual counterparts:
-
- * \`{npm-version}\` - The npm version in use
- * \`{node-version}\` - The Node.js version in use
- * \`{platform}\` - The value of \`process.platform\`
- * \`{arch}\` - The value of \`process.arch\`
- * \`{workspaces}\` - Set to \`true\` if the \`workspaces\` or \`workspace\`
- options are set.
- * \`{ci}\` - The value of the \`ci-name\` config, if set, prefixed with
- \`ci/\`, or an empty string if \`ci-name\` is empty.
- `,
- flatten (key, obj, flatOptions) {
- const value = obj[key]
- const ciName = obj['ci-name']
- let inWorkspaces = false
- if (obj.workspaces || obj.workspace && obj.workspace.length) {
- inWorkspaces = true
- }
- flatOptions.userAgent =
- value.replace(/\{node-version\}/gi, process.version)
- .replace(/\{npm-version\}/gi, npmVersion)
- .replace(/\{platform\}/gi, process.platform)
- .replace(/\{arch\}/gi, process.arch)
- .replace(/\{workspaces\}/gi, inWorkspaces)
- .replace(/\{ci\}/gi, ciName ? `ci/${ciName}` : '')
- .trim()
-
- // We can't clobber the original or else subsequent flattening will fail
- // (i.e. when we change the underlying config values)
- // obj[key] = flatOptions.userAgent
-
- // user-agent is a unique kind of config item that gets set from a template
- // and ends up translated. Because of this, the normal "should we set this
- // to process.env also doesn't work
- process.env.npm_config_user_agent = flatOptions.userAgent
- },
-})
-
-define('userconfig', {
- default: '~/.npmrc',
- type: path,
- description: `
- The location of user-level configuration settings.
-
- This may be overridden by the \`npm_config_userconfig\` environment
- variable or the \`--userconfig\` command line option, but may _not_
- be overridden by settings in the \`globalconfig\` file.
- `,
-})
-
-define('version', {
- default: false,
- type: Boolean,
- short: 'v',
- description: `
- If true, output the npm version and exit successfully.
-
- Only relevant when specified explicitly on the command line.
- `,
-})
-
-define('versions', {
- default: false,
- type: Boolean,
- description: `
- If true, output the npm version as well as node's \`process.versions\`
- map and the version in the current working directory's \`package.json\`
- file if one exists, and exit successfully.
-
- Only relevant when specified explicitly on the command line.
- `,
-})
-
-define('viewer', {
- default: isWindows ? 'browser' : 'man',
- defaultDescription: `
- "man" on Posix, "browser" on Windows
- `,
- type: String,
- description: `
- The program to use to view help content.
-
- Set to \`"browser"\` to view html help content in the default web browser.
- `,
-})
-
-define('which', {
- default: null,
- hint: '',
- type: [null, Number],
- description: `
- If there are multiple funding sources, which 1-indexed source URL to open.
- `,
-})
-
-define('workspace', {
- default: [],
- type: [String, Array],
- hint: '',
- short: 'w',
- envExport: false,
- description: `
- Enable running a command in the context of the configured workspaces of the
- current project while filtering by running only the workspaces defined by
- this configuration option.
-
- Valid values for the \`workspace\` config are either:
-
- * Workspace names
- * Path to a workspace directory
- * Path to a parent workspace directory (will result in selecting all
- workspaces within that folder)
-
- When set for the \`npm init\` command, this may be set to the folder of
- a workspace which does not yet exist, to create the folder and set it
- up as a brand new workspace within the project.
- `,
- flatten: (key, obj, flatOptions) => {
- definitions['user-agent'].flatten('user-agent', obj, flatOptions)
- },
-})
-
-define('workspaces', {
- default: null,
- type: [null, Boolean],
- short: 'ws',
- envExport: false,
- description: `
- Set to true to run the command in the context of **all** configured
- workspaces.
-
- Explicitly setting this to false will cause commands like \`install\` to
- ignore workspaces altogether.
- When not set explicitly:
-
- - Commands that operate on the \`node_modules\` tree (install, update,
- etc.) will link workspaces into the \`node_modules\` folder.
- - Commands that do other things (test, exec, publish, etc.) will operate
- on the root project, _unless_ one or more workspaces are specified in
- the \`workspace\` config.
- `,
- flatten: (key, obj, flatOptions) => {
- definitions['user-agent'].flatten('user-agent', obj, flatOptions)
-
- // TODO: this is a derived value, and should be reworked when we have a
- // pattern for derived value
-
- // workspacesEnabled is true whether workspaces is null or true
- // commands contextually work with workspaces or not regardless of
- // configuration, so we need an option specifically to disable workspaces
- flatOptions.workspacesEnabled = obj[key] !== false
- },
-})
-
-define('workspaces-update', {
- default: true,
- type: Boolean,
- description: `
- If set to true, the npm cli will run an update after operations that may
- possibly change the workspaces installed to the \`node_modules\` folder.
- `,
- flatten,
-})
-
-define('yes', {
- default: null,
- type: [null, Boolean],
- short: 'y',
- description: `
- Automatically answer "yes" to any prompts that npm might print on
- the command line.
- `,
-})
diff --git a/lib/utils/config/index.js b/lib/utils/config/index.js
deleted file mode 100644
index d393aec2297d2..0000000000000
--- a/lib/utils/config/index.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const definitions = require('./definitions.js')
-
-// use the defined flattening function, and copy over any scoped
-// registries and registry-specific "nerfdart" configs verbatim
-//
-// TODO: make these getters so that we only have to make dirty
-// the thing that changed, and then flatten the fields that
-// could have changed when a config.set is called.
-//
-// TODO: move nerfdart auth stuff into a nested object that
-// is only passed along to paths that end up calling npm-registry-fetch.
-const flatten = (obj, flat = {}) => {
- for (const [key, val] of Object.entries(obj)) {
- const def = definitions[key]
- if (def && def.flatten) {
- def.flatten(key, obj, flat)
- } else if (/@.*:registry$/i.test(key) || /^\/\//.test(key)) {
- flat[key] = val
- }
- }
-
- // XXX make this the bin/npm-cli.js file explicitly instead
- // otherwise using npm programmatically is a bit of a pain.
- flat.npmBin = require.main ? require.main.filename
- : /* istanbul ignore next - not configurable property */ undefined
- flat.nodeBin = process.env.NODE || process.execPath
-
- // XXX should this be sha512? is it even relevant?
- flat.hashAlgorithm = 'sha1'
-
- return flat
-}
-
-// aliases where they get expanded into a completely different thing
-// these are NOT supported in the environment or npmrc files, only
-// expanded on the CLI.
-// TODO: when we switch off of nopt, use an arg parser that supports
-// more reasonable aliasing and short opts right in the definitions set.
-const shorthands = {
- 'enjoy-by': ['--before'],
- d: ['--loglevel', 'info'],
- dd: ['--loglevel', 'verbose'],
- ddd: ['--loglevel', 'silly'],
- quiet: ['--loglevel', 'warn'],
- q: ['--loglevel', 'warn'],
- s: ['--loglevel', 'silent'],
- silent: ['--loglevel', 'silent'],
- verbose: ['--loglevel', 'verbose'],
- desc: ['--description'],
- help: ['--usage'],
- local: ['--no-global'],
- n: ['--no-yes'],
- no: ['--no-yes'],
- porcelain: ['--parseable'],
- readonly: ['--read-only'],
- reg: ['--registry'],
- iwr: ['--include-workspace-root'],
- ...Object.entries(definitions).reduce((acc, [key, { short = [] }]) => {
- // can be either an array or string
- for (const s of [].concat(short)) {
- acc[s] = [`--${key}`]
- }
- return acc
- }, {}),
-}
-
-module.exports = {
- get defaults () {
- // NB: 'default' is a reserved word
- return Object.entries(definitions).reduce((acc, [key, { default: d }]) => {
- acc[key] = d
- return acc
- }, {})
- },
- definitions,
- flatten,
- shorthands,
-}
diff --git a/lib/utils/did-you-mean.js b/lib/utils/did-you-mean.js
index 10b33d5f83a08..7428ed5df85e9 100644
--- a/lib/utils/did-you-mean.js
+++ b/lib/utils/did-you-mean.js
@@ -1,40 +1,34 @@
+const Npm = require('../npm')
const { distance } = require('fastest-levenshtein')
-const readJson = require('read-package-json-fast')
const { commands } = require('./cmd-list.js')
-const didYouMean = async (npm, path, scmd) => {
- // const cmd = await npm.cmd(str)
- const close = commands.filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && scmd !== cmd)
- let best = []
- for (const str of close) {
- const cmd = await npm.cmd(str)
- best.push(` npm ${str} # ${cmd.description}`)
- }
- // We would already be suggesting this in `npm x` so omit them here
- const runScripts = ['stop', 'start', 'test', 'restart']
- try {
- const { bin, scripts } = await readJson(`${path}/package.json`)
- best = best.concat(
- Object.keys(scripts || {})
- .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && !runScripts.includes(cmd))
- .map(str => ` npm run ${str} # run the "${str}" package script`),
- Object.keys(bin || {})
- .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4)
- /* eslint-disable-next-line max-len */
- .map(str => ` npm exec ${str} # run the "${str}" command from either this or a remote npm package`)
- )
- } catch (_) {
- // gracefully ignore not being in a folder w/ a package.json
- }
+const runScripts = ['stop', 'start', 'test', 'restart']
+
+const isClose = (scmd, cmd) => distance(scmd, cmd) < scmd.length * 0.4
+
+const didYouMean = (pkg, scmd) => {
+ const { scripts = {}, bin = {} } = pkg || {}
+
+ const best = [
+ ...commands
+ .filter(cmd => isClose(scmd, cmd) && scmd !== cmd)
+ .map(str => [str, Npm.cmd(str).description]),
+ ...Object.keys(scripts)
+ // We would already be suggesting this in `npm x` so omit them here
+ .filter(cmd => isClose(scmd, cmd) && !runScripts.includes(cmd))
+ .map(str => [`run ${str}`, `run the "${str}" package script`]),
+ ...Object.keys(bin)
+ .filter(cmd => isClose(scmd, cmd))
+ /* eslint-disable-next-line max-len */
+ .map(str => [`exec ${str}`, `run the "${str}" command from either this or a remote npm package`]),
+ ]
if (best.length === 0) {
return ''
}
- const suggestion =
- best.length === 1
- ? `\n\nDid you mean this?\n${best[0]}`
- : `\n\nDid you mean one of these?\n${best.slice(0, 3).join('\n')}`
- return suggestion
+ return `\n\nDid you mean ${best.length === 1 ? 'this' : 'one of these'}?\n` +
+ best.slice(0, 3).map(([msg, comment]) => ` npm ${msg} # ${comment}`).join('\n')
}
+
module.exports = didYouMean
diff --git a/lib/utils/display.js b/lib/utils/display.js
index 35d221c09cae8..67a3b98c0417a 100644
--- a/lib/utils/display.js
+++ b/lib/utils/display.js
@@ -1,120 +1,538 @@
-const { inspect } = require('util')
-const npmlog = require('npmlog')
-const log = require('./log-shim.js')
+const { log, output, input, META } = require('proc-log')
const { explain } = require('./explain-eresolve.js')
+const { formatWithOptions } = require('./format')
+
+// This is the general approach to color:
+// Eventually this will be exposed somewhere we can refer to these by name.
+// Foreground colors only. Never set the background color.
+/*
+ * Black # (Don't use)
+ * Red # Danger
+ * Green # Success
+ * Yellow # Warning
+ * Blue # Accent
+ * Magenta # Done
+ * Cyan # Emphasis
+ * White # (Don't use)
+ */
+
+// Translates log levels to chalk colors
+const COLOR_PALETTE = ({ chalk: c }) => ({
+ heading: c.bold,
+ title: c.blueBright,
+ timing: c.magentaBright,
+ // loglevels
+ error: c.red,
+ warn: c.yellow,
+ notice: c.cyanBright,
+ http: c.green,
+ info: c.cyan,
+ verbose: c.blue,
+ silly: c.blue.dim,
+})
+
+const LEVEL_OPTIONS = {
+ silent: {
+ index: 0,
+ },
+ error: {
+ index: 1,
+ },
+ warn: {
+ index: 2,
+ },
+ notice: {
+ index: 3,
+ },
+ http: {
+ index: 4,
+ },
+ info: {
+ index: 5,
+ },
+ verbose: {
+ index: 6,
+ },
+ silly: {
+ index: 7,
+ },
+}
+
+const LEVEL_METHODS = {
+ ...LEVEL_OPTIONS,
+ [log.KEYS.timing]: {
+ show: ({ timing, index }) => !!timing && index !== 0,
+ },
+}
+
+const setBlocking = (stream) => {
+ // Copied from https://github.com/yargs/set-blocking
+ // https://raw.githubusercontent.com/yargs/set-blocking/master/LICENSE.txt
+ /* istanbul ignore next - we trust that this works */
+ if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') {
+ stream._handle.setBlocking(true)
+ }
+ return stream
+}
+
+// These are important
+// This is the key that is returned to the user for errors
+const ERROR_KEY = 'error'
+// This is the key producers use to indicate that there
+// is a json error that should be merged into the finished output
+const JSON_ERROR_KEY = 'jsonError'
+
+const isPlainObject = (v) => v && typeof v === 'object' && !Array.isArray(v)
+
+const getArrayOrObject = (items) => {
+ if (items.length) {
+ const foundNonObject = items.find(o => !isPlainObject(o))
+ // Non-objects and arrays cant be merged, so just return the first item
+ if (foundNonObject) {
+ return foundNonObject
+ }
+ // We use objects with 0,1,2,etc keys to merge array
+ if (items.every((o, i) => Object.hasOwn(o, i))) {
+ return Object.assign([], ...items)
+ }
+ }
+ // Otherwise its an object with all object items merged together
+ return Object.assign({}, ...items.filter(o => isPlainObject(o)))
+}
+
+const getJsonBuffer = ({ [JSON_ERROR_KEY]: metaError }, buffer) => {
+ const items = []
+ // meta also contains the meta object passed to flush
+ const errors = metaError ? [metaError] : []
+ // index 1 is the meta, 2 is the logged argument
+ for (const [, { [JSON_ERROR_KEY]: error }, obj] of buffer) {
+ if (obj) {
+ items.push(obj)
+ }
+ if (error) {
+ errors.push(error)
+ }
+ }
+
+ if (!items.length && !errors.length) {
+ return null
+ }
+
+ const res = getArrayOrObject(items)
+
+ // This skips any error checking since we can only set an error property
+ // on an object that can be stringified
+ // XXX(BREAKING_CHANGE): remove this in favor of always returning an object with result and error keys
+ if (isPlainObject(res) && errors.length) {
+ // This is not ideal. JSON output has always been keyed at the root with an `error`
+ // key, so we cant change that without it being a breaking change. At the same time
+ // some commands output arbitrary keys at the top level of the output, such as package
+ // names. So the output could already have the same key. The choice here is to overwrite
+ // it with our error since that is (probably?) more important.
+ // XXX(BREAKING_CHANGE): all json output should be keyed under well known keys, eg `result` and `error`
+ if (res[ERROR_KEY]) {
+ log.warn('', `overwriting existing ${ERROR_KEY} on json output`)
+ }
+ res[ERROR_KEY] = getArrayOrObject(errors)
+ }
+
+ return res
+}
+
+const withMeta = (handler) => (level, ...args) => {
+ let meta = {}
+ const last = args.at(-1)
+ if (last && typeof last === 'object' && Object.hasOwn(last, META)) {
+ meta = args.pop()
+ }
+ return handler(level, meta, ...args)
+}
class Display {
- constructor () {
- // pause by default until config is loaded
- this.on()
- log.pause()
+ #logState = {
+ buffering: true,
+ buffer: [],
+ }
+
+ #outputState = {
+ buffering: true,
+ buffer: [],
}
- on () {
+ // colors
+ #noColorChalk
+ #stdoutChalk
+ #stdoutColor
+ #stderrChalk
+ #stderrColor
+ #logColors
+
+ // progress
+ #progress
+
+ // options
+ #command
+ #levelIndex
+ #timing
+ #json
+ #heading
+ #silent
+
+ // display streams
+ #stdout
+ #stderr
+
+ constructor ({ stdout, stderr }) {
+ this.#stdout = setBlocking(stdout)
+ this.#stderr = setBlocking(stderr)
+
+ // Handlers are set immediately so they can buffer all events
process.on('log', this.#logHandler)
+ process.on('output', this.#outputHandler)
+ process.on('input', this.#inputHandler)
+ this.#progress = new Progress({ stream: stderr })
}
off () {
process.off('log', this.#logHandler)
- // Unbalanced calls to enable/disable progress
- // will leave change listeners on the tracker
- // This pretty much only happens in tests but
- // this removes the event emitter listener warnings
- log.tracker.removeAllListeners()
- }
-
- load (config) {
- const {
- color,
- timing,
- loglevel,
+ this.#logState.buffer.length = 0
+ process.off('output', this.#outputHandler)
+ this.#outputState.buffer.length = 0
+ process.off('input', this.#inputHandler)
+ this.#progress.off()
+ }
+
+ get chalk () {
+ return {
+ noColor: this.#noColorChalk,
+ stdout: this.#stdoutChalk,
+ stderr: this.#stderrChalk,
+ }
+ }
+
+ async load ({
+ command,
+ heading,
+ json,
+ loglevel,
+ progress,
+ stderrColor,
+ stdoutColor,
+ timing,
+ unicode,
+ }) {
+ // get createSupportsColor from chalk directly if this lands
+ // https://github.com/chalk/chalk/pull/600
+ const [{ Chalk }, { createSupportsColor }] = await Promise.all([
+ import('chalk'),
+ import('supports-color'),
+ ])
+ // we get the chalk level based on a null stream meaning chalk will only use
+ // what it knows about the environment to get color support since we already
+ // determined in our definitions that we want to show colors.
+ const level = Math.max(createSupportsColor(null).level, 1)
+ this.#noColorChalk = new Chalk({ level: 0 })
+ this.#stdoutColor = stdoutColor
+ this.#stdoutChalk = stdoutColor ? new Chalk({ level }) : this.#noColorChalk
+ this.#stderrColor = stderrColor
+ this.#stderrChalk = stderrColor ? new Chalk({ level }) : this.#noColorChalk
+ this.#logColors = COLOR_PALETTE({ chalk: this.#stderrChalk })
+
+ this.#command = command
+ this.#levelIndex = LEVEL_OPTIONS[loglevel].index
+ this.#timing = timing
+ this.#json = json
+ this.#heading = heading
+ this.#silent = this.#levelIndex <= 0
+
+ // Emit resume event on the logs which will flush output
+ log.resume()
+ output.flush()
+ this.#progress.load({
unicode,
- progress,
- silent,
- heading = 'npm',
- } = config
-
- // npmlog is still going away someday, so this is a hack to dynamically
- // set the loglevel of timing based on the timing flag, instead of making
- // a breaking change to npmlog. The result is that timing logs are never
- // shown except when the --timing flag is set. We also need to change
- // the index of the silly level since otherwise it is set to -Infinity
- // and we can't go any lower than that. silent is still set to Infinify
- // because we DO want silent to hide timing levels. This allows for the
- // special case of getting timing information while hiding all CLI output
- // in order to get perf information that might be affected by writing to
- // a terminal. XXX(npmlog): this will be removed along with npmlog
- log.levels.silly = -10000
- log.levels.timing = log.levels[loglevel] + (timing ? 1 : -1)
-
- log.level = loglevel
- log.heading = heading
-
- if (color) {
- log.enableColor()
- } else {
- log.disableColor()
+ enabled: !!progress && !this.#silent,
+ })
+ }
+
+ // STREAM WRITES
+
+ // Write formatted and (non-)colorized output to streams
+ #write (stream, options, ...args) {
+ const colors = stream === this.#stdout ? this.#stdoutColor : this.#stderrColor
+ const value = formatWithOptions({ colors, ...options }, ...args)
+ this.#progress.write(() => stream.write(value))
+ }
+
+ // HANDLERS
+
+ // Arrow function assigned to a private class field so it can be passed
+ // directly as a listener and still reference "this"
+ #logHandler = withMeta((level, meta, ...args) => {
+ switch (level) {
+ case log.KEYS.resume:
+ this.#logState.buffering = false
+ this.#logState.buffer.forEach((item) => this.#tryWriteLog(...item))
+ this.#logState.buffer.length = 0
+ break
+
+ case log.KEYS.pause:
+ this.#logState.buffering = true
+ break
+
+ default:
+ if (this.#logState.buffering) {
+ this.#logState.buffer.push([level, meta, ...args])
+ } else {
+ this.#tryWriteLog(level, meta, ...args)
+ }
+ break
}
+ })
- if (unicode) {
- log.enableUnicode()
- } else {
- log.disableUnicode()
+ // Arrow function assigned to a private class field so it can be passed
+ // directly as a listener and still reference "this"
+ #outputHandler = withMeta((level, meta, ...args) => {
+ this.#json = typeof meta.json === 'boolean' ? meta.json : this.#json
+ switch (level) {
+ case output.KEYS.flush: {
+ this.#outputState.buffering = false
+ if (this.#json) {
+ const json = getJsonBuffer(meta, this.#outputState.buffer)
+ if (json) {
+ this.#writeOutput(output.KEYS.standard, meta, JSON.stringify(json, null, 2))
+ }
+ } else {
+ this.#outputState.buffer.forEach((item) => this.#writeOutput(...item))
+ }
+ this.#outputState.buffer.length = 0
+ break
+ }
+
+ case output.KEYS.buffer:
+ this.#outputState.buffer.push([output.KEYS.standard, meta, ...args])
+ break
+
+ default:
+ if (this.#outputState.buffering) {
+ this.#outputState.buffer.push([level, meta, ...args])
+ } else {
+ // HACK: Check if the argument looks like a run-script banner. This can be
+ // replaced with proc-log.META in @npmcli/run-script
+ if (typeof args[0] === 'string' && args[0].startsWith('\n> ') && args[0].endsWith('\n')) {
+ if (this.#silent || ['exec', 'explore'].includes(this.#command)) {
+ // Silent mode and some specific commands always hide run script banners
+ break
+ } else if (this.#json) {
+ // In json mode, change output to stderr since we dont want to break json
+ // parsing on stdout if the user is piping to jq or something.
+ // XXX: in a future (breaking?) change it might make sense for run-script to
+ // always output these banners with proc-log.output.error if we think they
+ // align closer with "logging" instead of "output"
+ level = output.KEYS.error
+ }
+ }
+ this.#writeOutput(level, meta, ...args)
+ }
+ break
}
+ })
- // if it's silent, don't show progress
- if (progress && !silent) {
- log.enableProgress()
- } else {
- log.disableProgress()
+ #inputHandler = withMeta((level, meta, ...args) => {
+ switch (level) {
+ case input.KEYS.start:
+ log.pause()
+ this.#outputState.buffering = true
+ this.#progress.off()
+ break
+
+ case input.KEYS.end:
+ log.resume()
+ output.flush()
+ this.#progress.resume()
+ break
+
+ case input.KEYS.read: {
+ // The convention when calling input.read is to pass in a single fn that returns
+ // the promise to await. resolve and reject are provided by proc-log
+ const [res, rej, p] = args
+ return input.start(() => p()
+ .then(res)
+ .catch(rej)
+ // Any call to procLog.input.read will render a prompt to the user, so we always
+ // add a single newline of output to stdout to move the cursor to the next line
+ .finally(() => output.standard('')))
+ }
}
+ })
- // Resume displaying logs now that we have config
- log.resume()
- }
+ // OUTPUT
+
+ #writeOutput (level, meta, ...args) {
+ switch (level) {
+ case output.KEYS.standard:
+ this.#write(this.#stdout, {}, ...args)
+ break
- log (...args) {
- this.#logHandler(...args)
+ case output.KEYS.error:
+ this.#write(this.#stderr, {}, ...args)
+ break
+ }
}
- #logHandler = (level, ...args) => {
+ // LOGS
+
+ #tryWriteLog (level, meta, ...args) {
try {
- this.#log(level, ...args)
+ // Also (and this is a really inexcusable kludge), we patch the
+ // log.warn() method so that when we see a peerDep override
+ // explanation from Arborist, we can replace the object with a
+ // highly abbreviated explanation of what's being overridden.
+ // TODO: this could probably be moved to arborist now that display is refactored
+ const [heading, message, expl] = args
+ if (level === log.KEYS.warn && heading === 'ERESOLVE' && expl && typeof expl === 'object') {
+ this.#writeLog(level, meta, heading, message)
+ this.#writeLog(level, meta, '', explain(expl, this.#stderrChalk, 2))
+ return
+ }
+ this.#writeLog(level, meta, ...args)
} catch (ex) {
try {
// if it crashed once, it might again!
- this.#npmlog('verbose', `attempt to log ${inspect(args)} crashed`, ex)
+ this.#writeLog(log.KEYS.verbose, meta, '', `attempt to log crashed`, ...args, ex)
} catch (ex2) {
+ // This happens if the object has an inspect method that crashes so just console.error
+ // with the errors but don't do anything else that might error again.
// eslint-disable-next-line no-console
- console.error(`attempt to log ${inspect(args)} crashed`, ex, ex2)
+ console.error(`attempt to log crashed`, ex, ex2)
}
}
}
- #log (...args) {
- return this.#eresolveWarn(...args) || this.#npmlog(...args)
+ #writeLog (level, meta, ...args) {
+ const levelOpts = LEVEL_METHODS[level]
+ const show = levelOpts.show ?? (({ index }) => levelOpts.index <= index)
+ const force = meta.force && !this.#silent
+
+ if (force || show({ index: this.#levelIndex, timing: this.#timing })) {
+ // this mutates the array so we can pass args directly to format later
+ const title = args.shift()
+ const prefix = [
+ this.#logColors.heading(this.#heading),
+ this.#logColors[level](level),
+ title ? this.#logColors.title(title) : null,
+ ]
+ this.#write(this.#stderr, { prefix }, ...args)
+ }
+ }
+}
+
+class Progress {
+ // Taken from https://github.com/sindresorhus/cli-spinners
+ // MIT License
+ // Copyright (c) Sindre Sorhus (https://sindresorhus.com)
+ static dots = { duration: 80, frames: ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'] }
+ static lines = { duration: 130, frames: ['-', '\\', '|', '/'] }
+
+ #stream
+ #spinner
+ #enabled = false
+
+ #frameIndex = 0
+ #lastUpdate = 0
+ #interval
+ #timeout
+
+ // We are rendering is enabled option is set and we are not waiting for the render timeout
+ get #rendering () {
+ return this.#enabled && !this.#timeout
+ }
+
+ // We are spinning if enabled option is set and the render interval has been set
+ get #spinning () {
+ return this.#enabled && this.#interval
+ }
+
+ constructor ({ stream }) {
+ this.#stream = stream
+ }
+
+ load ({ enabled, unicode }) {
+ this.#enabled = enabled
+ this.#spinner = unicode ? Progress.dots : Progress.lines
+ // Dont render the spinner for short durations
+ this.#render(200)
+ }
+
+ off () {
+ if (!this.#enabled) {
+ return
+ }
+ clearTimeout(this.#timeout)
+ this.#timeout = null
+ clearInterval(this.#interval)
+ this.#interval = null
+ this.#frameIndex = 0
+ this.#lastUpdate = 0
+ this.#clearSpinner()
+ }
+
+ resume () {
+ this.#render()
}
- // Explicitly call these on npmlog and not log shim
- // This is the final place we should call npmlog before removing it.
- #npmlog (level, ...args) {
- npmlog[level](...args)
+ // If we are currenting rendering the spinner we clear it
+ // before writing our line and then re-render the spinner after.
+ // If not then all we need to do is write the line
+ write (write) {
+ if (this.#spinning) {
+ this.#clearSpinner()
+ }
+ write()
+ if (this.#spinning) {
+ this.#render()
+ }
+ }
+
+ #render (ms) {
+ if (ms) {
+ this.#timeout = setTimeout(() => {
+ this.#timeout = null
+ this.#renderSpinner()
+ }, ms)
+ // Make sure this timeout does not keep the process open
+ this.#timeout.unref()
+ } else {
+ this.#renderSpinner()
+ }
}
- // Also (and this is a really inexcusable kludge), we patch the
- // log.warn() method so that when we see a peerDep override
- // explanation from Arborist, we can replace the object with a
- // highly abbreviated explanation of what's being overridden.
- #eresolveWarn (level, heading, message, expl) {
- if (level === 'warn' &&
- heading === 'ERESOLVE' &&
- expl && typeof expl === 'object'
- ) {
- this.#npmlog(level, heading, message)
- this.#npmlog(level, '', explain(expl, log.useColor(), 2))
- // Return true to short circuit other log in chain
- return true
+ #renderSpinner () {
+ if (!this.#rendering) {
+ return
}
+ // We always attempt to render immediately but we only request to move to the next
+ // frame if it has been longer than our spinner frame duration since our last update
+ this.#renderFrame(Date.now() - this.#lastUpdate >= this.#spinner.duration)
+ clearInterval(this.#interval)
+ this.#interval = setInterval(() => this.#renderFrame(true), this.#spinner.duration)
+ }
+
+ #renderFrame (next) {
+ if (next) {
+ this.#lastUpdate = Date.now()
+ this.#frameIndex++
+ if (this.#frameIndex >= this.#spinner.frames.length) {
+ this.#frameIndex = 0
+ }
+ }
+ this.#clearSpinner()
+ this.#stream.write(this.#spinner.frames[this.#frameIndex])
+ }
+
+ #clearSpinner () {
+ // Move to the start of the line and clear the rest of the line
+ this.#stream.cursorTo(0)
+ this.#stream.clearLine(1)
}
}
diff --git a/lib/utils/error-message.js b/lib/utils/error-message.js
index 72c7b9fe4553f..fc47c909069f0 100644
--- a/lib/utils/error-message.js
+++ b/lib/utils/error-message.js
@@ -1,44 +1,25 @@
-const { format } = require('util')
-const { resolve } = require('path')
-const nameValidator = require('validate-npm-package-name')
-const replaceInfo = require('./replace-info.js')
-const { report } = require('./explain-eresolve.js')
-const log = require('./log-shim')
-
-const messageText = msg => msg.map(line => line.slice(1).join(' ')).join('\n')
-
-const jsonError = (er, npm, { summary, detail }) => {
- if (npm?.config.loaded && npm.config.get('json')) {
- return {
- error: {
- code: er.code,
- summary: messageText(summary),
- detail: messageText(detail),
- },
- }
- }
-}
+const { format } = require('node:util')
+const { resolve } = require('node:path')
+const { redactLog: replaceInfo } = require('@npmcli/redact')
+const { log } = require('proc-log')
const errorMessage = (er, npm) => {
- const short = []
+ const summary = []
const detail = []
const files = []
- if (er.message) {
- er.message = replaceInfo(er.message)
- }
- if (er.stack) {
- er.stack = replaceInfo(er.stack)
- }
+ er.message &&= replaceInfo(er.message)
+ er.stack &&= replaceInfo(er.stack)
switch (er.code) {
case 'ERESOLVE': {
- short.push(['ERESOLVE', er.message])
+ const { report } = require('./explain-eresolve.js')
+ summary.push(['ERESOLVE', er.message])
detail.push(['', ''])
// XXX(display): error messages are logged so we use the logColor since that is based
// on stderr. This should be handled solely by the display layer so it could also be
// printed to stdout if necessary.
- const { explanation, file } = report(er, !!npm.logColor)
+ const { explanation, file } = report(er, npm.logChalk, npm.noColorChalk)
detail.push(['', explanation])
files.push(['eresolve-report.txt', file])
break
@@ -46,175 +27,147 @@ const errorMessage = (er, npm) => {
case 'ENOLOCK': {
const cmd = npm.command || ''
- short.push([cmd, 'This command requires an existing lockfile.'])
+ summary.push([cmd, 'This command requires an existing lockfile.'])
detail.push([cmd, 'Try creating one first with: npm i --package-lock-only'])
detail.push([cmd, `Original error: ${er.message}`])
break
}
case 'ENOAUDIT':
- short.push(['audit', er.message])
+ summary.push(['audit', er.message])
break
case 'ECONNREFUSED':
- short.push(['', er])
- detail.push([
+ summary.push(['', er])
+ detail.push(['', [
'',
- [
- '\nIf you are behind a proxy, please make sure that the',
- "'proxy' config is set properly. See: 'npm help config'",
- ].join('\n'),
- ])
+ 'If you are behind a proxy, please make sure that the',
+ "'proxy' config is set properly. See: 'npm help config'",
+ ].join('\n')])
break
case 'EACCES':
case 'EPERM': {
const isCachePath =
- typeof er.path === 'string' &&
- npm.config.loaded &&
- er.path.startsWith(npm.config.get('cache'))
+ typeof er.path === 'string' && npm.loaded && er.path.startsWith(npm.config.get('cache'))
const isCacheDest =
- typeof er.dest === 'string' &&
- npm.config.loaded &&
- er.dest.startsWith(npm.config.get('cache'))
+ typeof er.dest === 'string' && npm.loaded && er.dest.startsWith(npm.config.get('cache'))
- const { isWindows } = require('./is-windows.js')
-
- if (!isWindows && (isCachePath || isCacheDest)) {
+ if (process.platform !== 'win32' && (isCachePath || isCacheDest)) {
// user probably doesn't need this, but still add it to the debug log
log.verbose(er.stack)
- short.push([
+ summary.push(['', [
'',
- [
- '',
- 'Your cache folder contains root-owned files, due to a bug in',
- 'previous versions of npm which has since been addressed.',
- '',
- 'To permanently fix this problem, please run:',
- ` sudo chown -R ${process.getuid()}:${process.getgid()} ${JSON.stringify(
- npm.config.get('cache')
- )}`,
- ].join('\n'),
- ])
+ 'Your cache folder contains root-owned files, due to a bug in',
+ 'previous versions of npm which has since been addressed.',
+ '',
+ 'To permanently fix this problem, please run:',
+ ` sudo chown -R ${process.getuid()}:${process.getgid()} "${npm.config.get('cache')}"`,
+ ].join('\n')])
} else {
- short.push(['', er])
- detail.push([
+ summary.push(['', er])
+ detail.push(['', [
'',
- [
- '\nThe operation was rejected by your operating system.',
- isWindows
- /* eslint-disable-next-line max-len */
- ? "It's possible that the file was already in use (by a text editor or antivirus),\n" +
- 'or that you lack permissions to access it.'
- /* eslint-disable-next-line max-len */
- : 'It is likely you do not have the permissions to access this file as the current user',
- '\nIf you believe this might be a permissions issue, please double-check the',
- 'permissions of the file and its containing directories, or try running',
- 'the command again as root/Administrator.',
- ].join('\n'),
- ])
+ 'The operation was rejected by your operating system.',
+ ...process.platform === 'win32' ? [
+ "It's possible that the file was already in use (by a text editor or antivirus),",
+ 'or that you lack permissions to access it.',
+ ] : [
+ 'It is likely you do not have the permissions to access this file as the current user',
+ ],
+ '',
+ 'If you believe this might be a permissions issue, please double-check the',
+ 'permissions of the file and its containing directories, or try running',
+ 'the command again as root/Administrator.',
+ ].join('\n')])
}
break
}
case 'ENOGIT':
- short.push(['', er.message])
- detail.push([
+ summary.push(['', er.message])
+ detail.push(['', [
'',
- ['', 'Failed using git.', 'Please check if you have git installed and in your PATH.'].join(
- '\n'
- ),
- ])
+ 'Failed using git.',
+ 'Please check if you have git installed and in your PATH.',
+ ].join('\n')])
break
case 'EJSONPARSE':
// Check whether we ran into a conflict in our own package.json
if (er.path === resolve(npm.prefix, 'package.json')) {
const { isDiff } = require('parse-conflict-json')
- const txt = require('fs').readFileSync(er.path, 'utf8').replace(/\r\n/g, '\n')
+ const txt = require('node:fs').readFileSync(er.path, 'utf8').replace(/\r\n/g, '\n')
if (isDiff(txt)) {
- detail.push([
+ detail.push(['', [
+ 'Merge conflict detected in your package.json.',
'',
- [
- 'Merge conflict detected in your package.json.',
- '',
- 'Please resolve the package.json conflict and retry.',
- ].join('\n'),
- ])
+ 'Please resolve the package.json conflict and retry.',
+ ].join('\n')])
break
}
}
- short.push(['JSON.parse', er.message])
- detail.push([
- 'JSON.parse',
- [
- 'Failed to parse JSON data.',
- 'Note: package.json must be actual JSON, not just JavaScript.',
- ].join('\n'),
- ])
+ summary.push(['JSON.parse', er.message])
+ detail.push(['JSON.parse', [
+ 'Failed to parse JSON data.',
+ 'Note: package.json must be actual JSON, not just JavaScript.',
+ ].join('\n')])
break
case 'EOTP':
case 'E401':
// E401 is for places where we accidentally neglect OTP stuff
if (er.code === 'EOTP' || /one-time pass/.test(er.message)) {
- short.push(['', 'This operation requires a one-time password from your authenticator.'])
- detail.push([
- '',
- [
- 'You can provide a one-time password by passing --otp= to the command you ran.',
- 'If you already provided a one-time password then it is likely that you either typoed',
- 'it, or it timed out. Please try again.',
- ].join('\n'),
- ])
+ summary.push(['', 'This operation requires a one-time password from your authenticator.'])
+ detail.push(['', [
+ 'You can provide a one-time password by passing --otp= to the command you ran.',
+ 'If you already provided a one-time password then it is likely that you either typoed',
+ 'it, or it timed out. Please try again.',
+ ].join('\n')])
} else {
// npm ERR! code E401
// npm ERR! Unable to authenticate, need: Basic
- const auth =
- !er.headers || !er.headers['www-authenticate']
- ? []
- : er.headers['www-authenticate'].map(au => au.split(/[,\s]+/))[0]
+ const auth = !er.headers || !er.headers['www-authenticate']
+ ? []
+ : er.headers['www-authenticate'].map(au => au.split(/[,\s]+/))[0]
if (auth.includes('Bearer')) {
- short.push([
- '',
+ summary.push(['',
'Unable to authenticate, your authentication token seems to be invalid.',
])
- detail.push([
- '',
- ['To correct this please trying logging in again with:', ' npm login'].join('\n'),
- ])
+ detail.push(['', [
+ 'To correct this please try logging in again with:',
+ ' npm login',
+ ].join('\n')])
} else if (auth.includes('Basic')) {
- short.push(['', 'Incorrect or missing password.'])
- detail.push([
+ summary.push(['', 'Incorrect or missing password.'])
+ detail.push(['', [
+ 'If you were trying to login, change your password, create an',
+ 'authentication token or enable two-factor authentication then',
+ 'that means you likely typed your password in incorrectly.',
+ 'Please try again, or recover your password at:',
+ ' https://www.npmjs.com/forgot',
'',
- [
- 'If you were trying to login, change your password, create an',
- 'authentication token or enable two-factor authentication then',
- 'that means you likely typed your password in incorrectly.',
- 'Please try again, or recover your password at:',
- ' https://www.npmjs.com/forgot',
- '',
- 'If you were doing some other operation then your saved credentials are',
- 'probably out of date. To correct this please try logging in again with:',
- ' npm login',
- ].join('\n'),
- ])
+ 'If you were doing some other operation then your saved credentials are',
+ 'probably out of date. To correct this please try logging in again with:',
+ ' npm login',
+ ].join('\n')])
} else {
- short.push(['', er.message || er])
+ summary.push(['', er.message || er])
}
}
break
case 'E404':
// There's no need to have 404 in the message as well.
- short.push(['404', er.message.replace(/^404\s+/, '')])
+ summary.push(['404', er.message.replace(/^404\s+/, '')])
if (er.pkgid && er.pkgid !== '-') {
const pkg = er.pkgid.replace(/(?!^)@.*$/, '')
detail.push(['404', ''])
detail.push(['404', '', `'${replaceInfo(er.pkgid)}' is not in this registry.`])
+ const nameValidator = require('validate-npm-package-name')
const valResult = nameValidator(pkg)
if (!valResult.validForNewPackages) {
@@ -224,71 +177,77 @@ const errorMessage = (er, npm) => {
errorsArray.forEach((item, idx) => detail.push(['404', ' ' + (idx + 1) + '. ' + item]))
}
- detail.push(['404', '\nNote that you can also install from a'])
+ detail.push(['404', ''])
+ detail.push(['404', 'Note that you can also install from a'])
detail.push(['404', 'tarball, folder, http url, or git url.'])
}
break
case 'EPUBLISHCONFLICT':
- short.push(['publish fail', 'Cannot publish over existing version.'])
+ summary.push(['publish fail', 'Cannot publish over existing version.'])
detail.push(['publish fail', "Update the 'version' field in package.json and try again."])
detail.push(['publish fail', ''])
detail.push(['publish fail', 'To automatically increment version numbers, see:'])
- detail.push(['publish fail', ' npm help version'])
+ detail.push(['publish fail', ' npm help version'])
break
case 'EISGIT':
- short.push(['git', er.message])
- short.push(['git', ' ' + er.path])
- detail.push([
- 'git',
- ['Refusing to remove it. Update manually,', 'or move it out of the way first.'].join('\n'),
- ])
+ summary.push(['git', er.message])
+ summary.push(['git', ` ${er.path}`])
+ detail.push(['git', [
+ 'Refusing to remove it. Update manually,',
+ 'or move it out of the way first.',
+ ].join('\n')])
break
case 'EBADPLATFORM': {
- const validOs =
- er.required && er.required.os && er.required.os.join
- ? er.required.os.join(',')
- : er.required.os
- const validArch =
- er.required && er.required.cpu && er.required.cpu.join
- ? er.required.cpu.join(',')
- : er.required.cpu
- const expected = { os: validOs, arch: validArch }
- const actual = { os: process.platform, arch: process.arch }
- short.push([
- 'notsup',
- [
- format(
- 'Unsupported platform for %s: wanted %j (current: %j)',
- er.pkgid,
- expected,
- actual
- ),
- ].join('\n'),
- ])
- detail.push([
- 'notsup',
- [
- 'Valid OS: ' + validOs,
- 'Valid Arch: ' + validArch,
- 'Actual OS: ' + process.platform,
- 'Actual Arch: ' + process.arch,
- ].join('\n'),
- ])
+ const actual = er.current
+ const expected = { ...er.required }
+ const checkedKeys = []
+ for (const key in expected) {
+ if (Array.isArray(expected[key]) && expected[key].length > 0) {
+ expected[key] = expected[key].join(',')
+ checkedKeys.push(key)
+ } else if (expected[key] === undefined ||
+ Array.isArray(expected[key]) && expected[key].length === 0) {
+ delete expected[key]
+ delete actual[key]
+ } else {
+ checkedKeys.push(key)
+ }
+ }
+
+ const longestKey = Math.max(...checkedKeys.map((key) => key.length))
+ const detailEntry = []
+ for (const key of checkedKeys) {
+ const padding = key.length === longestKey
+ ? 1
+ : 1 + (longestKey - key.length)
+
+ // padding + 1 because 'actual' is longer than 'valid'
+ detailEntry.push(`Valid ${key}:${' '.repeat(padding + 1)}${expected[key]}`)
+ detailEntry.push(`Actual ${key}:${' '.repeat(padding)}${actual[key]}`)
+ }
+
+ summary.push(['notsup', format(
+ 'Unsupported platform for %s: wanted %j (current: %j)',
+ er.pkgid,
+ expected,
+ actual
+ )])
+ detail.push(['notsup', detailEntry.join('\n')])
break
}
case 'EEXIST':
- short.push(['', er.message])
- short.push(['', 'File exists: ' + (er.dest || er.path)])
+ summary.push(['', er.message])
+ summary.push(['', 'File exists: ' + (er.dest || er.path)])
detail.push(['', 'Remove the existing file and try again, or run npm'])
detail.push(['', 'with --force to overwrite files recklessly.'])
break
case 'ENEEDAUTH':
- short.push(['need auth', er.message])
+ summary.push(['need auth', er.message])
detail.push(['need auth', 'You need to authorize this machine using `npm adduser`'])
break
@@ -297,126 +256,185 @@ const errorMessage = (er, npm) => {
case 'ETIMEDOUT':
case 'ERR_SOCKET_TIMEOUT':
case 'EAI_FAIL':
- short.push(['network', er.message])
- detail.push([
- 'network',
- [
- 'This is a problem related to network connectivity.',
- 'In most cases you are behind a proxy or have bad network settings.',
- '\nIf you are behind a proxy, please make sure that the',
- "'proxy' config is set properly. See: 'npm help config'",
- ].join('\n'),
- ])
+ summary.push(['network', er.message])
+ detail.push(['network', [
+ 'This is a problem related to network connectivity.',
+ 'In most cases you are behind a proxy or have bad network settings.',
+ '',
+ 'If you are behind a proxy, please make sure that the',
+ "'proxy' config is set properly. See: 'npm help config'",
+ ].join('\n')])
break
case 'ETARGET':
- short.push(['notarget', er.message])
- detail.push([
- 'notarget',
- [
- 'In most cases you or one of your dependencies are requesting',
- "a package version that doesn't exist.",
- ].join('\n'),
- ])
+ summary.push(['notarget', er.message])
+ detail.push(['notarget', [
+ 'In most cases you or one of your dependencies are requesting',
+ "a package version that doesn't exist.",
+ ].join('\n')])
break
case 'E403':
- short.push(['403', er.message])
- detail.push([
- '403',
- [
- 'In most cases, you or one of your dependencies are requesting',
- 'a package version that is forbidden by your security policy, or',
- 'on a server you do not have access to.',
- ].join('\n'),
- ])
+ summary.push(['403', er.message])
+ detail.push(['403', [
+ 'In most cases, you or one of your dependencies are requesting',
+ 'a package version that is forbidden by your security policy, or',
+ 'on a server you do not have access to.',
+ ].join('\n')])
break
case 'EBADENGINE':
- short.push(['engine', er.message])
- short.push(['engine', 'Not compatible with your version of node/npm: ' + er.pkgid])
- detail.push([
- 'notsup',
- [
- 'Not compatible with your version of node/npm: ' + er.pkgid,
- 'Required: ' + JSON.stringify(er.required),
- 'Actual: ' +
- JSON.stringify({
- npm: npm.version,
- node: process.version,
- }),
- ].join('\n'),
- ])
+ summary.push(['engine', er.message])
+ summary.push(['engine', 'Not compatible with your version of node/npm: ' + er.pkgid])
+ detail.push(['notsup', [
+ 'Not compatible with your version of node/npm: ' + er.pkgid,
+ 'Required: ' + JSON.stringify(er.required),
+ 'Actual: ' +
+ JSON.stringify({ npm: npm.version, node: process.version }),
+ ].join('\n')])
break
case 'ENOSPC':
- short.push(['nospc', er.message])
- detail.push([
- 'nospc',
- [
- 'There appears to be insufficient space on your system to finish.',
- 'Clear up some disk space and try again.',
- ].join('\n'),
- ])
+ summary.push(['nospc', er.message])
+ detail.push(['nospc', [
+ 'There appears to be insufficient space on your system to finish.',
+ 'Clear up some disk space and try again.',
+ ].join('\n')])
break
case 'EROFS':
- short.push(['rofs', er.message])
- detail.push([
- 'rofs',
- [
- 'Often virtualized file systems, or other file systems',
- "that don't support symlinks, give this error.",
- ].join('\n'),
- ])
+ summary.push(['rofs', er.message])
+ detail.push(['rofs', [
+ 'Often virtualized file systems, or other file systems',
+ "that don't support symlinks, give this error.",
+ ].join('\n')])
break
case 'ENOENT':
- short.push(['enoent', er.message])
- detail.push([
- 'enoent',
- [
- 'This is related to npm not being able to find a file.',
- er.file ? "\nCheck if the file '" + er.file + "' is present." : '',
- ].join('\n'),
- ])
+ summary.push(['enoent', er.message])
+ detail.push(['enoent', [
+ 'This is related to npm not being able to find a file.',
+ er.file ? `\nCheck if the file '${er.file}' is present.` : '',
+ ].join('\n')])
break
case 'EMISSINGARG':
case 'EUNKNOWNTYPE':
case 'EINVALIDTYPE':
case 'ETOOMANYARGS':
- short.push(['typeerror', er.stack])
- detail.push([
- 'typeerror',
- [
- 'This is an error with npm itself. Please report this error at:',
- ' https://github.com/npm/cli/issues',
- ].join('\n'),
- ])
+ summary.push(['typeerror', er.stack])
+ detail.push(['typeerror', [
+ 'This is an error with npm itself. Please report this error at:',
+ ' https://github.com/npm/cli/issues',
+ ].join('\n')])
break
default:
- short.push(['', er.message || er])
+ summary.push(['', er.message || er])
+ if (er.cause) {
+ detail.push(['cause', er.cause.message])
+ }
if (er.signal) {
detail.push(['signal', er.signal])
}
-
if (er.cmd && Array.isArray(er.args)) {
detail.push(['command', ...[er.cmd, ...er.args.map(replaceInfo)]])
}
-
if (er.stdout) {
detail.push(['', er.stdout.trim()])
}
-
if (er.stderr) {
detail.push(['', er.stderr.trim()])
}
-
break
}
- return { summary: short, detail, files, json: jsonError(er, npm, { summary: short, detail }) }
+
+ return {
+ summary,
+ detail,
+ files,
+ }
}
-module.exports = errorMessage
+const getExitCodeFromError = (err) => {
+ if (typeof err?.errno === 'number') {
+ return err.errno
+ } else if (typeof err?.code === 'number') {
+ return err.code
+ }
+}
+
+const getError = (err, { npm, command, pkg }) => {
+ // if we got a command that just shells out to something else, then it
+ // will presumably print its own errors and exit with a proper status
+ // code if there's a problem. If we got an error with a code=0, then...
+ // something else went wrong along the way, so maybe an npm problem?
+ if (command?.constructor?.isShellout && typeof err.code === 'number' && err.code) {
+ return {
+ exitCode: err.code,
+ suppressError: true,
+ }
+ }
+
+ // XXX: we should stop throwing strings
+ if (typeof err === 'string') {
+ return {
+ exitCode: 1,
+ suppressError: true,
+ summary: [['', err]],
+ }
+ }
+
+ // XXX: we should stop throwing other non-errors
+ if (!(err instanceof Error)) {
+ return {
+ exitCode: 1,
+ suppressError: true,
+ summary: [['weird error', err]],
+ }
+ }
+
+ if (err.code === 'EUNKNOWNCOMMAND') {
+ const suggestions = require('./did-you-mean.js')(pkg, err.command)
+ return {
+ exitCode: 1,
+ suppressError: true,
+ standard: [
+ `Unknown command: "${err.command}"`,
+ suggestions,
+ 'To see a list of supported npm commands, run:',
+ ' npm help',
+ ],
+ }
+ }
+
+ // Anything after this is not suppressed and get more logged information
+
+ // add a code to the error if it doesnt have one and mutate some properties
+ // so they have redacted information
+ err.code ??= err.message.match(/^(?:Error: )?(E[A-Z]+)/)?.[1]
+ // this mutates the error and redacts stack/message
+ const { summary, detail, files } = errorMessage(err, npm)
+
+ return {
+ err,
+ code: err.code,
+ exitCode: getExitCodeFromError(err) || 1,
+ suppressError: false,
+ summary,
+ detail,
+ files,
+ verbose: ['type', 'stack', 'statusCode', 'pkgid']
+ .filter(k => err[k])
+ .map(k => [k, replaceInfo(err[k])]),
+ error: ['code', 'syscall', 'file', 'path', 'dest', 'errno']
+ .filter(k => err[k])
+ .map(k => [k, err[k]]),
+ }
+}
+
+module.exports = {
+ getExitCodeFromError,
+ errorMessage,
+ getError,
+}
diff --git a/lib/utils/exit-handler.js b/lib/utils/exit-handler.js
deleted file mode 100644
index b5fc7042bd020..0000000000000
--- a/lib/utils/exit-handler.js
+++ /dev/null
@@ -1,229 +0,0 @@
-const os = require('os')
-const fs = require('fs')
-
-const log = require('./log-shim.js')
-const errorMessage = require('./error-message.js')
-const replaceInfo = require('./replace-info.js')
-
-const indent = (val) => Array.isArray(val) ? val.map(v => indent(v)) : ` ${val}`
-
-let npm = null // set by the cli
-let exitHandlerCalled = false
-let showLogFileError = false
-
-process.on('exit', code => {
- log.disableProgress()
-
- // process.emit is synchronous, so the timeEnd handler will run before the
- // unfinished timer check below
- process.emit('timeEnd', 'npm')
-
- const hasLoadedNpm = npm?.config.loaded
-
- // Unfinished timers can be read before config load
- if (npm) {
- for (const [name, timer] of npm.unfinishedTimers) {
- log.verbose('unfinished npm timer', name, timer)
- }
- }
-
- if (!code) {
- log.info('ok')
- } else {
- log.verbose('code', code)
- }
-
- if (!exitHandlerCalled) {
- process.exitCode = code || 1
- log.error('', 'Exit handler never called!')
- // eslint-disable-next-line no-console
- console.error('')
- log.error('', 'This is an error with npm itself. Please report this error at:')
- log.error('', ' ')
- showLogFileError = true
- }
-
- // npm must be loaded to know where the log file was written
- if (hasLoadedNpm) {
- // write the timing file now, this might do nothing based on the configs set.
- // we need to call it here in case it errors so we dont tell the user
- // about a timing file that doesn't exist
- npm.writeTimingFile()
-
- const logsDir = npm.logsDir
- const logFiles = npm.logFiles
-
- const timingDir = npm.timingDir
- const timingFile = npm.timingFile
-
- const timing = npm.config.get('timing')
- const logsMax = npm.config.get('logs-max')
-
- // Determine whether to show log file message and why it is
- // being shown since in timing mode we always show the log file message
- const logMethod = showLogFileError ? 'error' : timing ? 'info' : null
-
- if (logMethod) {
- if (!npm.silent) {
- // just a line break if not in silent mode
- // eslint-disable-next-line no-console
- console.error('')
- }
-
- const message = []
-
- if (timingFile) {
- message.push('Timing info written to:', indent(timingFile))
- } else if (timing) {
- message.push(
- `The timing file was not written due to an error writing to the directory: ${timingDir}`
- )
- }
-
- if (logFiles.length) {
- message.push('A complete log of this run can be found in:', ...indent(logFiles))
- } else if (logsMax <= 0) {
- // user specified no log file
- message.push(`Log files were not written due to the config logs-max=${logsMax}`)
- } else {
- // could be an error writing to the directory
- message.push(
- `Log files were not written due to an error writing to the directory: ${logsDir}`,
- 'You can rerun the command with `--loglevel=verbose` to see the logs in your terminal'
- )
- }
-
- log[logMethod]('', message.join('\n'))
- }
-
- // This removes any listeners npm setup, mostly for tests to avoid max listener warnings
- npm.unload()
- }
-
- // these are needed for the tests to have a clean slate in each test case
- exitHandlerCalled = false
- showLogFileError = false
-})
-
-const exitHandler = err => {
- exitHandlerCalled = true
-
- log.disableProgress()
-
- const hasLoadedNpm = npm?.config.loaded
-
- if (!npm) {
- err = err || new Error('Exit prior to setting npm in exit handler')
- // eslint-disable-next-line no-console
- console.error(err.stack || err.message)
- return process.exit(1)
- }
-
- if (!hasLoadedNpm) {
- err = err || new Error('Exit prior to config file resolving.')
- // eslint-disable-next-line no-console
- console.error(err.stack || err.message)
- }
-
- // only show the notification if it finished.
- if (typeof npm.updateNotification === 'string') {
- const { level } = log
- log.level = 'notice'
- log.notice('', npm.updateNotification)
- log.level = level
- }
-
- let exitCode
- let noLogMessage
- let jsonError
-
- if (err) {
- exitCode = 1
- // if we got a command that just shells out to something else, then it
- // will presumably print its own errors and exit with a proper status
- // code if there's a problem. If we got an error with a code=0, then...
- // something else went wrong along the way, so maybe an npm problem?
- const isShellout = npm.isShellout
- const quietShellout = isShellout && typeof err.code === 'number' && err.code
- if (quietShellout) {
- exitCode = err.code
- noLogMessage = true
- } else if (typeof err === 'string') {
- // XXX: we should stop throwing strings
- log.error('', err)
- noLogMessage = true
- } else if (!(err instanceof Error)) {
- log.error('weird error', err)
- noLogMessage = true
- } else {
- if (!err.code) {
- const matchErrorCode = err.message.match(/^(?:Error: )?(E[A-Z]+)/)
- err.code = matchErrorCode && matchErrorCode[1]
- }
-
- for (const k of ['type', 'stack', 'statusCode', 'pkgid']) {
- const v = err[k]
- if (v) {
- log.verbose(k, replaceInfo(v))
- }
- }
-
- log.verbose('cwd', process.cwd())
- log.verbose('', os.type() + ' ' + os.release())
- log.verbose('node', process.version)
- log.verbose('npm ', 'v' + npm.version)
-
- for (const k of ['code', 'syscall', 'file', 'path', 'dest', 'errno']) {
- const v = err[k]
- if (v) {
- log.error(k, v)
- }
- }
-
- const { summary, detail, json, files = [] } = errorMessage(err, npm)
- jsonError = json
-
- for (let [file, content] of files) {
- file = `${npm.logPath}${file}`
- content = `'Log files:\n${npm.logFiles.join('\n')}\n\n${content.trim()}\n`
- try {
- fs.writeFileSync(file, content)
- detail.push(['', `\n\nFor a full report see:\n${file}`])
- } catch (logFileErr) {
- log.warn('', `Could not write error message to ${file} due to ${logFileErr}`)
- }
- }
-
- for (const errline of [...summary, ...detail]) {
- log.error(...errline)
- }
-
- if (typeof err.errno === 'number') {
- exitCode = err.errno
- } else if (typeof err.code === 'number') {
- exitCode = err.code
- }
- }
- }
-
- if (hasLoadedNpm) {
- npm.flushOutput(jsonError)
- }
-
- log.verbose('exit', exitCode || 0)
-
- showLogFileError = (hasLoadedNpm && npm.silent) || noLogMessage
- ? false
- : !!exitCode
-
- // explicitly call process.exit now so we don't hang on things like the
- // update notifier, also flush stdout/err beforehand because process.exit doesn't
- // wait for that to happen.
- let flushed = 0
- const flush = [process.stderr, process.stdout]
- const exit = () => ++flushed === flush.length && process.exit(exitCode)
- flush.forEach((f) => f.write('', exit))
-}
-
-module.exports = exitHandler
-module.exports.setNpm = n => (npm = n)
diff --git a/lib/utils/explain-dep.js b/lib/utils/explain-dep.js
index 58258026491dc..4e9e93454e8a2 100644
--- a/lib/utils/explain-dep.js
+++ b/lib/utils/explain-dep.js
@@ -1,101 +1,57 @@
-const chalk = require('chalk')
-const nocolor = {
- bold: s => s,
- dim: s => s,
- red: s => s,
- yellow: s => s,
- cyan: s => s,
- magenta: s => s,
- blue: s => s,
- green: s => s,
- gray: s => s,
-}
-
-const { relative } = require('path')
-
-const explainNode = (node, depth, color) =>
- printNode(node, color) +
- explainDependents(node, depth, color) +
- explainLinksIn(node, depth, color)
-
-const colorType = (type, color) => {
- const { red, yellow, cyan, magenta, blue, green, gray } = color ? chalk : nocolor
- const style = type === 'extraneous' ? red
- : type === 'dev' ? yellow
- : type === 'optional' ? cyan
- : type === 'peer' ? magenta
- : type === 'bundled' ? blue
- : type === 'workspace' ? green
- : type === 'overridden' ? gray
+const { relative } = require('node:path')
+
+const explainNode = (node, depth, chalk) =>
+ printNode(node, chalk) +
+ explainDependents(node, depth, chalk) +
+ explainLinksIn(node, depth, chalk)
+
+const colorType = (type, chalk) => {
+ const style = type === 'extraneous' ? chalk.red
+ : type === 'dev' ? chalk.blue
+ : type === 'optional' ? chalk.magenta
+ : type === 'peer' ? chalk.magentaBright
+ : type === 'bundled' ? chalk.underline.cyan
+ : type === 'workspace' ? chalk.blueBright
+ : type === 'overridden' ? chalk.dim
: /* istanbul ignore next */ s => s
return style(type)
}
-const printNode = (node, color) => {
- const {
- name,
- version,
- location,
- extraneous,
- dev,
- optional,
- peer,
- bundled,
- isWorkspace,
- overridden,
- } = node
- const { bold, dim, green } = color ? chalk : nocolor
+const printNode = (node, chalk) => {
const extra = []
- if (extraneous) {
- extra.push(' ' + bold(colorType('extraneous', color)))
- }
-
- if (dev) {
- extra.push(' ' + bold(colorType('dev', color)))
- }
-
- if (optional) {
- extra.push(' ' + bold(colorType('optional', color)))
- }
- if (peer) {
- extra.push(' ' + bold(colorType('peer', color)))
- }
-
- if (bundled) {
- extra.push(' ' + bold(colorType('bundled', color)))
- }
-
- if (overridden) {
- extra.push(' ' + bold(colorType('overridden', color)))
+ for (const meta of ['extraneous', 'dev', 'optional', 'peer', 'bundled', 'overridden']) {
+ if (node[meta]) {
+ extra.push(` ${colorType(meta, chalk)}`)
+ }
}
- const pkgid = isWorkspace
- ? green(`${name}@${version}`)
- : `${bold(name)}@${bold(version)}`
+ const pkgid = node.isWorkspace
+ ? chalk.blueBright(`${node.name}@${node.version}`)
+ : `${node.name}@${node.version}`
return `${pkgid}${extra.join('')}` +
- (location ? dim(`\n${location}`) : '')
+ (node.location ? chalk.dim(`\n${node.location}`) : '')
}
-const explainLinksIn = ({ linksIn }, depth, color) => {
+const explainLinksIn = ({ linksIn }, depth, chalk) => {
if (!linksIn || !linksIn.length || depth <= 0) {
return ''
}
- const messages = linksIn.map(link => explainNode(link, depth - 1, color))
+ const messages = linksIn.map(link => explainNode(link, depth - 1, chalk))
const str = '\n' + messages.join('\n')
return str.split('\n').join('\n ')
}
-const explainDependents = ({ name, dependents }, depth, color) => {
+const explainDependents = ({ dependents }, depth, chalk) => {
if (!dependents || !dependents.length || depth <= 0) {
return ''
}
const max = Math.ceil(depth / 2)
const messages = dependents.slice(0, max)
- .map(edge => explainEdge(edge, depth, color))
+ .map(edge => explainEdge(edge, depth, chalk))
// show just the names of the first 5 deps that overflowed the list
if (dependents.length > max) {
@@ -119,30 +75,29 @@ const explainDependents = ({ name, dependents }, depth, color) => {
return str.split('\n').join('\n ')
}
-const explainEdge = ({ name, type, bundled, from, spec, rawSpec, overridden }, depth, color) => {
- const { bold } = color ? chalk : nocolor
+const explainEdge = ({ name, type, bundled, from, spec, rawSpec, overridden }, depth, chalk) => {
let dep = type === 'workspace'
- ? bold(relative(from.location, spec.slice('file:'.length)))
- : `${bold(name)}@"${bold(spec)}"`
+ ? chalk.bold(relative(from.location, spec.slice('file:'.length)))
+ : `${name}@"${spec}"`
if (overridden) {
- dep = `${colorType('overridden', color)} ${dep} (was "${rawSpec}")`
+ dep = `${colorType('overridden', chalk)} ${dep} (was "${rawSpec}")`
}
- const fromMsg = ` from ${explainFrom(from, depth, color)}`
+ const fromMsg = ` from ${explainFrom(from, depth, chalk)}`
- return (type === 'prod' ? '' : `${colorType(type, color)} `) +
- (bundled ? `${colorType('bundled', color)} ` : '') +
+ return (type === 'prod' ? '' : `${colorType(type, chalk)} `) +
+ (bundled ? `${colorType('bundled', chalk)} ` : '') +
`${dep}${fromMsg}`
}
-const explainFrom = (from, depth, color) => {
+const explainFrom = (from, depth, chalk) => {
if (!from.name && !from.version) {
return 'the root project'
}
- return printNode(from, color) +
- explainDependents(from, depth - 1, color) +
- explainLinksIn(from, depth - 1, color)
+ return printNode(from, chalk) +
+ explainDependents(from, depth - 1, chalk) +
+ explainLinksIn(from, depth - 1, chalk)
}
module.exports = { explainNode, printNode, explainEdge }
diff --git a/lib/utils/explain-eresolve.js b/lib/utils/explain-eresolve.js
index 480cd8e5cd4e6..f3c6ae23a479d 100644
--- a/lib/utils/explain-eresolve.js
+++ b/lib/utils/explain-eresolve.js
@@ -7,7 +7,7 @@ const { explainEdge, explainNode, printNode } = require('./explain-dep.js')
// Depth is how far we want to want to descend into the object making a report.
// The full report (ie, depth=Infinity) is always written to the cache folder
// at ${cache}/eresolve-report.txt along with full json.
-const explain = (expl, color, depth) => {
+const explain = (expl, chalk, depth) => {
const { edge, dep, current, peerConflict, currentEdge } = expl
const out = []
@@ -15,28 +15,28 @@ const explain = (expl, color, depth) => {
current && current.whileInstalling ||
edge && edge.from && edge.from.whileInstalling
if (whileInstalling) {
- out.push('While resolving: ' + printNode(whileInstalling, color))
+ out.push('While resolving: ' + printNode(whileInstalling, chalk))
}
// it "should" be impossible for an ERESOLVE explanation to lack both
// current and currentEdge, but better to have a less helpful error
// than a crashing failure.
if (current) {
- out.push('Found: ' + explainNode(current, depth, color))
+ out.push('Found: ' + explainNode(current, depth, chalk))
} else if (peerConflict && peerConflict.current) {
- out.push('Found: ' + explainNode(peerConflict.current, depth, color))
+ out.push('Found: ' + explainNode(peerConflict.current, depth, chalk))
} else if (currentEdge) {
- out.push('Found: ' + explainEdge(currentEdge, depth, color))
+ out.push('Found: ' + explainEdge(currentEdge, depth, chalk))
} else /* istanbul ignore else - should always have one */ if (edge) {
- out.push('Found: ' + explainEdge(edge, depth, color))
+ out.push('Found: ' + explainEdge(edge, depth, chalk))
}
out.push('\nCould not resolve dependency:\n' +
- explainEdge(edge, depth, color))
+ explainEdge(edge, depth, chalk))
if (peerConflict) {
const heading = '\nConflicting peer dependency:'
- const pc = explainNode(peerConflict.peer, depth, color)
+ const pc = explainNode(peerConflict.peer, depth, chalk)
out.push(heading + ' ' + pc)
}
@@ -44,7 +44,7 @@ const explain = (expl, color, depth) => {
}
// generate a full verbose report and tell the user how to fix it
-const report = (expl, color) => {
+const report = (expl, chalk, noColorChalk) => {
const flags = [
expl.strictPeerDeps ? '--no-strict-peer-deps' : '',
'--force',
@@ -60,8 +60,8 @@ this command with ${or(flags)}
to accept an incorrect (and potentially broken) dependency resolution.`
return {
- explanation: `${explain(expl, color, 4)}\n\n${fix}`,
- file: `# npm resolution error report\n\n${explain(expl, false, Infinity)}\n\n${fix}`,
+ explanation: `${explain(expl, chalk, 4)}\n\n${fix}`,
+ file: `# npm resolution error report\n\n${explain(expl, noColorChalk, Infinity)}\n\n${fix}`,
}
}
diff --git a/lib/utils/format-search-stream.js b/lib/utils/format-search-stream.js
index 2a2dadd5c3434..b70bd915123da 100644
--- a/lib/utils/format-search-stream.js
+++ b/lib/utils/format-search-stream.js
@@ -1,5 +1,6 @@
-const Minipass = require('minipass')
-const columnify = require('columnify')
+/* eslint-disable max-len */
+const { stripVTControlCharacters: strip } = require('node:util')
+const { Minipass } = require('minipass')
// This module consumes package data in the following format:
//
@@ -15,14 +16,48 @@ const columnify = require('columnify')
// The returned stream will format this package data
// into a byte stream of formatted, displayable output.
+function filter (data, exclude) {
+ const words = [data.name]
+ .concat(data.maintainers.map(m => m.username))
+ .concat(data.keywords || [])
+ .map(f => f?.trim?.())
+ .filter(Boolean)
+ .join(' ')
+ .toLowerCase()
+
+ if (exclude.find(pattern => {
+ // Treats both /foo and /foo/ as regex searches
+ if (pattern.startsWith('/')) {
+ if (pattern.endsWith('/')) {
+ pattern = pattern.slice(0, -1)
+ }
+ return words.match(new RegExp(pattern.slice(1)))
+ }
+ return words.includes(pattern)
+ })) {
+ return false
+ }
+
+ return true
+}
+
module.exports = (opts) => {
- return opts.json ? new JSONOutputStream() : new TextOutputStream(opts)
+ return opts.json ? new JSONOutputStream(opts) : new TextOutputStream(opts)
}
class JSONOutputStream extends Minipass {
#didFirst = false
+ #exclude
+
+ constructor (opts) {
+ super()
+ this.#exclude = opts.exclude
+ }
write (obj) {
+ if (!filter(obj, this.#exclude)) {
+ return
+ }
if (!this.#didFirst) {
super.write('[\n')
this.#didFirst = true
@@ -40,121 +75,100 @@ class JSONOutputStream extends Minipass {
}
class TextOutputStream extends Minipass {
+ #args
+ #chalk
+ #exclude
+ #parseable
+
constructor (opts) {
super()
- this._opts = opts
- this._line = 0
+ this.#args = opts.args.map(s => s.toLowerCase()).filter(Boolean)
+ this.#chalk = opts.npm.chalk
+ this.#exclude = opts.exclude
+ this.#parseable = opts.parseable
}
- write (pkg) {
- return super.write(prettify(pkg, ++this._line, this._opts))
- }
-}
-
-function prettify (data, num, opts) {
- var truncate = !opts.long
-
- var pkg = normalizePackage(data, opts)
-
- var columns = ['name', 'description', 'author', 'date', 'version', 'keywords']
-
- if (opts.parseable) {
- return columns.map(function (col) {
- return pkg[col] && ('' + pkg[col]).replace(/\t/g, ' ')
- }).join('\t')
- }
-
- // stdout in tap is never a tty
- /* istanbul ignore next */
- const maxWidth = process.stdout.isTTY ? process.stdout.getWindowSize()[0] : Infinity
- let output = columnify(
- [pkg],
- {
- include: columns,
- showHeaders: num <= 1,
- columnSplitter: ' | ',
- truncate: truncate,
- config: {
- name: { minWidth: 25, maxWidth: 25, truncate: false, truncateMarker: '' },
- description: { minWidth: 20, maxWidth: 20 },
- author: { minWidth: 15, maxWidth: 15 },
- date: { maxWidth: 11 },
- version: { minWidth: 8, maxWidth: 8 },
- keywords: { maxWidth: Infinity },
- },
+ write (data) {
+ if (!filter(data, this.#exclude)) {
+ return
+ }
+ // Normalize
+ const pkg = {
+ authors: data.maintainers.map((m) => `${strip(m.username)}`).join(' '),
+ publisher: strip(data.publisher?.username || ''),
+ date: data.date ? data.date.toISOString().slice(0, 10) : 'prehistoric',
+ description: strip(data.description ?? ''),
+ keywords: [],
+ name: strip(data.name),
+ version: data.version,
+ }
+ if (Array.isArray(data.keywords)) {
+ pkg.keywords = data.keywords.map(strip)
+ } else if (typeof data.keywords === 'string') {
+ pkg.keywords = strip(data.keywords.replace(/[,\s]+/, ' ')).split(' ')
}
- ).split('\n').map(line => line.slice(0, maxWidth)).join('\n')
-
- if (opts.color) {
- output = highlightSearchTerms(output, opts.args)
- }
-
- return output
-}
-
-var colors = [31, 33, 32, 36, 34, 35]
-var cl = colors.length
-
-function addColorMarker (str, arg, i) {
- var m = i % cl + 1
- var markStart = String.fromCharCode(m)
- var markEnd = String.fromCharCode(0)
-
- if (arg.charAt(0) === '/') {
- return str.replace(
- new RegExp(arg.slice(1, -1), 'gi'),
- bit => markStart + bit + markEnd
- )
- }
-
- // just a normal string, do the split/map thing
- var pieces = str.toLowerCase().split(arg.toLowerCase())
- var p = 0
-
- return pieces.map(function (piece) {
- piece = str.slice(p, p + piece.length)
- var mark = markStart +
- str.slice(p + piece.length, p + piece.length + arg.length) +
- markEnd
- p += piece.length + arg.length
- return piece + mark
- }).join('')
-}
-
-function colorize (line) {
- for (var i = 0; i < cl; i++) {
- var m = i + 1
- var color = '\u001B[' + colors[i] + 'm'
- line = line.split(String.fromCharCode(m)).join(color)
- }
- var uncolor = '\u001B[0m'
- return line.split('\u0000').join(uncolor)
-}
-function highlightSearchTerms (str, terms) {
- terms.forEach(function (arg, i) {
- str = addColorMarker(str, arg, i)
- })
+ let output
+ if (this.#parseable) {
+ output = [pkg.name, pkg.description, pkg.author, pkg.date, pkg.version, pkg.keywords]
+ .filter(Boolean)
+ .map(col => ('' + col).replace(/\t/g, ' ')).join('\t')
+ return super.write(output)
+ }
- return colorize(str).trim()
-}
+ const keywords = pkg.keywords.map(k => {
+ if (this.#args.includes(k)) {
+ return this.#chalk.cyan(k)
+ } else {
+ return k
+ }
+ }).join(' ')
+
+ let description = []
+ for (const arg of this.#args) {
+ const finder = pkg.description.toLowerCase().split(arg.toLowerCase())
+ let p = 0
+ for (const f of finder) {
+ description.push(pkg.description.slice(p, p + f.length))
+ const word = pkg.description.slice(p + f.length, p + f.length + arg.length)
+ description.push(this.#chalk.cyan(word))
+ p += f.length + arg.length
+ }
+ }
+ description = description.filter(Boolean)
+ let name = pkg.name
+ if (this.#args.includes(pkg.name)) {
+ name = this.#chalk.cyan(pkg.name)
+ } else {
+ name = []
+ for (const arg of this.#args) {
+ const finder = pkg.name.toLowerCase().split(arg.toLowerCase())
+ let p = 0
+ for (const f of finder) {
+ name.push(pkg.name.slice(p, p + f.length))
+ const word = pkg.name.slice(p + f.length, p + f.length + arg.length)
+ name.push(this.#chalk.cyan(word))
+ p += f.length + arg.length
+ }
+ }
+ name = this.#chalk.blue(name.join(''))
+ }
-function normalizePackage (data, opts) {
- return {
- name: data.name,
- description: data.description,
- author: data.maintainers.map((m) => `=${m.username}`).join(' '),
- keywords: Array.isArray(data.keywords)
- ? data.keywords.join(' ')
- : typeof data.keywords === 'string'
- ? data.keywords.replace(/[,\s]+/, ' ')
- : '',
- version: data.version,
- date: (data.date &&
- (data.date.toISOString() // remove time
- .split('T').join(' ')
- .replace(/:[0-9]{2}\.[0-9]{3}Z$/, ''))
- .slice(0, -5)) ||
- 'prehistoric',
+ if (description.length) {
+ output = `${name}\n${description.join('')}\n`
+ } else {
+ output = `${name}\n`
+ }
+ if (pkg.publisher) {
+ output += `Version ${this.#chalk.blue(pkg.version)} published ${this.#chalk.blue(pkg.date)} by ${this.#chalk.blue(pkg.publisher)}\n`
+ } else {
+ output += `Version ${this.#chalk.blue(pkg.version)} published ${this.#chalk.blue(pkg.date)} by ${this.#chalk.yellow('???')}\n`
+ }
+ output += `Maintainers: ${pkg.authors}\n`
+ if (keywords) {
+ output += `Keywords: ${keywords}\n`
+ }
+ output += `${this.#chalk.blue(`https://npm.im/${pkg.name}`)}\n`
+ return super.write(output)
}
}
diff --git a/lib/utils/format.js b/lib/utils/format.js
new file mode 100644
index 0000000000000..aaecfe1ba0e7a
--- /dev/null
+++ b/lib/utils/format.js
@@ -0,0 +1,50 @@
+const { formatWithOptions: baseFormatWithOptions } = require('node:util')
+
+// These are most assuredly not a mistake
+// https://eslint.org/docs/latest/rules/no-control-regex
+// \x00 through \x1f, \x7f through \x9f, not including \x09 \x0a \x0b \x0d
+/* eslint-disable-next-line no-control-regex */
+const HAS_C01 = /[\x00-\x08\x0c\x0e-\x1f\x7f-\x9f]/
+
+// Allows everything up to '[38;5;255m' in 8 bit notation
+const ALLOWED_SGR = /^\[[0-9;]{0,8}m/
+
+// '[38;5;255m'.length
+const SGR_MAX_LEN = 10
+
+// Strips all ANSI C0 and C1 control characters (except for SGR up to 8 bit)
+function STRIP_C01 (str) {
+ if (!HAS_C01.test(str)) {
+ return str
+ }
+ let result = ''
+ for (let i = 0; i < str.length; i++) {
+ const char = str[i]
+ const code = char.charCodeAt(0)
+ if (!HAS_C01.test(char)) {
+ // Most characters are in this set so continue early if we can
+ result = `${result}${char}`
+ } else if (code === 27 && ALLOWED_SGR.test(str.slice(i + 1, i + SGR_MAX_LEN + 1))) {
+ // \x1b with allowed SGR
+ result = `${result}\x1b`
+ } else if (code <= 31) {
+ // escape all other C0 control characters besides \x7f
+ result = `${result}^${String.fromCharCode(code + 64)}`
+ } else {
+ // hasC01 ensures this is now a C1 control character or \x7f
+ result = `${result}^${String.fromCharCode(code - 64)}`
+ }
+ }
+ return result
+}
+
+const formatWithOptions = ({ prefix: prefixes = [], eol = '\n', ...options }, ...args) => {
+ const prefix = prefixes.filter(p => p != null).join(' ')
+ const formatted = STRIP_C01(baseFormatWithOptions(options, ...args))
+ // Splitting could be changed to only `\n` once we are sure we only emit unix newlines.
+ // The eol param to this function will put the correct newlines in place for the returned string.
+ const lines = formatted.split(/\r?\n/)
+ return lines.reduce((acc, l) => `${acc}${prefix}${prefix && l ? ' ' : ''}${l}${eol}`, '')
+}
+
+module.exports = { formatWithOptions }
diff --git a/lib/utils/get-workspaces.js b/lib/utils/get-workspaces.js
new file mode 100644
index 0000000000000..48c26779bb137
--- /dev/null
+++ b/lib/utils/get-workspaces.js
@@ -0,0 +1,54 @@
+const { resolve, relative } = require('node:path')
+const mapWorkspaces = require('@npmcli/map-workspaces')
+const { minimatch } = require('minimatch')
+const pkgJson = require('@npmcli/package-json')
+
+// minimatch wants forward slashes only for glob patterns
+const globify = pattern => pattern.split('\\').join('/')
+
+// Returns an Map of paths to workspaces indexed by workspace name
+// { foo => '/path/to/foo' }
+const getWorkspaces = async (filters, { path, includeWorkspaceRoot, relativeFrom }) => {
+ // TODO we need a better error to be bubbled up here if this call fails
+ const { content: pkg } = await pkgJson.normalize(path)
+ const workspaces = await mapWorkspaces({ cwd: path, pkg })
+ let res = new Map()
+ if (includeWorkspaceRoot) {
+ res.set(pkg.name, path)
+ }
+
+ if (!filters.length) {
+ res = new Map([...res, ...workspaces])
+ }
+
+ for (const filterArg of filters) {
+ for (const [workspaceName, workspacePath] of workspaces.entries()) {
+ let relativePath = relative(relativeFrom, workspacePath)
+ if (filterArg.startsWith('./')) {
+ relativePath = `./${relativePath}`
+ }
+ const relativeFilter = relative(path, filterArg)
+ if (filterArg === workspaceName
+ || resolve(relativeFrom, filterArg) === workspacePath
+ || minimatch(relativePath, `${globify(relativeFilter)}/*`)
+ || minimatch(relativePath, `${globify(filterArg)}/*`)
+ ) {
+ res.set(workspaceName, workspacePath)
+ }
+ }
+ }
+
+ if (!res.size) {
+ let msg = '!'
+ if (filters.length) {
+ msg = `:\n ${filters.reduce(
+ (acc, filterArg) => `${acc} --workspace=${filterArg}`, '')}`
+ }
+
+ throw new Error(`No workspaces found${msg}`)
+ }
+
+ return res
+}
+
+module.exports = getWorkspaces
diff --git a/lib/utils/installed-deep.js b/lib/utils/installed-deep.js
new file mode 100644
index 0000000000000..3c56c5d036f25
--- /dev/null
+++ b/lib/utils/installed-deep.js
@@ -0,0 +1,45 @@
+const { resolve } = require('node:path')
+const localeCompare = require('@isaacs/string-locale-compare')('en')
+
+const installedDeep = async (npm) => {
+ const Arborist = require('@npmcli/arborist')
+ const {
+ depth,
+ global,
+ prefix,
+ workspacesEnabled,
+ } = npm.flatOptions
+
+ const getValues = (tree) =>
+ [...tree.inventory.values()]
+ .filter(i => i.location !== '' && !i.isRoot)
+ .map(i => {
+ return i
+ })
+ .filter(i => (i.depth - 1) <= depth)
+ .sort((a, b) => (a.depth - b.depth) || localeCompare(a.name, b.name))
+
+ const res = new Set()
+ const gArb = new Arborist({
+ global: true,
+ path: resolve(npm.globalDir, '..'),
+ workspacesEnabled,
+ })
+ const gTree = await gArb.loadActual({ global: true })
+
+ for (const node of getValues(gTree)) {
+ res.add(global ? node.name : [node.name, '-g'])
+ }
+
+ if (!global) {
+ const arb = new Arborist({ global: false, path: prefix, workspacesEnabled })
+ const tree = await arb.loadActual()
+ for (const node of getValues(tree)) {
+ res.add(node.name)
+ }
+ }
+
+ return [...res]
+}
+
+module.exports = installedDeep
diff --git a/lib/utils/completion/installed-shallow.js b/lib/utils/installed-shallow.js
similarity index 100%
rename from lib/utils/completion/installed-shallow.js
rename to lib/utils/installed-shallow.js
diff --git a/lib/utils/is-windows.js b/lib/utils/is-windows.js
index 57f6599b6ae19..63c5671d8400e 100644
--- a/lib/utils/is-windows.js
+++ b/lib/utils/is-windows.js
@@ -1,6 +1,4 @@
-const isWindows = process.platform === 'win32'
-const isWindowsShell = isWindows &&
+const isWindowsShell = (process.platform === 'win32') &&
!/^MINGW(32|64)$/.test(process.env.MSYSTEM) && process.env.TERM !== 'cygwin'
-exports.isWindows = isWindows
exports.isWindowsShell = isWindowsShell
diff --git a/lib/utils/log-file.js b/lib/utils/log-file.js
index f663997308ed6..6c9bcd7ff8d86 100644
--- a/lib/utils/log-file.js
+++ b/lib/utils/log-file.js
@@ -1,19 +1,16 @@
-const os = require('os')
-const { join, dirname, basename } = require('path')
-const { format, promisify } = require('util')
-const glob = promisify(require('glob'))
-const MiniPass = require('minipass')
+const os = require('node:os')
+const { join, dirname, basename } = require('node:path')
const fsMiniPass = require('fs-minipass')
-const fs = require('fs/promises')
-const log = require('./log-shim')
+const fs = require('node:fs/promises')
+const { log } = require('proc-log')
+const { formatWithOptions } = require('./format')
const padZero = (n, length) => n.toString().padStart(length.toString().length, '0')
-const globify = pattern => pattern.split('\\').join('/')
class LogFiles {
- // Default to a plain minipass stream so we can buffer
+ // Default to an array so we can buffer
// initial writes before we know the cache location
- #logStream = null
+ #logStream = []
// We cap log files at a certain number of log events per file.
// Note that each log event can write more than one line to the
@@ -31,6 +28,7 @@ class LogFiles {
#path = null
#logsMax = null
#files = []
+ #timing = false
constructor ({
maxLogsPerFile = 50_000,
@@ -41,22 +39,7 @@ class LogFiles {
this.on()
}
- static format (count, level, title, ...args) {
- let prefix = `${count} ${level}`
- if (title) {
- prefix += ` ${title}`
- }
-
- return format(...args)
- .split(/\r?\n/)
- .reduce((lines, line) =>
- lines += prefix + (line ? ' ' : '') + line + os.EOL,
- ''
- )
- }
-
on () {
- this.#logStream = new MiniPass()
process.on('log', this.#logHandler)
}
@@ -65,11 +48,16 @@ class LogFiles {
this.#endStream()
}
- load ({ path, logsMax = Infinity } = {}) {
+ load ({ command, path, logsMax = Infinity, timing } = {}) {
+ if (['completion'].includes(command)) {
+ return
+ }
+
// dir is user configurable and is required to exist so
// this can error if the dir is missing or not configured correctly
this.#path = path
this.#logsMax = logsMax
+ this.#timing = timing
// Log stream has already ended
if (!this.#logStream) {
@@ -78,36 +66,40 @@ class LogFiles {
log.verbose('logfile', `logs-max:${logsMax} dir:${this.#path}`)
- // Pipe our initial stream to our new file stream and
+ // Write the contents of our array buffer to our new file stream and
// set that as the new log logstream for future writes
// if logs max is 0 then the user does not want a log file
if (this.#logsMax > 0) {
const initialFile = this.#openLogFile()
if (initialFile) {
- this.#logStream = this.#logStream.pipe(initialFile)
+ for (const item of this.#logStream) {
+ const formatted = this.#formatLogItem(...item)
+ if (formatted !== null) {
+ initialFile.write(formatted)
+ }
+ }
+ this.#logStream = initialFile
}
}
+ log.verbose('logfile', this.files[0] || 'no logfile created')
+
// Kickoff cleaning process, even if we aren't writing a logfile.
// This is async but it will always ignore the current logfile
// Return the result so it can be awaited in tests
return this.#cleanLogs()
}
- log (...args) {
- this.#logHandler(...args)
- }
-
get files () {
return this.#files
}
get #isBuffered () {
- return this.#logStream instanceof MiniPass
+ return Array.isArray(this.#logStream)
}
#endStream (output) {
- if (this.#logStream) {
+ if (this.#logStream && !this.#isBuffered) {
this.#logStream.end(output)
this.#logStream = null
}
@@ -125,12 +117,15 @@ class LogFiles {
return
}
- const logOutput = this.#formatLogItem(level, ...args)
-
if (this.#isBuffered) {
// Cant do anything but buffer the output if we dont
// have a file stream yet
- this.#logStream.write(logOutput)
+ this.#logStream.push([level, ...args])
+ return
+ }
+
+ const logOutput = this.#formatLogItem(level, ...args)
+ if (logOutput === null) {
return
}
@@ -150,9 +145,15 @@ class LogFiles {
}
}
- #formatLogItem (...args) {
+ #formatLogItem (level, title, ...args) {
+ // Only right timing logs to logfile if explicitly requests
+ if (level === log.KEYS.timing && !this.#timing) {
+ return null
+ }
+
this.#fileLogCount += 1
- return LogFiles.format(this.#totalLogCount++, ...args)
+ const prefix = [this.#totalLogCount++, level, title || null]
+ return formatWithOptions({ prefix, eol: os.EOL, colors: false }, ...args)
}
#getLogFilePath (count = '') {
@@ -197,17 +198,41 @@ class LogFiles {
try {
const logPath = this.#getLogFilePath()
- const logGlob = join(dirname(logPath), basename(logPath)
+ const patternFileName = basename(logPath)
// tell glob to only match digits
- .replace(/\d/g, '[0123456789]')
+ .replace(/\d/g, 'd')
// Handle the old (prior to 8.2.0) log file names which did not have a
// counter suffix
- .replace(/-\.log$/, '*.log')
- )
+ .replace('-.log', '')
+
+ let files = await fs.readdir(
+ dirname(logPath), {
+ withFileTypes: true,
+ encoding: 'utf-8',
+ })
+ files = files.sort((a, b) => basename(a.name).localeCompare(basename(b.name), 'en'))
+
+ const logFiles = []
- // Always ignore the currently written files
- const files = await glob(globify(logGlob), { ignore: this.#files.map(globify), silent: true })
- const toDelete = files.length - this.#logsMax
+ for (const file of files) {
+ if (!file.isFile()) {
+ continue
+ }
+
+ const genericFileName = file.name.replace(/\d/g, 'd')
+ const filePath = join(dirname(logPath), basename(file.name))
+
+ // Always ignore the currently written files
+ if (
+ genericFileName.includes(patternFileName)
+ && genericFileName.endsWith('.log')
+ && !this.#files.includes(filePath)
+ ) {
+ logFiles.push(filePath)
+ }
+ }
+
+ const toDelete = logFiles.length - this.#logsMax
if (toDelete <= 0) {
return
@@ -215,7 +240,7 @@ class LogFiles {
log.silly('logfile', `start cleaning logs, removing ${toDelete} files`)
- for (const file of files.slice(0, toDelete)) {
+ for (const file of logFiles.slice(0, toDelete)) {
try {
await fs.rm(file, { force: true })
} catch (e) {
@@ -223,7 +248,10 @@ class LogFiles {
}
}
} catch (e) {
- log.warn('logfile', 'error cleaning log files', e)
+ // Disable cleanup failure warnings when log writing is disabled
+ if (this.#logsMax > 0) {
+ log.verbose('logfile', 'error cleaning log files', e)
+ }
} finally {
log.silly('logfile', 'done cleaning log files')
}
diff --git a/lib/utils/log-shim.js b/lib/utils/log-shim.js
deleted file mode 100644
index 9d5a36d967413..0000000000000
--- a/lib/utils/log-shim.js
+++ /dev/null
@@ -1,59 +0,0 @@
-const NPMLOG = require('npmlog')
-const PROCLOG = require('proc-log')
-
-// Sets getter and optionally a setter
-// otherwise setting should throw
-const accessors = (obj, set) => (k) => ({
- get: () => obj[k],
- set: set ? (v) => (obj[k] = v) : () => {
- throw new Error(`Cant set ${k}`)
- },
-})
-
-// Set the value to a bound function on the object
-const value = (obj) => (k) => ({
- value: (...args) => obj[k].apply(obj, args),
-})
-
-const properties = {
- // npmlog getters/setters
- level: accessors(NPMLOG, true),
- heading: accessors(NPMLOG, true),
- levels: accessors(NPMLOG),
- gauge: accessors(NPMLOG),
- stream: accessors(NPMLOG),
- tracker: accessors(NPMLOG),
- progressEnabled: accessors(NPMLOG),
- // npmlog methods
- useColor: value(NPMLOG),
- enableColor: value(NPMLOG),
- disableColor: value(NPMLOG),
- enableUnicode: value(NPMLOG),
- disableUnicode: value(NPMLOG),
- enableProgress: value(NPMLOG),
- disableProgress: value(NPMLOG),
- clearProgress: value(NPMLOG),
- showProgress: value(NPMLOG),
- newItem: value(NPMLOG),
- newGroup: value(NPMLOG),
- // proclog methods
- notice: value(PROCLOG),
- error: value(PROCLOG),
- warn: value(PROCLOG),
- info: value(PROCLOG),
- verbose: value(PROCLOG),
- http: value(PROCLOG),
- silly: value(PROCLOG),
- pause: value(PROCLOG),
- resume: value(PROCLOG),
-}
-
-const descriptors = Object.entries(properties).reduce((acc, [k, v]) => {
- acc[k] = { enumerable: true, ...v(k) }
- return acc
-}, {})
-
-// Create an object with the allowed properties rom npm log and all
-// the logging methods from proc log
-// XXX: this should go away and requires of this should be replaced with proc-log + new display
-module.exports = Object.freeze(Object.defineProperties({}, descriptors))
diff --git a/lib/utils/npm-usage.js b/lib/utils/npm-usage.js
index b04ad33f9dd79..1bd790ca601bc 100644
--- a/lib/utils/npm-usage.js
+++ b/lib/utils/npm-usage.js
@@ -8,9 +8,9 @@ const INDENT = 4
const indent = (repeat = INDENT) => ' '.repeat(repeat)
const indentNewline = (repeat) => `\n${indent(repeat)}`
-module.exports = async (npm) => {
+module.exports = (npm) => {
const browser = npm.config.get('viewer') === 'browser' ? ' (in a browser)' : ''
- const allCommands = npm.config.get('long') ? await cmdUsages(npm) : cmdNames()
+ const allCommands = npm.config.get('long') ? cmdUsages(npm.constructor) : cmdNames()
return `npm
@@ -57,13 +57,12 @@ const cmdNames = () => {
return indentNewline() + out.join(indentNewline()).slice(2)
}
-const cmdUsages = async (npm) => {
+const cmdUsages = (Npm) => {
// return a string of :
let maxLen = 0
const set = []
for (const c of commands) {
- const { usage } = await npm.cmd(c)
- set.push([c, usage.split('\n')])
+ set.push([c, Npm.cmd(c).describeUsage.split('\n')])
maxLen = Math.max(maxLen, c.length)
}
diff --git a/lib/utils/open-url-prompt.js b/lib/utils/open-url-prompt.js
deleted file mode 100644
index df0c9709c0774..0000000000000
--- a/lib/utils/open-url-prompt.js
+++ /dev/null
@@ -1,70 +0,0 @@
-const readline = require('readline')
-const promiseSpawn = require('@npmcli/promise-spawn')
-
-function print (npm, title, url) {
- const json = npm.config.get('json')
-
- const message = json ? JSON.stringify({ title, url }) : `${title}:\n${url}`
-
- npm.output(message)
-}
-
-// Prompt to open URL in browser if possible
-const promptOpen = async (npm, url, title, prompt, emitter) => {
- const browser = npm.config.get('browser')
- const isInteractive = process.stdin.isTTY === true && process.stdout.isTTY === true
-
- try {
- if (!/^https?:$/.test(new URL(url).protocol)) {
- throw new Error()
- }
- } catch (_) {
- throw new Error('Invalid URL: ' + url)
- }
-
- print(npm, title, url)
-
- if (browser === false || !isInteractive) {
- return
- }
-
- const rl = readline.createInterface({
- input: process.stdin,
- output: process.stdout,
- })
-
- const tryOpen = await new Promise(resolve => {
- rl.on('SIGINT', () => {
- rl.close()
- resolve('SIGINT')
- })
-
- rl.question(prompt, () => {
- resolve(true)
- })
-
- if (emitter && emitter.addListener) {
- emitter.addListener('abort', () => {
- rl.close()
-
- // clear the prompt line
- npm.output('')
-
- resolve(false)
- })
- }
- })
-
- if (tryOpen === 'SIGINT') {
- throw new Error('canceled')
- }
-
- if (!tryOpen) {
- return
- }
-
- const command = browser === true ? null : browser
- await promiseSpawn.open(url, { command })
-}
-
-module.exports = promptOpen
diff --git a/lib/utils/open-url.js b/lib/utils/open-url.js
index f882d0c9d3934..632dcc79949d6 100644
--- a/lib/utils/open-url.js
+++ b/lib/utils/open-url.js
@@ -1,50 +1,98 @@
-const promiseSpawn = require('@npmcli/promise-spawn')
+const { open } = require('@npmcli/promise-spawn')
+const { output, input } = require('proc-log')
+const { URL } = require('node:url')
+const readline = require('node:readline/promises')
+const { once } = require('node:events')
-const { URL } = require('url')
+const assertValidUrl = (url) => {
+ try {
+ if (!/^https?:$/.test(new URL(url).protocol)) {
+ throw new Error()
+ }
+ } catch {
+ throw new Error('Invalid URL: ' + url)
+ }
+}
+
+const outputMsg = (json, title, url) => {
+ if (json) {
+ output.buffer({ title, url })
+ } else {
+ output.standard(`${title}:\n${url}`)
+ }
+}
// attempt to open URL in web-browser, print address otherwise:
-const open = async (npm, url, errMsg, isFile) => {
+const openUrl = async (npm, url, title, isFile) => {
url = encodeURI(url)
const browser = npm.config.get('browser')
-
- function printAlternateMsg () {
- const json = npm.config.get('json')
- const alternateMsg = json
- ? JSON.stringify({
- title: errMsg,
- url,
- }, null, 2)
- : `${errMsg}:\n ${url}\n`
-
- npm.output(alternateMsg)
- }
+ const json = npm.config.get('json')
if (browser === false) {
- printAlternateMsg()
+ outputMsg(json, title, url)
return
}
// We pass this in as true from the help command so we know we don't have to
// check the protocol
if (!isFile) {
- try {
- if (!/^https?:$/.test(new URL(url).protocol)) {
- throw new Error()
- }
- } catch {
- throw new Error('Invalid URL: ' + url)
+ assertValidUrl(url)
+ }
+
+ try {
+ await input.start(() => open(url, {
+ command: browser === true ? null : browser,
+ }))
+ } catch (err) {
+ if (err.code !== 127) {
+ throw err
}
+ outputMsg(json, title, url)
}
+}
+
+// Prompt to open URL in browser if possible
+const openUrlPrompt = async (npm, url, title, prompt, { signal }) => {
+ const browser = npm.config.get('browser')
+ const json = npm.config.get('json')
- const command = browser === true ? null : browser
- await promiseSpawn.open(url, { command })
- .catch((err) => {
- if (err.code !== 'ENOENT') {
- throw err
- }
+ assertValidUrl(url)
+ outputMsg(json, title, url)
+
+ if (browser === false || !process.stdin.isTTY || !process.stdout.isTTY) {
+ return
+ }
- printAlternateMsg()
- })
+ const rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout,
+ })
+
+ try {
+ await input.read(() => Promise.race([
+ rl.question(prompt, { signal }),
+ once(rl, 'error'),
+ once(rl, 'SIGINT').then(() => {
+ throw new Error('canceled')
+ }),
+ ]))
+ rl.close()
+ await openUrl(npm, url, 'Browser unavailable. Please open the URL manually')
+ } catch (err) {
+ rl.close()
+ if (err.name !== 'AbortError') {
+ throw err
+ }
+ }
}
-module.exports = open
+// Rearrange arguments and return a function that takes the two arguments
+// returned from the npm-profile methods that take an opener
+const createOpener = (npm, title, prompt = 'Press ENTER to open in the browser...') =>
+ (url, opts) => openUrlPrompt(npm, url, title, prompt, opts)
+
+module.exports = {
+ openUrl,
+ openUrlPrompt,
+ createOpener,
+}
diff --git a/lib/utils/otplease.js b/lib/utils/otplease.js
deleted file mode 100644
index b4aa167469255..0000000000000
--- a/lib/utils/otplease.js
+++ /dev/null
@@ -1,48 +0,0 @@
-const log = require('./log-shim')
-async function otplease (npm, opts, fn) {
- try {
- return await fn(opts)
- } catch (err) {
- if (!process.stdin.isTTY || !process.stdout.isTTY) {
- throw err
- }
-
- if (isWebOTP(err)) {
- log.disableProgress()
- const webAuth = require('./web-auth')
- const openUrlPrompt = require('./open-url-prompt')
-
- const openerPromise = (url, emitter) =>
- openUrlPrompt(
- npm,
- url,
- 'Authenticate your account at',
- 'Press ENTER to open in the browser...',
- emitter
- )
- const otp = await webAuth(openerPromise, err.body.authUrl, err.body.doneUrl, opts)
- return await fn({ ...opts, otp })
- }
-
- if (isClassicOTP(err)) {
- const readUserInfo = require('./read-user-info.js')
- const otp = await readUserInfo.otp('This operation requires a one-time password.\nEnter OTP:')
- return await fn({ ...opts, otp })
- }
-
- throw err
- }
-}
-
-function isWebOTP (err) {
- if (err.code === 'EOTP' && err.body) {
- return err.body.authUrl && err.body.doneUrl
- }
- return false
-}
-
-function isClassicOTP (err) {
- return err.code === 'EOTP' || (err.code === 'E401' && /one-time pass/.test(err.body))
-}
-
-module.exports = otplease
diff --git a/lib/utils/output-error.js b/lib/utils/output-error.js
new file mode 100644
index 0000000000000..27128e9f03a8c
--- /dev/null
+++ b/lib/utils/output-error.js
@@ -0,0 +1,29 @@
+const { log, output } = require('proc-log')
+
+const outputError = ({ standard = [], verbose = [], error = [], summary = [], detail = [] }) => {
+ for (const line of standard) {
+ // Each output line is just a single string
+ output.standard(line)
+ }
+ for (const line of verbose) {
+ log.verbose(...line)
+ }
+ for (const line of [...error, ...summary, ...detail]) {
+ log.error(...line)
+ }
+}
+
+const jsonError = (error, npm) => {
+ if (error && npm?.loaded && npm?.config.get('json')) {
+ return {
+ code: error.code,
+ summary: (error.summary || []).map(l => l.slice(1).join(' ')).join('\n').trim(),
+ detail: (error.detail || []).map(l => l.slice(1).join(' ')).join('\n').trim(),
+ }
+ }
+}
+
+module.exports = {
+ outputError,
+ jsonError,
+}
diff --git a/lib/utils/pulse-till-done.js b/lib/utils/pulse-till-done.js
deleted file mode 100644
index 2229414147483..0000000000000
--- a/lib/utils/pulse-till-done.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const log = require('./log-shim.js')
-
-let pulseTimer = null
-const withPromise = async (promise) => {
- pulseStart()
- try {
- return await promise
- } finally {
- pulseStop()
- }
-}
-
-const pulseStart = () => {
- pulseTimer = pulseTimer || setInterval(() => {
- log.gauge.pulse('')
- }, 150)
-}
-
-const pulseStop = () => {
- clearInterval(pulseTimer)
- pulseTimer = null
-}
-
-module.exports = {
- withPromise,
-}
diff --git a/lib/utils/queryable.js b/lib/utils/queryable.js
index 6acc1758ceea7..a5fb25a845eaf 100644
--- a/lib/utils/queryable.js
+++ b/lib/utils/queryable.js
@@ -1,10 +1,10 @@
-const util = require('util')
+const util = require('node:util')
const _delete = Symbol('delete')
const _append = Symbol('append')
const sqBracketsMatcher = str => str.match(/(.+)\[([^\]]+)\]\.?(.*)$/)
-// replaces any occurence of an empty-brackets (e.g: []) with a special
+// replaces any occurrence of an empty-brackets (e.g: []) with a special
// Symbol(append) to represent it, this is going to be useful for the setter
// method that will push values to the end of the array when finding these
const replaceAppendSymbols = str => {
@@ -29,7 +29,7 @@ const parseKeys = key => {
const preSqBracketPortion = index[1]
// we want to have a `new String` wrapper here in order to differentiate
- // between multiple occurences of the same string, e.g:
+ // between multiple occurrences of the same string, e.g:
// foo.bar[foo.bar] should split into { foo: { bar: { 'foo.bar': {} } }
/* eslint-disable-next-line no-new-wrappers */
const foundKey = new String(index[2])
@@ -41,7 +41,7 @@ const parseKeys = key => {
sqBracketItems.add(foundKey)
// returns an array that contains either dot-separate items (that will
- // be splitted appart during the next step OR the fully parsed keys
+ // be split apart during the next step OR the fully parsed keys
// read from square brackets, e.g:
// foo.bar[1.0.0].a.b -> ['foo.bar', '1.0.0', 'a.b']
return [
@@ -83,7 +83,7 @@ const parseKeys = key => {
return res
}
-const getter = ({ data, key }) => {
+const getter = ({ data, key }, { unwrapSingleItemArrays = true } = {}) => {
// keys are a list in which each entry represents the name of
// a property that should be walked through the object in order to
// return the final found value
@@ -111,13 +111,9 @@ const getter = ({ data, key }) => {
}, {})
return _data
} else {
- // if can't find any more values, it means it's just over
- // and there's nothing to return
- if (!_data[k]) {
+ if (!Object.hasOwn(_data, k)) {
return undefined
}
-
- // otherwise sets the next value
_data = _data[k]
}
@@ -126,7 +122,7 @@ const getter = ({ data, key }) => {
// these are some legacy expectations from
// the old API consumed by lib/view.js
- if (Array.isArray(_data) && _data.length <= 1) {
+ if (unwrapSingleItemArrays && Array.isArray(_data) && _data.length <= 1) {
_data = _data[0]
}
@@ -142,7 +138,7 @@ const setter = ({ data, key, value, force }) => {
const keys = parseKeys(key)
const setKeys = (_data, _key) => {
// handles array indexes, converting valid integers to numbers,
- // note that occurences of Symbol(append) will throw,
+ // note that occurrences of Symbol(append) will throw,
// so we just ignore these for now
let maybeIndex = Number.NaN
try {
@@ -235,6 +231,8 @@ const setter = ({ data, key, value, force }) => {
}
class Queryable {
+ static ALL = ''
+
#data = null
constructor (obj) {
@@ -247,19 +245,19 @@ class Queryable {
this.#data = obj
}
- query (queries) {
+ query (queries, opts) {
// this ugly interface here is meant to be a compatibility layer
// with the legacy API lib/view.js is consuming, if at some point
// we refactor that command then we can revisit making this nicer
- if (queries === '') {
- return { '': this.#data }
+ if (queries === Queryable.ALL) {
+ return { [Queryable.ALL]: this.#data }
}
const q = query =>
getter({
data: this.#data,
key: query,
- })
+ }, opts)
if (Array.isArray(queries)) {
let res = {}
diff --git a/lib/utils/read-user-info.js b/lib/utils/read-user-info.js
index 26d5b36d55b58..a9a50f8263ff6 100644
--- a/lib/utils/read-user-info.js
+++ b/lib/utils/read-user-info.js
@@ -1,12 +1,6 @@
-const { promisify } = require('util')
-const readAsync = promisify(require('read'))
+const { read: _read } = require('read')
const userValidate = require('npm-user-validate')
-const log = require('./log-shim.js')
-
-exports.otp = readOTP
-exports.password = readPassword
-exports.username = readUsername
-exports.email = readEmail
+const { log, input } = require('proc-log')
const otpPrompt = `This command requires a one-time password (OTP) from your authenticator app.
Enter one below. You can also pass one on the command line by appending --otp=123456.
@@ -17,10 +11,7 @@ const passwordPrompt = 'npm password: '
const usernamePrompt = 'npm username: '
const emailPrompt = 'email (this IS public): '
-function read (opts) {
- log.clearProgress()
- return readAsync(opts).finally(() => log.showProgress())
-}
+const read = (...args) => input.read(() => _read(...args))
function readOTP (msg = otpPrompt, otp, isRetry) {
if (isRetry && otp && /^[\d ]+$|^[A-Fa-f0-9]{64,64}$/.test(otp)) {
@@ -67,3 +58,10 @@ function readEmail (msg = emailPrompt, email, isRetry) {
return read({ prompt: msg, default: email || '' })
.then((username) => readEmail(msg, username, true))
}
+
+module.exports = {
+ otp: readOTP,
+ password: readPassword,
+ username: readUsername,
+ email: readEmail,
+}
diff --git a/lib/utils/reify-finish.js b/lib/utils/reify-finish.js
index 9b43abcb7610a..410c19730cdf4 100644
--- a/lib/utils/reify-finish.js
+++ b/lib/utils/reify-finish.js
@@ -1,7 +1,7 @@
const reifyOutput = require('./reify-output.js')
const ini = require('ini')
-const { writeFile } = require('fs').promises
-const { resolve } = require('path')
+const { writeFile } = require('node:fs/promises')
+const { resolve } = require('node:path')
const reifyFinish = async (npm, arb) => {
await saveBuiltinConfig(npm, arb)
diff --git a/lib/utils/reify-output.js b/lib/utils/reify-output.js
index 5ac7fa4b01896..025479f0c8e60 100644
--- a/lib/utils/reify-output.js
+++ b/lib/utils/reify-output.js
@@ -9,7 +9,7 @@
// found 37 vulnerabilities (5 low, 7 moderate, 25 high)
// run `npm audit fix` to fix them, or `npm audit` for details
-const log = require('./log-shim.js')
+const { log, output } = require('proc-log')
const { depth } = require('treeverse')
const ms = require('ms')
const npmAuditReport = require('npm-audit-report')
@@ -41,17 +41,31 @@ const reifyOutput = (npm, arb) => {
}
if (diff) {
+ const showDiff = npm.config.get('dry-run') || npm.config.get('long')
+ const chalk = npm.chalk
+
depth({
tree: diff,
visit: d => {
switch (d.action) {
case 'REMOVE':
+ if (showDiff) {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${chalk.blue('remove')} ${d.actual.name} ${d.actual.package.version}`)
+ }
summary.removed++
break
case 'ADD':
+ if (showDiff) {
+ output.standard(`${chalk.green('add')} ${d.ideal.name} ${d.ideal.package.version}`)
+ }
actualTree.inventory.has(d.ideal) && summary.added++
break
case 'CHANGE':
+ if (showDiff) {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${chalk.cyan('change')} ${d.actual.name} ${d.actual.package.version} => ${d.ideal.package.version}`)
+ }
summary.changed++
break
default:
@@ -76,7 +90,7 @@ const reifyOutput = (npm, arb) => {
summary.audit = npm.command === 'audit' ? auditReport
: auditReport.toJSON().metadata
}
- npm.output(JSON.stringify(summary, 0, 2))
+ output.buffer(summary)
} else {
packagesChangedMessage(npm, summary)
packagesFundingMessage(npm, summary)
@@ -95,7 +109,7 @@ const printAuditReport = (npm, report) => {
if (!res || !res.report) {
return
}
- npm.output(`\n${res.report}`)
+ output.standard(`\n${res.report}`)
}
const getAuditReport = (npm, report) => {
@@ -116,6 +130,7 @@ const getAuditReport = (npm, report) => {
reporter,
...npm.flatOptions,
auditLevel,
+ chalk: npm.chalk,
})
if (npm.command === 'audit') {
process.exitCode = process.exitCode || res.exitCode
@@ -166,7 +181,7 @@ const packagesChangedMessage = (npm, { added, removed, changed, audited }) => {
}
msg.push(` in ${ms(Date.now() - npm.started)}`)
- npm.output(msg.join(''))
+ output.standard(msg.join(''))
}
const packagesFundingMessage = (npm, { funding }) => {
@@ -174,11 +189,11 @@ const packagesFundingMessage = (npm, { funding }) => {
return
}
- npm.output('')
+ output.standard('')
const pkg = funding === 1 ? 'package' : 'packages'
const is = funding === 1 ? 'is' : 'are'
- npm.output(`${funding} ${pkg} ${is} looking for funding`)
- npm.output(' run `npm fund` for details')
+ output.standard(`${funding} ${pkg} ${is} looking for funding`)
+ output.standard(' run `npm fund` for details')
}
module.exports = reifyOutput
diff --git a/lib/utils/replace-info.js b/lib/utils/replace-info.js
deleted file mode 100644
index b9ce61935ffb7..0000000000000
--- a/lib/utils/replace-info.js
+++ /dev/null
@@ -1,31 +0,0 @@
-const { cleanUrl } = require('npm-registry-fetch')
-const isString = (v) => typeof v === 'string'
-
-// split on \s|= similar to how nopt parses options
-const splitAndReplace = (str) => {
- // stateful regex, don't move out of this scope
- const splitChars = /[\s=]/g
-
- let match = null
- let result = ''
- let index = 0
- while (match = splitChars.exec(str)) {
- result += cleanUrl(str.slice(index, match.index)) + match[0]
- index = splitChars.lastIndex
- }
-
- return result + cleanUrl(str.slice(index))
-}
-
-// replaces auth info in an array of arguments or in a strings
-function replaceInfo (arg) {
- if (isString(arg)) {
- return splitAndReplace(arg)
- } else if (Array.isArray(arg)) {
- return arg.map((a) => isString(a) ? splitAndReplace(a) : a)
- }
-
- return arg
-}
-
-module.exports = replaceInfo
diff --git a/lib/utils/sbom-cyclonedx.js b/lib/utils/sbom-cyclonedx.js
new file mode 100644
index 0000000000000..989abea58dae8
--- /dev/null
+++ b/lib/utils/sbom-cyclonedx.js
@@ -0,0 +1,201 @@
+const crypto = require('node:crypto')
+const normalizeData = require('normalize-package-data')
+const parseLicense = require('spdx-expression-parse')
+const npa = require('npm-package-arg')
+const ssri = require('ssri')
+
+const CYCLONEDX_SCHEMA = 'http://cyclonedx.org/schema/bom-1.5.schema.json'
+const CYCLONEDX_FORMAT = 'CycloneDX'
+const CYCLONEDX_SCHEMA_VERSION = '1.5'
+
+const PROP_PATH = 'cdx:npm:package:path'
+const PROP_BUNDLED = 'cdx:npm:package:bundled'
+const PROP_DEVELOPMENT = 'cdx:npm:package:development'
+const PROP_EXTRANEOUS = 'cdx:npm:package:extraneous'
+const PROP_PRIVATE = 'cdx:npm:package:private'
+
+const REF_VCS = 'vcs'
+const REF_WEBSITE = 'website'
+const REF_ISSUE_TRACKER = 'issue-tracker'
+const REF_DISTRIBUTION = 'distribution'
+
+const ALGO_MAP = {
+ sha1: 'SHA-1',
+ sha256: 'SHA-256',
+ sha384: 'SHA-384',
+ sha512: 'SHA-512',
+}
+
+const cyclonedxOutput = ({ npm, nodes, packageType, packageLockOnly }) => {
+ const rootNode = nodes.find(node => node.isRoot)
+ const childNodes = nodes.filter(node => !node.isRoot && !node.isLink)
+ const uuid = crypto.randomUUID()
+
+ const deps = []
+ const seen = new Set()
+ for (let node of nodes) {
+ if (node.isLink) {
+ node = node.target
+ }
+
+ if (seen.has(node)) {
+ continue
+ }
+ seen.add(node)
+ deps.push(toCyclonedxDependency(node, nodes))
+ }
+
+ const bom = {
+ $schema: CYCLONEDX_SCHEMA,
+ bomFormat: CYCLONEDX_FORMAT,
+ specVersion: CYCLONEDX_SCHEMA_VERSION,
+ serialNumber: `urn:uuid:${uuid}`,
+ version: 1,
+ metadata: {
+ timestamp: new Date().toISOString(),
+ lifecycles: [
+ { phase: packageLockOnly ? 'pre-build' : 'build' },
+ ],
+ tools: [
+ {
+ vendor: 'npm',
+ name: 'cli',
+ version: npm.version,
+ },
+ ],
+ component: toCyclonedxItem(rootNode, { packageType }),
+ },
+ components: childNodes.map(toCyclonedxItem),
+ dependencies: deps,
+ }
+
+ return bom
+}
+
+const toCyclonedxItem = (node, { packageType }) => {
+ packageType = packageType || 'library'
+
+ // Calculate purl from package spec
+ let spec = npa(node.pkgid)
+ spec = (spec.type === 'alias') ? spec.subSpec : spec
+ const purl = npa.toPurl(spec) + (isGitNode(node) ? `?vcs_url=${node.resolved}` : '')
+
+ if (node.package) {
+ normalizeData(node.package)
+ }
+
+ let parsedLicense
+ try {
+ let license = node.package?.license
+ if (license) {
+ if (typeof license === 'object') {
+ license = license.type
+ }
+ }
+
+ parsedLicense = parseLicense(license)
+ } catch (err) {
+ parsedLicense = null
+ }
+
+ const component = {
+ 'bom-ref': toCyclonedxID(node),
+ type: packageType,
+ name: node.name,
+ version: node.version,
+ scope: (node.optional || node.devOptional) ? 'optional' : 'required',
+ author: (typeof node.package?.author === 'object')
+ ? node.package.author.name
+ : (node.package?.author || undefined),
+ description: node.package?.description || undefined,
+ purl: purl,
+ properties: [{
+ name: PROP_PATH,
+ value: node.location,
+ }],
+ externalReferences: [],
+ }
+
+ if (node.integrity) {
+ const integrity = ssri.parse(node.integrity, { single: true })
+ component.hashes = [{
+ alg: ALGO_MAP[integrity.algorithm] || /* istanbul ignore next */ 'SHA-512',
+ content: integrity.hexDigest(),
+ }]
+ }
+
+ if (node.dev === true) {
+ component.properties.push(prop(PROP_DEVELOPMENT))
+ }
+
+ if (node.package?.private === true) {
+ component.properties.push(prop(PROP_PRIVATE))
+ }
+
+ if (node.extraneous === true) {
+ component.properties.push(prop(PROP_EXTRANEOUS))
+ }
+
+ if (node.inBundle === true) {
+ component.properties.push(prop(PROP_BUNDLED))
+ }
+
+ if (!node.isLink && node.resolved) {
+ component.externalReferences.push(extRef(REF_DISTRIBUTION, node.resolved))
+ }
+
+ if (node.package?.repository?.url) {
+ component.externalReferences.push(extRef(REF_VCS, node.package.repository.url))
+ }
+
+ if (node.package?.homepage) {
+ component.externalReferences.push(extRef(REF_WEBSITE, node.package.homepage))
+ }
+
+ if (node.package?.bugs?.url) {
+ component.externalReferences.push(extRef(REF_ISSUE_TRACKER, node.package.bugs.url))
+ }
+
+ // If license is a single SPDX license, use the license field
+ if (parsedLicense?.license) {
+ component.licenses = [{ license: { id: parsedLicense.license } }]
+ // If license is a conjunction, use the expression field
+ } else if (parsedLicense?.conjunction) {
+ component.licenses = [{ expression: node.package.license }]
+ }
+
+ return component
+}
+
+const toCyclonedxDependency = (node, nodes) => {
+ return {
+ ref: toCyclonedxID(node),
+ dependsOn: [...node.edgesOut.values()]
+ // Filter out edges that are linking to nodes not in the list
+ .filter(edge => nodes.find(n => n === edge.to))
+ .map(edge => toCyclonedxID(edge.to))
+ .filter(id => id),
+ }
+}
+
+const toCyclonedxID = (node) => `${node.packageName}@${node.version}`
+
+const prop = (name) => ({ name, value: 'true' })
+
+const extRef = (type, url) => ({ type, url })
+
+const isGitNode = (node) => {
+ if (!node.resolved) {
+ return
+ }
+
+ try {
+ const { type } = npa(node.resolved)
+ return type === 'git' || type === 'hosted'
+ } catch (err) {
+ /* istanbul ignore next */
+ return false
+ }
+}
+
+module.exports = { cyclonedxOutput }
diff --git a/lib/utils/sbom-spdx.js b/lib/utils/sbom-spdx.js
new file mode 100644
index 0000000000000..e3af77e10c751
--- /dev/null
+++ b/lib/utils/sbom-spdx.js
@@ -0,0 +1,182 @@
+
+const crypto = require('node:crypto')
+const normalizeData = require('normalize-package-data')
+const npa = require('npm-package-arg')
+const ssri = require('ssri')
+
+const SPDX_SCHEMA_VERSION = 'SPDX-2.3'
+const SPDX_DATA_LICENSE = 'CC0-1.0'
+const SPDX_IDENTIFER = 'SPDXRef-DOCUMENT'
+
+const NO_ASSERTION = 'NOASSERTION'
+
+const REL_DESCRIBES = 'DESCRIBES'
+const REL_PREREQ = 'PREREQUISITE_FOR'
+const REL_OPTIONAL = 'OPTIONAL_DEPENDENCY_OF'
+const REL_DEV = 'DEV_DEPENDENCY_OF'
+const REL_DEP = 'DEPENDENCY_OF'
+
+const REF_CAT_PACKAGE_MANAGER = 'PACKAGE-MANAGER'
+const REF_TYPE_PURL = 'purl'
+
+const spdxOutput = ({ npm, nodes, packageType }) => {
+ const rootNode = nodes.find(node => node.isRoot)
+ const childNodes = nodes.filter(node => !node.isRoot && !node.isLink)
+ const rootID = rootNode.pkgid
+ const uuid = crypto.randomUUID()
+ const ns = `http://spdx.org/spdxdocs/${npa(rootID).escapedName}-${rootNode.version}-${uuid}`
+
+ const relationships = []
+ const seen = new Set()
+ for (let node of nodes) {
+ if (node.isLink) {
+ node = node.target
+ }
+
+ if (seen.has(node)) {
+ continue
+ }
+ seen.add(node)
+
+ const rels = [...node.edgesOut.values()]
+ // Filter out edges that are linking to nodes not in the list
+ .filter(edge => nodes.find(n => n === edge.to))
+ .map(edge => toSpdxRelationship(node, edge))
+ .filter(rel => rel)
+
+ relationships.push(...rels)
+ }
+
+ const extraRelationships = nodes.filter(node => node.extraneous)
+ .map(node => toSpdxRelationship(rootNode, { to: node, type: 'optional' }))
+
+ relationships.push(...extraRelationships)
+
+ const bom = {
+ spdxVersion: SPDX_SCHEMA_VERSION,
+ dataLicense: SPDX_DATA_LICENSE,
+ SPDXID: SPDX_IDENTIFER,
+ name: rootID,
+ documentNamespace: ns,
+ creationInfo: {
+ created: new Date().toISOString(),
+ creators: [
+ `Tool: npm/cli-${npm.version}`,
+ ],
+ },
+ documentDescribes: [toSpdxID(rootNode)],
+ packages: [toSpdxItem(rootNode, { packageType }), ...childNodes.map(toSpdxItem)],
+ relationships: [
+ {
+ spdxElementId: SPDX_IDENTIFER,
+ relatedSpdxElement: toSpdxID(rootNode),
+ relationshipType: REL_DESCRIBES,
+ },
+ ...relationships,
+ ],
+ }
+
+ return bom
+}
+
+const toSpdxItem = (node, { packageType }) => {
+ normalizeData(node.package)
+
+ // Calculate purl from package spec
+ let spec = npa(node.pkgid)
+ spec = (spec.type === 'alias') ? spec.subSpec : spec
+ const purl = npa.toPurl(spec) + (isGitNode(node) ? `?vcs_url=${node.resolved}` : '')
+
+ /* For workspace nodes, use the location from their linkNode */
+ let location = node.location
+ if (node.isWorkspace && node.linksIn.size > 0) {
+ location = node.linksIn.values().next().value.location
+ }
+
+ let license = node.package?.license
+ if (license) {
+ if (typeof license === 'object') {
+ license = license.type
+ }
+ }
+
+ const pkg = {
+ name: node.packageName,
+ SPDXID: toSpdxID(node),
+ versionInfo: node.version,
+ packageFileName: location,
+ description: node.package?.description || undefined,
+ primaryPackagePurpose: packageType ? packageType.toUpperCase() : undefined,
+ downloadLocation: (node.isLink ? undefined : node.resolved) || NO_ASSERTION,
+ filesAnalyzed: false,
+ homepage: node.package?.homepage || NO_ASSERTION,
+ licenseDeclared: license || NO_ASSERTION,
+ externalRefs: [
+ {
+ referenceCategory: REF_CAT_PACKAGE_MANAGER,
+ referenceType: REF_TYPE_PURL,
+ referenceLocator: purl,
+ },
+ ],
+ }
+
+ if (node.integrity) {
+ const integrity = ssri.parse(node.integrity, { single: true })
+ pkg.checksums = [{
+ algorithm: integrity.algorithm.toUpperCase(),
+ checksumValue: integrity.hexDigest(),
+ }]
+ }
+ return pkg
+}
+
+const toSpdxRelationship = (node, edge) => {
+ let type
+ switch (edge.type) {
+ case 'peer':
+ type = REL_PREREQ
+ break
+ case 'optional':
+ type = REL_OPTIONAL
+ break
+ case 'dev':
+ type = REL_DEV
+ break
+ default:
+ type = REL_DEP
+ }
+
+ return {
+ spdxElementId: toSpdxID(edge.to),
+ relatedSpdxElement: toSpdxID(node),
+ relationshipType: type,
+ }
+}
+
+const toSpdxID = (node) => {
+ let name = node.packageName
+
+ // Strip leading @ for scoped packages
+ name = name.replace(/^@/, '')
+
+ // Replace slashes with dots
+ name = name.replace(/\//g, '.')
+
+ return `SPDXRef-Package-${name}-${node.version}`
+}
+
+const isGitNode = (node) => {
+ if (!node.resolved) {
+ return
+ }
+
+ try {
+ const { type } = npa(node.resolved)
+ return type === 'git' || type === 'hosted'
+ } catch (err) {
+ /* istanbul ignore next */
+ return false
+ }
+}
+
+module.exports = { spdxOutput }
diff --git a/lib/utils/tar.js b/lib/utils/tar.js
index c25fe71614a60..63ef6067acb90 100644
--- a/lib/utils/tar.js
+++ b/lib/utils/tar.js
@@ -1,71 +1,49 @@
const tar = require('tar')
const ssri = require('ssri')
-const log = require('./log-shim')
+const { log, output } = require('proc-log')
const formatBytes = require('./format-bytes.js')
-const columnify = require('columnify')
const localeCompare = require('@isaacs/string-locale-compare')('en', {
sensitivity: 'case',
numeric: true,
})
-const logTar = (tarball, opts = {}) => {
- const { unicode = false } = opts
+const logTar = (tarball, { unicode = false, json, key } = {}) => {
+ if (json) {
+ output.buffer(key == null ? tarball : { [key]: tarball })
+ return
+ }
log.notice('')
log.notice('', `${unicode ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`)
- log.notice('=== Tarball Contents ===')
+ log.notice('Tarball Contents')
if (tarball.files.length) {
log.notice(
'',
- columnify(
- tarball.files
- .map(f => {
- const bytes = formatBytes(f.size, false)
- return /^node_modules\//.test(f.path) ? null : { path: f.path, size: `${bytes}` }
- })
- .filter(f => f),
- {
- include: ['size', 'path'],
- showHeaders: false,
- }
- )
+ tarball.files.map(f =>
+ /^node_modules\//.test(f.path) ? null : `${formatBytes(f.size, false)} ${f.path}`
+ ).filter(f => f).join('\n')
)
}
if (tarball.bundled.length) {
- log.notice('=== Bundled Dependencies ===')
+ log.notice('Bundled Dependencies')
tarball.bundled.forEach(name => log.notice('', name))
}
- log.notice('=== Tarball Details ===')
- log.notice(
- '',
- columnify(
- [
- { name: 'name:', value: tarball.name },
- { name: 'version:', value: tarball.version },
- tarball.filename && { name: 'filename:', value: tarball.filename },
- { name: 'package size:', value: formatBytes(tarball.size) },
- { name: 'unpacked size:', value: formatBytes(tarball.unpackedSize) },
- { name: 'shasum:', value: tarball.shasum },
- {
- name: 'integrity:',
- value:
- tarball.integrity.toString().slice(0, 20) +
- '[...]' +
- tarball.integrity.toString().slice(80),
- },
- tarball.bundled.length && { name: 'bundled deps:', value: tarball.bundled.length },
- tarball.bundled.length && {
- name: 'bundled files:',
- value: tarball.entryCount - tarball.files.length,
- },
- tarball.bundled.length && { name: 'own files:', value: tarball.files.length },
- { name: 'total files:', value: tarball.entryCount },
- ].filter(x => x),
- {
- include: ['name', 'value'],
- showHeaders: false,
- }
- )
- )
+ log.notice('Tarball Details')
+ log.notice('', `name: ${tarball.name}`)
+ log.notice('', `version: ${tarball.version}`)
+ if (tarball.filename) {
+ log.notice('', `filename: ${tarball.filename}`)
+ }
+ log.notice('', `package size: ${formatBytes(tarball.size)}`)
+ log.notice('', `unpacked size: ${formatBytes(tarball.unpackedSize)}`)
+ log.notice('', `shasum: ${tarball.shasum}`)
+ /* eslint-disable-next-line max-len */
+ log.notice('', `integrity: ${tarball.integrity.toString().slice(0, 20)}[...]${tarball.integrity.toString().slice(80)}`)
+ if (tarball.bundled.length) {
+ log.notice('', `bundled deps: ${tarball.bundled.length}`)
+ log.notice('', `bundled files: ${tarball.entryCount - tarball.files.length}`)
+ log.notice('', `own files: ${tarball.files.length}`)
+ }
+ log.notice('', `total files: ${tarball.entryCount}`)
log.notice('', '')
}
@@ -81,7 +59,7 @@ const getContents = async (manifest, tarball) => {
totalEntries++
totalEntrySize += entry.size
const p = entry.path
- if (p.startsWith('package/node_modules/')) {
+ if (p.startsWith('package/node_modules/') && p !== 'package/node_modules/') {
const name = p.match(/^package\/node_modules\/((?:@[^/]+\/)?[^/]+)/)[1]
bundled.add(name)
}
@@ -94,7 +72,7 @@ const getContents = async (manifest, tarball) => {
})
stream.end(tarball)
- const integrity = await ssri.fromData(tarball, {
+ const integrity = ssri.fromData(tarball, {
algorithms: ['sha1', 'sha512'],
})
diff --git a/lib/utils/timers.js b/lib/utils/timers.js
index c215fe926afb5..16a255961fee3 100644
--- a/lib/utils/timers.js
+++ b/lib/utils/timers.js
@@ -1,114 +1,87 @@
-const EE = require('events')
-const fs = require('fs')
-const log = require('./log-shim')
+const EE = require('node:events')
+const fs = require('node:fs')
+const { log, time } = require('proc-log')
+
+const INITIAL_TIMER = 'npm'
-// This is an event emiiter but on/off
-// only listen on a single internal event that gets
-// emitted whenever a timer ends
class Timers extends EE {
- file = null
+ #file
+ #timing
#unfinished = new Map()
#finished = {}
- #onTimeEnd = Symbol('onTimeEnd')
- #initialListener = null
- #initialTimer = null
- constructor ({ listener = null, start = 'npm' } = {}) {
+ constructor () {
super()
- this.#initialListener = listener
- this.#initialTimer = start
- this.#init()
- }
-
- get unfinished () {
- return this.#unfinished
- }
-
- get finished () {
- return this.#finished
- }
-
- #init () {
this.on()
- if (this.#initialListener) {
- this.on(this.#initialListener)
- }
- process.emit('time', this.#initialTimer)
- this.started = this.#unfinished.get(this.#initialTimer)
+ time.start(INITIAL_TIMER)
+ this.started = this.#unfinished.get(INITIAL_TIMER)
}
- on (listener) {
- if (listener) {
- super.on(this.#onTimeEnd, listener)
- } else {
- process.on('time', this.#timeListener)
- process.on('timeEnd', this.#timeEndListener)
- }
+ on () {
+ process.on('time', this.#timeHandler)
}
- off (listener) {
- if (listener) {
- super.off(this.#onTimeEnd, listener)
- } else {
- this.removeAllListeners(this.#onTimeEnd)
- process.off('time', this.#timeListener)
- process.off('timeEnd', this.#timeEndListener)
- }
+ off () {
+ process.off('time', this.#timeHandler)
}
- time (name, fn) {
- process.emit('time', name)
- const end = () => process.emit('timeEnd', name)
- if (typeof fn === 'function') {
- const res = fn()
- return res && res.finally ? res.finally(end) : (end(), res)
- }
- return end
+ load ({ path, timing } = {}) {
+ this.#timing = timing
+ this.#file = `${path}timing.json`
}
- load ({ path } = {}) {
- if (path) {
- this.file = `${path}timing.json`
+ finish (metadata) {
+ time.end(INITIAL_TIMER)
+
+ for (const [name, timer] of this.#unfinished) {
+ log.silly('unfinished npm timer', name, timer)
}
- }
- writeFile (metadata) {
- if (!this.file) {
+ if (!this.#timing) {
+ // Not in timing mode, nothing else to do here
return
}
try {
- const globalStart = this.started
- const globalEnd = this.#finished.npm || Date.now()
- const content = {
- metadata,
- timers: this.#finished,
- // add any unfinished timers with their relative start/end
- unfinishedTimers: [...this.#unfinished.entries()].reduce((acc, [name, start]) => {
- acc[name] = [start - globalStart, globalEnd - globalStart]
- return acc
- }, {}),
- }
- fs.writeFileSync(this.file, JSON.stringify(content) + '\n')
+ this.#writeFile(metadata)
+ log.info('timing', `Timing info written to: ${this.#file}`)
} catch (e) {
- this.file = null
log.warn('timing', `could not write timing file: ${e}`)
}
}
- #timeListener = (name) => {
- this.#unfinished.set(name, Date.now())
+ #writeFile (metadata) {
+ const globalStart = this.started
+ const globalEnd = this.#finished[INITIAL_TIMER]
+ const content = {
+ metadata,
+ timers: this.#finished,
+ // add any unfinished timers with their relative start/end
+ unfinishedTimers: [...this.#unfinished.entries()].reduce((acc, [name, start]) => {
+ acc[name] = [start - globalStart, globalEnd - globalStart]
+ return acc
+ }, {}),
+ }
+ fs.writeFileSync(this.#file, JSON.stringify(content) + '\n')
}
- #timeEndListener = (name) => {
- if (this.#unfinished.has(name)) {
- const ms = Date.now() - this.#unfinished.get(name)
- this.#finished[name] = ms
- this.#unfinished.delete(name)
- this.emit(this.#onTimeEnd, name, ms)
- } else {
- log.silly('timing', "Tried to end timer that doesn't exist:", name)
+ #timeHandler = (level, name) => {
+ const now = Date.now()
+ switch (level) {
+ case time.KEYS.start:
+ this.#unfinished.set(name, now)
+ break
+ case time.KEYS.end: {
+ if (this.#unfinished.has(name)) {
+ const ms = now - this.#unfinished.get(name)
+ this.#finished[name] = ms
+ this.#unfinished.delete(name)
+ log.timing(name, `Completed in ${ms}ms`)
+ } else {
+ log.silly('timing', `Tried to end timer that doesn't exist: ${name}`)
+ }
+ }
}
}
}
diff --git a/lib/utils/update-notifier.js b/lib/utils/update-notifier.js
deleted file mode 100644
index a7eaaca64747f..0000000000000
--- a/lib/utils/update-notifier.js
+++ /dev/null
@@ -1,134 +0,0 @@
-// print a banner telling the user to upgrade npm to latest
-// but not in CI, and not if we're doing that already.
-// Check daily for betas, and weekly otherwise.
-
-const pacote = require('pacote')
-const ciInfo = require('ci-info')
-const semver = require('semver')
-const chalk = require('chalk')
-const { promisify } = require('util')
-const stat = promisify(require('fs').stat)
-const writeFile = promisify(require('fs').writeFile)
-const { resolve } = require('path')
-
-const SKIP = Symbol('SKIP')
-
-const isGlobalNpmUpdate = npm => {
- return npm.flatOptions.global &&
- ['install', 'update'].includes(npm.command) &&
- npm.argv.some(arg => /^npm(@|$)/.test(arg))
-}
-
-// update check frequency
-const DAILY = 1000 * 60 * 60 * 24
-const WEEKLY = DAILY * 7
-
-// don't put it in the _cacache folder, just in npm's cache
-const lastCheckedFile = npm =>
- resolve(npm.flatOptions.cache, '../_update-notifier-last-checked')
-
-const checkTimeout = async (npm, duration) => {
- const t = new Date(Date.now() - duration)
- const f = lastCheckedFile(npm)
- // if we don't have a file, then definitely check it.
- const st = await stat(f).catch(() => ({ mtime: t - 1 }))
- return t > st.mtime
-}
-
-const updateNotifier = async (npm, spec = 'latest') => {
- // never check for updates in CI, when updating npm already, or opted out
- if (!npm.config.get('update-notifier') ||
- isGlobalNpmUpdate(npm) ||
- ciInfo.isCI) {
- return SKIP
- }
-
- // if we're on a prerelease train, then updates are coming fast
- // check for a new one daily. otherwise, weekly.
- const { version } = npm
- const current = semver.parse(version)
-
- // if we're on a beta train, always get the next beta
- if (current.prerelease.length) {
- spec = `^${version}`
- }
-
- // while on a beta train, get updates daily
- const duration = spec !== 'latest' ? DAILY : WEEKLY
-
- // if we've already checked within the specified duration, don't check again
- if (!(await checkTimeout(npm, duration))) {
- return null
- }
-
- // if they're currently using a prerelease, nudge to the next prerelease
- // otherwise, nudge to latest.
- const useColor = npm.logColor
-
- const mani = await pacote.manifest(`npm@${spec}`, {
- // always prefer latest, even if doing --tag=whatever on the cmd
- defaultTag: 'latest',
- ...npm.flatOptions,
- }).catch(() => null)
-
- // if pacote failed, give up
- if (!mani) {
- return null
- }
-
- const latest = mani.version
-
- // if the current version is *greater* than latest, we're on a 'next'
- // and should get the updates from that release train.
- // Note that this isn't another http request over the network, because
- // the packument will be cached by pacote from previous request.
- if (semver.gt(version, latest) && spec === 'latest') {
- return updateNotifier(npm, `^${version}`)
- }
-
- // if we already have something >= the desired spec, then we're done
- if (semver.gte(version, latest)) {
- return null
- }
-
- // ok! notify the user about this update they should get.
- // The message is saved for printing at process exit so it will not get
- // lost in any other messages being printed as part of the command.
- const update = semver.parse(mani.version)
- const type = update.major !== current.major ? 'major'
- : update.minor !== current.minor ? 'minor'
- : update.patch !== current.patch ? 'patch'
- : 'prerelease'
- const typec = !useColor ? type
- : type === 'major' ? chalk.red(type)
- : type === 'minor' ? chalk.yellow(type)
- : chalk.green(type)
- const oldc = !useColor ? current : chalk.red(current)
- const latestc = !useColor ? latest : chalk.green(latest)
- const changelog = `https://github.com/npm/cli/releases/tag/v${latest}`
- const changelogc = !useColor ? `<${changelog}>` : chalk.cyan(changelog)
- const cmd = `npm install -g npm@${latest}`
- const cmdc = !useColor ? `\`${cmd}\`` : chalk.green(cmd)
- const message = `\nNew ${typec} version of npm available! ` +
- `${oldc} -> ${latestc}\n` +
- `Changelog: ${changelogc}\n` +
- `Run ${cmdc} to update!\n`
-
- return message
-}
-
-// only update the notification timeout if we actually finished checking
-module.exports = async npm => {
- const notification = await updateNotifier(npm)
-
- // dont write the file if we skipped checking altogether
- if (notification === SKIP) {
- return null
- }
-
- // intentional. do not await this. it's a best-effort update. if this
- // fails, it's ok. might be using /dev/null as the cache or something weird
- // like that.
- writeFile(lastCheckedFile(npm), '').catch(() => {})
- return notification
-}
diff --git a/lib/workspaces/update-workspaces.js b/lib/utils/update-workspaces.js
similarity index 88%
rename from lib/workspaces/update-workspaces.js
rename to lib/utils/update-workspaces.js
index 4cba1245ac2e5..892f366e9980a 100644
--- a/lib/workspaces/update-workspaces.js
+++ b/lib/utils/update-workspaces.js
@@ -1,6 +1,5 @@
'use strict'
-const Arborist = require('@npmcli/arborist')
const reifyFinish = require('../utils/reify-finish.js')
async function updateWorkspaces ({
@@ -22,7 +21,7 @@ async function updateWorkspaces ({
? false
: config.get('save')
- // runs a minimalistic reify update, targetting only the workspaces
+ // runs a minimalistic reify update, targeting only the workspaces
// that had version updates and skipping fund/audit/save
const opts = {
...flatOptions,
@@ -31,6 +30,7 @@ async function updateWorkspaces ({
path: localPrefix,
save,
}
+ const Arborist = require('@npmcli/arborist')
const arb = new Arborist(opts)
await arb.reify({ ...opts, update: workspaces })
diff --git a/lib/utils/verify-signatures.js b/lib/utils/verify-signatures.js
new file mode 100644
index 0000000000000..09711581d11dd
--- /dev/null
+++ b/lib/utils/verify-signatures.js
@@ -0,0 +1,386 @@
+const fetch = require('npm-registry-fetch')
+const localeCompare = require('@isaacs/string-locale-compare')('en')
+const npa = require('npm-package-arg')
+const pacote = require('pacote')
+const pMap = require('p-map')
+const tufClient = require('@sigstore/tuf')
+const { log, output } = require('proc-log')
+
+const sortAlphabetically = (a, b) => localeCompare(a.name, b.name)
+
+class VerifySignatures {
+ constructor (tree, filterSet, npm, opts) {
+ this.tree = tree
+ this.filterSet = filterSet
+ this.npm = npm
+ this.opts = opts
+ this.keys = new Map()
+ this.invalid = []
+ this.missing = []
+ this.checkedPackages = new Set()
+ this.auditedWithKeysCount = 0
+ this.verifiedSignatureCount = 0
+ this.verifiedAttestationCount = 0
+ this.exitCode = 0
+ }
+
+ async run () {
+ const start = process.hrtime.bigint()
+
+ // Find all deps in tree
+ const { edges, registries } = this.getEdgesOut(this.tree.inventory.values(), this.filterSet)
+ if (edges.size === 0) {
+ throw new Error('found no installed dependencies to audit')
+ }
+
+ const tuf = await tufClient.initTUF({
+ cachePath: this.opts.tufCache,
+ retry: this.opts.retry,
+ timeout: this.opts.timeout,
+ })
+ await Promise.all([...registries].map(registry => this.setKeys({ registry, tuf })))
+
+ log.verbose('verifying registry signatures')
+ await pMap(edges, (e) => this.getVerifiedInfo(e), { concurrency: 20, stopOnError: true })
+
+ // Didn't find any dependencies that could be verified, e.g. only local
+ // deps, missing version, not on a registry etc.
+ if (!this.auditedWithKeysCount) {
+ throw new Error('found no dependencies to audit that were installed from ' +
+ 'a supported registry')
+ }
+
+ const invalid = this.invalid.sort(sortAlphabetically)
+ const missing = this.missing.sort(sortAlphabetically)
+
+ const hasNoInvalidOrMissing = invalid.length === 0 && missing.length === 0
+
+ if (!hasNoInvalidOrMissing) {
+ process.exitCode = 1
+ }
+
+ if (this.npm.config.get('json')) {
+ output.buffer({ invalid, missing })
+ return
+ }
+ const end = process.hrtime.bigint()
+ const elapsed = end - start
+
+ const auditedPlural = this.auditedWithKeysCount > 1 ? 's' : ''
+ const timing = `audited ${this.auditedWithKeysCount} package${auditedPlural} in ` +
+ `${Math.floor(Number(elapsed) / 1e9)}s`
+ output.standard(timing)
+ output.standard('')
+
+ const verifiedBold = this.npm.chalk.bold('verified')
+ if (this.verifiedSignatureCount) {
+ if (this.verifiedSignatureCount === 1) {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${this.verifiedSignatureCount} package has a ${verifiedBold} registry signature`)
+ } else {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${this.verifiedSignatureCount} packages have ${verifiedBold} registry signatures`)
+ }
+ output.standard('')
+ }
+
+ if (this.verifiedAttestationCount) {
+ if (this.verifiedAttestationCount === 1) {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${this.verifiedAttestationCount} package has a ${verifiedBold} attestation`)
+ } else {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${this.verifiedAttestationCount} packages have ${verifiedBold} attestations`)
+ }
+ output.standard('')
+ }
+
+ if (missing.length) {
+ const missingClr = this.npm.chalk.redBright('missing')
+ if (missing.length === 1) {
+ /* eslint-disable-next-line max-len */
+ output.standard(`1 package has a ${missingClr} registry signature but the registry is providing signing keys:`)
+ } else {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${missing.length} packages have ${missingClr} registry signatures but the registry is providing signing keys:`)
+ }
+ output.standard('')
+ missing.map(m =>
+ output.standard(`${this.npm.chalk.red(`${m.name}@${m.version}`)} (${m.registry})`)
+ )
+ }
+
+ if (invalid.length) {
+ if (missing.length) {
+ output.standard('')
+ }
+ const invalidClr = this.npm.chalk.redBright('invalid')
+ // We can have either invalid signatures or invalid provenance
+ const invalidSignatures = this.invalid.filter(i => i.code === 'EINTEGRITYSIGNATURE')
+ if (invalidSignatures.length) {
+ if (invalidSignatures.length === 1) {
+ output.standard(`1 package has an ${invalidClr} registry signature:`)
+ } else {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${invalidSignatures.length} packages have ${invalidClr} registry signatures:`)
+ }
+ output.standard('')
+ invalidSignatures.map(i =>
+ output.standard(`${this.npm.chalk.red(`${i.name}@${i.version}`)} (${i.registry})`)
+ )
+ output.standard('')
+ }
+
+ const invalidAttestations = this.invalid.filter(i => i.code === 'EATTESTATIONVERIFY')
+ if (invalidAttestations.length) {
+ if (invalidAttestations.length === 1) {
+ output.standard(`1 package has an ${invalidClr} attestation:`)
+ } else {
+ /* eslint-disable-next-line max-len */
+ output.standard(`${invalidAttestations.length} packages have ${invalidClr} attestations:`)
+ }
+ output.standard('')
+ invalidAttestations.map(i =>
+ output.standard(`${this.npm.chalk.red(`${i.name}@${i.version}`)} (${i.registry})`)
+ )
+ output.standard('')
+ }
+
+ if (invalid.length === 1) {
+ /* eslint-disable-next-line max-len */
+ output.standard(`Someone might have tampered with this package since it was published on the registry!`)
+ } else {
+ /* eslint-disable-next-line max-len */
+ output.standard(`Someone might have tampered with these packages since they were published on the registry!`)
+ }
+ output.standard('')
+ }
+ }
+
+ getEdgesOut (nodes, filterSet) {
+ const edges = new Set()
+ const registries = new Set()
+ for (const node of nodes) {
+ for (const edge of node.edgesOut.values()) {
+ const filteredOut =
+ edge.from
+ && filterSet
+ && filterSet.size > 0
+ && !filterSet.has(edge.from.target)
+
+ if (!filteredOut) {
+ const spec = this.getEdgeSpec(edge)
+ if (spec) {
+ // Prefetch and cache public keys from used registries
+ registries.add(this.getSpecRegistry(spec))
+ }
+ edges.add(edge)
+ }
+ }
+ }
+ return { edges, registries }
+ }
+
+ async setKeys ({ registry, tuf }) {
+ const { host, pathname } = new URL(registry)
+ // Strip any trailing slashes from pathname
+ const regKey = `${host}${pathname.replace(/\/$/, '')}/keys.json`
+ let keys = await tuf.getTarget(regKey)
+ .then((target) => JSON.parse(target))
+ .then(({ keys: ks }) => ks.map((key) => ({
+ ...key,
+ keyid: key.keyId,
+ pemkey: `-----BEGIN PUBLIC KEY-----\n${key.publicKey.rawBytes}\n-----END PUBLIC KEY-----`,
+ expires: key.publicKey.validFor.end || null,
+ }))).catch(err => {
+ if (err.code === 'TUF_FIND_TARGET_ERROR') {
+ return null
+ } else {
+ throw err
+ }
+ })
+
+ // If keys not found in Sigstore TUF repo, fallback to registry keys API
+ if (!keys) {
+ keys = await fetch.json('/-/npm/v1/keys', {
+ ...this.npm.flatOptions,
+ registry,
+ }).then(({ keys: ks }) => ks.map((key) => ({
+ ...key,
+ pemkey: `-----BEGIN PUBLIC KEY-----\n${key.key}\n-----END PUBLIC KEY-----`,
+ }))).catch(err => {
+ if (err.code === 'E404' || err.code === 'E400') {
+ return null
+ } else {
+ throw err
+ }
+ })
+ }
+
+ if (keys) {
+ this.keys.set(registry, keys)
+ }
+ }
+
+ getEdgeType (edge) {
+ return edge.optional ? 'optionalDependencies'
+ : edge.peer ? 'peerDependencies'
+ : edge.dev ? 'devDependencies'
+ : 'dependencies'
+ }
+
+ getEdgeSpec (edge) {
+ let name = edge.name
+ try {
+ name = npa(edge.spec).subSpec.name
+ } catch {
+ // leave it as edge.name
+ }
+ try {
+ return npa(`${name}@${edge.spec}`)
+ } catch {
+ // Skip packages with invalid spec
+ }
+ }
+
+ buildRegistryConfig (registry) {
+ const keys = this.keys.get(registry) || []
+ const parsedRegistry = new URL(registry)
+ const regKey = `//${parsedRegistry.host}${parsedRegistry.pathname}`
+ return {
+ [`${regKey}:_keys`]: keys,
+ }
+ }
+
+ getSpecRegistry (spec) {
+ return fetch.pickRegistry(spec, this.npm.flatOptions)
+ }
+
+ getValidPackageInfo (edge) {
+ const type = this.getEdgeType(edge)
+ // Skip potentially optional packages that are not on disk, as these could
+ // be omitted during install
+ if (edge.error === 'MISSING' && type !== 'dependencies') {
+ return
+ }
+
+ const spec = this.getEdgeSpec(edge)
+ // Skip invalid version requirements
+ if (!spec) {
+ return
+ }
+ const node = edge.to || edge
+ const { version } = node.package || {}
+
+ if (node.isWorkspace || // Skip local workspaces packages
+ !version || // Skip packages that don't have a installed version, e.g. optonal dependencies
+ !spec.registry) { // Skip if not from registry, e.g. git package
+ return
+ }
+
+ for (const omitType of this.npm.config.get('omit')) {
+ if (node[omitType]) {
+ return
+ }
+ }
+
+ return {
+ name: spec.name,
+ version,
+ type,
+ location: node.location,
+ registry: this.getSpecRegistry(spec),
+ }
+ }
+
+ async verifySignatures (name, version, registry) {
+ const {
+ _integrity: integrity,
+ _signatures,
+ _attestations,
+ _resolved: resolved,
+ } = await pacote.manifest(`${name}@${version}`, {
+ verifySignatures: true,
+ verifyAttestations: true,
+ ...this.buildRegistryConfig(registry),
+ ...this.npm.flatOptions,
+ })
+ const signatures = _signatures || []
+ const result = {
+ integrity,
+ signatures,
+ attestations: _attestations,
+ resolved,
+ }
+ return result
+ }
+
+ async getVerifiedInfo (edge) {
+ const info = this.getValidPackageInfo(edge)
+ if (!info) {
+ return
+ }
+ const { name, version, location, registry, type } = info
+ if (this.checkedPackages.has(location)) {
+ // we already did or are doing this one
+ return
+ }
+ this.checkedPackages.add(location)
+
+ // We only "audit" or verify the signature, or the presence of it, on
+ // packages whose registry returns signing keys
+ const keys = this.keys.get(registry) || []
+ if (keys.length) {
+ this.auditedWithKeysCount += 1
+ }
+
+ try {
+ const { integrity, signatures, attestations, resolved } = await this.verifySignatures(
+ name, version, registry
+ )
+
+ // Currently we only care about missing signatures on registries that provide a public key
+ // We could make this configurable in the future with a strict/paranoid mode
+ if (signatures.length) {
+ this.verifiedSignatureCount += 1
+ } else if (keys.length) {
+ this.missing.push({
+ integrity,
+ location,
+ name,
+ registry,
+ resolved,
+ version,
+ })
+ }
+
+ // Track verified attestations separately to registry signatures, as all
+ // packages on registries with signing keys are expected to have registry
+ // signatures, but not all packages have provenance and publish attestations.
+ if (attestations) {
+ this.verifiedAttestationCount += 1
+ }
+ } catch (e) {
+ if (e.code === 'EINTEGRITYSIGNATURE' || e.code === 'EATTESTATIONVERIFY') {
+ this.invalid.push({
+ code: e.code,
+ message: e.message,
+ integrity: e.integrity,
+ keyid: e.keyid,
+ location,
+ name,
+ registry,
+ resolved: e.resolved,
+ signature: e.signature,
+ predicateType: e.predicateType,
+ type,
+ version,
+ })
+ } else {
+ throw e
+ }
+ }
+ }
+}
+
+module.exports = VerifySignatures
diff --git a/lib/utils/web-auth.js b/lib/utils/web-auth.js
deleted file mode 100644
index ce551687098fc..0000000000000
--- a/lib/utils/web-auth.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const EventEmitter = require('events')
-const { webAuthCheckLogin } = require('npm-profile')
-
-async function webAuth (opener, initialUrl, doneUrl, opts) {
- const doneEmitter = new EventEmitter()
-
- const openPromise = opener(initialUrl, doneEmitter)
- const webAuthCheckPromise = webAuthCheckLogin(doneUrl, { ...opts, cache: false })
- .then(authResult => {
- // cancel open prompt if it's present
- doneEmitter.emit('abort')
-
- return authResult.token
- })
-
- await openPromise
- return await webAuthCheckPromise
-}
-
-module.exports = webAuth
diff --git a/lib/workspaces/get-workspaces.js b/lib/workspaces/get-workspaces.js
deleted file mode 100644
index 2ac043d5f3943..0000000000000
--- a/lib/workspaces/get-workspaces.js
+++ /dev/null
@@ -1,54 +0,0 @@
-const { resolve, relative } = require('path')
-const mapWorkspaces = require('@npmcli/map-workspaces')
-const minimatch = require('minimatch')
-const rpj = require('read-package-json-fast')
-
-// minimatch wants forward slashes only for glob patterns
-const globify = pattern => pattern.split('\\').join('/')
-
-// Returns an Map of paths to workspaces indexed by workspace name
-// { foo => '/path/to/foo' }
-const getWorkspaces = async (filters, { path, includeWorkspaceRoot, relativeFrom }) => {
- // TODO we need a better error to be bubbled up here if this rpj call fails
- const pkg = await rpj(resolve(path, 'package.json'))
- const workspaces = await mapWorkspaces({ cwd: path, pkg })
- let res = new Map()
- if (includeWorkspaceRoot) {
- res.set(pkg.name, path)
- }
-
- if (!filters.length) {
- res = new Map([...res, ...workspaces])
- }
-
- for (const filterArg of filters) {
- for (const [workspaceName, workspacePath] of workspaces.entries()) {
- let relativePath = relative(relativeFrom, workspacePath)
- if (filterArg.startsWith('./')) {
- relativePath = `./${relativePath}`
- }
- const relativeFilter = relative(path, filterArg)
- if (filterArg === workspaceName
- || resolve(relativeFrom, filterArg) === workspacePath
- || minimatch(relativePath, `${globify(relativeFilter)}/*`)
- || minimatch(relativePath, `${globify(filterArg)}/*`)
- ) {
- res.set(workspaceName, workspacePath)
- }
- }
- }
-
- if (!res.size) {
- let msg = '!'
- if (filters.length) {
- msg = `:\n ${filters.reduce(
- (acc, filterArg) => `${acc} --workspace=${filterArg}`, '')}`
- }
-
- throw new Error(`No workspaces found${msg}`)
- }
-
- return res
-}
-
-module.exports = getWorkspaces
diff --git a/mock-globals/.eslintrc.js b/mock-globals/.eslintrc.js
new file mode 100644
index 0000000000000..f21d26eccec7d
--- /dev/null
+++ b/mock-globals/.eslintrc.js
@@ -0,0 +1,20 @@
+/* This file is automatically added by @npmcli/template-oss. Do not edit. */
+
+'use strict'
+
+const { readdirSync: readdir } = require('fs')
+
+const localConfigs = readdir(__dirname)
+ .filter((file) => file.startsWith('.eslintrc.local.'))
+ .map((file) => `./${file}`)
+
+module.exports = {
+ root: true,
+ ignorePatterns: [
+ 'tap-testdir*/',
+ ],
+ extends: [
+ '@npmcli',
+ ...localConfigs,
+ ],
+}
diff --git a/mock-globals/.gitignore b/mock-globals/.gitignore
new file mode 100644
index 0000000000000..a96d056a7064e
--- /dev/null
+++ b/mock-globals/.gitignore
@@ -0,0 +1,23 @@
+# This file is automatically added by @npmcli/template-oss. Do not edit.
+
+# ignore everything in the root
+/*
+# transient test directories
+tap-testdir*/
+
+# keep these
+!**/.gitignore
+!/.eslintrc.js
+!/.eslintrc.local.*
+!/.gitignore
+!/bin/
+!/CHANGELOG*
+!/docs/
+!/lib/
+!/LICENSE*
+!/map.js
+!/package.json
+!/README*
+!/scripts/
+!/tap-snapshots/
+!/test/
diff --git a/test/fixtures/mock-globals.js b/mock-globals/lib/index.js
similarity index 100%
rename from test/fixtures/mock-globals.js
rename to mock-globals/lib/index.js
diff --git a/mock-globals/package.json b/mock-globals/package.json
new file mode 100644
index 0000000000000..83e5388b86186
--- /dev/null
+++ b/mock-globals/package.json
@@ -0,0 +1,55 @@
+{
+ "name": "@npmcli/mock-globals",
+ "version": "1.0.0",
+ "description": "",
+ "main": "lib/index.js",
+ "private": true,
+ "scripts": {
+ "test": "tap",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+ "postlint": "template-oss-check",
+ "template-oss-apply": "template-oss-apply --force",
+ "lintfix": "npm run lint -- --fix",
+ "snap": "tap",
+ "posttest": "npm run lint"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/cli.git",
+ "directory": "mock-globals"
+ },
+ "keywords": [],
+ "author": "GitHub Inc.",
+ "license": "ISC",
+ "bugs": {
+ "url": "https://github.com/npm/cli/issues"
+ },
+ "homepage": "https://github.com/npm/cli#readme",
+ "files": [
+ "bin/",
+ "lib/"
+ ],
+ "engines": {
+ "node": "^18.17.0 || >=20.5.0"
+ },
+ "templateOSS": {
+ "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+ "version": "4.22.0",
+ "content": "../scripts/template-oss/index.js"
+ },
+ "tap": {
+ "branches": 89,
+ "functions": 97,
+ "lines": 97,
+ "statements": 97,
+ "nyc-arg": [
+ "--exclude",
+ "tap-snapshots/**"
+ ]
+ },
+ "devDependencies": {
+ "@npmcli/eslint-config": "^4.0.1",
+ "@npmcli/template-oss": "4.22.0",
+ "tap": "^16.3.8"
+ }
+}
diff --git a/mock-globals/test/index.js b/mock-globals/test/index.js
new file mode 100644
index 0000000000000..5480801196301
--- /dev/null
+++ b/mock-globals/test/index.js
@@ -0,0 +1,331 @@
+const t = require('tap')
+const mockGlobals = require('..')
+
+/* eslint-disable no-console */
+const originals = {
+ platform: process.platform,
+ error: console.error,
+ stderrOn: process.stderr.on,
+ stderrWrite: process.stderr.write,
+ shell: process.env.SHELL,
+ home: process.env.HOME,
+ argv: process.argv,
+ env: process.env,
+ setInterval,
+}
+
+t.test('console', async t => {
+ await t.test('mocks', async (t) => {
+ const errors = []
+ mockGlobals(t, {
+ 'console.error': (...args) => errors.push(...args),
+ })
+
+ console.error(1)
+ console.error(2)
+ console.error(3)
+ t.strictSame(errors, [1, 2, 3], 'i got my errors')
+ })
+
+ t.equal(console.error, originals.error)
+})
+/* eslint-enable no-console */
+
+t.test('platform', async (t) => {
+ t.equal(process.platform, originals.platform)
+
+ await t.test('posix', async (t) => {
+ mockGlobals(t, { 'process.platform': 'posix' })
+ t.equal(process.platform, 'posix')
+
+ await t.test('win32 --> woo', async (t) => {
+ mockGlobals(t, { 'process.platform': 'win32' })
+ t.equal(process.platform, 'win32')
+
+ mockGlobals(t, { 'process.platform': 'woo' })
+ t.equal(process.platform, 'woo')
+ })
+
+ t.equal(process.platform, 'posix')
+ })
+
+ t.equal(process.platform, originals.platform)
+})
+
+t.test('manual reset', async t => {
+ let errorHandler, data
+
+ const { reset } = mockGlobals(t, {
+ 'process.stderr.on': (__, handler) => {
+ errorHandler = handler
+ reset['process.stderr.on']()
+ },
+ 'process.stderr.write': (chunk, callback) => {
+ data = chunk
+ process.nextTick(() => {
+ errorHandler({ errno: 'EPIPE' })
+ callback()
+ })
+ reset['process.stderr.write']()
+ },
+ })
+
+ await new Promise((res, rej) => {
+ process.stderr.on('error', er => er.errno === 'EPIPE' ? res() : rej(er))
+ process.stderr.write('hey', res)
+ })
+
+ t.equal(process.stderr.on, originals.stderrOn)
+ t.equal(process.stderr.write, originals.stderrWrite)
+ t.equal(data, 'hey', 'handles EPIPE errors')
+ t.ok(errorHandler)
+})
+
+t.test('reset called multiple times', async (t) => {
+ await t.test('single reset', async t => {
+ const { reset } = mockGlobals(t, { 'process.platform': 'z' })
+ t.equal(process.platform, 'z')
+
+ reset['process.platform']()
+ t.equal(process.platform, originals.platform)
+
+ reset['process.platform']()
+ reset['process.platform']()
+ reset['process.platform']()
+ t.equal(process.platform, originals.platform)
+ })
+
+ t.equal(process.platform, originals.platform)
+})
+
+t.test('object mode', async t => {
+ await t.test('mocks', async t => {
+ const home = t.testdir()
+
+ mockGlobals(t, {
+ process: {
+ stderr: {
+ on: '1',
+ },
+ env: {
+ HOME: home,
+ },
+ },
+ })
+
+ t.equal(process.stderr.on, '1')
+ t.equal(process.env.HOME, home)
+ })
+
+ t.equal(process.env.HOME, originals.home)
+ t.equal(process.stderr.write, originals.stderrWrite)
+})
+
+t.test('mixed object/string mode', async t => {
+ await t.test('mocks', async t => {
+ const home = t.testdir()
+
+ mockGlobals(t, {
+ 'process.env': {
+ HOME: home,
+ TEST: '1',
+ },
+ })
+
+ t.equal(process.env.HOME, home)
+ t.equal(process.env.TEST, '1')
+ })
+
+ t.equal(process.env.HOME, originals.home)
+ t.equal(process.env.TEST, undefined)
+})
+
+t.test('conflicting mixed object/string mode', async t => {
+ await t.test('same key', async t => {
+ t.throws(
+ () => mockGlobals(t, {
+ process: {
+ env: {
+ HOME: '1',
+ TEST: '1',
+ NODE_ENV: '1',
+ },
+ stderr: {
+ write: '1',
+ },
+ },
+ 'process.env.HOME': '1',
+ 'process.stderr.write': '1',
+ }),
+ /process.env.HOME,process.stderr.write/
+ )
+ })
+
+ await t.test('partial overwrite with replace', async t => {
+ t.throws(
+ () => mockGlobals(t, {
+ process: {
+ env: {
+ HOME: '1',
+ TEST: '1',
+ NODE_ENV: '1',
+ },
+ stderr: {
+ write: '1',
+ },
+ },
+ 'process.env.HOME': '1',
+ 'process.stderr.write': '1',
+ }, { replace: true }),
+ /process -> process.env.HOME,process.stderr.write/
+ )
+ })
+})
+
+t.test('falsy values', async t => {
+ await t.test('undefined deletes', async t => {
+ mockGlobals(t, { 'process.platform': undefined })
+ t.notOk(Object.prototype.hasOwnProperty.call(process, 'platform'))
+ t.equal(process.platform, undefined)
+ })
+
+ await t.test('null', async t => {
+ mockGlobals(t, { 'process.platform': null })
+ t.ok(Object.prototype.hasOwnProperty.call(process, 'platform'))
+ t.equal(process.platform, null)
+ })
+
+ t.equal(process.platform, originals.platform)
+})
+
+t.test('date', async t => {
+ await t.test('mocks', async t => {
+ mockGlobals(t, {
+ 'Date.now': () => 100,
+ 'Date.prototype.toISOString': () => 'DDD',
+ })
+ t.equal(Date.now(), 100)
+ t.equal(new Date().toISOString(), 'DDD')
+ })
+
+ t.ok(Date.now() > 100)
+ t.ok(new Date().toISOString().includes('T'))
+})
+
+t.test('argv', async t => {
+ await t.test('argv', async t => {
+ mockGlobals(t, { 'process.argv': ['node', 'woo'] })
+ t.strictSame(process.argv, ['node', 'woo'])
+ })
+
+ t.strictSame(process.argv, originals.argv)
+})
+
+t.test('replace', async (t) => {
+ await t.test('env', async t => {
+ mockGlobals(t, { 'process.env': { HOME: '1' } }, { replace: true })
+ t.strictSame(process.env, { HOME: '1' })
+ t.equal(Object.keys(process.env).length, 1)
+ })
+
+ await t.test('setInterval', async t => {
+ mockGlobals(t, { setInterval: 0 }, { replace: true })
+ t.strictSame(setInterval, 0)
+ })
+
+ t.strictSame(setInterval, originals.setInterval)
+ t.strictSame(process.env, originals.env)
+})
+
+t.test('dot key', async t => {
+ const dotKey = 'this.is.a.single.key'
+ mockGlobals(t, {
+ [`process.env."${dotKey}"`]: 'value',
+ })
+ t.strictSame(process.env[dotKey], 'value')
+})
+
+t.test('multiple mocks and resets', async (t) => {
+ const initial = 'a'
+ const platforms = ['b', 'c', 'd', 'e', 'f', 'g']
+
+ await t.test('first in, first out', async t => {
+ mockGlobals(t, { 'process.platform': initial })
+ t.equal(process.platform, initial)
+
+ await t.test('platforms', async (t) => {
+ const resets = platforms.map((platform) => {
+ const { reset } = mockGlobals(t, { 'process.platform': platform })
+ t.equal(process.platform, platform)
+ return reset['process.platform']
+ }).reverse()
+
+ ;[...platforms.reverse()].forEach((platform, index) => {
+ const reset = resets[index]
+ const nextPlatform = index === platforms.length - 1 ? initial : platforms[index + 1]
+ t.equal(process.platform, platform)
+ reset()
+ t.equal(process.platform, nextPlatform, 'first reset')
+ reset()
+ reset()
+ t.equal(process.platform, nextPlatform, 'multiple resets are indempotent')
+ })
+ })
+
+ t.equal(process.platform, initial)
+ })
+
+ await t.test('last in,first out', async t => {
+ mockGlobals(t, { 'process.platform': initial })
+ t.equal(process.platform, initial)
+
+ await t.test('platforms', async (t) => {
+ const resets = platforms.map((platform) => {
+ const { reset } = mockGlobals(t, { 'process.platform': platform })
+ t.equal(process.platform, platform)
+ return reset['process.platform']
+ })
+
+ resets.forEach((reset, index) => {
+ // Calling a reset out of order removes it from the stack
+ // but does not change the descriptor so it should still be the
+ // last in descriptor until there are none left
+ const lastPlatform = platforms[platforms.length - 1]
+ const nextPlatform = index === platforms.length - 1 ? initial : lastPlatform
+ t.equal(process.platform, lastPlatform)
+ reset()
+ t.equal(process.platform, nextPlatform, 'multiple resets are indempotent')
+ reset()
+ reset()
+ t.equal(process.platform, nextPlatform, 'multiple resets are indempotent')
+ })
+ })
+
+ t.equal(process.platform, initial)
+ })
+
+ t.test('reset all', async (t) => {
+ const { teardown } = mockGlobals(t, { 'process.platform': initial })
+
+ await t.test('platforms', async (t) => {
+ const resets = platforms.map((p) => {
+ const { teardown: nestedTeardown, reset } = mockGlobals(t, { 'process.platform': p })
+ t.equal(process.platform, p)
+ return [
+ reset['process.platform'],
+ nestedTeardown,
+ ]
+ })
+
+ resets.forEach(r => r[1]())
+ t.equal(process.platform, initial, 'teardown goes to initial value')
+
+ resets.forEach((r) => r[0]())
+ t.equal(process.platform, initial, 'calling resets after teardown does nothing')
+ })
+
+ t.equal(process.platform, initial)
+ teardown()
+ t.equal(process.platform, originals.platform)
+ })
+})
diff --git a/mock-registry/.eslintrc.js b/mock-registry/.eslintrc.js
index 5db9f815536f1..f21d26eccec7d 100644
--- a/mock-registry/.eslintrc.js
+++ b/mock-registry/.eslintrc.js
@@ -10,6 +10,9 @@ const localConfigs = readdir(__dirname)
module.exports = {
root: true,
+ ignorePatterns: [
+ 'tap-testdir*/',
+ ],
extends: [
'@npmcli',
...localConfigs,
diff --git a/mock-registry/.eslintrc.local.json b/mock-registry/.eslintrc.local.json
new file mode 100644
index 0000000000000..2f2f707c490b9
--- /dev/null
+++ b/mock-registry/.eslintrc.local.json
@@ -0,0 +1,5 @@
+{
+ "rules": {
+ "import/no-extraneous-dependencies": "off"
+ }
+}
diff --git a/mock-registry/.gitignore b/mock-registry/.gitignore
index 79af2bfcaa4d8..a96d056a7064e 100644
--- a/mock-registry/.gitignore
+++ b/mock-registry/.gitignore
@@ -2,6 +2,8 @@
# ignore everything in the root
/*
+# transient test directories
+tap-testdir*/
# keep these
!**/.gitignore
diff --git a/mock-registry/lib/index.js b/mock-registry/lib/index.js
index a89c8b72b7d58..39bc63504cbe1 100644
--- a/mock-registry/lib/index.js
+++ b/mock-registry/lib/index.js
@@ -2,6 +2,18 @@ const pacote = require('pacote')
const Arborist = require('@npmcli/arborist')
const npa = require('npm-package-arg')
const Nock = require('nock')
+const stringify = require('json-stringify-safe')
+
+const logReq = (req, ...keys) => {
+ const obj = JSON.parse(stringify(req))
+ const res = {}
+ for (const [k, v] of Object.entries(obj)) {
+ if (!keys.includes(k)) {
+ res[k] = v
+ }
+ }
+ return stringify(res, null, 2)
+}
class MockRegistry {
#tap
@@ -30,16 +42,18 @@ class MockRegistry {
static tnock (t, host, opts, { debug = false, strict = false } = {}) {
const noMatch = (req) => {
+ if (debug) {
+ /* eslint-disable-next-line no-console */
+ console.error('NO MATCH', t.name, req.options ? req.options : req.path)
+ }
if (strict) {
// There are network requests that get caught regardless of error code.
// Turning on strict mode requires that those requests get explicitly
// mocked with a 404, 500, etc.
// XXX: this is opt-in currently because it breaks some existing CLI
// tests. We should work towards making this the default for all tests.
- t.fail(`Unmatched request: ${JSON.stringify(req.options, null, 2)}`)
- }
- if (debug) {
- console.error('NO MATCH', t.name, req.options ? req.options : req.path)
+ t.comment(logReq(req, 'interceptors', 'socket', 'response', '_events'))
+ t.fail(`Unmatched request: ${req.method} ${req.path}`)
}
}
@@ -143,10 +157,10 @@ class MockRegistry {
).reply(200)
}
- getVisibility ({ spec, visibility }) {
+ getVisibility ({ spec, visibility, responseCode = 200 }) {
this.nock = this.nock.get(
this.fullPath(`/-/package/${npa(spec).escapedName}/visibility`))
- .reply(200, visibility)
+ .reply(responseCode, visibility)
}
setPermissions ({ spec, team, permissions }) {
@@ -224,13 +238,12 @@ class MockRegistry {
})
}
- webadduser ({ username, password, token = 'npm_default-test-token' }) {
+ webadduser ({ token = 'npm_default-test-token' }) {
const doneUrl = new URL('/npm-cli-test/done', this.origin).href
const loginUrl = new URL('/npm-cli-test/login', this.origin).href
this.nock = this.nock
.post(this.fullPath('/-/v1/login'), body => {
this.#tap.ok(body.create) // Sole difference from weblogin
- this.#tap.ok(body.hostname)
return true
})
.reply(200, { doneUrl, loginUrl })
@@ -242,8 +255,7 @@ class MockRegistry {
const doneUrl = new URL('/npm-cli-test/done', this.origin).href
const loginUrl = new URL('/npm-cli-test/login', this.origin).href
this.nock = this.nock
- .post(this.fullPath('/-/v1/login'), body => {
- this.#tap.ok(body.hostname)
+ .post(this.fullPath('/-/v1/login'), () => {
return true
})
.reply(200, { doneUrl, loginUrl })
@@ -251,6 +263,11 @@ class MockRegistry {
.reply(200, { token })
}
+ logout (token) {
+ this.nock = this.nock.delete(this.fullPath(`/-/user/token/${encodeURIComponent(token)}`))
+ .reply(200, { ok: true })
+ }
+
// team can be a team or a username
getPackages ({ user, team, packages = {}, times = 1, responseCode = 200 }) {
let uri
@@ -328,6 +345,31 @@ class MockRegistry {
this.nock = nock
}
+ getTokens (tokens) {
+ return this.nock.get('/-/npm/v1/tokens')
+ .reply(200, {
+ objects: tokens,
+ urls: {},
+ total: tokens.length,
+ userHasOldFormatToken: false,
+ })
+ }
+
+ createToken ({ password, readonly = false, cidr = [] }) {
+ return this.nock.post('/-/npm/v1/tokens', {
+ password,
+ readonly,
+ cidr_whitelist: cidr,
+ }).reply(200, {
+ key: 'n3wk3y',
+ token: 'n3wt0k3n',
+ created: new Date(),
+ updated: new Date(),
+ readonly,
+ cidr_whitelist: cidr,
+ })
+ }
+
async package ({ manifest, times = 1, query, tarballs }) {
let nock = this.nock
const spec = npa(manifest.name)
@@ -337,9 +379,9 @@ class MockRegistry {
}
nock = nock.reply(200, manifest)
if (tarballs) {
- for (const version in tarballs) {
+ for (const [version, tarball] of Object.entries(tarballs)) {
const m = manifest.versions[version]
- nock = await this.tarball({ manifest: m, tarball: tarballs[version] })
+ nock = await this.tarball({ manifest: m, tarball })
}
}
this.nock = nock
@@ -410,6 +452,50 @@ class MockRegistry {
...packument,
}
}
+
+ /**
+ * this is a simpler convience method for creating mockable registry with
+ * tarballs for specific versions
+ */
+ async setup (packages) {
+ const format = Object.keys(packages).map(v => {
+ const [name, version] = v.split('@')
+ return { name, version }
+ }).reduce((acc, inc) => {
+ const exists = acc.find(pkg => pkg.name === inc.name)
+ if (exists) {
+ exists.tarballs = {
+ ...exists.tarballs,
+ [inc.version]: packages[`${inc.name}@${inc.version}`],
+ }
+ } else {
+ acc.push({ name: inc.name,
+ tarballs: {
+ [inc.version]: packages[`${inc.name}@${inc.version}`],
+ },
+ })
+ }
+ return acc
+ }, [])
+ const registry = this
+ for (const pkg of format) {
+ const { name, tarballs } = pkg
+ const versions = Object.keys(tarballs)
+ const manifest = await registry.manifest({ name, versions })
+
+ for (const version of versions) {
+ const tarballPath = pkg.tarballs[version]
+ if (!tarballPath) {
+ throw new Error(`Tarball path not provided for version ${version}`)
+ }
+
+ await registry.tarball({
+ manifest: manifest.versions[version],
+ tarball: tarballPath,
+ })
+ }
+ }
+ }
}
module.exports = MockRegistry
diff --git a/mock-registry/package.json b/mock-registry/package.json
index 65bd08118717b..8582d113c04f0 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -6,16 +6,16 @@
"private": true,
"scripts": {
"test": "tap",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
- "lintfix": "node .. run lint -- --fix",
+ "lintfix": "npm run lint -- --fix",
"snap": "tap",
- "posttest": "node .. run lint"
+ "posttest": "npm run lint"
},
"repository": {
"type": "git",
- "url": "https://github.com/npm/cli.git",
+ "url": "git+https://github.com/npm/cli.git",
"directory": "mock-registry"
},
"keywords": [],
@@ -30,11 +30,12 @@
"lib/"
],
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^18.17.0 || >=20.5.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.11.0"
+ "version": "4.22.0",
+ "content": "../scripts/template-oss/index.js"
},
"tap": {
"no-coverage": true,
@@ -44,12 +45,13 @@
]
},
"devDependencies": {
- "@npmcli/arborist": "^6.1.1",
+ "@npmcli/arborist": "^7.1.0",
"@npmcli/eslint-config": "^4.0.1",
- "@npmcli/template-oss": "4.11.0",
- "nock": "^13.2.9",
- "npm-package-arg": "^10.1.0",
- "pacote": "^15.0.7",
- "tap": "^16.3.2"
+ "@npmcli/template-oss": "4.22.0",
+ "json-stringify-safe": "^5.0.1",
+ "nock": "^13.3.3",
+ "npm-package-arg": "^11.0.2",
+ "pacote": "^18.0.6",
+ "tap": "^16.3.8"
}
}
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 9861a76e6a417..7940f660b3aa4 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -3,20 +3,19 @@
/*
!/.gitignore
# Allow all bundled deps
-!/@colors/
-/@colors/*
-!/@colors/colors
-!/@gar/
-/@gar/*
-!/@gar/promisify
!/@isaacs/
/@isaacs/*
+!/@isaacs/cliui
+!/@isaacs/cliui/node_modules/
+/@isaacs/cliui/node_modules/*
+!/@isaacs/cliui/node_modules/ansi-regex
+!/@isaacs/cliui/node_modules/emoji-regex
+!/@isaacs/cliui/node_modules/string-width
+!/@isaacs/cliui/node_modules/strip-ansi
!/@isaacs/string-locale-compare
!/@npmcli/
/@npmcli/*
-!/@npmcli/arborist
-!/@npmcli/config
-!/@npmcli/disparity-colors
+!/@npmcli/agent
!/@npmcli/fs
!/@npmcli/git
!/@npmcli/installed-package-contents
@@ -27,30 +26,34 @@
!/@npmcli/package-json
!/@npmcli/promise-spawn
!/@npmcli/query
+!/@npmcli/redact
!/@npmcli/run-script
-!/@tootallnate/
-/@tootallnate/*
-!/@tootallnate/once
+!/@pkgjs/
+/@pkgjs/*
+!/@pkgjs/parseargs
+!/@sigstore/
+/@sigstore/*
+!/@sigstore/bundle
+!/@sigstore/core
+!/@sigstore/protobuf-specs
+!/@sigstore/sign
+!/@sigstore/tuf
+!/@sigstore/verify
+!/@tufjs/
+/@tufjs/*
+!/@tufjs/canonical-json
+!/@tufjs/models
!/abbrev
-!/abort-controller
!/agent-base
-!/agentkeepalive
!/aggregate-error
!/ansi-regex
!/ansi-styles
!/aproba
!/archy
-!/are-we-there-yet
-!/are-we-there-yet/node_modules/
-/are-we-there-yet/node_modules/*
-!/are-we-there-yet/node_modules/buffer
-!/are-we-there-yet/node_modules/readable-stream
!/balanced-match
-!/base64-js
!/bin-links
!/binary-extensions
!/brace-expansion
-!/builtins
!/cacache
!/chalk
!/chownr
@@ -58,98 +61,63 @@
!/cidr-regex
!/clean-stack
!/cli-columns
-!/cli-table3
-!/clone
!/cmd-shim
!/color-convert
!/color-name
-!/color-support
-!/columnify
!/common-ancestor-path
-!/concat-map
-!/console-control-strings
+!/cross-spawn
+!/cross-spawn/node_modules/
+/cross-spawn/node_modules/*
+!/cross-spawn/node_modules/which
!/cssesc
!/debug
!/debug/node_modules/
/debug/node_modules/*
!/debug/node_modules/ms
-!/defaults
-!/delegates
-!/depd
!/diff
+!/eastasianwidth
!/emoji-regex
!/encoding
!/env-paths
!/err-code
-!/event-target-shim
-!/events
+!/exponential-backoff
!/fastest-levenshtein
+!/foreground-child
!/fs-minipass
-!/fs-minipass/node_modules/
-/fs-minipass/node_modules/*
-!/fs-minipass/node_modules/minipass
-!/fs.realpath
-!/function-bind
-!/gauge
!/glob
!/graceful-fs
-!/has-flag
-!/has-unicode
-!/has
!/hosted-git-info
!/http-cache-semantics
!/http-proxy-agent
!/https-proxy-agent
-!/humanize-ms
!/iconv-lite
-!/ieee754
!/ignore-walk
!/imurmurhash
!/indent-string
-!/infer-owner
-!/inflight
-!/inherits
!/ini
!/init-package-json
+!/ip-address
!/ip-regex
-!/ip
!/is-cidr
-!/is-core-module
!/is-fullwidth-code-point
!/is-lambda
!/isexe
+!/jackspeak
+!/jsbn
!/json-parse-even-better-errors
!/json-stringify-nice
!/jsonparse
!/just-diff-apply
!/just-diff
-!/libnpmaccess
-!/libnpmdiff
-!/libnpmexec
-!/libnpmfund
-!/libnpmhook
-!/libnpmorg
-!/libnpmpack
-!/libnpmpublish
-!/libnpmsearch
-!/libnpmteam
-!/libnpmversion
!/lru-cache
!/make-fetch-happen
!/minimatch
!/minipass-collect
-!/minipass-collect/node_modules/
-/minipass-collect/node_modules/*
-!/minipass-collect/node_modules/minipass
!/minipass-fetch
!/minipass-flush
!/minipass-flush/node_modules/
/minipass-flush/node_modules/*
!/minipass-flush/node_modules/minipass
-!/minipass-json-stream
-!/minipass-json-stream/node_modules/
-/minipass-json-stream/node_modules/*
-!/minipass-json-stream/node_modules/minipass
!/minipass-pipeline
!/minipass-pipeline/node_modules/
/minipass-pipeline/node_modules/*
@@ -170,31 +138,7 @@
!/node-gyp
!/node-gyp/node_modules/
/node-gyp/node_modules/*
-!/node-gyp/node_modules/@npmcli/
-/node-gyp/node_modules/@npmcli/*
-!/node-gyp/node_modules/@npmcli/fs
-!/node-gyp/node_modules/@npmcli/move-file
-!/node-gyp/node_modules/abbrev
-!/node-gyp/node_modules/are-we-there-yet
-!/node-gyp/node_modules/brace-expansion
-!/node-gyp/node_modules/cacache
-!/node-gyp/node_modules/cacache/node_modules/
-/node-gyp/node_modules/cacache/node_modules/*
-!/node-gyp/node_modules/cacache/node_modules/brace-expansion
-!/node-gyp/node_modules/cacache/node_modules/glob
-!/node-gyp/node_modules/cacache/node_modules/minimatch
-!/node-gyp/node_modules/gauge
-!/node-gyp/node_modules/glob
-!/node-gyp/node_modules/make-fetch-happen
-!/node-gyp/node_modules/minimatch
-!/node-gyp/node_modules/minipass-fetch
-!/node-gyp/node_modules/minipass
-!/node-gyp/node_modules/nopt
-!/node-gyp/node_modules/npmlog
-!/node-gyp/node_modules/ssri
-!/node-gyp/node_modules/unique-filename
-!/node-gyp/node_modules/unique-slug
-!/node-gyp/node_modules/which
+!/node-gyp/node_modules/proc-log
!/nopt
!/normalize-package-data
!/npm-audit-report
@@ -207,15 +151,15 @@
!/npm-profile
!/npm-registry-fetch
!/npm-user-validate
-!/npmlog
-!/once
!/p-map
+!/package-json-from-dist
!/pacote
!/parse-conflict-json
-!/path-is-absolute
+!/path-key
+!/path-scurry
!/postcss-selector-parser
!/proc-log
-!/process
+!/proggy
!/promise-all-reject-late
!/promise-call-limit
!/promise-inflight
@@ -224,50 +168,67 @@
!/qrcode-terminal
!/read-cmd-shim
!/read-package-json-fast
-!/read-package-json
!/read
-!/readable-stream
!/retry
-!/rimraf
-!/rimraf/node_modules/
-/rimraf/node_modules/*
-!/rimraf/node_modules/brace-expansion
-!/rimraf/node_modules/glob
-!/rimraf/node_modules/minimatch
-!/safe-buffer
!/safer-buffer
!/semver
-!/semver/node_modules/
-/semver/node_modules/*
-!/semver/node_modules/lru-cache
-!/set-blocking
+!/shebang-command
+!/shebang-regex
!/signal-exit
+!/sigstore
!/smart-buffer
!/socks-proxy-agent
!/socks
!/spdx-correct
+!/spdx-correct/node_modules/
+/spdx-correct/node_modules/*
+!/spdx-correct/node_modules/spdx-expression-parse
!/spdx-exceptions
!/spdx-expression-parse
!/spdx-license-ids
+!/sprintf-js
!/ssri
-!/string_decoder
+!/string-width-cjs
!/string-width
+!/strip-ansi-cjs
!/strip-ansi
!/supports-color
!/tar
+!/tar/node_modules/
+/tar/node_modules/*
+!/tar/node_modules/fs-minipass
+!/tar/node_modules/fs-minipass/node_modules/
+/tar/node_modules/fs-minipass/node_modules/*
+!/tar/node_modules/fs-minipass/node_modules/minipass
+!/tar/node_modules/minipass
!/text-table
!/tiny-relative-date
!/treeverse
+!/tuf-js
!/unique-filename
!/unique-slug
!/util-deprecate
!/validate-npm-package-license
+!/validate-npm-package-license/node_modules/
+/validate-npm-package-license/node_modules/*
+!/validate-npm-package-license/node_modules/spdx-expression-parse
!/validate-npm-package-name
!/walk-up-path
-!/wcwidth
!/which
-!/wide-align
-!/wrappy
+!/which/node_modules/
+/which/node_modules/*
+!/which/node_modules/isexe
+!/wrap-ansi-cjs
+!/wrap-ansi-cjs/node_modules/
+/wrap-ansi-cjs/node_modules/*
+!/wrap-ansi-cjs/node_modules/ansi-styles
+!/wrap-ansi
+!/wrap-ansi/node_modules/
+/wrap-ansi/node_modules/*
+!/wrap-ansi/node_modules/ansi-regex
+!/wrap-ansi/node_modules/emoji-regex
+!/wrap-ansi/node_modules/string-width
+!/wrap-ansi/node_modules/strip-ansi
!/write-file-atomic
!/yallist
# Always ignore some specific patterns within any allowed package
@@ -301,3 +262,7 @@ __pycache__
.babelrc*
.nyc_output
.gitkeep
+*.map
+*.ts
+*.png
+*.jpg
diff --git a/node_modules/@colors/colors/LICENSE b/node_modules/@colors/colors/LICENSE
deleted file mode 100644
index 6b86056199d2a..0000000000000
--- a/node_modules/@colors/colors/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-MIT License
-
-Original Library
- - Copyright (c) Marak Squires
-
-Additional Functionality
- - Copyright (c) Sindre Sorhus (sindresorhus.com)
- - Copyright (c) DABH (https://github.com/DABH)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/@colors/colors/examples/normal-usage.js b/node_modules/@colors/colors/examples/normal-usage.js
deleted file mode 100644
index a4bfe7b7be633..0000000000000
--- a/node_modules/@colors/colors/examples/normal-usage.js
+++ /dev/null
@@ -1,83 +0,0 @@
-var colors = require('../lib/index');
-
-console.log('First some yellow text'.yellow);
-
-console.log('Underline that text'.yellow.underline);
-
-console.log('Make it bold and red'.red.bold);
-
-console.log(('Double Raindows All Day Long').rainbow);
-
-console.log('Drop the bass'.trap);
-
-console.log('DROP THE RAINBOW BASS'.trap.rainbow);
-
-// styles not widely supported
-console.log('Chains are also cool.'.bold.italic.underline.red);
-
-// styles not widely supported
-console.log('So '.green + 'are'.underline + ' ' + 'inverse'.inverse
- + ' styles! '.yellow.bold);
-console.log('Zebras are so fun!'.zebra);
-
-//
-// Remark: .strikethrough may not work with Mac OS Terminal App
-//
-console.log('This is ' + 'not'.strikethrough + ' fun.');
-
-console.log('Background color attack!'.black.bgWhite);
-console.log('Use random styles on everything!'.random);
-console.log('America, Heck Yeah!'.america);
-
-// eslint-disable-next-line max-len
-console.log('Blindingly '.brightCyan + 'bright? '.brightRed + 'Why '.brightYellow + 'not?!'.brightGreen);
-
-console.log('Setting themes is useful');
-
-//
-// Custom themes
-//
-console.log('Generic logging theme as JSON'.green.bold.underline);
-// Load theme with JSON literal
-colors.setTheme({
- silly: 'rainbow',
- input: 'grey',
- verbose: 'cyan',
- prompt: 'grey',
- info: 'green',
- data: 'grey',
- help: 'cyan',
- warn: 'yellow',
- debug: 'blue',
- error: 'red',
-});
-
-// outputs red text
-console.log('this is an error'.error);
-
-// outputs yellow text
-console.log('this is a warning'.warn);
-
-// outputs grey text
-console.log('this is an input'.input);
-
-console.log('Generic logging theme as file'.green.bold.underline);
-
-// Load a theme from file
-try {
- colors.setTheme(require(__dirname + '/../themes/generic-logging.js'));
-} catch (err) {
- console.log(err);
-}
-
-// outputs red text
-console.log('this is an error'.error);
-
-// outputs yellow text
-console.log('this is a warning'.warn);
-
-// outputs grey text
-console.log('this is an input'.input);
-
-// console.log("Don't summon".zalgo)
-
diff --git a/node_modules/@colors/colors/examples/safe-string.js b/node_modules/@colors/colors/examples/safe-string.js
deleted file mode 100644
index fc664745705f3..0000000000000
--- a/node_modules/@colors/colors/examples/safe-string.js
+++ /dev/null
@@ -1,80 +0,0 @@
-var colors = require('../safe');
-
-console.log(colors.yellow('First some yellow text'));
-
-console.log(colors.yellow.underline('Underline that text'));
-
-console.log(colors.red.bold('Make it bold and red'));
-
-console.log(colors.rainbow('Double Raindows All Day Long'));
-
-console.log(colors.trap('Drop the bass'));
-
-console.log(colors.rainbow(colors.trap('DROP THE RAINBOW BASS')));
-
-// styles not widely supported
-console.log(colors.bold.italic.underline.red('Chains are also cool.'));
-
-// styles not widely supported
-console.log(colors.green('So ') + colors.underline('are') + ' '
- + colors.inverse('inverse') + colors.yellow.bold(' styles! '));
-
-console.log(colors.zebra('Zebras are so fun!'));
-
-console.log('This is ' + colors.strikethrough('not') + ' fun.');
-
-
-console.log(colors.black.bgWhite('Background color attack!'));
-console.log(colors.random('Use random styles on everything!'));
-console.log(colors.america('America, Heck Yeah!'));
-
-// eslint-disable-next-line max-len
-console.log(colors.brightCyan('Blindingly ') + colors.brightRed('bright? ') + colors.brightYellow('Why ') + colors.brightGreen('not?!'));
-
-console.log('Setting themes is useful');
-
-//
-// Custom themes
-//
-// console.log('Generic logging theme as JSON'.green.bold.underline);
-// Load theme with JSON literal
-colors.setTheme({
- silly: 'rainbow',
- input: 'blue',
- verbose: 'cyan',
- prompt: 'grey',
- info: 'green',
- data: 'grey',
- help: 'cyan',
- warn: 'yellow',
- debug: 'blue',
- error: 'red',
-});
-
-// outputs red text
-console.log(colors.error('this is an error'));
-
-// outputs yellow text
-console.log(colors.warn('this is a warning'));
-
-// outputs blue text
-console.log(colors.input('this is an input'));
-
-
-// console.log('Generic logging theme as file'.green.bold.underline);
-
-// Load a theme from file
-colors.setTheme(require(__dirname + '/../themes/generic-logging.js'));
-
-// outputs red text
-console.log(colors.error('this is an error'));
-
-// outputs yellow text
-console.log(colors.warn('this is a warning'));
-
-// outputs grey text
-console.log(colors.input('this is an input'));
-
-// console.log(colors.zalgo("Don't summon him"))
-
-
diff --git a/node_modules/@colors/colors/index.d.ts b/node_modules/@colors/colors/index.d.ts
deleted file mode 100644
index df3f2e6afc945..0000000000000
--- a/node_modules/@colors/colors/index.d.ts
+++ /dev/null
@@ -1,136 +0,0 @@
-// Type definitions for @colors/colors 1.4+
-// Project: https://github.com/Marak/colors.js
-// Definitions by: Bart van der Schoor , Staffan Eketorp
-// Definitions: https://github.com/DABH/colors.js
-
-export interface Color {
- (text: string): string;
-
- strip: Color;
- stripColors: Color;
-
- black: Color;
- red: Color;
- green: Color;
- yellow: Color;
- blue: Color;
- magenta: Color;
- cyan: Color;
- white: Color;
- gray: Color;
- grey: Color;
-
- bgBlack: Color;
- bgRed: Color;
- bgGreen: Color;
- bgYellow: Color;
- bgBlue: Color;
- bgMagenta: Color;
- bgCyan: Color;
- bgWhite: Color;
-
- reset: Color;
- bold: Color;
- dim: Color;
- italic: Color;
- underline: Color;
- inverse: Color;
- hidden: Color;
- strikethrough: Color;
-
- rainbow: Color;
- zebra: Color;
- america: Color;
- trap: Color;
- random: Color;
- zalgo: Color;
-}
-
-export function enable(): void;
-export function disable(): void;
-export function setTheme(theme: any): void;
-
-export let enabled: boolean;
-
-export const strip: Color;
-export const stripColors: Color;
-
-export const black: Color;
-export const red: Color;
-export const green: Color;
-export const yellow: Color;
-export const blue: Color;
-export const magenta: Color;
-export const cyan: Color;
-export const white: Color;
-export const gray: Color;
-export const grey: Color;
-
-export const bgBlack: Color;
-export const bgRed: Color;
-export const bgGreen: Color;
-export const bgYellow: Color;
-export const bgBlue: Color;
-export const bgMagenta: Color;
-export const bgCyan: Color;
-export const bgWhite: Color;
-
-export const reset: Color;
-export const bold: Color;
-export const dim: Color;
-export const italic: Color;
-export const underline: Color;
-export const inverse: Color;
-export const hidden: Color;
-export const strikethrough: Color;
-
-export const rainbow: Color;
-export const zebra: Color;
-export const america: Color;
-export const trap: Color;
-export const random: Color;
-export const zalgo: Color;
-
-declare global {
- interface String {
- strip: string;
- stripColors: string;
-
- black: string;
- red: string;
- green: string;
- yellow: string;
- blue: string;
- magenta: string;
- cyan: string;
- white: string;
- gray: string;
- grey: string;
-
- bgBlack: string;
- bgRed: string;
- bgGreen: string;
- bgYellow: string;
- bgBlue: string;
- bgMagenta: string;
- bgCyan: string;
- bgWhite: string;
-
- reset: string;
- // @ts-ignore
- bold: string;
- dim: string;
- italic: string;
- underline: string;
- inverse: string;
- hidden: string;
- strikethrough: string;
-
- rainbow: string;
- zebra: string;
- america: string;
- trap: string;
- random: string;
- zalgo: string;
- }
-}
diff --git a/node_modules/@colors/colors/lib/colors.js b/node_modules/@colors/colors/lib/colors.js
deleted file mode 100644
index d9fb08762fde5..0000000000000
--- a/node_modules/@colors/colors/lib/colors.js
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
-
-The MIT License (MIT)
-
-Original Library
- - Copyright (c) Marak Squires
-
-Additional functionality
- - Copyright (c) Sindre Sorhus (sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
-*/
-
-var colors = {};
-module['exports'] = colors;
-
-colors.themes = {};
-
-var util = require('util');
-var ansiStyles = colors.styles = require('./styles');
-var defineProps = Object.defineProperties;
-var newLineRegex = new RegExp(/[\r\n]+/g);
-
-colors.supportsColor = require('./system/supports-colors').supportsColor;
-
-if (typeof colors.enabled === 'undefined') {
- colors.enabled = colors.supportsColor() !== false;
-}
-
-colors.enable = function() {
- colors.enabled = true;
-};
-
-colors.disable = function() {
- colors.enabled = false;
-};
-
-colors.stripColors = colors.strip = function(str) {
- return ('' + str).replace(/\x1B\[\d+m/g, '');
-};
-
-// eslint-disable-next-line no-unused-vars
-var stylize = colors.stylize = function stylize(str, style) {
- if (!colors.enabled) {
- return str+'';
- }
-
- var styleMap = ansiStyles[style];
-
- // Stylize should work for non-ANSI styles, too
- if (!styleMap && style in colors) {
- // Style maps like trap operate as functions on strings;
- // they don't have properties like open or close.
- return colors[style](str);
- }
-
- return styleMap.open + str + styleMap.close;
-};
-
-var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g;
-var escapeStringRegexp = function(str) {
- if (typeof str !== 'string') {
- throw new TypeError('Expected a string');
- }
- return str.replace(matchOperatorsRe, '\\$&');
-};
-
-function build(_styles) {
- var builder = function builder() {
- return applyStyle.apply(builder, arguments);
- };
- builder._styles = _styles;
- // __proto__ is used because we must return a function, but there is
- // no way to create a function with a different prototype.
- builder.__proto__ = proto;
- return builder;
-}
-
-var styles = (function() {
- var ret = {};
- ansiStyles.grey = ansiStyles.gray;
- Object.keys(ansiStyles).forEach(function(key) {
- ansiStyles[key].closeRe =
- new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g');
- ret[key] = {
- get: function() {
- return build(this._styles.concat(key));
- },
- };
- });
- return ret;
-})();
-
-var proto = defineProps(function colors() {}, styles);
-
-function applyStyle() {
- var args = Array.prototype.slice.call(arguments);
-
- var str = args.map(function(arg) {
- // Use weak equality check so we can colorize null/undefined in safe mode
- if (arg != null && arg.constructor === String) {
- return arg;
- } else {
- return util.inspect(arg);
- }
- }).join(' ');
-
- if (!colors.enabled || !str) {
- return str;
- }
-
- var newLinesPresent = str.indexOf('\n') != -1;
-
- var nestedStyles = this._styles;
-
- var i = nestedStyles.length;
- while (i--) {
- var code = ansiStyles[nestedStyles[i]];
- str = code.open + str.replace(code.closeRe, code.open) + code.close;
- if (newLinesPresent) {
- str = str.replace(newLineRegex, function(match) {
- return code.close + match + code.open;
- });
- }
- }
-
- return str;
-}
-
-colors.setTheme = function(theme) {
- if (typeof theme === 'string') {
- console.log('colors.setTheme now only accepts an object, not a string. ' +
- 'If you are trying to set a theme from a file, it is now your (the ' +
- 'caller\'s) responsibility to require the file. The old syntax ' +
- 'looked like colors.setTheme(__dirname + ' +
- '\'/../themes/generic-logging.js\'); The new syntax looks like '+
- 'colors.setTheme(require(__dirname + ' +
- '\'/../themes/generic-logging.js\'));');
- return;
- }
- for (var style in theme) {
- (function(style) {
- colors[style] = function(str) {
- if (typeof theme[style] === 'object') {
- var out = str;
- for (var i in theme[style]) {
- out = colors[theme[style][i]](out);
- }
- return out;
- }
- return colors[theme[style]](str);
- };
- })(style);
- }
-};
-
-function init() {
- var ret = {};
- Object.keys(styles).forEach(function(name) {
- ret[name] = {
- get: function() {
- return build([name]);
- },
- };
- });
- return ret;
-}
-
-var sequencer = function sequencer(map, str) {
- var exploded = str.split('');
- exploded = exploded.map(map);
- return exploded.join('');
-};
-
-// custom formatter methods
-colors.trap = require('./custom/trap');
-colors.zalgo = require('./custom/zalgo');
-
-// maps
-colors.maps = {};
-colors.maps.america = require('./maps/america')(colors);
-colors.maps.zebra = require('./maps/zebra')(colors);
-colors.maps.rainbow = require('./maps/rainbow')(colors);
-colors.maps.random = require('./maps/random')(colors);
-
-for (var map in colors.maps) {
- (function(map) {
- colors[map] = function(str) {
- return sequencer(colors.maps[map], str);
- };
- })(map);
-}
-
-defineProps(colors, init());
diff --git a/node_modules/@colors/colors/lib/custom/trap.js b/node_modules/@colors/colors/lib/custom/trap.js
deleted file mode 100644
index fbccf88dede0b..0000000000000
--- a/node_modules/@colors/colors/lib/custom/trap.js
+++ /dev/null
@@ -1,46 +0,0 @@
-module['exports'] = function runTheTrap(text, options) {
- var result = '';
- text = text || 'Run the trap, drop the bass';
- text = text.split('');
- var trap = {
- a: ['\u0040', '\u0104', '\u023a', '\u0245', '\u0394', '\u039b', '\u0414'],
- b: ['\u00df', '\u0181', '\u0243', '\u026e', '\u03b2', '\u0e3f'],
- c: ['\u00a9', '\u023b', '\u03fe'],
- d: ['\u00d0', '\u018a', '\u0500', '\u0501', '\u0502', '\u0503'],
- e: ['\u00cb', '\u0115', '\u018e', '\u0258', '\u03a3', '\u03be', '\u04bc',
- '\u0a6c'],
- f: ['\u04fa'],
- g: ['\u0262'],
- h: ['\u0126', '\u0195', '\u04a2', '\u04ba', '\u04c7', '\u050a'],
- i: ['\u0f0f'],
- j: ['\u0134'],
- k: ['\u0138', '\u04a0', '\u04c3', '\u051e'],
- l: ['\u0139'],
- m: ['\u028d', '\u04cd', '\u04ce', '\u0520', '\u0521', '\u0d69'],
- n: ['\u00d1', '\u014b', '\u019d', '\u0376', '\u03a0', '\u048a'],
- o: ['\u00d8', '\u00f5', '\u00f8', '\u01fe', '\u0298', '\u047a', '\u05dd',
- '\u06dd', '\u0e4f'],
- p: ['\u01f7', '\u048e'],
- q: ['\u09cd'],
- r: ['\u00ae', '\u01a6', '\u0210', '\u024c', '\u0280', '\u042f'],
- s: ['\u00a7', '\u03de', '\u03df', '\u03e8'],
- t: ['\u0141', '\u0166', '\u0373'],
- u: ['\u01b1', '\u054d'],
- v: ['\u05d8'],
- w: ['\u0428', '\u0460', '\u047c', '\u0d70'],
- x: ['\u04b2', '\u04fe', '\u04fc', '\u04fd'],
- y: ['\u00a5', '\u04b0', '\u04cb'],
- z: ['\u01b5', '\u0240'],
- };
- text.forEach(function(c) {
- c = c.toLowerCase();
- var chars = trap[c] || [' '];
- var rand = Math.floor(Math.random() * chars.length);
- if (typeof trap[c] !== 'undefined') {
- result += trap[c][rand];
- } else {
- result += c;
- }
- });
- return result;
-};
diff --git a/node_modules/@colors/colors/lib/custom/zalgo.js b/node_modules/@colors/colors/lib/custom/zalgo.js
deleted file mode 100644
index 0ef2b01195635..0000000000000
--- a/node_modules/@colors/colors/lib/custom/zalgo.js
+++ /dev/null
@@ -1,110 +0,0 @@
-// please no
-module['exports'] = function zalgo(text, options) {
- text = text || ' he is here ';
- var soul = {
- 'up': [
- '̍', '̎', '̄', '̅',
- '̿', '̑', '̆', '̐',
- '͒', '͗', '͑', '̇',
- '̈', '̊', '͂', '̓',
- '̈', '͊', '͋', '͌',
- '̃', '̂', '̌', '͐',
- '̀', '́', '̋', '̏',
- '̒', '̓', '̔', '̽',
- '̉', 'ͣ', 'ͤ', 'ͥ',
- 'ͦ', 'ͧ', 'ͨ', 'ͩ',
- 'ͪ', 'ͫ', 'ͬ', 'ͭ',
- 'ͮ', 'ͯ', '̾', '͛',
- '͆', '̚',
- ],
- 'down': [
- '̖', '̗', '̘', '̙',
- '̜', '̝', '̞', '̟',
- '̠', '̤', '̥', '̦',
- '̩', '̪', '̫', '̬',
- '̭', '̮', '̯', '̰',
- '̱', '̲', '̳', '̹',
- '̺', '̻', '̼', 'ͅ',
- '͇', '͈', '͉', '͍',
- '͎', '͓', '͔', '͕',
- '͖', '͙', '͚', '̣',
- ],
- 'mid': [
- '̕', '̛', '̀', '́',
- '͘', '̡', '̢', '̧',
- '̨', '̴', '̵', '̶',
- '͜', '͝', '͞',
- '͟', '͠', '͢', '̸',
- '̷', '͡', ' ҉',
- ],
- };
- var all = [].concat(soul.up, soul.down, soul.mid);
-
- function randomNumber(range) {
- var r = Math.floor(Math.random() * range);
- return r;
- }
-
- function isChar(character) {
- var bool = false;
- all.filter(function(i) {
- bool = (i === character);
- });
- return bool;
- }
-
-
- function heComes(text, options) {
- var result = '';
- var counts;
- var l;
- options = options || {};
- options['up'] =
- typeof options['up'] !== 'undefined' ? options['up'] : true;
- options['mid'] =
- typeof options['mid'] !== 'undefined' ? options['mid'] : true;
- options['down'] =
- typeof options['down'] !== 'undefined' ? options['down'] : true;
- options['size'] =
- typeof options['size'] !== 'undefined' ? options['size'] : 'maxi';
- text = text.split('');
- for (l in text) {
- if (isChar(l)) {
- continue;
- }
- result = result + text[l];
- counts = {'up': 0, 'down': 0, 'mid': 0};
- switch (options.size) {
- case 'mini':
- counts.up = randomNumber(8);
- counts.mid = randomNumber(2);
- counts.down = randomNumber(8);
- break;
- case 'maxi':
- counts.up = randomNumber(16) + 3;
- counts.mid = randomNumber(4) + 1;
- counts.down = randomNumber(64) + 3;
- break;
- default:
- counts.up = randomNumber(8) + 1;
- counts.mid = randomNumber(6) / 2;
- counts.down = randomNumber(8) + 1;
- break;
- }
-
- var arr = ['up', 'mid', 'down'];
- for (var d in arr) {
- var index = arr[d];
- for (var i = 0; i <= counts[index]; i++) {
- if (options[index]) {
- result = result + soul[index][randomNumber(soul[index].length)];
- }
- }
- }
- }
- return result;
- }
- // don't summon him
- return heComes(text, options);
-};
-
diff --git a/node_modules/@colors/colors/lib/extendStringPrototype.js b/node_modules/@colors/colors/lib/extendStringPrototype.js
deleted file mode 100644
index 46fd386a915a6..0000000000000
--- a/node_modules/@colors/colors/lib/extendStringPrototype.js
+++ /dev/null
@@ -1,110 +0,0 @@
-var colors = require('./colors');
-
-module['exports'] = function() {
- //
- // Extends prototype of native string object to allow for "foo".red syntax
- //
- var addProperty = function(color, func) {
- String.prototype.__defineGetter__(color, func);
- };
-
- addProperty('strip', function() {
- return colors.strip(this);
- });
-
- addProperty('stripColors', function() {
- return colors.strip(this);
- });
-
- addProperty('trap', function() {
- return colors.trap(this);
- });
-
- addProperty('zalgo', function() {
- return colors.zalgo(this);
- });
-
- addProperty('zebra', function() {
- return colors.zebra(this);
- });
-
- addProperty('rainbow', function() {
- return colors.rainbow(this);
- });
-
- addProperty('random', function() {
- return colors.random(this);
- });
-
- addProperty('america', function() {
- return colors.america(this);
- });
-
- //
- // Iterate through all default styles and colors
- //
- var x = Object.keys(colors.styles);
- x.forEach(function(style) {
- addProperty(style, function() {
- return colors.stylize(this, style);
- });
- });
-
- function applyTheme(theme) {
- //
- // Remark: This is a list of methods that exist
- // on String that you should not overwrite.
- //
- var stringPrototypeBlacklist = [
- '__defineGetter__', '__defineSetter__', '__lookupGetter__',
- '__lookupSetter__', 'charAt', 'constructor', 'hasOwnProperty',
- 'isPrototypeOf', 'propertyIsEnumerable', 'toLocaleString', 'toString',
- 'valueOf', 'charCodeAt', 'indexOf', 'lastIndexOf', 'length',
- 'localeCompare', 'match', 'repeat', 'replace', 'search', 'slice',
- 'split', 'substring', 'toLocaleLowerCase', 'toLocaleUpperCase',
- 'toLowerCase', 'toUpperCase', 'trim', 'trimLeft', 'trimRight',
- ];
-
- Object.keys(theme).forEach(function(prop) {
- if (stringPrototypeBlacklist.indexOf(prop) !== -1) {
- console.log('warn: '.red + ('String.prototype' + prop).magenta +
- ' is probably something you don\'t want to override. ' +
- 'Ignoring style name');
- } else {
- if (typeof(theme[prop]) === 'string') {
- colors[prop] = colors[theme[prop]];
- addProperty(prop, function() {
- return colors[prop](this);
- });
- } else {
- var themePropApplicator = function(str) {
- var ret = str || this;
- for (var t = 0; t < theme[prop].length; t++) {
- ret = colors[theme[prop][t]](ret);
- }
- return ret;
- };
- addProperty(prop, themePropApplicator);
- colors[prop] = function(str) {
- return themePropApplicator(str);
- };
- }
- }
- });
- }
-
- colors.setTheme = function(theme) {
- if (typeof theme === 'string') {
- console.log('colors.setTheme now only accepts an object, not a string. ' +
- 'If you are trying to set a theme from a file, it is now your (the ' +
- 'caller\'s) responsibility to require the file. The old syntax ' +
- 'looked like colors.setTheme(__dirname + ' +
- '\'/../themes/generic-logging.js\'); The new syntax looks like '+
- 'colors.setTheme(require(__dirname + ' +
- '\'/../themes/generic-logging.js\'));');
- return;
- } else {
- applyTheme(theme);
- }
- };
-};
diff --git a/node_modules/@colors/colors/lib/index.js b/node_modules/@colors/colors/lib/index.js
deleted file mode 100644
index 9df5ab7df3077..0000000000000
--- a/node_modules/@colors/colors/lib/index.js
+++ /dev/null
@@ -1,13 +0,0 @@
-var colors = require('./colors');
-module['exports'] = colors;
-
-// Remark: By default, colors will add style properties to String.prototype.
-//
-// If you don't wish to extend String.prototype, you can do this instead and
-// native String will not be touched:
-//
-// var colors = require('colors/safe);
-// colors.red("foo")
-//
-//
-require('./extendStringPrototype')();
diff --git a/node_modules/@colors/colors/lib/maps/america.js b/node_modules/@colors/colors/lib/maps/america.js
deleted file mode 100644
index dc96903328989..0000000000000
--- a/node_modules/@colors/colors/lib/maps/america.js
+++ /dev/null
@@ -1,10 +0,0 @@
-module['exports'] = function(colors) {
- return function(letter, i, exploded) {
- if (letter === ' ') return letter;
- switch (i%3) {
- case 0: return colors.red(letter);
- case 1: return colors.white(letter);
- case 2: return colors.blue(letter);
- }
- };
-};
diff --git a/node_modules/@colors/colors/lib/maps/rainbow.js b/node_modules/@colors/colors/lib/maps/rainbow.js
deleted file mode 100644
index 2b00ac0ac998e..0000000000000
--- a/node_modules/@colors/colors/lib/maps/rainbow.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module['exports'] = function(colors) {
- // RoY G BiV
- var rainbowColors = ['red', 'yellow', 'green', 'blue', 'magenta'];
- return function(letter, i, exploded) {
- if (letter === ' ') {
- return letter;
- } else {
- return colors[rainbowColors[i++ % rainbowColors.length]](letter);
- }
- };
-};
-
diff --git a/node_modules/@colors/colors/lib/maps/random.js b/node_modules/@colors/colors/lib/maps/random.js
deleted file mode 100644
index 3d82a39ec0fab..0000000000000
--- a/node_modules/@colors/colors/lib/maps/random.js
+++ /dev/null
@@ -1,11 +0,0 @@
-module['exports'] = function(colors) {
- var available = ['underline', 'inverse', 'grey', 'yellow', 'red', 'green',
- 'blue', 'white', 'cyan', 'magenta', 'brightYellow', 'brightRed',
- 'brightGreen', 'brightBlue', 'brightWhite', 'brightCyan', 'brightMagenta'];
- return function(letter, i, exploded) {
- return letter === ' ' ? letter :
- colors[
- available[Math.round(Math.random() * (available.length - 2))]
- ](letter);
- };
-};
diff --git a/node_modules/@colors/colors/lib/maps/zebra.js b/node_modules/@colors/colors/lib/maps/zebra.js
deleted file mode 100644
index fa73623544a82..0000000000000
--- a/node_modules/@colors/colors/lib/maps/zebra.js
+++ /dev/null
@@ -1,5 +0,0 @@
-module['exports'] = function(colors) {
- return function(letter, i, exploded) {
- return i % 2 === 0 ? letter : colors.inverse(letter);
- };
-};
diff --git a/node_modules/@colors/colors/lib/styles.js b/node_modules/@colors/colors/lib/styles.js
deleted file mode 100644
index 011dafd8c28f7..0000000000000
--- a/node_modules/@colors/colors/lib/styles.js
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
-The MIT License (MIT)
-
-Copyright (c) Sindre Sorhus (sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
-*/
-
-var styles = {};
-module['exports'] = styles;
-
-var codes = {
- reset: [0, 0],
-
- bold: [1, 22],
- dim: [2, 22],
- italic: [3, 23],
- underline: [4, 24],
- inverse: [7, 27],
- hidden: [8, 28],
- strikethrough: [9, 29],
-
- black: [30, 39],
- red: [31, 39],
- green: [32, 39],
- yellow: [33, 39],
- blue: [34, 39],
- magenta: [35, 39],
- cyan: [36, 39],
- white: [37, 39],
- gray: [90, 39],
- grey: [90, 39],
-
- brightRed: [91, 39],
- brightGreen: [92, 39],
- brightYellow: [93, 39],
- brightBlue: [94, 39],
- brightMagenta: [95, 39],
- brightCyan: [96, 39],
- brightWhite: [97, 39],
-
- bgBlack: [40, 49],
- bgRed: [41, 49],
- bgGreen: [42, 49],
- bgYellow: [43, 49],
- bgBlue: [44, 49],
- bgMagenta: [45, 49],
- bgCyan: [46, 49],
- bgWhite: [47, 49],
- bgGray: [100, 49],
- bgGrey: [100, 49],
-
- bgBrightRed: [101, 49],
- bgBrightGreen: [102, 49],
- bgBrightYellow: [103, 49],
- bgBrightBlue: [104, 49],
- bgBrightMagenta: [105, 49],
- bgBrightCyan: [106, 49],
- bgBrightWhite: [107, 49],
-
- // legacy styles for colors pre v1.0.0
- blackBG: [40, 49],
- redBG: [41, 49],
- greenBG: [42, 49],
- yellowBG: [43, 49],
- blueBG: [44, 49],
- magentaBG: [45, 49],
- cyanBG: [46, 49],
- whiteBG: [47, 49],
-
-};
-
-Object.keys(codes).forEach(function(key) {
- var val = codes[key];
- var style = styles[key] = [];
- style.open = '\u001b[' + val[0] + 'm';
- style.close = '\u001b[' + val[1] + 'm';
-});
diff --git a/node_modules/@colors/colors/lib/system/has-flag.js b/node_modules/@colors/colors/lib/system/has-flag.js
deleted file mode 100644
index a347dd4d7a697..0000000000000
--- a/node_modules/@colors/colors/lib/system/has-flag.js
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
-MIT License
-
-Copyright (c) Sindre Sorhus (sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-*/
-
-'use strict';
-
-module.exports = function(flag, argv) {
- argv = argv || process.argv;
-
- var terminatorPos = argv.indexOf('--');
- var prefix = /^-{1,2}/.test(flag) ? '' : '--';
- var pos = argv.indexOf(prefix + flag);
-
- return pos !== -1 && (terminatorPos === -1 ? true : pos < terminatorPos);
-};
diff --git a/node_modules/@colors/colors/lib/system/supports-colors.js b/node_modules/@colors/colors/lib/system/supports-colors.js
deleted file mode 100644
index f1f9c8ff3da28..0000000000000
--- a/node_modules/@colors/colors/lib/system/supports-colors.js
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
-The MIT License (MIT)
-
-Copyright (c) Sindre Sorhus (sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
-*/
-
-'use strict';
-
-var os = require('os');
-var hasFlag = require('./has-flag.js');
-
-var env = process.env;
-
-var forceColor = void 0;
-if (hasFlag('no-color') || hasFlag('no-colors') || hasFlag('color=false')) {
- forceColor = false;
-} else if (hasFlag('color') || hasFlag('colors') || hasFlag('color=true')
- || hasFlag('color=always')) {
- forceColor = true;
-}
-if ('FORCE_COLOR' in env) {
- forceColor = env.FORCE_COLOR.length === 0
- || parseInt(env.FORCE_COLOR, 10) !== 0;
-}
-
-function translateLevel(level) {
- if (level === 0) {
- return false;
- }
-
- return {
- level: level,
- hasBasic: true,
- has256: level >= 2,
- has16m: level >= 3,
- };
-}
-
-function supportsColor(stream) {
- if (forceColor === false) {
- return 0;
- }
-
- if (hasFlag('color=16m') || hasFlag('color=full')
- || hasFlag('color=truecolor')) {
- return 3;
- }
-
- if (hasFlag('color=256')) {
- return 2;
- }
-
- if (stream && !stream.isTTY && forceColor !== true) {
- return 0;
- }
-
- var min = forceColor ? 1 : 0;
-
- if (process.platform === 'win32') {
- // Node.js 7.5.0 is the first version of Node.js to include a patch to
- // libuv that enables 256 color output on Windows. Anything earlier and it
- // won't work. However, here we target Node.js 8 at minimum as it is an LTS
- // release, and Node.js 7 is not. Windows 10 build 10586 is the first
- // Windows release that supports 256 colors. Windows 10 build 14931 is the
- // first release that supports 16m/TrueColor.
- var osRelease = os.release().split('.');
- if (Number(process.versions.node.split('.')[0]) >= 8
- && Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) {
- return Number(osRelease[2]) >= 14931 ? 3 : 2;
- }
-
- return 1;
- }
-
- if ('CI' in env) {
- if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(function(sign) {
- return sign in env;
- }) || env.CI_NAME === 'codeship') {
- return 1;
- }
-
- return min;
- }
-
- if ('TEAMCITY_VERSION' in env) {
- return (/^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0
- );
- }
-
- if ('TERM_PROGRAM' in env) {
- var version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
-
- switch (env.TERM_PROGRAM) {
- case 'iTerm.app':
- return version >= 3 ? 3 : 2;
- case 'Hyper':
- return 3;
- case 'Apple_Terminal':
- return 2;
- // No default
- }
- }
-
- if (/-256(color)?$/i.test(env.TERM)) {
- return 2;
- }
-
- if (/^screen|^xterm|^vt100|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
- return 1;
- }
-
- if ('COLORTERM' in env) {
- return 1;
- }
-
- if (env.TERM === 'dumb') {
- return min;
- }
-
- return min;
-}
-
-function getSupportLevel(stream) {
- var level = supportsColor(stream);
- return translateLevel(level);
-}
-
-module.exports = {
- supportsColor: getSupportLevel,
- stdout: getSupportLevel(process.stdout),
- stderr: getSupportLevel(process.stderr),
-};
diff --git a/node_modules/@colors/colors/package.json b/node_modules/@colors/colors/package.json
deleted file mode 100644
index cb87f20953886..0000000000000
--- a/node_modules/@colors/colors/package.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "name": "@colors/colors",
- "description": "get colors in your node.js console",
- "version": "1.5.0",
- "author": "DABH",
- "contributors": [
- {
- "name": "DABH",
- "url": "https://github.com/DABH"
- }
- ],
- "homepage": "https://github.com/DABH/colors.js",
- "bugs": "https://github.com/DABH/colors.js/issues",
- "keywords": [
- "ansi",
- "terminal",
- "colors"
- ],
- "repository": {
- "type": "git",
- "url": "http://github.com/DABH/colors.js.git"
- },
- "license": "MIT",
- "scripts": {
- "lint": "eslint . --fix",
- "test": "export FORCE_COLOR=1 && node tests/basic-test.js && node tests/safe-test.js"
- },
- "engines": {
- "node": ">=0.1.90"
- },
- "main": "lib/index.js",
- "files": [
- "examples",
- "lib",
- "LICENSE",
- "safe.js",
- "themes",
- "index.d.ts",
- "safe.d.ts"
- ],
- "devDependencies": {
- "eslint": "^5.2.0",
- "eslint-config-google": "^0.11.0"
- }
-}
diff --git a/node_modules/@colors/colors/safe.d.ts b/node_modules/@colors/colors/safe.d.ts
deleted file mode 100644
index 2bafc27984e0e..0000000000000
--- a/node_modules/@colors/colors/safe.d.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-// Type definitions for Colors.js 1.2
-// Project: https://github.com/Marak/colors.js
-// Definitions by: Bart van der Schoor , Staffan Eketorp
-// Definitions: https://github.com/Marak/colors.js
-
-export const enabled: boolean;
-export function enable(): void;
-export function disable(): void;
-export function setTheme(theme: any): void;
-
-export function strip(str: string): string;
-export function stripColors(str: string): string;
-
-export function black(str: string): string;
-export function red(str: string): string;
-export function green(str: string): string;
-export function yellow(str: string): string;
-export function blue(str: string): string;
-export function magenta(str: string): string;
-export function cyan(str: string): string;
-export function white(str: string): string;
-export function gray(str: string): string;
-export function grey(str: string): string;
-
-export function bgBlack(str: string): string;
-export function bgRed(str: string): string;
-export function bgGreen(str: string): string;
-export function bgYellow(str: string): string;
-export function bgBlue(str: string): string;
-export function bgMagenta(str: string): string;
-export function bgCyan(str: string): string;
-export function bgWhite(str: string): string;
-
-export function reset(str: string): string;
-export function bold(str: string): string;
-export function dim(str: string): string;
-export function italic(str: string): string;
-export function underline(str: string): string;
-export function inverse(str: string): string;
-export function hidden(str: string): string;
-export function strikethrough(str: string): string;
-
-export function rainbow(str: string): string;
-export function zebra(str: string): string;
-export function america(str: string): string;
-export function trap(str: string): string;
-export function random(str: string): string;
-export function zalgo(str: string): string;
diff --git a/node_modules/@colors/colors/safe.js b/node_modules/@colors/colors/safe.js
deleted file mode 100644
index a013d54246485..0000000000000
--- a/node_modules/@colors/colors/safe.js
+++ /dev/null
@@ -1,10 +0,0 @@
-//
-// Remark: Requiring this file will use the "safe" colors API,
-// which will not touch String.prototype.
-//
-// var colors = require('colors/safe');
-// colors.red("foo")
-//
-//
-var colors = require('./lib/colors');
-module['exports'] = colors;
diff --git a/node_modules/@colors/colors/themes/generic-logging.js b/node_modules/@colors/colors/themes/generic-logging.js
deleted file mode 100644
index 63adfe4ac31f9..0000000000000
--- a/node_modules/@colors/colors/themes/generic-logging.js
+++ /dev/null
@@ -1,12 +0,0 @@
-module['exports'] = {
- silly: 'rainbow',
- input: 'grey',
- verbose: 'cyan',
- prompt: 'grey',
- info: 'green',
- data: 'grey',
- help: 'cyan',
- warn: 'yellow',
- debug: 'blue',
- error: 'red',
-};
diff --git a/node_modules/@gar/promisify/LICENSE.md b/node_modules/@gar/promisify/LICENSE.md
deleted file mode 100644
index 64f773240bed1..0000000000000
--- a/node_modules/@gar/promisify/LICENSE.md
+++ /dev/null
@@ -1,10 +0,0 @@
-The MIT License (MIT)
-
-Copyright © 2020-2022 Michael Garvin
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
diff --git a/node_modules/@gar/promisify/index.js b/node_modules/@gar/promisify/index.js
deleted file mode 100644
index d0be95f6fec61..0000000000000
--- a/node_modules/@gar/promisify/index.js
+++ /dev/null
@@ -1,36 +0,0 @@
-'use strict'
-
-const { promisify } = require('util')
-
-const handler = {
- get: function (target, prop, receiver) {
- if (typeof target[prop] !== 'function') {
- return target[prop]
- }
- if (target[prop][promisify.custom]) {
- return function () {
- return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments)
- }
- }
- return function () {
- return new Promise((resolve, reject) => {
- Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) {
- if (err) {
- return reject(err)
- }
- resolve(result)
- }])
- })
- }
- }
-}
-
-module.exports = function (thingToPromisify) {
- if (typeof thingToPromisify === 'function') {
- return promisify(thingToPromisify)
- }
- if (typeof thingToPromisify === 'object') {
- return new Proxy(thingToPromisify, handler)
- }
- throw new TypeError('Can only promisify functions or objects')
-}
diff --git a/node_modules/@gar/promisify/package.json b/node_modules/@gar/promisify/package.json
deleted file mode 100644
index d0ce69b2f7c0e..0000000000000
--- a/node_modules/@gar/promisify/package.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "name": "@gar/promisify",
- "version": "1.1.3",
- "description": "Promisify an entire class or object",
- "main": "index.js",
- "repository": {
- "type": "git",
- "url": "https://github.com/wraithgar/gar-promisify.git"
- },
- "scripts": {
- "lint": "standard",
- "lint:fix": "standard --fix",
- "test": "lab -a @hapi/code -t 100",
- "posttest": "npm run lint"
- },
- "files": [
- "index.js"
- ],
- "keywords": [
- "promisify",
- "all",
- "class",
- "object"
- ],
- "author": "Gar ",
- "license": "MIT",
- "devDependencies": {
- "@hapi/code": "^8.0.1",
- "@hapi/lab": "^24.1.0",
- "standard": "^16.0.3"
- }
-}
diff --git a/node_modules/set-blocking/LICENSE.txt b/node_modules/@isaacs/cliui/LICENSE.txt
similarity index 95%
rename from node_modules/set-blocking/LICENSE.txt
rename to node_modules/@isaacs/cliui/LICENSE.txt
index 836440bef7cf1..c7e27478a3eff 100644
--- a/node_modules/set-blocking/LICENSE.txt
+++ b/node_modules/@isaacs/cliui/LICENSE.txt
@@ -1,4 +1,4 @@
-Copyright (c) 2016, Contributors
+Copyright (c) 2015, Contributors
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted, provided
diff --git a/node_modules/@isaacs/cliui/build/index.cjs b/node_modules/@isaacs/cliui/build/index.cjs
new file mode 100644
index 0000000000000..aca2b8507ac0f
--- /dev/null
+++ b/node_modules/@isaacs/cliui/build/index.cjs
@@ -0,0 +1,317 @@
+'use strict';
+
+const align = {
+ right: alignRight,
+ center: alignCenter
+};
+const top = 0;
+const right = 1;
+const bottom = 2;
+const left = 3;
+class UI {
+ constructor(opts) {
+ var _a;
+ this.width = opts.width;
+ /* c8 ignore start */
+ this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true;
+ /* c8 ignore stop */
+ this.rows = [];
+ }
+ span(...args) {
+ const cols = this.div(...args);
+ cols.span = true;
+ }
+ resetOutput() {
+ this.rows = [];
+ }
+ div(...args) {
+ if (args.length === 0) {
+ this.div('');
+ }
+ if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') {
+ return this.applyLayoutDSL(args[0]);
+ }
+ const cols = args.map(arg => {
+ if (typeof arg === 'string') {
+ return this.colFromString(arg);
+ }
+ return arg;
+ });
+ this.rows.push(cols);
+ return cols;
+ }
+ shouldApplyLayoutDSL(...args) {
+ return args.length === 1 && typeof args[0] === 'string' &&
+ /[\t\n]/.test(args[0]);
+ }
+ applyLayoutDSL(str) {
+ const rows = str.split('\n').map(row => row.split('\t'));
+ let leftColumnWidth = 0;
+ // simple heuristic for layout, make sure the
+ // second column lines up along the left-hand.
+ // don't allow the first column to take up more
+ // than 50% of the screen.
+ rows.forEach(columns => {
+ if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) {
+ leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0]));
+ }
+ });
+ // generate a table:
+ // replacing ' ' with padding calculations.
+ // using the algorithmically generated width.
+ rows.forEach(columns => {
+ this.div(...columns.map((r, i) => {
+ return {
+ text: r.trim(),
+ padding: this.measurePadding(r),
+ width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined
+ };
+ }));
+ });
+ return this.rows[this.rows.length - 1];
+ }
+ colFromString(text) {
+ return {
+ text,
+ padding: this.measurePadding(text)
+ };
+ }
+ measurePadding(str) {
+ // measure padding without ansi escape codes
+ const noAnsi = mixin.stripAnsi(str);
+ return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length];
+ }
+ toString() {
+ const lines = [];
+ this.rows.forEach(row => {
+ this.rowToString(row, lines);
+ });
+ // don't display any lines with the
+ // hidden flag set.
+ return lines
+ .filter(line => !line.hidden)
+ .map(line => line.text)
+ .join('\n');
+ }
+ rowToString(row, lines) {
+ this.rasterize(row).forEach((rrow, r) => {
+ let str = '';
+ rrow.forEach((col, c) => {
+ const { width } = row[c]; // the width with padding.
+ const wrapWidth = this.negatePadding(row[c]); // the width without padding.
+ let ts = col; // temporary string used during alignment/padding.
+ if (wrapWidth > mixin.stringWidth(col)) {
+ ts += ' '.repeat(wrapWidth - mixin.stringWidth(col));
+ }
+ // align the string within its column.
+ if (row[c].align && row[c].align !== 'left' && this.wrap) {
+ const fn = align[row[c].align];
+ ts = fn(ts, wrapWidth);
+ if (mixin.stringWidth(ts) < wrapWidth) {
+ /* c8 ignore start */
+ const w = width || 0;
+ /* c8 ignore stop */
+ ts += ' '.repeat(w - mixin.stringWidth(ts) - 1);
+ }
+ }
+ // apply border and padding to string.
+ const padding = row[c].padding || [0, 0, 0, 0];
+ if (padding[left]) {
+ str += ' '.repeat(padding[left]);
+ }
+ str += addBorder(row[c], ts, '| ');
+ str += ts;
+ str += addBorder(row[c], ts, ' |');
+ if (padding[right]) {
+ str += ' '.repeat(padding[right]);
+ }
+ // if prior row is span, try to render the
+ // current row on the prior line.
+ if (r === 0 && lines.length > 0) {
+ str = this.renderInline(str, lines[lines.length - 1]);
+ }
+ });
+ // remove trailing whitespace.
+ lines.push({
+ text: str.replace(/ +$/, ''),
+ span: row.span
+ });
+ });
+ return lines;
+ }
+ // if the full 'source' can render in
+ // the target line, do so.
+ renderInline(source, previousLine) {
+ const match = source.match(/^ */);
+ /* c8 ignore start */
+ const leadingWhitespace = match ? match[0].length : 0;
+ /* c8 ignore stop */
+ const target = previousLine.text;
+ const targetTextWidth = mixin.stringWidth(target.trimEnd());
+ if (!previousLine.span) {
+ return source;
+ }
+ // if we're not applying wrapping logic,
+ // just always append to the span.
+ if (!this.wrap) {
+ previousLine.hidden = true;
+ return target + source;
+ }
+ if (leadingWhitespace < targetTextWidth) {
+ return source;
+ }
+ previousLine.hidden = true;
+ return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart();
+ }
+ rasterize(row) {
+ const rrows = [];
+ const widths = this.columnWidths(row);
+ let wrapped;
+ // word wrap all columns, and create
+ // a data-structure that is easy to rasterize.
+ row.forEach((col, c) => {
+ // leave room for left and right padding.
+ col.width = widths[c];
+ if (this.wrap) {
+ wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n');
+ }
+ else {
+ wrapped = col.text.split('\n');
+ }
+ if (col.border) {
+ wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.');
+ wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'");
+ }
+ // add top and bottom padding.
+ if (col.padding) {
+ wrapped.unshift(...new Array(col.padding[top] || 0).fill(''));
+ wrapped.push(...new Array(col.padding[bottom] || 0).fill(''));
+ }
+ wrapped.forEach((str, r) => {
+ if (!rrows[r]) {
+ rrows.push([]);
+ }
+ const rrow = rrows[r];
+ for (let i = 0; i < c; i++) {
+ if (rrow[i] === undefined) {
+ rrow.push('');
+ }
+ }
+ rrow.push(str);
+ });
+ });
+ return rrows;
+ }
+ negatePadding(col) {
+ /* c8 ignore start */
+ let wrapWidth = col.width || 0;
+ /* c8 ignore stop */
+ if (col.padding) {
+ wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0);
+ }
+ if (col.border) {
+ wrapWidth -= 4;
+ }
+ return wrapWidth;
+ }
+ columnWidths(row) {
+ if (!this.wrap) {
+ return row.map(col => {
+ return col.width || mixin.stringWidth(col.text);
+ });
+ }
+ let unset = row.length;
+ let remainingWidth = this.width;
+ // column widths can be set in config.
+ const widths = row.map(col => {
+ if (col.width) {
+ unset--;
+ remainingWidth -= col.width;
+ return col.width;
+ }
+ return undefined;
+ });
+ // any unset widths should be calculated.
+ /* c8 ignore start */
+ const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0;
+ /* c8 ignore stop */
+ return widths.map((w, i) => {
+ if (w === undefined) {
+ return Math.max(unsetWidth, _minWidth(row[i]));
+ }
+ return w;
+ });
+ }
+}
+function addBorder(col, ts, style) {
+ if (col.border) {
+ if (/[.']-+[.']/.test(ts)) {
+ return '';
+ }
+ if (ts.trim().length !== 0) {
+ return style;
+ }
+ return ' ';
+ }
+ return '';
+}
+// calculates the minimum width of
+// a column, based on padding preferences.
+function _minWidth(col) {
+ const padding = col.padding || [];
+ const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0);
+ if (col.border) {
+ return minWidth + 4;
+ }
+ return minWidth;
+}
+function getWindowWidth() {
+ /* c8 ignore start */
+ if (typeof process === 'object' && process.stdout && process.stdout.columns) {
+ return process.stdout.columns;
+ }
+ return 80;
+}
+/* c8 ignore stop */
+function alignRight(str, width) {
+ str = str.trim();
+ const strWidth = mixin.stringWidth(str);
+ if (strWidth < width) {
+ return ' '.repeat(width - strWidth) + str;
+ }
+ return str;
+}
+function alignCenter(str, width) {
+ str = str.trim();
+ const strWidth = mixin.stringWidth(str);
+ /* c8 ignore start */
+ if (strWidth >= width) {
+ return str;
+ }
+ /* c8 ignore stop */
+ return ' '.repeat((width - strWidth) >> 1) + str;
+}
+let mixin;
+function cliui(opts, _mixin) {
+ mixin = _mixin;
+ return new UI({
+ /* c8 ignore start */
+ width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(),
+ wrap: opts === null || opts === void 0 ? void 0 : opts.wrap
+ /* c8 ignore stop */
+ });
+}
+
+// Bootstrap cliui with CommonJS dependencies:
+const stringWidth = require('string-width-cjs');
+const stripAnsi = require('strip-ansi-cjs');
+const wrap = require('wrap-ansi-cjs');
+function ui(opts) {
+ return cliui(opts, {
+ stringWidth,
+ stripAnsi,
+ wrap
+ });
+}
+
+module.exports = ui;
diff --git a/node_modules/@isaacs/cliui/build/index.d.cts b/node_modules/@isaacs/cliui/build/index.d.cts
new file mode 100644
index 0000000000000..4567f945e81a7
--- /dev/null
+++ b/node_modules/@isaacs/cliui/build/index.d.cts
@@ -0,0 +1,43 @@
+interface UIOptions {
+ width: number;
+ wrap?: boolean;
+ rows?: string[];
+}
+interface Column {
+ text: string;
+ width?: number;
+ align?: "right" | "left" | "center";
+ padding: number[];
+ border?: boolean;
+}
+interface ColumnArray extends Array {
+ span: boolean;
+}
+interface Line {
+ hidden?: boolean;
+ text: string;
+ span?: boolean;
+}
+declare class UI {
+ width: number;
+ wrap: boolean;
+ rows: ColumnArray[];
+ constructor(opts: UIOptions);
+ span(...args: ColumnArray): void;
+ resetOutput(): void;
+ div(...args: (Column | string)[]): ColumnArray;
+ private shouldApplyLayoutDSL;
+ private applyLayoutDSL;
+ private colFromString;
+ private measurePadding;
+ toString(): string;
+ rowToString(row: ColumnArray, lines: Line[]): Line[];
+ // if the full 'source' can render in
+ // the target line, do so.
+ private renderInline;
+ private rasterize;
+ private negatePadding;
+ private columnWidths;
+}
+declare function ui(opts: UIOptions): UI;
+export { ui as default };
diff --git a/node_modules/@isaacs/cliui/build/lib/index.js b/node_modules/@isaacs/cliui/build/lib/index.js
new file mode 100644
index 0000000000000..587b5ecd3e773
--- /dev/null
+++ b/node_modules/@isaacs/cliui/build/lib/index.js
@@ -0,0 +1,302 @@
+'use strict';
+const align = {
+ right: alignRight,
+ center: alignCenter
+};
+const top = 0;
+const right = 1;
+const bottom = 2;
+const left = 3;
+export class UI {
+ constructor(opts) {
+ var _a;
+ this.width = opts.width;
+ /* c8 ignore start */
+ this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true;
+ /* c8 ignore stop */
+ this.rows = [];
+ }
+ span(...args) {
+ const cols = this.div(...args);
+ cols.span = true;
+ }
+ resetOutput() {
+ this.rows = [];
+ }
+ div(...args) {
+ if (args.length === 0) {
+ this.div('');
+ }
+ if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') {
+ return this.applyLayoutDSL(args[0]);
+ }
+ const cols = args.map(arg => {
+ if (typeof arg === 'string') {
+ return this.colFromString(arg);
+ }
+ return arg;
+ });
+ this.rows.push(cols);
+ return cols;
+ }
+ shouldApplyLayoutDSL(...args) {
+ return args.length === 1 && typeof args[0] === 'string' &&
+ /[\t\n]/.test(args[0]);
+ }
+ applyLayoutDSL(str) {
+ const rows = str.split('\n').map(row => row.split('\t'));
+ let leftColumnWidth = 0;
+ // simple heuristic for layout, make sure the
+ // second column lines up along the left-hand.
+ // don't allow the first column to take up more
+ // than 50% of the screen.
+ rows.forEach(columns => {
+ if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) {
+ leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0]));
+ }
+ });
+ // generate a table:
+ // replacing ' ' with padding calculations.
+ // using the algorithmically generated width.
+ rows.forEach(columns => {
+ this.div(...columns.map((r, i) => {
+ return {
+ text: r.trim(),
+ padding: this.measurePadding(r),
+ width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined
+ };
+ }));
+ });
+ return this.rows[this.rows.length - 1];
+ }
+ colFromString(text) {
+ return {
+ text,
+ padding: this.measurePadding(text)
+ };
+ }
+ measurePadding(str) {
+ // measure padding without ansi escape codes
+ const noAnsi = mixin.stripAnsi(str);
+ return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length];
+ }
+ toString() {
+ const lines = [];
+ this.rows.forEach(row => {
+ this.rowToString(row, lines);
+ });
+ // don't display any lines with the
+ // hidden flag set.
+ return lines
+ .filter(line => !line.hidden)
+ .map(line => line.text)
+ .join('\n');
+ }
+ rowToString(row, lines) {
+ this.rasterize(row).forEach((rrow, r) => {
+ let str = '';
+ rrow.forEach((col, c) => {
+ const { width } = row[c]; // the width with padding.
+ const wrapWidth = this.negatePadding(row[c]); // the width without padding.
+ let ts = col; // temporary string used during alignment/padding.
+ if (wrapWidth > mixin.stringWidth(col)) {
+ ts += ' '.repeat(wrapWidth - mixin.stringWidth(col));
+ }
+ // align the string within its column.
+ if (row[c].align && row[c].align !== 'left' && this.wrap) {
+ const fn = align[row[c].align];
+ ts = fn(ts, wrapWidth);
+ if (mixin.stringWidth(ts) < wrapWidth) {
+ /* c8 ignore start */
+ const w = width || 0;
+ /* c8 ignore stop */
+ ts += ' '.repeat(w - mixin.stringWidth(ts) - 1);
+ }
+ }
+ // apply border and padding to string.
+ const padding = row[c].padding || [0, 0, 0, 0];
+ if (padding[left]) {
+ str += ' '.repeat(padding[left]);
+ }
+ str += addBorder(row[c], ts, '| ');
+ str += ts;
+ str += addBorder(row[c], ts, ' |');
+ if (padding[right]) {
+ str += ' '.repeat(padding[right]);
+ }
+ // if prior row is span, try to render the
+ // current row on the prior line.
+ if (r === 0 && lines.length > 0) {
+ str = this.renderInline(str, lines[lines.length - 1]);
+ }
+ });
+ // remove trailing whitespace.
+ lines.push({
+ text: str.replace(/ +$/, ''),
+ span: row.span
+ });
+ });
+ return lines;
+ }
+ // if the full 'source' can render in
+ // the target line, do so.
+ renderInline(source, previousLine) {
+ const match = source.match(/^ */);
+ /* c8 ignore start */
+ const leadingWhitespace = match ? match[0].length : 0;
+ /* c8 ignore stop */
+ const target = previousLine.text;
+ const targetTextWidth = mixin.stringWidth(target.trimEnd());
+ if (!previousLine.span) {
+ return source;
+ }
+ // if we're not applying wrapping logic,
+ // just always append to the span.
+ if (!this.wrap) {
+ previousLine.hidden = true;
+ return target + source;
+ }
+ if (leadingWhitespace < targetTextWidth) {
+ return source;
+ }
+ previousLine.hidden = true;
+ return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart();
+ }
+ rasterize(row) {
+ const rrows = [];
+ const widths = this.columnWidths(row);
+ let wrapped;
+ // word wrap all columns, and create
+ // a data-structure that is easy to rasterize.
+ row.forEach((col, c) => {
+ // leave room for left and right padding.
+ col.width = widths[c];
+ if (this.wrap) {
+ wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n');
+ }
+ else {
+ wrapped = col.text.split('\n');
+ }
+ if (col.border) {
+ wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.');
+ wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'");
+ }
+ // add top and bottom padding.
+ if (col.padding) {
+ wrapped.unshift(...new Array(col.padding[top] || 0).fill(''));
+ wrapped.push(...new Array(col.padding[bottom] || 0).fill(''));
+ }
+ wrapped.forEach((str, r) => {
+ if (!rrows[r]) {
+ rrows.push([]);
+ }
+ const rrow = rrows[r];
+ for (let i = 0; i < c; i++) {
+ if (rrow[i] === undefined) {
+ rrow.push('');
+ }
+ }
+ rrow.push(str);
+ });
+ });
+ return rrows;
+ }
+ negatePadding(col) {
+ /* c8 ignore start */
+ let wrapWidth = col.width || 0;
+ /* c8 ignore stop */
+ if (col.padding) {
+ wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0);
+ }
+ if (col.border) {
+ wrapWidth -= 4;
+ }
+ return wrapWidth;
+ }
+ columnWidths(row) {
+ if (!this.wrap) {
+ return row.map(col => {
+ return col.width || mixin.stringWidth(col.text);
+ });
+ }
+ let unset = row.length;
+ let remainingWidth = this.width;
+ // column widths can be set in config.
+ const widths = row.map(col => {
+ if (col.width) {
+ unset--;
+ remainingWidth -= col.width;
+ return col.width;
+ }
+ return undefined;
+ });
+ // any unset widths should be calculated.
+ /* c8 ignore start */
+ const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0;
+ /* c8 ignore stop */
+ return widths.map((w, i) => {
+ if (w === undefined) {
+ return Math.max(unsetWidth, _minWidth(row[i]));
+ }
+ return w;
+ });
+ }
+}
+function addBorder(col, ts, style) {
+ if (col.border) {
+ if (/[.']-+[.']/.test(ts)) {
+ return '';
+ }
+ if (ts.trim().length !== 0) {
+ return style;
+ }
+ return ' ';
+ }
+ return '';
+}
+// calculates the minimum width of
+// a column, based on padding preferences.
+function _minWidth(col) {
+ const padding = col.padding || [];
+ const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0);
+ if (col.border) {
+ return minWidth + 4;
+ }
+ return minWidth;
+}
+function getWindowWidth() {
+ /* c8 ignore start */
+ if (typeof process === 'object' && process.stdout && process.stdout.columns) {
+ return process.stdout.columns;
+ }
+ return 80;
+}
+/* c8 ignore stop */
+function alignRight(str, width) {
+ str = str.trim();
+ const strWidth = mixin.stringWidth(str);
+ if (strWidth < width) {
+ return ' '.repeat(width - strWidth) + str;
+ }
+ return str;
+}
+function alignCenter(str, width) {
+ str = str.trim();
+ const strWidth = mixin.stringWidth(str);
+ /* c8 ignore start */
+ if (strWidth >= width) {
+ return str;
+ }
+ /* c8 ignore stop */
+ return ' '.repeat((width - strWidth) >> 1) + str;
+}
+let mixin;
+export function cliui(opts, _mixin) {
+ mixin = _mixin;
+ return new UI({
+ /* c8 ignore start */
+ width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(),
+ wrap: opts === null || opts === void 0 ? void 0 : opts.wrap
+ /* c8 ignore stop */
+ });
+}
diff --git a/node_modules/@isaacs/cliui/index.mjs b/node_modules/@isaacs/cliui/index.mjs
new file mode 100644
index 0000000000000..5177519af3722
--- /dev/null
+++ b/node_modules/@isaacs/cliui/index.mjs
@@ -0,0 +1,14 @@
+// Bootstrap cliui with ESM dependencies:
+import { cliui } from './build/lib/index.js'
+
+import stringWidth from 'string-width'
+import stripAnsi from 'strip-ansi'
+import wrap from 'wrap-ansi'
+
+export default function ui (opts) {
+ return cliui(opts, {
+ stringWidth,
+ stripAnsi,
+ wrap
+ })
+}
diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js b/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js
new file mode 100644
index 0000000000000..130a0929b8ce8
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js
@@ -0,0 +1,8 @@
+export default function ansiRegex({onlyFirst = false} = {}) {
+ const pattern = [
+ '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
+ '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
+ ].join('|');
+
+ return new RegExp(pattern, onlyFirst ? undefined : 'g');
+}
diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/license b/node_modules/@isaacs/cliui/node_modules/ansi-regex/license
new file mode 100644
index 0000000000000..fa7ceba3eb4a9
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (https://sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json b/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json
new file mode 100644
index 0000000000000..7bbb563bf2a70
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json
@@ -0,0 +1,58 @@
+{
+ "name": "ansi-regex",
+ "version": "6.0.1",
+ "description": "Regular expression for matching ANSI escape codes",
+ "license": "MIT",
+ "repository": "chalk/ansi-regex",
+ "funding": "https://github.com/chalk/ansi-regex?sponsor=1",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "https://sindresorhus.com"
+ },
+ "type": "module",
+ "exports": "./index.js",
+ "engines": {
+ "node": ">=12"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd",
+ "view-supported": "node fixtures/view-codes.js"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts"
+ ],
+ "keywords": [
+ "ansi",
+ "styles",
+ "color",
+ "colour",
+ "colors",
+ "terminal",
+ "console",
+ "cli",
+ "string",
+ "tty",
+ "escape",
+ "formatting",
+ "rgb",
+ "256",
+ "shell",
+ "xterm",
+ "command-line",
+ "text",
+ "regex",
+ "regexp",
+ "re",
+ "match",
+ "test",
+ "find",
+ "pattern"
+ ],
+ "devDependencies": {
+ "ava": "^3.15.0",
+ "tsd": "^0.14.0",
+ "xo": "^0.38.2"
+ }
+}
diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt b/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt
new file mode 100644
index 0000000000000..a41e0a7ef970e
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt
@@ -0,0 +1,20 @@
+Copyright Mathias Bynens
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js
new file mode 100644
index 0000000000000..3fbe92410063f
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js
@@ -0,0 +1,6 @@
+"use strict";
+
+module.exports = function () {
+ // https://mths.be/emoji
+ return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]/g;
+};
diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js
new file mode 100644
index 0000000000000..ecf32f177908c
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js
@@ -0,0 +1,6 @@
+"use strict";
+
+module.exports = () => {
+ // https://mths.be/emoji
+ return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]/gu;
+};
diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js
new file mode 100644
index 0000000000000..1a4fc8d0dcc32
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js
@@ -0,0 +1,6 @@
+"use strict";
+
+module.exports = () => {
+ // https://mths.be/emoji
+ return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]\uFE0F|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu;
+};
diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js
new file mode 100644
index 0000000000000..8e9f985758314
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js
@@ -0,0 +1,6 @@
+"use strict";
+
+module.exports = () => {
+ // https://mths.be/emoji
+ return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]\uFE0F?/gu;
+};
diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js
new file mode 100644
index 0000000000000..c0490d4c95ac3
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js
@@ -0,0 +1,6 @@
+"use strict";
+
+module.exports = function () {
+ // https://mths.be/emoji
+ return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5-\uDED7\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])\uFE0F|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDD77\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g;
+};
diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json b/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json
new file mode 100644
index 0000000000000..eac892a16a253
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json
@@ -0,0 +1,52 @@
+{
+ "name": "emoji-regex",
+ "version": "9.2.2",
+ "description": "A regular expression to match all Emoji-only symbols as per the Unicode Standard.",
+ "homepage": "https://mths.be/emoji-regex",
+ "main": "index.js",
+ "types": "index.d.ts",
+ "keywords": [
+ "unicode",
+ "regex",
+ "regexp",
+ "regular expressions",
+ "code points",
+ "symbols",
+ "characters",
+ "emoji"
+ ],
+ "license": "MIT",
+ "author": {
+ "name": "Mathias Bynens",
+ "url": "https://mathiasbynens.be/"
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/mathiasbynens/emoji-regex.git"
+ },
+ "bugs": "https://github.com/mathiasbynens/emoji-regex/issues",
+ "files": [
+ "LICENSE-MIT.txt",
+ "index.js",
+ "index.d.ts",
+ "RGI_Emoji.js",
+ "RGI_Emoji.d.ts",
+ "text.js",
+ "text.d.ts",
+ "es2015"
+ ],
+ "scripts": {
+ "build": "rm -rf -- es2015; babel src -d .; NODE_ENV=es2015 babel src es2015_types -D -d ./es2015; node script/inject-sequences.js",
+ "test": "mocha",
+ "test:watch": "npm run test -- --watch"
+ },
+ "devDependencies": {
+ "@babel/cli": "^7.4.4",
+ "@babel/core": "^7.4.4",
+ "@babel/plugin-proposal-unicode-property-regex": "^7.4.4",
+ "@babel/preset-env": "^7.4.4",
+ "@unicode/unicode-13.0.0": "^1.0.3",
+ "mocha": "^6.1.4",
+ "regexgen": "^1.3.0"
+ }
+}
diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js
new file mode 100644
index 0000000000000..9bc63ce74753f
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js
@@ -0,0 +1,6 @@
+"use strict";
+
+module.exports = function () {
+ // https://mths.be/emoji
+ return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5-\uDED7\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])\uFE0F?/g;
+};
diff --git a/node_modules/@isaacs/cliui/node_modules/string-width/index.js b/node_modules/@isaacs/cliui/node_modules/string-width/index.js
new file mode 100644
index 0000000000000..9294488f88488
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/string-width/index.js
@@ -0,0 +1,54 @@
+import stripAnsi from 'strip-ansi';
+import eastAsianWidth from 'eastasianwidth';
+import emojiRegex from 'emoji-regex';
+
+export default function stringWidth(string, options = {}) {
+ if (typeof string !== 'string' || string.length === 0) {
+ return 0;
+ }
+
+ options = {
+ ambiguousIsNarrow: true,
+ ...options
+ };
+
+ string = stripAnsi(string);
+
+ if (string.length === 0) {
+ return 0;
+ }
+
+ string = string.replace(emojiRegex(), ' ');
+
+ const ambiguousCharacterWidth = options.ambiguousIsNarrow ? 1 : 2;
+ let width = 0;
+
+ for (const character of string) {
+ const codePoint = character.codePointAt(0);
+
+ // Ignore control characters
+ if (codePoint <= 0x1F || (codePoint >= 0x7F && codePoint <= 0x9F)) {
+ continue;
+ }
+
+ // Ignore combining characters
+ if (codePoint >= 0x300 && codePoint <= 0x36F) {
+ continue;
+ }
+
+ const code = eastAsianWidth.eastAsianWidth(character);
+ switch (code) {
+ case 'F':
+ case 'W':
+ width += 2;
+ break;
+ case 'A':
+ width += ambiguousCharacterWidth;
+ break;
+ default:
+ width += 1;
+ }
+ }
+
+ return width;
+}
diff --git a/node_modules/@isaacs/cliui/node_modules/string-width/license b/node_modules/@isaacs/cliui/node_modules/string-width/license
new file mode 100644
index 0000000000000..fa7ceba3eb4a9
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/string-width/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (https://sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/@isaacs/cliui/node_modules/string-width/package.json b/node_modules/@isaacs/cliui/node_modules/string-width/package.json
new file mode 100644
index 0000000000000..f46d6770f9ebb
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/string-width/package.json
@@ -0,0 +1,59 @@
+{
+ "name": "string-width",
+ "version": "5.1.2",
+ "description": "Get the visual width of a string - the number of columns required to display it",
+ "license": "MIT",
+ "repository": "sindresorhus/string-width",
+ "funding": "https://github.com/sponsors/sindresorhus",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "https://sindresorhus.com"
+ },
+ "type": "module",
+ "exports": "./index.js",
+ "engines": {
+ "node": ">=12"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts"
+ ],
+ "keywords": [
+ "string",
+ "character",
+ "unicode",
+ "width",
+ "visual",
+ "column",
+ "columns",
+ "fullwidth",
+ "full-width",
+ "full",
+ "ansi",
+ "escape",
+ "codes",
+ "cli",
+ "command-line",
+ "terminal",
+ "console",
+ "cjk",
+ "chinese",
+ "japanese",
+ "korean",
+ "fixed-width"
+ ],
+ "dependencies": {
+ "eastasianwidth": "^0.2.0",
+ "emoji-regex": "^9.2.2",
+ "strip-ansi": "^7.0.1"
+ },
+ "devDependencies": {
+ "ava": "^3.15.0",
+ "tsd": "^0.14.0",
+ "xo": "^0.38.2"
+ }
+}
diff --git a/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js b/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js
new file mode 100644
index 0000000000000..ba19750e64e06
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js
@@ -0,0 +1,14 @@
+import ansiRegex from 'ansi-regex';
+
+const regex = ansiRegex();
+
+export default function stripAnsi(string) {
+ if (typeof string !== 'string') {
+ throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
+ }
+
+ // Even though the regex is global, we don't need to reset the `.lastIndex`
+ // because unlike `.exec()` and `.test()`, `.replace()` does it automatically
+ // and doing it manually has a performance penalty.
+ return string.replace(regex, '');
+}
diff --git a/node_modules/@isaacs/cliui/node_modules/strip-ansi/license b/node_modules/@isaacs/cliui/node_modules/strip-ansi/license
new file mode 100644
index 0000000000000..fa7ceba3eb4a9
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/strip-ansi/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus (https://sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json b/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json
new file mode 100644
index 0000000000000..e1f455c325b00
--- /dev/null
+++ b/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json
@@ -0,0 +1,57 @@
+{
+ "name": "strip-ansi",
+ "version": "7.1.0",
+ "description": "Strip ANSI escape codes from a string",
+ "license": "MIT",
+ "repository": "chalk/strip-ansi",
+ "funding": "https://github.com/chalk/strip-ansi?sponsor=1",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "https://sindresorhus.com"
+ },
+ "type": "module",
+ "exports": "./index.js",
+ "engines": {
+ "node": ">=12"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts"
+ ],
+ "keywords": [
+ "strip",
+ "trim",
+ "remove",
+ "ansi",
+ "styles",
+ "color",
+ "colour",
+ "colors",
+ "terminal",
+ "console",
+ "string",
+ "tty",
+ "escape",
+ "formatting",
+ "rgb",
+ "256",
+ "shell",
+ "xterm",
+ "log",
+ "logging",
+ "command-line",
+ "text"
+ ],
+ "dependencies": {
+ "ansi-regex": "^6.0.1"
+ },
+ "devDependencies": {
+ "ava": "^3.15.0",
+ "tsd": "^0.17.0",
+ "xo": "^0.44.0"
+ }
+}
diff --git a/node_modules/@isaacs/cliui/package.json b/node_modules/@isaacs/cliui/package.json
new file mode 100644
index 0000000000000..7a952532def5d
--- /dev/null
+++ b/node_modules/@isaacs/cliui/package.json
@@ -0,0 +1,86 @@
+{
+ "name": "@isaacs/cliui",
+ "version": "8.0.2",
+ "description": "easily create complex multi-column command-line-interfaces",
+ "main": "build/index.cjs",
+ "exports": {
+ ".": [
+ {
+ "import": "./index.mjs",
+ "require": "./build/index.cjs"
+ },
+ "./build/index.cjs"
+ ]
+ },
+ "type": "module",
+ "module": "./index.mjs",
+ "scripts": {
+ "check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'",
+ "fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'",
+ "pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs",
+ "test": "c8 mocha ./test/*.cjs",
+ "test:esm": "c8 mocha ./test/**/*.mjs",
+ "postest": "check",
+ "coverage": "c8 report --check-coverage",
+ "precompile": "rimraf build",
+ "compile": "tsc",
+ "postcompile": "npm run build:cjs",
+ "build:cjs": "rollup -c",
+ "prepare": "npm run compile"
+ },
+ "repository": "yargs/cliui",
+ "standard": {
+ "ignore": [
+ "**/example/**"
+ ],
+ "globals": [
+ "it"
+ ]
+ },
+ "keywords": [
+ "cli",
+ "command-line",
+ "layout",
+ "design",
+ "console",
+ "wrap",
+ "table"
+ ],
+ "author": "Ben Coe ",
+ "license": "ISC",
+ "dependencies": {
+ "string-width": "^5.1.2",
+ "string-width-cjs": "npm:string-width@^4.2.0",
+ "strip-ansi": "^7.0.1",
+ "strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
+ "wrap-ansi": "^8.1.0",
+ "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
+ },
+ "devDependencies": {
+ "@types/node": "^14.0.27",
+ "@typescript-eslint/eslint-plugin": "^4.0.0",
+ "@typescript-eslint/parser": "^4.0.0",
+ "c8": "^7.3.0",
+ "chai": "^4.2.0",
+ "chalk": "^4.1.0",
+ "cross-env": "^7.0.2",
+ "eslint": "^7.6.0",
+ "eslint-plugin-import": "^2.22.0",
+ "eslint-plugin-node": "^11.1.0",
+ "gts": "^3.0.0",
+ "mocha": "^10.0.0",
+ "rimraf": "^3.0.2",
+ "rollup": "^2.23.1",
+ "rollup-plugin-ts": "^3.0.2",
+ "standardx": "^7.0.0",
+ "typescript": "^4.0.0"
+ },
+ "files": [
+ "build",
+ "index.mjs",
+ "!*.d.ts"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+}
diff --git a/node_modules/@npmcli/agent/lib/agents.js b/node_modules/@npmcli/agent/lib/agents.js
new file mode 100644
index 0000000000000..c541b93001517
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/agents.js
@@ -0,0 +1,206 @@
+'use strict'
+
+const net = require('net')
+const tls = require('tls')
+const { once } = require('events')
+const timers = require('timers/promises')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js')
+const Errors = require('./errors.js')
+const { Agent: AgentBase } = require('agent-base')
+
+module.exports = class Agent extends AgentBase {
+ #options
+ #timeouts
+ #proxy
+ #noProxy
+ #ProxyAgent
+
+ constructor (options = {}) {
+ const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options)
+
+ super(normalizedOptions)
+
+ this.#options = normalizedOptions
+ this.#timeouts = timeouts
+
+ if (proxy) {
+ this.#proxy = new URL(proxy)
+ this.#noProxy = noProxy
+ this.#ProxyAgent = getProxyAgent(proxy)
+ }
+ }
+
+ get proxy () {
+ return this.#proxy ? { url: this.#proxy } : {}
+ }
+
+ #getProxy (options) {
+ if (!this.#proxy) {
+ return
+ }
+
+ const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, {
+ proxy: this.#proxy,
+ noProxy: this.#noProxy,
+ })
+
+ if (!proxy) {
+ return
+ }
+
+ const cacheKey = cacheOptions({
+ ...options,
+ ...this.#options,
+ timeouts: this.#timeouts,
+ proxy,
+ })
+
+ if (proxyCache.has(cacheKey)) {
+ return proxyCache.get(cacheKey)
+ }
+
+ let ProxyAgent = this.#ProxyAgent
+ if (Array.isArray(ProxyAgent)) {
+ ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0]
+ }
+
+ const proxyAgent = new ProxyAgent(proxy, {
+ ...this.#options,
+ socketOptions: { family: this.#options.family },
+ })
+ proxyCache.set(cacheKey, proxyAgent)
+
+ return proxyAgent
+ }
+
+ // takes an array of promises and races them against the connection timeout
+ // which will throw the necessary error if it is hit. This will return the
+ // result of the promise race.
+ async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) {
+ if (timeout) {
+ const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal })
+ .then(() => {
+ throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`)
+ }).catch((err) => {
+ if (err.name === 'AbortError') {
+ return
+ }
+ throw err
+ })
+ promises.push(connectionTimeout)
+ }
+
+ let result
+ try {
+ result = await Promise.race(promises)
+ ac.abort()
+ } catch (err) {
+ ac.abort()
+ throw err
+ }
+ return result
+ }
+
+ async connect (request, options) {
+ // if the connection does not have its own lookup function
+ // set, then use the one from our options
+ options.lookup ??= this.#options.lookup
+
+ let socket
+ let timeout = this.#timeouts.connection
+ const isSecureEndpoint = this.isSecureEndpoint(options)
+
+ const proxy = this.#getProxy(options)
+ if (proxy) {
+ // some of the proxies will wait for the socket to fully connect before
+ // returning so we have to await this while also racing it against the
+ // connection timeout.
+ const start = Date.now()
+ socket = await this.#timeoutConnection({
+ options,
+ timeout,
+ promises: [proxy.connect(request, options)],
+ })
+ // see how much time proxy.connect took and subtract it from
+ // the timeout
+ if (timeout) {
+ timeout = timeout - (Date.now() - start)
+ }
+ } else {
+ socket = (isSecureEndpoint ? tls : net).connect(options)
+ }
+
+ socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
+ socket.setNoDelay(this.keepAlive)
+
+ const abortController = new AbortController()
+ const { signal } = abortController
+
+ const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting']
+ ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal })
+ : Promise.resolve()
+
+ await this.#timeoutConnection({
+ options,
+ timeout,
+ promises: [
+ connectPromise,
+ once(socket, 'error', { signal }).then((err) => {
+ throw err[0]
+ }),
+ ],
+ }, abortController)
+
+ if (this.#timeouts.idle) {
+ socket.setTimeout(this.#timeouts.idle, () => {
+ socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`))
+ })
+ }
+
+ return socket
+ }
+
+ addRequest (request, options) {
+ const proxy = this.#getProxy(options)
+ // it would be better to call proxy.addRequest here but this causes the
+ // http-proxy-agent to call its super.addRequest which causes the request
+ // to be added to the agent twice. since we only support 3 agents
+ // currently (see the required agents in proxy.js) we have manually
+ // checked that the only public methods we need to call are called in the
+ // next block. this could change in the future and presumably we would get
+ // failing tests until we have properly called the necessary methods on
+ // each of our proxy agents
+ if (proxy?.setRequestProps) {
+ proxy.setRequestProps(request, options)
+ }
+
+ request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
+
+ if (this.#timeouts.response) {
+ let responseTimeout
+ request.once('finish', () => {
+ setTimeout(() => {
+ request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy))
+ }, this.#timeouts.response)
+ })
+ request.once('response', () => {
+ clearTimeout(responseTimeout)
+ })
+ }
+
+ if (this.#timeouts.transfer) {
+ let transferTimeout
+ request.once('response', (res) => {
+ setTimeout(() => {
+ res.destroy(new Errors.TransferTimeoutError(request, this.#proxy))
+ }, this.#timeouts.transfer)
+ res.once('close', () => {
+ clearTimeout(transferTimeout)
+ })
+ })
+ }
+
+ return super.addRequest(request, options)
+ }
+}
diff --git a/node_modules/@npmcli/agent/lib/dns.js b/node_modules/@npmcli/agent/lib/dns.js
new file mode 100644
index 0000000000000..3c6946c566d73
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/dns.js
@@ -0,0 +1,53 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const dns = require('dns')
+
+// this is a factory so that each request can have its own opts (i.e. ttl)
+// while still sharing the cache across all requests
+const cache = new LRUCache({ max: 50 })
+
+const getOptions = ({
+ family = 0,
+ hints = dns.ADDRCONFIG,
+ all = false,
+ verbatim = undefined,
+ ttl = 5 * 60 * 1000,
+ lookup = dns.lookup,
+}) => ({
+ // hints and lookup are returned since both are top level properties to (net|tls).connect
+ hints,
+ lookup: (hostname, ...args) => {
+ const callback = args.pop() // callback is always last arg
+ const lookupOptions = args[0] ?? {}
+
+ const options = {
+ family,
+ hints,
+ all,
+ verbatim,
+ ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
+ }
+
+ const key = JSON.stringify({ hostname, ...options })
+
+ if (cache.has(key)) {
+ const cached = cache.get(key)
+ return process.nextTick(callback, null, ...cached)
+ }
+
+ lookup(hostname, options, (err, ...result) => {
+ if (err) {
+ return callback(err)
+ }
+
+ cache.set(key, result, { ttl })
+ return callback(null, ...result)
+ })
+ },
+})
+
+module.exports = {
+ cache,
+ getOptions,
+}
diff --git a/node_modules/@npmcli/agent/lib/errors.js b/node_modules/@npmcli/agent/lib/errors.js
new file mode 100644
index 0000000000000..70475aec8eb35
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/errors.js
@@ -0,0 +1,61 @@
+'use strict'
+
+class InvalidProxyProtocolError extends Error {
+ constructor (url) {
+ super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
+ this.code = 'EINVALIDPROXY'
+ this.proxy = url
+ }
+}
+
+class ConnectionTimeoutError extends Error {
+ constructor (host) {
+ super(`Timeout connecting to host \`${host}\``)
+ this.code = 'ECONNECTIONTIMEOUT'
+ this.host = host
+ }
+}
+
+class IdleTimeoutError extends Error {
+ constructor (host) {
+ super(`Idle timeout reached for host \`${host}\``)
+ this.code = 'EIDLETIMEOUT'
+ this.host = host
+ }
+}
+
+class ResponseTimeoutError extends Error {
+ constructor (request, proxy) {
+ let msg = 'Response timeout '
+ if (proxy) {
+ msg += `from proxy \`${proxy.host}\` `
+ }
+ msg += `connecting to host \`${request.host}\``
+ super(msg)
+ this.code = 'ERESPONSETIMEOUT'
+ this.proxy = proxy
+ this.request = request
+ }
+}
+
+class TransferTimeoutError extends Error {
+ constructor (request, proxy) {
+ let msg = 'Transfer timeout '
+ if (proxy) {
+ msg += `from proxy \`${proxy.host}\` `
+ }
+ msg += `for \`${request.host}\``
+ super(msg)
+ this.code = 'ETRANSFERTIMEOUT'
+ this.proxy = proxy
+ this.request = request
+ }
+}
+
+module.exports = {
+ InvalidProxyProtocolError,
+ ConnectionTimeoutError,
+ IdleTimeoutError,
+ ResponseTimeoutError,
+ TransferTimeoutError,
+}
diff --git a/node_modules/@npmcli/agent/lib/index.js b/node_modules/@npmcli/agent/lib/index.js
new file mode 100644
index 0000000000000..b33d6eaef07a2
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/index.js
@@ -0,0 +1,56 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, proxyCache } = require('./proxy.js')
+const dns = require('./dns.js')
+const Agent = require('./agents.js')
+
+const agentCache = new LRUCache({ max: 20 })
+
+const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => {
+ // false has meaning so this can't be a simple truthiness check
+ if (agent != null) {
+ return agent
+ }
+
+ url = new URL(url)
+
+ const proxyForUrl = getProxy(url, { proxy, noProxy })
+ const normalizedOptions = {
+ ...normalizeOptions(options),
+ proxy: proxyForUrl,
+ }
+
+ const cacheKey = cacheOptions({
+ ...normalizedOptions,
+ secureEndpoint: url.protocol === 'https:',
+ })
+
+ if (agentCache.has(cacheKey)) {
+ return agentCache.get(cacheKey)
+ }
+
+ const newAgent = new Agent(normalizedOptions)
+ agentCache.set(cacheKey, newAgent)
+
+ return newAgent
+}
+
+module.exports = {
+ getAgent,
+ Agent,
+ // these are exported for backwards compatability
+ HttpAgent: Agent,
+ HttpsAgent: Agent,
+ cache: {
+ proxy: proxyCache,
+ agent: agentCache,
+ dns: dns.cache,
+ clear: () => {
+ proxyCache.clear()
+ agentCache.clear()
+ dns.cache.clear()
+ },
+ },
+}
diff --git a/node_modules/@npmcli/agent/lib/options.js b/node_modules/@npmcli/agent/lib/options.js
new file mode 100644
index 0000000000000..0bf53f725f084
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/options.js
@@ -0,0 +1,86 @@
+'use strict'
+
+const dns = require('./dns')
+
+const normalizeOptions = (opts) => {
+ const family = parseInt(opts.family ?? '0', 10)
+ const keepAlive = opts.keepAlive ?? true
+
+ const normalized = {
+ // nodejs http agent options. these are all the defaults
+ // but kept here to increase the likelihood of cache hits
+ // https://nodejs.org/api/http.html#new-agentoptions
+ keepAliveMsecs: keepAlive ? 1000 : undefined,
+ maxSockets: opts.maxSockets ?? 15,
+ maxTotalSockets: Infinity,
+ maxFreeSockets: keepAlive ? 256 : undefined,
+ scheduling: 'fifo',
+ // then spread the rest of the options
+ ...opts,
+ // we already set these to their defaults that we want
+ family,
+ keepAlive,
+ // our custom timeout options
+ timeouts: {
+ // the standard timeout option is mapped to our idle timeout
+ // and then deleted below
+ idle: opts.timeout ?? 0,
+ connection: 0,
+ response: 0,
+ transfer: 0,
+ ...opts.timeouts,
+ },
+ // get the dns options that go at the top level of socket connection
+ ...dns.getOptions({ family, ...opts.dns }),
+ }
+
+ // remove timeout since we already used it to set our own idle timeout
+ delete normalized.timeout
+
+ return normalized
+}
+
+const createKey = (obj) => {
+ let key = ''
+ const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
+ for (let [k, v] of sorted) {
+ if (v == null) {
+ v = 'null'
+ } else if (v instanceof URL) {
+ v = v.toString()
+ } else if (typeof v === 'object') {
+ v = createKey(v)
+ }
+ key += `${k}:${v}:`
+ }
+ return key
+}
+
+const cacheOptions = ({ secureEndpoint, ...options }) => createKey({
+ secureEndpoint: !!secureEndpoint,
+ // socket connect options
+ family: options.family,
+ hints: options.hints,
+ localAddress: options.localAddress,
+ // tls specific connect options
+ strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false,
+ ca: secureEndpoint ? options.ca : null,
+ cert: secureEndpoint ? options.cert : null,
+ key: secureEndpoint ? options.key : null,
+ // http agent options
+ keepAlive: options.keepAlive,
+ keepAliveMsecs: options.keepAliveMsecs,
+ maxSockets: options.maxSockets,
+ maxTotalSockets: options.maxTotalSockets,
+ maxFreeSockets: options.maxFreeSockets,
+ scheduling: options.scheduling,
+ // timeout options
+ timeouts: options.timeouts,
+ // proxy
+ proxy: options.proxy,
+})
+
+module.exports = {
+ normalizeOptions,
+ cacheOptions,
+}
diff --git a/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/@npmcli/agent/lib/proxy.js
new file mode 100644
index 0000000000000..6272e929e57bc
--- /dev/null
+++ b/node_modules/@npmcli/agent/lib/proxy.js
@@ -0,0 +1,88 @@
+'use strict'
+
+const { HttpProxyAgent } = require('http-proxy-agent')
+const { HttpsProxyAgent } = require('https-proxy-agent')
+const { SocksProxyAgent } = require('socks-proxy-agent')
+const { LRUCache } = require('lru-cache')
+const { InvalidProxyProtocolError } = require('./errors.js')
+
+const PROXY_CACHE = new LRUCache({ max: 20 })
+
+const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
+
+const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
+
+const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => {
+ key = key.toLowerCase()
+ if (PROXY_ENV_KEYS.has(key)) {
+ acc[key] = value
+ }
+ return acc
+}, {})
+
+const getProxyAgent = (url) => {
+ url = new URL(url)
+
+ const protocol = url.protocol.slice(0, -1)
+ if (SOCKS_PROTOCOLS.has(protocol)) {
+ return SocksProxyAgent
+ }
+ if (protocol === 'https' || protocol === 'http') {
+ return [HttpProxyAgent, HttpsProxyAgent]
+ }
+
+ throw new InvalidProxyProtocolError(url)
+}
+
+const isNoProxy = (url, noProxy) => {
+ if (typeof noProxy === 'string') {
+ noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
+ }
+
+ if (!noProxy || !noProxy.length) {
+ return false
+ }
+
+ const hostSegments = url.hostname.split('.').reverse()
+
+ return noProxy.some((no) => {
+ const noSegments = no.split('.').filter(Boolean).reverse()
+ if (!noSegments.length) {
+ return false
+ }
+
+ for (let i = 0; i < noSegments.length; i++) {
+ if (hostSegments[i] !== noSegments[i]) {
+ return false
+ }
+ }
+
+ return true
+ })
+}
+
+const getProxy = (url, { proxy, noProxy }) => {
+ url = new URL(url)
+
+ if (!proxy) {
+ proxy = url.protocol === 'https:'
+ ? PROXY_ENV.https_proxy
+ : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy
+ }
+
+ if (!noProxy) {
+ noProxy = PROXY_ENV.no_proxy
+ }
+
+ if (!proxy || isNoProxy(url, noProxy)) {
+ return null
+ }
+
+ return new URL(proxy)
+}
+
+module.exports = {
+ getProxyAgent,
+ getProxy,
+ proxyCache: PROXY_CACHE,
+}
diff --git a/node_modules/@npmcli/agent/package.json b/node_modules/@npmcli/agent/package.json
new file mode 100644
index 0000000000000..ef5b4e3228cc4
--- /dev/null
+++ b/node_modules/@npmcli/agent/package.json
@@ -0,0 +1,60 @@
+{
+ "name": "@npmcli/agent",
+ "version": "2.2.2",
+ "description": "the http/https agent used by the npm cli",
+ "main": "lib/index.js",
+ "scripts": {
+ "gencerts": "bash scripts/create-cert.sh",
+ "test": "tap",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+ "postlint": "template-oss-check",
+ "template-oss-apply": "template-oss-apply --force",
+ "lintfix": "npm run lint -- --fix",
+ "snap": "tap",
+ "posttest": "npm run lint"
+ },
+ "author": "GitHub Inc.",
+ "license": "ISC",
+ "bugs": {
+ "url": "https://github.com/npm/agent/issues"
+ },
+ "homepage": "https://github.com/npm/agent#readme",
+ "files": [
+ "bin/",
+ "lib/"
+ ],
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ },
+ "templateOSS": {
+ "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+ "version": "4.21.3",
+ "publish": "true"
+ },
+ "dependencies": {
+ "agent-base": "^7.1.0",
+ "http-proxy-agent": "^7.0.0",
+ "https-proxy-agent": "^7.0.1",
+ "lru-cache": "^10.0.1",
+ "socks-proxy-agent": "^8.0.3"
+ },
+ "devDependencies": {
+ "@npmcli/eslint-config": "^4.0.0",
+ "@npmcli/template-oss": "4.21.3",
+ "minipass-fetch": "^3.0.3",
+ "nock": "^13.2.7",
+ "semver": "^7.5.4",
+ "simple-socks": "^3.1.0",
+ "tap": "^16.3.0"
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/npm/agent.git"
+ },
+ "tap": {
+ "nyc-arg": [
+ "--exclude",
+ "tap-snapshots/**"
+ ]
+ }
+}
diff --git a/node_modules/@npmcli/arborist b/node_modules/@npmcli/arborist
deleted file mode 120000
index a2d3e8bbf4641..0000000000000
--- a/node_modules/@npmcli/arborist
+++ /dev/null
@@ -1 +0,0 @@
-../../workspaces/arborist
\ No newline at end of file
diff --git a/node_modules/@npmcli/config b/node_modules/@npmcli/config
deleted file mode 120000
index bf09f370d87e3..0000000000000
--- a/node_modules/@npmcli/config
+++ /dev/null
@@ -1 +0,0 @@
-../../workspaces/config
\ No newline at end of file
diff --git a/node_modules/@npmcli/disparity-colors/LICENSE b/node_modules/@npmcli/disparity-colors/LICENSE
deleted file mode 100644
index dedcd7d2f9dae..0000000000000
--- a/node_modules/@npmcli/disparity-colors/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/disparity-colors/lib/index.js b/node_modules/@npmcli/disparity-colors/lib/index.js
deleted file mode 100644
index 3d2aa56be9253..0000000000000
--- a/node_modules/@npmcli/disparity-colors/lib/index.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const ansi = require('ansi-styles')
-
-const colors = {
- removed: ansi.red,
- added: ansi.green,
- header: ansi.yellow,
- section: ansi.magenta,
-}
-
-function colorize (str, opts) {
- let headerLength = (opts || {}).headerLength
- if (typeof headerLength !== 'number' || Number.isNaN(headerLength)) {
- headerLength = 2
- }
-
- const color = (colorStr, colorId) => {
- const { open, close } = colors[colorId]
- // avoid highlighting the "\n" (would highlight till the end of the line)
- return colorStr.replace(/[^\n\r]+/g, open + '$&' + close)
- }
-
- // this RegExp will include all the `\n` chars into the lines, easier to join
- const lines = ((typeof str === 'string' && str) || '').split(/^/m)
-
- const start = color(lines.slice(0, headerLength).join(''), 'header')
- const end = lines.slice(headerLength).join('')
- .replace(/^-.*/gm, color('$&', 'removed'))
- .replace(/^\+.*/gm, color('$&', 'added'))
- .replace(/^@@.+@@/gm, color('$&', 'section'))
-
- return start + end
-}
-
-module.exports = colorize
diff --git a/node_modules/@npmcli/disparity-colors/package.json b/node_modules/@npmcli/disparity-colors/package.json
deleted file mode 100644
index 17eb4846c353c..0000000000000
--- a/node_modules/@npmcli/disparity-colors/package.json
+++ /dev/null
@@ -1,70 +0,0 @@
-{
- "name": "@npmcli/disparity-colors",
- "version": "3.0.0",
- "main": "lib/index.js",
- "files": [
- "bin/",
- "lib/"
- ],
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- },
- "description": "Colorizes unified diff output",
- "repository": {
- "type": "git",
- "url": "https://github.com/npm/disparity-colors.git"
- },
- "keywords": [
- "disparity",
- "npm",
- "npmcli",
- "diff",
- "char",
- "unified",
- "multiline",
- "string",
- "color",
- "ansi",
- "terminal",
- "cli",
- "tty"
- ],
- "author": "GitHub Inc.",
- "contributors": [
- {
- "name": "Ruy Adorno",
- "url": "https://ruyadorno.com",
- "twitter": "ruyadorno"
- }
- ],
- "license": "ISC",
- "scripts": {
- "lint": "eslint \"**/*.js\"",
- "pretest": "npm run lint",
- "test": "tap",
- "snap": "tap",
- "postlint": "template-oss-check",
- "template-oss-apply": "template-oss-apply --force",
- "lintfix": "npm run lint -- --fix",
- "posttest": "npm run lint"
- },
- "tap": {
- "check-coverage": true,
- "nyc-arg": [
- "--exclude",
- "tap-snapshots/**"
- ]
- },
- "devDependencies": {
- "@npmcli/eslint-config": "^3.0.1",
- "@npmcli/template-oss": "4.5.1",
- "tap": "^16.0.1"
- },
- "dependencies": {
- "ansi-styles": "^4.3.0"
- },
- "templateOSS": {
- "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
- }
-}
diff --git a/node_modules/@npmcli/fs/package.json b/node_modules/@npmcli/fs/package.json
index 28eb613388418..5261a11b78000 100644
--- a/node_modules/@npmcli/fs/package.json
+++ b/node_modules/@npmcli/fs/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/fs",
- "version": "3.1.0",
+ "version": "3.1.1",
"description": "filesystem utilities for the npm cli",
"main": "lib/index.js",
"files": [
@@ -11,7 +11,7 @@
"snap": "tap",
"test": "tap",
"npmclilint": "npmcli-lint",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"posttest": "npm run lint",
"postsnap": "npm run lintfix --",
@@ -20,7 +20,7 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/npm/fs.git"
+ "url": "git+https://github.com/npm/fs.git"
},
"keywords": [
"npm",
@@ -30,7 +30,7 @@
"license": "ISC",
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.8.0",
+ "@npmcli/template-oss": "4.22.0",
"tap": "^16.0.1"
},
"dependencies": {
@@ -41,7 +41,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.8.0"
+ "version": "4.22.0"
},
"tap": {
"nyc-arg": [
diff --git a/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/git/lib/clone.js
index 3f165dd70e380..e25a4d1426821 100644
--- a/node_modules/@npmcli/git/lib/clone.js
+++ b/node_modules/@npmcli/git/lib/clone.js
@@ -27,8 +27,7 @@ const spawn = require('./spawn.js')
const { isWindows } = require('./utils.js')
const pickManifest = require('npm-pick-manifest')
-const fs = require('fs')
-const mkdirp = require('mkdirp')
+const fs = require('fs/promises')
module.exports = (repo, ref = 'HEAD', target = null, opts = {}) =>
getRevs(repo, opts).then(revs => clone(
@@ -93,7 +92,7 @@ const other = (repo, revDoc, target, opts) => {
.concat(shallow ? ['--depth=1'] : [])
const git = (args) => spawn(args, { ...opts, cwd: target })
- return mkdirp(target)
+ return fs.mkdir(target, { recursive: true })
.then(() => git(['init']))
.then(() => isWindows(opts)
? git(['config', '--local', '--add', 'core.longpaths', 'true'])
@@ -141,19 +140,21 @@ const plain = (repo, revDoc, target, opts) => {
return spawn(args, opts).then(() => revDoc.sha)
}
-const updateSubmodules = (target, opts) => new Promise(resolve =>
- fs.stat(target + '/.gitmodules', er => {
- if (er) {
- return resolve(null)
- }
- return resolve(spawn([
- 'submodule',
- 'update',
- '-q',
- '--init',
- '--recursive',
- ], { ...opts, cwd: target }))
- }))
+const updateSubmodules = async (target, opts) => {
+ const hasSubmodules = await fs.stat(`${target}/.gitmodules`)
+ .then(() => true)
+ .catch(() => false)
+ if (!hasSubmodules) {
+ return null
+ }
+ return spawn([
+ 'submodule',
+ 'update',
+ '-q',
+ '--init',
+ '--recursive',
+ ], { ...opts, cwd: target })
+}
const unresolved = (repo, ref, target, opts) => {
// can't do this one shallowly, because the ref isn't advertised
@@ -161,7 +162,7 @@ const unresolved = (repo, ref, target, opts) => {
const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : []
const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git']
const git = (args) => spawn(args, { ...opts, cwd: target })
- return mkdirp(target)
+ return fs.mkdir(target, { recursive: true })
.then(() => git(cloneArgs.concat(lp)))
.then(() => git(['init']))
.then(() => git(['checkout', ref]))
diff --git a/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/git/lib/errors.js
index 7aeac4762866f..3ceaa45811669 100644
--- a/node_modules/@npmcli/git/lib/errors.js
+++ b/node_modules/@npmcli/git/lib/errors.js
@@ -8,7 +8,7 @@ class GitError extends Error {
}
class GitConnectionError extends GitError {
- constructor (message) {
+ constructor () {
super('A git connection error occurred')
}
@@ -18,13 +18,13 @@ class GitConnectionError extends GitError {
}
class GitPathspecError extends GitError {
- constructor (message) {
+ constructor () {
super('The git reference could not be found')
}
}
class GitUnknownError extends GitError {
- constructor (message) {
+ constructor () {
super('An unknown git error occurred')
}
}
diff --git a/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/git/lib/find.js
index d58f01dbcc16f..34bd310b88e5d 100644
--- a/node_modules/@npmcli/git/lib/find.js
+++ b/node_modules/@npmcli/git/lib/find.js
@@ -1,15 +1,15 @@
const is = require('./is.js')
const { dirname } = require('path')
-module.exports = async ({ cwd = process.cwd() } = {}) => {
- if (await is({ cwd })) {
- return cwd
- }
- while (cwd !== dirname(cwd)) {
- cwd = dirname(cwd)
+module.exports = async ({ cwd = process.cwd(), root } = {}) => {
+ while (true) {
if (await is({ cwd })) {
return cwd
}
+ const next = dirname(cwd)
+ if (cwd === root || cwd === next) {
+ return null
+ }
+ cwd = next
}
- return null
}
diff --git a/node_modules/@npmcli/git/lib/is.js b/node_modules/@npmcli/git/lib/is.js
index e2542f2157727..f5a0e8754f10d 100644
--- a/node_modules/@npmcli/git/lib/is.js
+++ b/node_modules/@npmcli/git/lib/is.js
@@ -1,6 +1,4 @@
// not an airtight indicator, but a good gut-check to even bother trying
-const { promisify } = require('util')
-const fs = require('fs')
-const stat = promisify(fs.stat)
+const { stat } = require('fs/promises')
module.exports = ({ cwd = process.cwd() } = {}) =>
stat(cwd + '/.git').then(() => true, () => false)
diff --git a/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/git/lib/opts.js
index 3119af16e0cf1..1e80e9efe4989 100644
--- a/node_modules/@npmcli/git/lib/opts.js
+++ b/node_modules/@npmcli/git/lib/opts.js
@@ -1,12 +1,57 @@
+const fs = require('node:fs')
+const os = require('node:os')
+const path = require('node:path')
+const ini = require('ini')
+
+const gitConfigPath = path.join(os.homedir(), '.gitconfig')
+
+let cachedConfig = null
+
+// Function to load and cache the git config
+const loadGitConfig = () => {
+ if (cachedConfig === null) {
+ try {
+ cachedConfig = {}
+ if (fs.existsSync(gitConfigPath)) {
+ const configContent = fs.readFileSync(gitConfigPath, 'utf-8')
+ cachedConfig = ini.parse(configContent)
+ }
+ } catch (error) {
+ cachedConfig = {}
+ }
+ }
+ return cachedConfig
+}
+
+const checkGitConfigs = () => {
+ const config = loadGitConfig()
+ return {
+ sshCommandSetInConfig: config?.core?.sshCommand !== undefined,
+ askPassSetInConfig: config?.core?.askpass !== undefined,
+ }
+}
+
+const sshCommandSetInEnv = process.env.GIT_SSH_COMMAND !== undefined
+const askPassSetInEnv = process.env.GIT_ASKPASS !== undefined
+const { sshCommandSetInConfig, askPassSetInConfig } = checkGitConfigs()
+
// Values we want to set if they're not already defined by the end user
// This defaults to accepting new ssh host key fingerprints
-const gitEnv = {
- GIT_ASKPASS: 'echo',
- GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new',
+const finalGitEnv = {
+ ...(askPassSetInEnv || askPassSetInConfig ? {} : {
+ GIT_ASKPASS: 'echo',
+ }),
+ ...(sshCommandSetInEnv || sshCommandSetInConfig ? {} : {
+ GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new',
+ }),
}
+
module.exports = (opts = {}) => ({
stdioString: true,
...opts,
shell: false,
- env: opts.env || { ...gitEnv, ...process.env },
+ env: opts.env || { ...finalGitEnv, ...process.env },
})
+
+// Export the loadGitConfig function for testing
+module.exports.loadGitConfig = loadGitConfig
diff --git a/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/git/lib/revs.js
index ee72370d5b7ec..ca14837de1b87 100644
--- a/node_modules/@npmcli/git/lib/revs.js
+++ b/node_modules/@npmcli/git/lib/revs.js
@@ -1,8 +1,8 @@
const pinflight = require('promise-inflight')
const spawn = require('./spawn.js')
-const LRU = require('lru-cache')
+const { LRUCache } = require('lru-cache')
-const revsCache = new LRU({
+const revsCache = new LRUCache({
max: 100,
ttl: 5 * 60 * 1000,
})
diff --git a/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/git/lib/spawn.js
index 7098d7b872942..03c1cbde21547 100644
--- a/node_modules/@npmcli/git/lib/spawn.js
+++ b/node_modules/@npmcli/git/lib/spawn.js
@@ -1,11 +1,11 @@
const spawn = require('@npmcli/promise-spawn')
const promiseRetry = require('promise-retry')
-const log = require('proc-log')
+const { log } = require('proc-log')
const makeError = require('./make-error.js')
-const whichGit = require('./which.js')
const makeOpts = require('./opts.js')
module.exports = (gitArgs, opts = {}) => {
+ const whichGit = require('./which.js')
const gitPath = whichGit(opts)
if (gitPath instanceof Error) {
diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json
index f3ce2fcfc9232..b6aa4a282cc0f 100644
--- a/node_modules/@npmcli/git/package.json
+++ b/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/git",
- "version": "4.0.3",
+ "version": "5.0.8",
"main": "lib/index.js",
"files": [
"bin/",
@@ -9,12 +9,12 @@
"description": "a util for spawning git from npm CLI contexts",
"repository": {
"type": "git",
- "url": "https://github.com/npm/git.git"
+ "url": "git+https://github.com/npm/git.git"
},
"author": "GitHub Inc.",
"license": "ISC",
"scripts": {
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"snap": "tap",
"test": "tap",
"posttest": "npm run lint",
@@ -23,8 +23,7 @@
"template-oss-apply": "template-oss-apply --force"
},
"tap": {
- "check-coverage": true,
- "coverage-map": "map.js",
+ "timeout": 600,
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
@@ -32,29 +31,28 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.8.0",
- "npm-package-arg": "^10.0.0",
- "rimraf": "^3.0.2",
+ "@npmcli/template-oss": "4.22.0",
+ "npm-package-arg": "^11.0.0",
"slash": "^3.0.0",
"tap": "^16.0.1"
},
"dependencies": {
- "@npmcli/promise-spawn": "^6.0.0",
- "lru-cache": "^7.4.4",
- "mkdirp": "^1.0.4",
- "npm-pick-manifest": "^8.0.0",
- "proc-log": "^3.0.0",
+ "@npmcli/promise-spawn": "^7.0.0",
+ "ini": "^4.1.3",
+ "lru-cache": "^10.0.1",
+ "npm-pick-manifest": "^9.0.0",
+ "proc-log": "^4.0.0",
"promise-inflight": "^1.0.1",
"promise-retry": "^2.0.1",
"semver": "^7.3.5",
- "which": "^3.0.0"
+ "which": "^4.0.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "windowsCI": false,
- "version": "4.8.0"
+ "version": "4.22.0",
+ "publish": true
}
}
diff --git a/node_modules/@npmcli/installed-package-contents/bin/index.js b/node_modules/@npmcli/installed-package-contents/bin/index.js
new file mode 100755
index 0000000000000..7b83b23bf168c
--- /dev/null
+++ b/node_modules/@npmcli/installed-package-contents/bin/index.js
@@ -0,0 +1,44 @@
+#! /usr/bin/env node
+
+const { relative } = require('path')
+const pkgContents = require('../')
+
+const usage = `Usage:
+ installed-package-contents [-d --depth=]
+
+Lists the files installed for a package specified by .
+
+Options:
+ -d --depth= Provide a numeric value ("Infinity" is allowed)
+ to specify how deep in the file tree to traverse.
+ Default=1
+ -h --help Show this usage information`
+
+const options = {}
+
+process.argv.slice(2).forEach(arg => {
+ let match
+ if ((match = arg.match(/^(?:--depth=|-d)([0-9]+|Infinity)/))) {
+ options.depth = +match[1]
+ } else if (arg === '-h' || arg === '--help') {
+ console.log(usage)
+ process.exit(0)
+ } else {
+ options.path = arg
+ }
+})
+
+if (!options.path) {
+ console.error('ERROR: no path provided')
+ console.error(usage)
+ process.exit(1)
+}
+
+const cwd = process.cwd()
+
+pkgContents(options)
+ .then(list => list.sort().forEach(p => console.log(relative(cwd, p))))
+ .catch(/* istanbul ignore next - pretty unusual */ er => {
+ console.error(er)
+ process.exit(1)
+ })
diff --git a/node_modules/@npmcli/installed-package-contents/lib/index.js b/node_modules/@npmcli/installed-package-contents/lib/index.js
old mode 100755
new mode 100644
index e2c545b5ab949..ab1486cd01d00
--- a/node_modules/@npmcli/installed-package-contents/lib/index.js
+++ b/node_modules/@npmcli/installed-package-contents/lib/index.js
@@ -17,53 +17,46 @@
// - add GET CONTENTS of bundled deps, PACKAGE=true, depth + 1
const bundled = require('npm-bundled')
-const { promisify } = require('util')
-const fs = require('fs')
-const readFile = promisify(fs.readFile)
-const readdir = promisify(fs.readdir)
-const stat = promisify(fs.stat)
-const lstat = promisify(fs.lstat)
-const { relative, resolve, basename, dirname } = require('path')
+const { readFile, readdir, stat } = require('fs/promises')
+const { resolve, basename, dirname } = require('path')
const normalizePackageBin = require('npm-normalize-package-bin')
-const readPackage = ({ path, packageJsonCache }) =>
- packageJsonCache.has(path) ? Promise.resolve(packageJsonCache.get(path))
+const readPackage = ({ path, packageJsonCache }) => packageJsonCache.has(path)
+ ? Promise.resolve(packageJsonCache.get(path))
: readFile(path).then(json => {
const pkg = normalizePackageBin(JSON.parse(json))
packageJsonCache.set(path, pkg)
return pkg
- })
- .catch(er => null)
+ }).catch(() => null)
// just normalize bundle deps and bin, that's all we care about here.
const normalized = Symbol('package data has been normalized')
-const rpj = ({ path, packageJsonCache }) =>
- readPackage({ path, packageJsonCache })
- .then(pkg => {
- if (!pkg || pkg[normalized]) {
- return pkg
- }
- if (pkg.bundledDependencies && !pkg.bundleDependencies) {
- pkg.bundleDependencies = pkg.bundledDependencies
- delete pkg.bundledDependencies
- }
- const bd = pkg.bundleDependencies
- if (bd === true) {
- pkg.bundleDependencies = [
- ...Object.keys(pkg.dependencies || {}),
- ...Object.keys(pkg.optionalDependencies || {}),
- ]
- }
- if (typeof bd === 'object' && !Array.isArray(bd)) {
- pkg.bundleDependencies = Object.keys(bd)
- }
- pkg[normalized] = true
+const rpj = ({ path, packageJsonCache }) => readPackage({ path, packageJsonCache })
+ .then(pkg => {
+ if (!pkg || pkg[normalized]) {
return pkg
- })
+ }
+ if (pkg.bundledDependencies && !pkg.bundleDependencies) {
+ pkg.bundleDependencies = pkg.bundledDependencies
+ delete pkg.bundledDependencies
+ }
+ const bd = pkg.bundleDependencies
+ if (bd === true) {
+ pkg.bundleDependencies = [
+ ...Object.keys(pkg.dependencies || {}),
+ ...Object.keys(pkg.optionalDependencies || {}),
+ ]
+ }
+ if (typeof bd === 'object' && !Array.isArray(bd)) {
+ pkg.bundleDependencies = Object.keys(bd)
+ }
+ pkg[normalized] = true
+ return pkg
+ })
const pkgContents = async ({
path,
- depth,
+ depth = 1,
currentDepth = 0,
pkg = null,
result = null,
@@ -103,7 +96,7 @@ const pkgContents = async ({
})
const bins = await Promise.all(
- binFiles.map(b => stat(b).then(() => b).catch((er) => null))
+ binFiles.map(b => stat(b).then(() => b).catch(() => null))
)
bins.filter(b => b).forEach(b => result.add(b))
}
@@ -134,18 +127,6 @@ const pkgContents = async ({
const recursePromises = []
- // if we didn't get withFileTypes support, tack that on
- if (typeof dirEntries[0] === 'string') {
- // use a map so we can return a promise, but we mutate dirEntries in place
- // this is much slower than getting the entries from the readdir call,
- // but polyfills support for node versions before 10.10
- await Promise.all(dirEntries.map(async (name, index) => {
- const p = resolve(path, name)
- const st = await lstat(p)
- dirEntries[index] = Object.assign(st, { name })
- }))
- }
-
for (const entry of dirEntries) {
const p = resolve(path, entry.name)
if (entry.isDirectory() === false) {
@@ -193,48 +174,8 @@ const pkgContents = async ({
return result
}
-module.exports = ({ path, depth = 1, packageJsonCache }) => pkgContents({
+module.exports = ({ path, ...opts }) => pkgContents({
path: resolve(path),
- depth,
+ ...opts,
pkg: true,
- packageJsonCache,
}).then(results => [...results])
-
-if (require.main === module) {
- const options = { path: null, depth: 1 }
- const usage = `Usage:
- installed-package-contents [-d --depth=]
-
-Lists the files installed for a package specified by .
-
-Options:
- -d --depth= Provide a numeric value ("Infinity" is allowed)
- to specify how deep in the file tree to traverse.
- Default=1
- -h --help Show this usage information`
-
- process.argv.slice(2).forEach(arg => {
- let match
- if ((match = arg.match(/^--depth=([0-9]+|Infinity)/)) ||
- (match = arg.match(/^-d([0-9]+|Infinity)/))) {
- options.depth = +match[1]
- } else if (arg === '-h' || arg === '--help') {
- console.log(usage)
- process.exit(0)
- } else {
- options.path = arg
- }
- })
- if (!options.path) {
- console.error('ERROR: no path provided')
- console.error(usage)
- process.exit(1)
- }
- const cwd = process.cwd()
- module.exports(options)
- .then(list => list.sort().forEach(p => console.log(relative(cwd, p))))
- .catch(/* istanbul ignore next - pretty unusual */ er => {
- console.error(er)
- process.exit(1)
- })
-}
diff --git a/node_modules/@npmcli/installed-package-contents/package.json b/node_modules/@npmcli/installed-package-contents/package.json
index aac2de1304256..132256430a6c1 100644
--- a/node_modules/@npmcli/installed-package-contents/package.json
+++ b/node_modules/@npmcli/installed-package-contents/package.json
@@ -1,17 +1,17 @@
{
"name": "@npmcli/installed-package-contents",
- "version": "2.0.1",
+ "version": "2.1.0",
"description": "Get the list of files installed in a package in node_modules, including bundled dependencies",
"author": "GitHub Inc.",
"main": "lib/index.js",
"bin": {
- "installed-package-contents": "lib/index.js"
+ "installed-package-contents": "bin/index.js"
},
"license": "ISC",
"scripts": {
"test": "tap",
"snap": "tap",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"lintfix": "npm run lint -- --fix",
@@ -19,9 +19,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.6.2",
- "mkdirp": "^1.0.4",
- "require-inject": "^1.4.4",
+ "@npmcli/template-oss": "4.21.4",
"tap": "^16.3.0"
},
"dependencies": {
@@ -41,7 +39,8 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.6.2"
+ "version": "4.21.4",
+ "publish": true
},
"tap": {
"nyc-arg": [
diff --git a/node_modules/@npmcli/map-workspaces/lib/index.js b/node_modules/@npmcli/map-workspaces/lib/index.js
index f93bc2911e89f..b20bf5de5d631 100644
--- a/node_modules/@npmcli/map-workspaces/lib/index.js
+++ b/node_modules/@npmcli/map-workspaces/lib/index.js
@@ -1,29 +1,53 @@
-const { promisify } = require('util')
const path = require('path')
const getName = require('@npmcli/name-from-folder')
-const minimatch = require('minimatch')
+const { minimatch } = require('minimatch')
const rpj = require('read-package-json-fast')
-const glob = require('glob')
-const pGlob = promisify(glob)
+const { glob } = require('glob')
-function appendNegatedPatterns (patterns) {
- const results = []
- for (let pattern of patterns) {
+function appendNegatedPatterns (allPatterns) {
+ const patterns = []
+ const negatedPatterns = []
+ for (let pattern of allPatterns) {
const excl = pattern.match(/^!+/)
if (excl) {
pattern = pattern.slice(excl[0].length)
}
- // strip off any / from the start of the pattern. /foo => foo
- pattern = pattern.replace(/^\/+/, '')
+ // strip off any / or ./ from the start of the pattern. /foo => foo
+ pattern = pattern.replace(/^\.?\/+/, '')
// an odd number of ! means a negated pattern. !!foo ==> foo
const negate = excl && excl[0].length % 2 === 1
- results.push({ pattern, negate })
+ if (negate) {
+ negatedPatterns.push(pattern)
+ } else {
+ // remove negated patterns that appeared before this pattern to avoid
+ // ignoring paths that were matched afterwards
+ // e.g: ['packages/**', '!packages/b/**', 'packages/b/a']
+ // in the above list, the last pattern overrides the negated pattern
+ // right before it. In effect, the above list would become:
+ // ['packages/**', 'packages/b/a']
+ // The order matters here which is why we must do it inside the loop
+ // as opposed to doing it all together at the end.
+ for (let i = 0; i < negatedPatterns.length; ++i) {
+ const negatedPattern = negatedPatterns[i]
+ if (minimatch(pattern, negatedPattern)) {
+ negatedPatterns.splice(i, 1)
+ }
+ }
+ patterns.push(pattern)
+ }
}
- return results
+ // use the negated patterns to eagerly remove all the patterns that
+ // can be removed to avoid unnecessary crawling
+ for (const negated of negatedPatterns) {
+ for (const pattern of minimatch.match(patterns, negated)) {
+ patterns.splice(patterns.indexOf(pattern), 1)
+ }
+ }
+ return { patterns, negatedPatterns }
}
function getPatterns (workspaces) {
@@ -79,11 +103,11 @@ async function mapWorkspaces (opts = {}) {
}
const { workspaces = [] } = opts.pkg
- const patterns = getPatterns(workspaces)
+ const { patterns, negatedPatterns } = getPatterns(workspaces)
const results = new Map()
const seen = new Map()
- if (!patterns.length) {
+ if (!patterns.length && !negatedPatterns.length) {
return results
}
@@ -91,50 +115,54 @@ async function mapWorkspaces (opts = {}) {
...opts,
ignore: [
...opts.ignore || [],
- ...['**/node_modules/**'],
+ '**/node_modules/**',
+ // just ignore the negated patterns to avoid unnecessary crawling
+ ...negatedPatterns,
],
})
const getPackagePathname = pkgPathmame(opts)
- for (const item of patterns) {
- const matches = await pGlob(getGlobPattern(item.pattern), getGlobOpts())
-
- for (const match of matches) {
- let pkg
- const packageJsonPathname = getPackagePathname(match, 'package.json')
- const packagePathname = path.dirname(packageJsonPathname)
-
- try {
- pkg = await rpj(packageJsonPathname)
- } catch (err) {
- if (err.code === 'ENOENT') {
- continue
- } else {
- throw err
- }
- }
+ let matches = await glob(patterns.map((p) => getGlobPattern(p)), getGlobOpts())
+ // preserves glob@8 behavior
+ matches = matches.sort((a, b) => a.localeCompare(b, 'en'))
+
+ // we must preserve the order of results according to the given list of
+ // workspace patterns
+ const orderedMatches = []
+ for (const pattern of patterns) {
+ orderedMatches.push(...matches.filter((m) => {
+ return minimatch(m, pattern, { partial: true, windowsPathsNoEscape: true })
+ }))
+ }
- const name = getPackageName(pkg, packagePathname)
+ for (const match of orderedMatches) {
+ let pkg
+ const packageJsonPathname = getPackagePathname(match, 'package.json')
- let seenPackagePathnames = seen.get(name)
- if (!seenPackagePathnames) {
- seenPackagePathnames = new Set()
- seen.set(name, seenPackagePathnames)
- }
- if (item.negate) {
- seenPackagePathnames.delete(packagePathname)
+ try {
+ pkg = await rpj(packageJsonPathname)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ continue
} else {
- seenPackagePathnames.add(packagePathname)
+ throw err
}
}
+
+ const packagePathname = path.dirname(packageJsonPathname)
+ const name = getPackageName(pkg, packagePathname)
+
+ let seenPackagePathnames = seen.get(name)
+ if (!seenPackagePathnames) {
+ seenPackagePathnames = new Set()
+ seen.set(name, seenPackagePathnames)
+ }
+ seenPackagePathnames.add(packagePathname)
}
const errorMessageArray = ['must not have multiple workspaces with the same name']
for (const [packageName, seenPackagePathnames] of seen) {
- if (seenPackagePathnames.size === 0) {
- continue
- }
if (seenPackagePathnames.size > 1) {
addDuplicateErrorMessages(errorMessageArray, packageName, seenPackagePathnames)
} else {
@@ -177,30 +205,25 @@ mapWorkspaces.virtual = function (opts = {}) {
const { workspaces = [] } = packages[''] || {}
// uses a pathname-keyed map in order to negate the exact items
const results = new Map()
- const patterns = getPatterns(workspaces)
- if (!patterns.length) {
+ const { patterns, negatedPatterns } = getPatterns(workspaces)
+ if (!patterns.length && !negatedPatterns.length) {
return results
}
- patterns.push({ pattern: '**/node_modules/**', negate: true })
-
- const getPackagePathname = pkgPathmame(opts)
+ negatedPatterns.push('**/node_modules/**')
- for (const packageKey of Object.keys(packages)) {
- if (packageKey === '') {
- continue
+ const packageKeys = Object.keys(packages)
+ for (const pattern of negatedPatterns) {
+ for (const packageKey of minimatch.match(packageKeys, pattern)) {
+ packageKeys.splice(packageKeys.indexOf(packageKey), 1)
}
+ }
- for (const item of patterns) {
- if (minimatch(packageKey, item.pattern)) {
- const packagePathname = getPackagePathname(packageKey)
- const name = getPackageName(packages[packageKey], packagePathname)
-
- if (item.negate) {
- results.delete(packagePathname)
- } else {
- results.set(packagePathname, name)
- }
- }
+ const getPackagePathname = pkgPathmame(opts)
+ for (const pattern of patterns) {
+ for (const packageKey of minimatch.match(packageKeys, pattern)) {
+ const packagePathname = getPackagePathname(packageKey)
+ const name = getPackageName(packages[packageKey], packagePathname)
+ results.set(packagePathname, name)
}
}
diff --git a/node_modules/@npmcli/map-workspaces/package.json b/node_modules/@npmcli/map-workspaces/package.json
index c8113cb25eb32..e6292b06bd2b4 100644
--- a/node_modules/@npmcli/map-workspaces/package.json
+++ b/node_modules/@npmcli/map-workspaces/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/map-workspaces",
- "version": "3.0.0",
+ "version": "3.0.6",
"main": "lib/index.js",
"files": [
"bin/",
@@ -25,7 +25,7 @@
"author": "GitHub Inc.",
"license": "ISC",
"scripts": {
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"pretest": "npm run lint",
"test": "tap",
"snap": "tap",
@@ -42,18 +42,19 @@
]
},
"devDependencies": {
- "@npmcli/eslint-config": "^3.0.1",
- "@npmcli/template-oss": "4.5.1",
+ "@npmcli/eslint-config": "^4.0.0",
+ "@npmcli/template-oss": "4.21.3",
"tap": "^16.0.1"
},
"dependencies": {
- "@npmcli/name-from-folder": "^1.0.1",
- "glob": "^8.0.1",
- "minimatch": "^5.0.1",
+ "@npmcli/name-from-folder": "^2.0.0",
+ "glob": "^10.2.2",
+ "minimatch": "^9.0.0",
"read-package-json-fast": "^3.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
+ "version": "4.21.3",
+ "publish": "true"
}
}
diff --git a/node_modules/@npmcli/metavuln-calculator/lib/advisory.js b/node_modules/@npmcli/metavuln-calculator/lib/advisory.js
index 1f479a90dd999..01f6a66fc2ac4 100644
--- a/node_modules/@npmcli/metavuln-calculator/lib/advisory.js
+++ b/node_modules/@npmcli/metavuln-calculator/lib/advisory.js
@@ -106,7 +106,7 @@ class Advisory {
this[_packument] = packument
- const pakuVersions = Object.keys(packument.versions)
+ const pakuVersions = Object.keys(packument.versions || {})
const allVersions = new Set([...pakuVersions, ...this.versions])
const versionsAdded = []
const versionsRemoved = []
@@ -242,7 +242,7 @@ class Advisory {
// check the dependency of this version on the vulnerable dep
// if we got a version that's not in the packument, fall back on
// the spec provided, if possible.
- const mani = this[_packument].versions[version] || {
+ const mani = this[_packument]?.versions?.[version] || {
dependencies: {
[this.dependency]: spec,
},
diff --git a/node_modules/@npmcli/metavuln-calculator/lib/index.js b/node_modules/@npmcli/metavuln-calculator/lib/index.js
index 668f55942c506..971409b5bad44 100644
--- a/node_modules/@npmcli/metavuln-calculator/lib/index.js
+++ b/node_modules/@npmcli/metavuln-calculator/lib/index.js
@@ -3,6 +3,7 @@
// class handles all the IO with the registry and cache.
const pacote = require('pacote')
const cacache = require('cacache')
+const { time } = require('proc-log')
const Advisory = require('./advisory.js')
const { homedir } = require('os')
const jsonParse = require('json-parse-even-better-errors')
@@ -48,34 +49,33 @@ class Calculator {
async [_calculate] (name, source) {
const k = `security-advisory:${name}:${source.id}`
- const t = `metavuln:calculate:${k}`
- process.emit('time', t)
+ const timeEnd = time.start(`metavuln:calculate:${k}`)
const advisory = new Advisory(name, source, this[_options])
// load packument and cached advisory
const [cached, packument] = await Promise.all([
this[_cacheGet](advisory),
this[_packument](name),
])
- process.emit('time', `metavuln:load:${k}`)
+ const timeEndLoad = time.start(`metavuln:load:${k}`)
advisory.load(cached, packument)
- process.emit('timeEnd', `metavuln:load:${k}`)
+ timeEndLoad()
if (advisory.updated) {
await this[_cachePut](advisory)
}
this[_advisories].set(k, advisory)
- process.emit('timeEnd', t)
+ timeEnd()
return advisory
}
async [_cachePut] (advisory) {
const { name, id } = advisory
const key = `security-advisory:${name}:${id}`
- process.emit('time', `metavuln:cache:put:${key}`)
+ const timeEnd = time.start(`metavuln:cache:put:${key}`)
const data = JSON.stringify(advisory)
const options = { ...this[_options] }
this[_cacheData].set(key, jsonParse(data))
await cacache.put(this[_cache], key, data, options).catch(() => {})
- process.emit('timeEnd', `metavuln:cache:put:${key}`)
+ timeEnd()
}
async [_cacheGet] (advisory) {
@@ -87,12 +87,12 @@ class Calculator {
return this[_cacheData].get(key)
}
- process.emit('time', `metavuln:cache:get:${key}`)
+ const timeEnd = time.start(`metavuln:cache:get:${key}`)
const p = cacache.get(this[_cache], key, { ...this[_options] })
.catch(() => ({ data: '{}' }))
.then(({ data }) => {
data = jsonParse(data)
- process.emit('timeEnd', `metavuln:cache:get:${key}`)
+ timeEnd()
this[_cacheData].set(key, data)
return data
})
@@ -105,9 +105,9 @@ class Calculator {
return this[_packuments].get(name)
}
- process.emit('time', `metavuln:packument:${name}`)
+ const timeEnd = time.start(`metavuln:packument:${name}`)
const p = pacote.packument(name, { ...this[_options] })
- .catch((er) => {
+ .catch(() => {
// presumably not something from the registry.
// an empty packument will have an effective range of *
return {
@@ -116,7 +116,7 @@ class Calculator {
}
})
.then(paku => {
- process.emit('timeEnd', `metavuln:packument:${name}`)
+ timeEnd()
this[_packuments].set(name, paku)
return paku
})
diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json
index baf59c47fcb40..a7ec02d2ee72b 100644
--- a/node_modules/@npmcli/metavuln-calculator/package.json
+++ b/node_modules/@npmcli/metavuln-calculator/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/metavuln-calculator",
- "version": "5.0.0",
+ "version": "7.1.1",
"main": "lib/index.js",
"files": [
"bin/",
@@ -9,7 +9,7 @@
"description": "Calculate meta-vulnerabilities from package security advisories",
"repository": {
"type": "git",
- "url": "https://github.com/npm/metavuln-calculator.git"
+ "url": "git+https://github.com/npm/metavuln-calculator.git"
},
"author": "GitHub Inc.",
"license": "ISC",
@@ -19,7 +19,7 @@
"snap": "tap",
"postsnap": "npm run lint",
"eslint": "eslint",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force"
@@ -33,22 +33,30 @@
]
},
"devDependencies": {
- "@npmcli/eslint-config": "^3.0.1",
- "@npmcli/template-oss": "4.5.1",
+ "@npmcli/eslint-config": "^4.0.0",
+ "@npmcli/template-oss": "4.22.0",
"require-inject": "^1.4.4",
"tap": "^16.0.1"
},
"dependencies": {
- "cacache": "^17.0.0",
+ "cacache": "^18.0.0",
"json-parse-even-better-errors": "^3.0.0",
- "pacote": "^15.0.0",
+ "pacote": "^18.0.0",
+ "proc-log": "^4.1.0",
"semver": "^7.3.5"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
+ "version": "4.22.0",
+ "publish": "true",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
}
}
diff --git a/node_modules/@npmcli/name-from-folder/index.js b/node_modules/@npmcli/name-from-folder/lib/index.js
similarity index 100%
rename from node_modules/@npmcli/name-from-folder/index.js
rename to node_modules/@npmcli/name-from-folder/lib/index.js
diff --git a/node_modules/@npmcli/name-from-folder/package.json b/node_modules/@npmcli/name-from-folder/package.json
index 9569b4e66e90c..f0aa5b16dba1a 100644
--- a/node_modules/@npmcli/name-from-folder/package.json
+++ b/node_modules/@npmcli/name-from-folder/package.json
@@ -1,27 +1,43 @@
{
"name": "@npmcli/name-from-folder",
- "version": "1.0.1",
+ "version": "2.0.0",
"files": [
- "index.js"
+ "bin/",
+ "lib/"
],
+ "main": "lib/index.js",
"description": "Get the package name from a folder path",
"repository": {
"type": "git",
- "url": "git+https://github.com/npm/name-from-folder"
+ "url": "https://github.com/npm/name-from-folder.git"
},
- "author": "Isaac Z. Schlueter (https://izs.me)",
+ "author": "GitHub Inc.",
"license": "ISC",
"scripts": {
"test": "tap",
"snap": "tap",
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags"
- },
- "tap": {
- "check-coverage": true
+ "lint": "eslint \"**/*.js\"",
+ "postlint": "template-oss-check",
+ "template-oss-apply": "template-oss-apply --force",
+ "lintfix": "npm run lint -- --fix",
+ "posttest": "npm run lint"
},
"devDependencies": {
- "tap": "^14.10.7"
+ "@npmcli/eslint-config": "^4.0.1",
+ "@npmcli/template-oss": "4.11.0",
+ "tap": "^16.3.2"
+ },
+ "engines": {
+ "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ },
+ "templateOSS": {
+ "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+ "version": "4.11.0"
+ },
+ "tap": {
+ "nyc-arg": [
+ "--exclude",
+ "tap-snapshots/**"
+ ]
}
}
diff --git a/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/package-json/lib/index.js
index e98308f3d3b84..f165ee23b75ab 100644
--- a/node_modules/@npmcli/package-json/lib/index.js
+++ b/node_modules/@npmcli/package-json/lib/index.js
@@ -1,17 +1,12 @@
-const fs = require('fs')
-const promisify = require('util').promisify
-const readFile = promisify(fs.readFile)
-const writeFile = promisify(fs.writeFile)
-const { resolve } = require('path')
+const { readFile, writeFile } = require('node:fs/promises')
+const { resolve } = require('node:path')
+const parseJSON = require('json-parse-even-better-errors')
+
const updateDeps = require('./update-dependencies.js')
const updateScripts = require('./update-scripts.js')
const updateWorkspaces = require('./update-workspaces.js')
-
-const parseJSON = require('json-parse-even-better-errors')
-
-const _filename = Symbol('filename')
-const _manifest = Symbol('manifest')
-const _readFileContent = Symbol('readFileContent')
+const normalize = require('./normalize.js')
+const { read, parse } = require('./read-package.js')
// a list of handy specialized helper functions that take
// care of special cases that are handled by the npm cli
@@ -29,58 +24,206 @@ const knownKeys = new Set([
])
class PackageJson {
- static async load (path) {
- return await new PackageJson(path).load()
- }
+ static normalizeSteps = Object.freeze([
+ '_id',
+ '_attributes',
+ 'bundledDependencies',
+ 'bundleDependencies',
+ 'optionalDedupe',
+ 'scripts',
+ 'funding',
+ 'bin',
+ ])
+
+ // npm pkg fix
+ static fixSteps = Object.freeze([
+ 'binRefs',
+ 'bundleDependencies',
+ 'bundleDependenciesFalse',
+ 'fixNameField',
+ 'fixVersionField',
+ 'fixRepositoryField',
+ 'fixDependencies',
+ 'devDependencies',
+ 'scriptpath',
+ ])
+
+ static prepareSteps = Object.freeze([
+ '_id',
+ '_attributes',
+ 'bundledDependencies',
+ 'bundleDependencies',
+ 'bundleDependenciesDeleteFalse',
+ 'gypfile',
+ 'serverjs',
+ 'scriptpath',
+ 'authors',
+ 'readme',
+ 'mans',
+ 'binDir',
+ 'gitHead',
+ 'fillTypes',
+ 'normalizeData',
+ 'binRefs',
+ ])
- constructor (path) {
- this[_filename] = resolve(path, 'package.json')
- this[_manifest] = {}
- this[_readFileContent] = ''
+ // create a new empty package.json, so we can save at the given path even
+ // though we didn't start from a parsed file
+ static async create (path, opts = {}) {
+ const p = new PackageJson()
+ await p.create(path)
+ if (opts.data) {
+ return p.update(opts.data)
+ }
+ return p
}
- async load () {
+ // Loads a package.json at given path and JSON parses
+ static async load (path, opts = {}) {
+ const p = new PackageJson()
+ // Avoid try/catch if we aren't going to create
+ if (!opts.create) {
+ return p.load(path)
+ }
+
try {
- this[_readFileContent] =
- await readFile(this[_filename], 'utf8')
+ return await p.load(path)
} catch (err) {
- throw new Error('package.json not found')
+ if (!err.message.startsWith('Could not read package.json')) {
+ throw err
+ }
+ return await p.create(path)
}
+ }
+
+ // npm pkg fix
+ static async fix (path, opts) {
+ const p = new PackageJson()
+ await p.load(path, true)
+ return p.fix(opts)
+ }
+ // read-package-json compatible behavior
+ static async prepare (path, opts) {
+ const p = new PackageJson()
+ await p.load(path, true)
+ return p.prepare(opts)
+ }
+
+ // read-package-json-fast compatible behavior
+ static async normalize (path, opts) {
+ const p = new PackageJson()
+ await p.load(path)
+ return p.normalize(opts)
+ }
+
+ #path
+ #manifest
+ #readFileContent = ''
+ #canSave = true
+
+ // Load content from given path
+ async load (path, parseIndex) {
+ this.#path = path
+ let parseErr
try {
- this[_manifest] =
- parseJSON(this[_readFileContent])
+ this.#readFileContent = await read(this.filename)
} catch (err) {
- throw new Error(`Invalid package.json: ${err}`)
+ if (!parseIndex) {
+ throw err
+ }
+ parseErr = err
+ }
+
+ if (parseErr) {
+ const indexFile = resolve(this.path, 'index.js')
+ let indexFileContent
+ try {
+ indexFileContent = await readFile(indexFile, 'utf8')
+ } catch (err) {
+ throw parseErr
+ }
+ try {
+ this.fromComment(indexFileContent)
+ } catch (err) {
+ throw parseErr
+ }
+ // This wasn't a package.json so prevent saving
+ this.#canSave = false
+ return this
+ }
+
+ return this.fromJSON(this.#readFileContent)
+ }
+
+ // Load data from a JSON string/buffer
+ fromJSON (data) {
+ this.#manifest = parse(data)
+ return this
+ }
+
+ fromContent (data) {
+ this.#manifest = data
+ this.#canSave = false
+ return this
+ }
+
+ // Load data from a comment
+ // /**package { "name": "foo", "version": "1.2.3", ... } **/
+ fromComment (data) {
+ data = data.split(/^\/\*\*package(?:\s|$)/m)
+
+ if (data.length < 2) {
+ throw new Error('File has no package in comments')
}
+ data = data[1]
+ data = data.split(/\*\*\/$/m)
+ if (data.length < 2) {
+ throw new Error('File has no package in comments')
+ }
+ data = data[0]
+ data = data.replace(/^\s*\*/mg, '')
+
+ this.#manifest = parseJSON(data)
return this
}
get content () {
- return this[_manifest]
+ return this.#manifest
+ }
+
+ get path () {
+ return this.#path
}
+ get filename () {
+ if (this.path) {
+ return resolve(this.path, 'package.json')
+ }
+ return undefined
+ }
+
+ create (path) {
+ this.#path = path
+ this.#manifest = {}
+ return this
+ }
+
+ // This should be the ONLY way to set content in the manifest
update (content) {
- // validates both current manifest and content param
- const invalidContent =
- typeof this[_manifest] !== 'object'
- || typeof content !== 'object'
- if (invalidContent) {
- throw Object.assign(
- new Error(`Can't update invalid package.json data`),
- { code: 'EPACKAGEJSONUPDATE' }
- )
+ if (!this.content) {
+ throw new Error('Can not update without content. Please `load` or `create`')
}
for (const step of knownSteps) {
- this[_manifest] = step({ content, originalContent: this[_manifest] })
+ this.#manifest = step({ content, originalContent: this.content })
}
// unknown properties will just be overwitten
for (const [key, value] of Object.entries(content)) {
if (!knownKeys.has(key)) {
- this[_manifest][key] = value
+ this.content[key] = value
}
}
@@ -88,21 +231,47 @@ class PackageJson {
}
async save () {
+ if (!this.#canSave) {
+ throw new Error('No package.json to save to')
+ }
const {
[Symbol.for('indent')]: indent,
[Symbol.for('newline')]: newline,
- } = this[_manifest]
+ } = this.content
const format = indent === undefined ? ' ' : indent
const eol = newline === undefined ? '\n' : newline
const fileContent = `${
- JSON.stringify(this[_manifest], null, format)
+ JSON.stringify(this.content, null, format)
}\n`
.replace(/\n/g, eol)
- if (fileContent.trim() !== this[_readFileContent].trim()) {
- return await writeFile(this[_filename], fileContent)
+ if (fileContent.trim() !== this.#readFileContent.trim()) {
+ return await writeFile(this.filename, fileContent)
+ }
+ }
+
+ async normalize (opts = {}) {
+ if (!opts.steps) {
+ opts.steps = this.constructor.normalizeSteps
+ }
+ await normalize(this, opts)
+ return this
+ }
+
+ async prepare (opts = {}) {
+ if (!opts.steps) {
+ opts.steps = this.constructor.prepareSteps
}
+ await normalize(this, opts)
+ return this
+ }
+
+ async fix (opts = {}) {
+ // This one is not overridable
+ opts.steps = this.constructor.fixSteps
+ await normalize(this, opts)
+ return this
}
}
diff --git a/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/@npmcli/package-json/lib/normalize.js
new file mode 100644
index 0000000000000..682d234825de9
--- /dev/null
+++ b/node_modules/@npmcli/package-json/lib/normalize.js
@@ -0,0 +1,619 @@
+const valid = require('semver/functions/valid')
+const clean = require('semver/functions/clean')
+const fs = require('node:fs/promises')
+const path = require('node:path')
+const { log } = require('proc-log')
+
+/**
+ * @type {import('hosted-git-info')}
+ */
+let _hostedGitInfo
+function lazyHostedGitInfo () {
+ if (!_hostedGitInfo) {
+ _hostedGitInfo = require('hosted-git-info')
+ }
+ return _hostedGitInfo
+}
+
+/**
+ * @type {import('glob').glob}
+ */
+let _glob
+function lazyLoadGlob () {
+ if (!_glob) {
+ _glob = require('glob').glob
+ }
+ return _glob
+}
+
+// used to be npm-normalize-package-bin
+function normalizePackageBin (pkg, changes) {
+ if (pkg.bin) {
+ if (typeof pkg.bin === 'string' && pkg.name) {
+ changes?.push('"bin" was converted to an object')
+ pkg.bin = { [pkg.name]: pkg.bin }
+ } else if (Array.isArray(pkg.bin)) {
+ changes?.push('"bin" was converted to an object')
+ pkg.bin = pkg.bin.reduce((acc, k) => {
+ acc[path.basename(k)] = k
+ return acc
+ }, {})
+ }
+ if (typeof pkg.bin === 'object') {
+ for (const binKey in pkg.bin) {
+ if (typeof pkg.bin[binKey] !== 'string') {
+ delete pkg.bin[binKey]
+ changes?.push(`removed invalid "bin[${binKey}]"`)
+ continue
+ }
+ const base = path.basename(secureAndUnixifyPath(binKey))
+ if (!base) {
+ delete pkg.bin[binKey]
+ changes?.push(`removed invalid "bin[${binKey}]"`)
+ continue
+ }
+
+ const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
+
+ if (!binTarget) {
+ delete pkg.bin[binKey]
+ changes?.push(`removed invalid "bin[${binKey}]"`)
+ continue
+ }
+
+ if (base !== binKey) {
+ delete pkg.bin[binKey]
+ changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
+ }
+ if (binTarget !== pkg.bin[binKey]) {
+ changes?.push(`"bin[${base}]" script name was cleaned`)
+ }
+ pkg.bin[base] = binTarget
+ }
+
+ if (Object.keys(pkg.bin).length === 0) {
+ changes?.push('empty "bin" was removed')
+ delete pkg.bin
+ }
+
+ return pkg
+ }
+ }
+ delete pkg.bin
+}
+
+function normalizePackageMan (pkg, changes) {
+ if (pkg.man) {
+ const mans = []
+ for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
+ if (typeof man !== 'string') {
+ changes?.push(`removed invalid "man [${man}]"`)
+ } else {
+ mans.push(secureAndUnixifyPath(man))
+ }
+ }
+
+ if (!mans.length) {
+ changes?.push('empty "man" was removed')
+ } else {
+ pkg.man = mans
+ return pkg
+ }
+ }
+ delete pkg.man
+}
+
+function isCorrectlyEncodedName (spec) {
+ return !spec.match(/[/@\s+%:]/) &&
+ spec === encodeURIComponent(spec)
+}
+
+function isValidScopedPackageName (spec) {
+ if (spec.charAt(0) !== '@') {
+ return false
+ }
+
+ const rest = spec.slice(1).split('/')
+ if (rest.length !== 2) {
+ return false
+ }
+
+ return rest[0] && rest[1] &&
+ rest[0] === encodeURIComponent(rest[0]) &&
+ rest[1] === encodeURIComponent(rest[1])
+}
+
+function unixifyPath (ref) {
+ return ref.replace(/\\|:/g, '/')
+}
+
+function securePath (ref) {
+ const secured = path.join('.', path.join('/', unixifyPath(ref)))
+ return secured.startsWith('.') ? '' : secured
+}
+
+function secureAndUnixifyPath (ref) {
+ return unixifyPath(securePath(ref))
+}
+
+// We don't want the `changes` array in here by default because this is a hot
+// path for parsing packuments during install. So the calling method passes it
+// in if it wants to track changes.
+const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
+ if (!pkg.content) {
+ throw new Error('Can not normalize without content')
+ }
+ const data = pkg.content
+ const scripts = data.scripts || {}
+ const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
+
+ // name and version are load bearing so we have to clean them up first
+ if (steps.includes('fixNameField') || steps.includes('normalizeData')) {
+ if (!data.name && !strict) {
+ changes?.push('Missing "name" field was set to an empty string')
+ data.name = ''
+ } else {
+ if (typeof data.name !== 'string') {
+ throw new Error('name field must be a string.')
+ }
+ if (!strict) {
+ const name = data.name.trim()
+ if (data.name !== name) {
+ changes?.push(`Whitespace was trimmed from "name"`)
+ data.name = name
+ }
+ }
+
+ if (data.name.startsWith('.') ||
+ !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
+ (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
+ data.name.toLowerCase() === 'node_modules' ||
+ data.name.toLowerCase() === 'favicon.ico') {
+ throw new Error('Invalid name: ' + JSON.stringify(data.name))
+ }
+ }
+ }
+
+ if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
+ // allow "loose" semver 1.0 versions in non-strict mode
+ // enforce strict semver 2.0 compliance in strict mode
+ const loose = !strict
+ if (!data.version) {
+ data.version = ''
+ } else {
+ if (!valid(data.version, loose)) {
+ throw new Error(`Invalid version: "${data.version}"`)
+ }
+ const version = clean(data.version, loose)
+ if (version !== data.version) {
+ changes?.push(`"version" was cleaned and set to "${version}"`)
+ data.version = version
+ }
+ }
+ }
+ // remove attributes that start with "_"
+ if (steps.includes('_attributes')) {
+ for (const key in data) {
+ if (key.startsWith('_')) {
+ changes?.push(`"${key}" was removed`)
+ delete pkg.content[key]
+ }
+ }
+ }
+
+ // build the "_id" attribute
+ if (steps.includes('_id')) {
+ if (data.name && data.version) {
+ changes?.push(`"_id" was set to ${pkgId}`)
+ data._id = pkgId
+ }
+ }
+
+ // fix bundledDependencies typo
+ // normalize bundleDependencies
+ if (steps.includes('bundledDependencies')) {
+ if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
+ data.bundleDependencies = data.bundledDependencies
+ }
+ changes?.push(`Deleted incorrect "bundledDependencies"`)
+ delete data.bundledDependencies
+ }
+ // expand "bundleDependencies: true or translate from object"
+ if (steps.includes('bundleDependencies')) {
+ const bd = data.bundleDependencies
+ if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
+ changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
+ data.bundleDependencies = []
+ } else if (bd === true) {
+ changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
+ data.bundleDependencies = Object.keys(data.dependencies || {})
+ } else if (bd && typeof bd === 'object') {
+ if (!Array.isArray(bd)) {
+ changes?.push(`"bundleDependencies" was changed from an object to an array`)
+ data.bundleDependencies = Object.keys(bd)
+ }
+ } else if ('bundleDependencies' in data) {
+ changes?.push(`"bundleDependencies" was removed`)
+ delete data.bundleDependencies
+ }
+ }
+
+ // it was once common practice to list deps both in optionalDependencies and
+ // in dependencies, to support npm versions that did not know about
+ // optionalDependencies. This is no longer a relevant need, so duplicating
+ // the deps in two places is unnecessary and excessive.
+ if (steps.includes('optionalDedupe')) {
+ if (data.dependencies &&
+ data.optionalDependencies && typeof data.optionalDependencies === 'object') {
+ for (const name in data.optionalDependencies) {
+ changes?.push(`optionalDependencies."${name}" was removed`)
+ delete data.dependencies[name]
+ }
+ if (!Object.keys(data.dependencies).length) {
+ changes?.push(`Empty "optionalDependencies" was removed`)
+ delete data.dependencies
+ }
+ }
+ }
+
+ // add "install" attribute if any "*.gyp" files exist
+ if (steps.includes('gypfile')) {
+ if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
+ const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
+ if (files.length) {
+ scripts.install = 'node-gyp rebuild'
+ data.scripts = scripts
+ data.gypfile = true
+ changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
+ changes?.push(`"gypfile" was set to "true"`)
+ }
+ }
+ }
+
+ // add "start" attribute if "server.js" exists
+ if (steps.includes('serverjs') && !scripts.start) {
+ try {
+ await fs.access(path.join(pkg.path, 'server.js'))
+ scripts.start = 'node server.js'
+ data.scripts = scripts
+ changes?.push('"scripts.start" was set to "node server.js"')
+ } catch {
+ // do nothing
+ }
+ }
+
+ // strip "node_modules/.bin" from scripts entries
+ // remove invalid scripts entries (non-strings)
+ if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
+ const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
+ if (typeof data.scripts === 'object') {
+ for (const name in data.scripts) {
+ if (typeof data.scripts[name] !== 'string') {
+ delete data.scripts[name]
+ changes?.push(`Invalid scripts."${name}" was removed`)
+ } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
+ data.scripts[name] = data.scripts[name].replace(spre, '')
+ changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
+ }
+ }
+ } else {
+ changes?.push(`Removed invalid "scripts"`)
+ delete data.scripts
+ }
+ }
+
+ if (steps.includes('funding')) {
+ if (data.funding && typeof data.funding === 'string') {
+ data.funding = { url: data.funding }
+ changes?.push(`"funding" was changed to an object with a url attribute`)
+ }
+ }
+
+ // populate "authors" attribute
+ if (steps.includes('authors') && !data.contributors) {
+ try {
+ const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
+ const authors = authorData.split(/\r?\n/g)
+ .map(line => line.replace(/^\s*#.*$/, '').trim())
+ .filter(line => line)
+ data.contributors = authors
+ changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
+ } catch {
+ // do nothing
+ }
+ }
+
+ // populate "readme" attribute
+ if (steps.includes('readme') && !data.readme) {
+ const mdre = /\.m?a?r?k?d?o?w?n?$/i
+ const files = await lazyLoadGlob()('{README,README.*}', {
+ cwd: pkg.path,
+ nocase: true,
+ mark: true,
+ })
+ let readmeFile
+ for (const file of files) {
+ // don't accept directories.
+ if (!file.endsWith(path.sep)) {
+ if (file.match(mdre)) {
+ readmeFile = file
+ break
+ }
+ if (file.endsWith('README')) {
+ readmeFile = file
+ }
+ }
+ }
+ if (readmeFile) {
+ const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
+ data.readme = readmeData
+ data.readmeFilename = readmeFile
+ changes?.push(`"readme" was set to the contents of ${readmeFile}`)
+ changes?.push(`"readmeFilename" was set to ${readmeFile}`)
+ }
+ if (!data.readme) {
+ // this.warn('missingReadme')
+ data.readme = 'ERROR: No README data found!'
+ }
+ }
+
+ // expand directories.man
+ if (steps.includes('mans')) {
+ if (data.directories?.man && !data.man) {
+ const manDir = secureAndUnixifyPath(data.directories.man)
+ const cwd = path.resolve(pkg.path, manDir)
+ const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
+ data.man = files.map(man =>
+ path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
+ )
+ }
+ normalizePackageMan(data, changes)
+ }
+
+ if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
+ normalizePackageBin(data, changes)
+ }
+
+ // expand "directories.bin"
+ if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
+ const binsDir = path.resolve(pkg.path, securePath(data.directories.bin))
+ const bins = await lazyLoadGlob()('**', { cwd: binsDir })
+ data.bin = bins.reduce((acc, binFile) => {
+ if (binFile && !binFile.startsWith('.')) {
+ const binName = path.basename(binFile)
+ acc[binName] = path.join(data.directories.bin, binFile)
+ }
+ return acc
+ }, {})
+ // *sigh*
+ normalizePackageBin(data, changes)
+ }
+
+ // populate "gitHead" attribute
+ if (steps.includes('gitHead') && !data.gitHead) {
+ const git = require('@npmcli/git')
+ const gitRoot = await git.find({ cwd: pkg.path, root })
+ let head
+ if (gitRoot) {
+ try {
+ head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
+ } catch (err) {
+ // do nothing
+ }
+ }
+ let headData
+ if (head) {
+ if (head.startsWith('ref: ')) {
+ const headRef = head.replace(/^ref: /, '').trim()
+ const headFile = path.resolve(gitRoot, '.git', headRef)
+ try {
+ headData = await fs.readFile(headFile, 'utf8')
+ headData = headData.replace(/^ref: /, '').trim()
+ } catch (err) {
+ // do nothing
+ }
+ if (!headData) {
+ const packFile = path.resolve(gitRoot, '.git/packed-refs')
+ try {
+ let refs = await fs.readFile(packFile, 'utf8')
+ if (refs) {
+ refs = refs.split('\n')
+ for (let i = 0; i < refs.length; i++) {
+ const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
+ if (match && match[2].trim() === headRef) {
+ headData = match[1]
+ break
+ }
+ }
+ }
+ } catch {
+ // do nothing
+ }
+ }
+ } else {
+ headData = head.trim()
+ }
+ }
+ if (headData) {
+ data.gitHead = headData
+ }
+ }
+
+ // populate "types" attribute
+ if (steps.includes('fillTypes')) {
+ const index = data.main || 'index.js'
+
+ if (typeof index !== 'string') {
+ throw new TypeError('The "main" attribute must be of type string.')
+ }
+
+ // TODO exports is much more complicated than this in verbose format
+ // We need to support for instance
+
+ // "exports": {
+ // ".": [
+ // {
+ // "default": "./lib/npm.js"
+ // },
+ // "./lib/npm.js"
+ // ],
+ // "./package.json": "./package.json"
+ // },
+ // as well as conditional exports
+
+ // if (data.exports && typeof data.exports === 'string') {
+ // index = data.exports
+ // }
+
+ // if (data.exports && data.exports['.']) {
+ // index = data.exports['.']
+ // if (typeof index !== 'string') {
+ // }
+ // }
+ const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
+ const dts = `./${extless}.d.ts`
+ const hasDTSFields = 'types' in data || 'typings' in data
+ if (!hasDTSFields) {
+ try {
+ await fs.access(path.join(pkg.path, dts))
+ data.types = dts.split(path.sep).join('/')
+ } catch {
+ // do nothing
+ }
+ }
+ }
+
+ // "normalizeData" from "read-package-json", which was just a call through to
+ // "normalize-package-data". We only call the "fixer" functions because
+ // outside of that it was also clobbering _id (which we already conditionally
+ // do) and also adding the gypfile script (which we also already
+ // conditionally do)
+
+ // Some steps are isolated so we can do a limited subset of these in `fix`
+ if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
+ if (data.repositories) {
+ /* eslint-disable-next-line max-len */
+ changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
+ data.repository = data.repositories[0]
+ }
+ if (data.repository) {
+ if (typeof data.repository === 'string') {
+ changes?.push('"repository" was changed from a string to an object')
+ data.repository = {
+ type: 'git',
+ url: data.repository,
+ }
+ }
+ if (data.repository.url) {
+ const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
+ let r
+ if (hosted) {
+ if (hosted.getDefaultRepresentation() === 'shortcut') {
+ r = hosted.https()
+ } else {
+ r = hosted.toString()
+ }
+ if (r !== data.repository.url) {
+ changes?.push(`"repository.url" was normalized to "${r}"`)
+ data.repository.url = r
+ }
+ }
+ }
+ }
+ }
+
+ if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
+ // peerDependencies?
+ // devDependencies is meaningless here, it's ignored on an installed package
+ for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
+ if (data[type]) {
+ let secondWarning = true
+ if (typeof data[type] === 'string') {
+ changes?.push(`"${type}" was converted from a string into an object`)
+ data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
+ secondWarning = false
+ }
+ if (Array.isArray(data[type])) {
+ if (secondWarning) {
+ changes?.push(`"${type}" was converted from an array into an object`)
+ }
+ const o = {}
+ for (const d of data[type]) {
+ if (typeof d === 'string') {
+ const dep = d.trim().split(/(:?[@\s><=])/)
+ const dn = dep.shift()
+ const dv = dep.join('').replace(/^@/, '').trim()
+ o[dn] = dv
+ }
+ }
+ data[type] = o
+ }
+ }
+ }
+ // normalize-package-data used to put optional dependencies BACK into
+ // dependencies here, we no longer do this
+
+ for (const deps of ['dependencies', 'devDependencies']) {
+ if (deps in data) {
+ if (!data[deps] || typeof data[deps] !== 'object') {
+ changes?.push(`Removed invalid "${deps}"`)
+ delete data[deps]
+ } else {
+ for (const d in data[deps]) {
+ const r = data[deps][d]
+ if (typeof r !== 'string') {
+ changes?.push(`Removed invalid "${deps}.${d}"`)
+ delete data[deps][d]
+ }
+ const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
+ if (hosted && hosted !== data[deps][d]) {
+ changes?.push(`Normalized git reference to "${deps}.${d}"`)
+ data[deps][d] = hosted.toString()
+ }
+ }
+ }
+ }
+ }
+ }
+
+ if (steps.includes('normalizeData')) {
+ const legacyFixer = require('normalize-package-data/lib/fixer.js')
+ const legacyMakeWarning = require('normalize-package-data/lib/make_warning.js')
+ legacyFixer.warn = function () {
+ changes?.push(legacyMakeWarning.apply(null, arguments))
+ }
+
+ const legacySteps = [
+ 'fixDescriptionField',
+ 'fixModulesField',
+ 'fixFilesField',
+ 'fixManField',
+ 'fixBugsField',
+ 'fixKeywordsField',
+ 'fixBundleDependenciesField',
+ 'fixHomepageField',
+ 'fixReadmeField',
+ 'fixLicenseField',
+ 'fixPeople',
+ 'fixTypos',
+ ]
+ for (const legacyStep of legacySteps) {
+ legacyFixer[legacyStep](data)
+ }
+ }
+
+ // Warn if the bin references don't point to anything. This might be better
+ // in normalize-package-data if it had access to the file path.
+ if (steps.includes('binRefs') && data.bin instanceof Object) {
+ for (const key in data.bin) {
+ try {
+ await fs.access(path.resolve(pkg.path, data.bin[key]))
+ } catch {
+ log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
+ // XXX: should a future breaking change delete bin entries that cannot be accessed?
+ }
+ }
+ }
+}
+
+module.exports = normalize
diff --git a/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/@npmcli/package-json/lib/read-package.js
new file mode 100644
index 0000000000000..d6c86ce388e6c
--- /dev/null
+++ b/node_modules/@npmcli/package-json/lib/read-package.js
@@ -0,0 +1,39 @@
+// This is JUST the code needed to open a package.json file and parse it.
+// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
+
+const { readFile } = require('fs/promises')
+const parseJSON = require('json-parse-even-better-errors')
+
+async function read (filename) {
+ try {
+ const data = await readFile(filename, 'utf8')
+ return data
+ } catch (err) {
+ err.message = `Could not read package.json: ${err}`
+ throw err
+ }
+}
+
+function parse (data) {
+ try {
+ const content = parseJSON(data)
+ return content
+ } catch (err) {
+ err.message = `Invalid package.json: ${err}`
+ throw err
+ }
+}
+
+// This is what most external libs will use.
+// PackageJson will call read and parse separately
+async function readPackage (filename) {
+ const data = await read(filename)
+ const content = parse(data)
+ return content
+}
+
+module.exports = {
+ read,
+ parse,
+ readPackage,
+}
diff --git a/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/package-json/package.json
index faae7891a1e72..a5ea22bdbb340 100644
--- a/node_modules/@npmcli/package-json/package.json
+++ b/node_modules/@npmcli/package-json/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/package-json",
- "version": "3.0.0",
+ "version": "5.2.0",
"description": "Programmatic API to update package.json",
"main": "lib/index.js",
"files": [
@@ -10,7 +10,7 @@
"scripts": {
"snap": "tap",
"test": "tap",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"posttest": "npm run lint",
"postsnap": "npm run lintfix --",
@@ -24,23 +24,32 @@
"author": "GitHub Inc.",
"license": "ISC",
"devDependencies": {
- "@npmcli/eslint-config": "^3.0.1",
- "@npmcli/template-oss": "4.5.1",
+ "@npmcli/eslint-config": "^4.0.0",
+ "@npmcli/template-oss": "4.22.0",
+ "read-package-json": "^7.0.0",
+ "read-package-json-fast": "^3.0.2",
"tap": "^16.0.1"
},
"dependencies": {
- "json-parse-even-better-errors": "^3.0.0"
+ "@npmcli/git": "^5.0.0",
+ "glob": "^10.2.2",
+ "hosted-git-info": "^7.0.0",
+ "json-parse-even-better-errors": "^3.0.0",
+ "normalize-package-data": "^6.0.0",
+ "proc-log": "^4.0.0",
+ "semver": "^7.5.3"
},
"repository": {
"type": "git",
- "url": "https://github.com/npm/package-json.git"
+ "url": "git+https://github.com/npm/package-json.git"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
+ "version": "4.22.0",
+ "publish": "true"
},
"tap": {
"nyc-arg": [
diff --git a/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/@npmcli/promise-spawn/lib/index.js
index 1d422045d558c..e147cb8f9c746 100644
--- a/node_modules/@npmcli/promise-spawn/lib/index.js
+++ b/node_modules/@npmcli/promise-spawn/lib/index.js
@@ -12,54 +12,55 @@ const promiseSpawn = (cmd, args, opts = {}, extra = {}) => {
return spawnWithShell(cmd, args, opts, extra)
}
- let proc
+ let resolve, reject
+ const promise = new Promise((_resolve, _reject) => {
+ resolve = _resolve
+ reject = _reject
+ })
- const p = new Promise((res, rej) => {
- proc = spawn(cmd, args, opts)
+ // Create error here so we have a more useful stack trace when rejecting
+ const closeError = new Error('command failed')
- const stdout = []
- const stderr = []
+ const stdout = []
+ const stderr = []
- const reject = er => rej(Object.assign(er, {
- cmd,
- args,
- ...stdioResult(stdout, stderr, opts),
- ...extra,
- }))
+ const getResult = (result) => ({
+ cmd,
+ args,
+ ...result,
+ ...stdioResult(stdout, stderr, opts),
+ ...extra,
+ })
+ const rejectWithOpts = (er, erOpts) => {
+ const resultError = getResult(erOpts)
+ reject(Object.assign(er, resultError))
+ }
- proc.on('error', reject)
+ const proc = spawn(cmd, args, opts)
+ promise.stdin = proc.stdin
+ promise.process = proc
- if (proc.stdout) {
- proc.stdout.on('data', c => stdout.push(c)).on('error', reject)
- proc.stdout.on('error', er => reject(er))
- }
+ proc.on('error', rejectWithOpts)
- if (proc.stderr) {
- proc.stderr.on('data', c => stderr.push(c)).on('error', reject)
- proc.stderr.on('error', er => reject(er))
- }
+ if (proc.stdout) {
+ proc.stdout.on('data', c => stdout.push(c))
+ proc.stdout.on('error', rejectWithOpts)
+ }
- proc.on('close', (code, signal) => {
- const result = {
- cmd,
- args,
- code,
- signal,
- ...stdioResult(stdout, stderr, opts),
- ...extra,
- }
+ if (proc.stderr) {
+ proc.stderr.on('data', c => stderr.push(c))
+ proc.stderr.on('error', rejectWithOpts)
+ }
- if (code || signal) {
- rej(Object.assign(new Error('command failed'), result))
- } else {
- res(result)
- }
- })
+ proc.on('close', (code, signal) => {
+ if (code || signal) {
+ rejectWithOpts(closeError, { code, signal })
+ } else {
+ resolve(getResult({ code, signal }))
+ }
})
- p.stdin = proc.stdin
- p.process = proc
- return p
+ return promise
}
const spawnWithShell = (cmd, args, opts, extra) => {
@@ -100,8 +101,8 @@ const spawnWithShell = (cmd, args, opts, extra) => {
let pathToInitial
try {
pathToInitial = which.sync(initialCmd, {
- path: (options.env && options.env.PATH) || process.env.PATH,
- pathext: (options.env && options.env.PATHEXT) || process.env.PATHEXT,
+ path: (options.env && findInObject(options.env, 'PATH')) || process.env.PATH,
+ pathext: (options.env && findInObject(options.env, 'PATHEXT')) || process.env.PATHEXT,
}).toLowerCase()
} catch (err) {
pathToInitial = initialCmd.toLowerCase()
@@ -131,7 +132,7 @@ const open = (_args, opts = {}, extra = {}) => {
let platform = process.platform
// process.platform === 'linux' may actually indicate WSL, if that's the case
// we want to treat things as win32 anyway so the host can open the argument
- if (platform === 'linux' && os.release().includes('Microsoft')) {
+ if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) {
platform = 'win32'
}
@@ -192,4 +193,14 @@ const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => {
return result
}
+// case insensitive lookup in an object
+const findInObject = (obj, key) => {
+ key = key.toLowerCase()
+ for (const objKey of Object.keys(obj).sort()) {
+ if (objKey.toLowerCase() === key) {
+ return obj[objKey]
+ }
+ }
+}
+
module.exports = promiseSpawn
diff --git a/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/promise-spawn/package.json
index c21e84fe83599..1b633f84596d2 100644
--- a/node_modules/@npmcli/promise-spawn/package.json
+++ b/node_modules/@npmcli/promise-spawn/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/promise-spawn",
- "version": "6.0.1",
+ "version": "7.0.2",
"files": [
"bin/",
"lib/"
@@ -9,14 +9,14 @@
"description": "spawn processes the way the npm cli likes to do",
"repository": {
"type": "git",
- "url": "https://github.com/npm/promise-spawn.git"
+ "url": "git+https://github.com/npm/promise-spawn.git"
},
"author": "GitHub Inc.",
"license": "ISC",
"scripts": {
"test": "tap",
"snap": "tap",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"posttest": "npm run lint",
"postsnap": "npm run lintfix --",
@@ -32,19 +32,19 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.8.0",
- "minipass": "^3.1.1",
+ "@npmcli/template-oss": "4.22.0",
"spawk": "^1.7.1",
"tap": "^16.0.1"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.8.0"
+ "version": "4.22.0",
+ "publish": true
},
"dependencies": {
- "which": "^3.0.0"
+ "which": "^4.0.0"
}
}
diff --git a/node_modules/@npmcli/query/lib/index.js b/node_modules/@npmcli/query/lib/index.js
index 9373a4f7adbf0..db7dc345a8c75 100644
--- a/node_modules/@npmcli/query/lib/index.js
+++ b/node_modules/@npmcli/query/lib/index.js
@@ -85,31 +85,57 @@ const fixupNestedPseudo = astNode => {
transformAst(newRootNode)
}
-// :semver(, [selector], [function])
+// :semver(, [version|range|selector], [function])
+// note: the first or second parameter must be a static version or range
const fixupSemverSpecs = astNode => {
- // the first child node contains the version or range, most likely as a tag and a series of
- // classes. we combine them into a single string here. this is the only required input.
- const children = astNode.nodes.shift().nodes
- const value = children.reduce((res, i) => `${res}${String(i)}`, '')
-
- // next, if we have 2 nodes left then the user called us with a total of 3. that means the
- // last one tells us what specific semver function the user is requesting, so we pull that out
- let semverFunc
- if (astNode.nodes.length === 2) {
+ // if we have three nodes, the last is the semver function to use, pull that out first
+ if (astNode.nodes.length === 3) {
const funcNode = astNode.nodes.pop().nodes[0]
if (funcNode.type === 'tag') {
- semverFunc = funcNode.value
+ astNode.semverFunc = funcNode.value
+ } else if (funcNode.type === 'string') {
+ // a string is always in some type of quotes, we don't want those so slice them off
+ astNode.semverFunc = funcNode.value.slice(1, -1)
+ } else {
+ // anything that isn't a tag or a string isn't a function name
+ throw Object.assign(
+ new Error('`:semver` pseudo-class expects a function name as last value'),
+ { code: 'ESEMVERFUNC' }
+ )
+ }
+ }
+
+ // now if we have 1 node, it's a static value
+ // istanbul ignore else
+ if (astNode.nodes.length === 1) {
+ const semverNode = astNode.nodes.pop()
+ astNode.semverValue = semverNode.nodes.reduce((res, next) => `${res}${String(next)}`, '')
+ } else if (astNode.nodes.length === 2) {
+ // and if we have two nodes, one of them is a static value and we need to determine which it is
+ for (let i = 0; i < astNode.nodes.length; ++i) {
+ const type = astNode.nodes[i].nodes[0].type
+ // the type of the first child may be combinator for ranges, such as >14
+ if (type === 'tag' || type === 'combinator') {
+ const semverNode = astNode.nodes.splice(i, 1)[0]
+ astNode.semverValue = semverNode.nodes.reduce((res, next) => `${res}${String(next)}`, '')
+ astNode.semverPosition = i
+ break
+ }
+ }
+
+ if (typeof astNode.semverValue === 'undefined') {
+ throw Object.assign(
+ new Error('`:semver` pseudo-class expects a static value in the first or second position'),
+ { code: 'ESEMVERVALUE' }
+ )
}
}
- // now if there's a node left, that node is our selector. since that is the last remaining
- // child node, we call fixupAttr on ourselves so that the attribute selectors get parsed
+ // if we got here, the last remaining child should be attribute selector
if (astNode.nodes.length === 1) {
fixupAttr(astNode)
} else {
- // we weren't provided a selector, so we default to `[version]`. note, there's no default
- // operator here. that's because we don't know yet if the user has provided us a version
- // or range to assert against
+ // if we don't have a selector, we default to `[version]`
astNode.attributeMatcher = {
insensitive: false,
attribute: 'version',
@@ -118,8 +144,6 @@ const fixupSemverSpecs = astNode => {
astNode.lookupProperties = []
}
- astNode.semverFunc = semverFunc
- astNode.semverValue = value
astNode.nodes.length = 0
}
@@ -142,6 +166,46 @@ const fixupOutdated = astNode => {
}
}
+const fixupVuln = astNode => {
+ const vulns = []
+ if (astNode.nodes.length) {
+ for (const selector of astNode.nodes) {
+ const vuln = {}
+ for (const node of selector.nodes) {
+ if (node.type !== 'attribute') {
+ throw Object.assign(
+ new Error(':vuln pseudo-class only accepts attribute matchers or "cwe" tag'),
+ { code: 'EQUERYATTR' }
+ )
+ }
+ if (!['severity', 'cwe'].includes(node._attribute)) {
+ throw Object.assign(
+ new Error(':vuln pseudo-class only matches "severity" and "cwe" attributes'),
+ { code: 'EQUERYATTR' }
+ )
+ }
+ if (!node.operator) {
+ node.operator = '='
+ node.value = '*'
+ }
+ if (node.operator !== '=') {
+ throw Object.assign(
+ new Error(':vuln pseudo-class attribute selector only accepts "=" operator', node),
+ { code: 'EQUERYATTR' }
+ )
+ }
+ if (!vuln[node._attribute]) {
+ vuln[node._attribute] = []
+ }
+ vuln[node._attribute].push(node._value)
+ }
+ vulns.push(vuln)
+ }
+ astNode.vulns = vulns
+ astNode.nodes.length = 0
+ }
+}
+
// a few of the supported ast nodes need to be tweaked in order to properly be
// interpreted as proper arborist query selectors, namely semver ranges from
// both ids and :semver pseudo-class selectors need to be translated from what
@@ -168,6 +232,8 @@ const transformAst = selector => {
return fixupTypes(nextAstNode)
case ':outdated':
return fixupOutdated(nextAstNode)
+ case ':vuln':
+ return fixupVuln(nextAstNode)
}
})
}
diff --git a/node_modules/@npmcli/query/package.json b/node_modules/@npmcli/query/package.json
index 1e4abd37656f6..ad45c18c44cd6 100644
--- a/node_modules/@npmcli/query/package.json
+++ b/node_modules/@npmcli/query/package.json
@@ -1,11 +1,11 @@
{
"name": "@npmcli/query",
- "version": "3.0.0",
+ "version": "3.1.0",
"description": "npm query parser and tools",
"main": "lib/index.js",
"scripts": {
"test": "tap",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"lintfix": "npm run lint -- --fix",
@@ -39,11 +39,12 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
+ "version": "4.21.3",
+ "publish": true
},
"devDependencies": {
- "@npmcli/eslint-config": "^3.0.1",
- "@npmcli/template-oss": "4.5.1",
+ "@npmcli/eslint-config": "^4.0.0",
+ "@npmcli/template-oss": "4.21.3",
"tap": "^16.2.0"
},
"dependencies": {
diff --git a/node_modules/@npmcli/redact/LICENSE b/node_modules/@npmcli/redact/LICENSE
new file mode 100644
index 0000000000000..c21644115c85d
--- /dev/null
+++ b/node_modules/@npmcli/redact/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2024 npm
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/@npmcli/redact/lib/deep-map.js b/node_modules/@npmcli/redact/lib/deep-map.js
new file mode 100644
index 0000000000000..b555cf9fc4c8b
--- /dev/null
+++ b/node_modules/@npmcli/redact/lib/deep-map.js
@@ -0,0 +1,78 @@
+function filterError (input) {
+ return {
+ errorType: input.name,
+ message: input.message,
+ stack: input.stack,
+ ...(input.code ? { code: input.code } : {}),
+ ...(input.statusCode ? { statusCode: input.statusCode } : {}),
+ }
+}
+
+const deepMap = (input, handler = v => v, path = ['$'], seen = new Set([input])) => {
+ // this is in an effort to maintain bole's error logging behavior
+ if (path.join('.') === '$' && input instanceof Error) {
+ return deepMap({ err: filterError(input) }, handler, path, seen)
+ }
+ if (input instanceof Error) {
+ return deepMap(filterError(input), handler, path, seen)
+ }
+ if (input instanceof Buffer) {
+ return `[unable to log instanceof buffer]`
+ }
+ if (input instanceof Uint8Array) {
+ return `[unable to log instanceof Uint8Array]`
+ }
+
+ if (Array.isArray(input)) {
+ const result = []
+ for (let i = 0; i < input.length; i++) {
+ const element = input[i]
+ const elementPath = [...path, i]
+ if (element instanceof Object) {
+ if (!seen.has(element)) { // avoid getting stuck in circular reference
+ seen.add(element)
+ result.push(deepMap(handler(element, elementPath), handler, elementPath, seen))
+ }
+ } else {
+ result.push(handler(element, elementPath))
+ }
+ }
+ return result
+ }
+
+ if (input === null) {
+ return null
+ } else if (typeof input === 'object' || typeof input === 'function') {
+ const result = {}
+
+ for (const propertyName of Object.getOwnPropertyNames(input)) {
+ // skip logging internal properties
+ if (propertyName.startsWith('_')) {
+ continue
+ }
+
+ try {
+ const property = input[propertyName]
+ const propertyPath = [...path, propertyName]
+ if (property instanceof Object) {
+ if (!seen.has(property)) { // avoid getting stuck in circular reference
+ seen.add(property)
+ result[propertyName] = deepMap(
+ handler(property, propertyPath), handler, propertyPath, seen
+ )
+ }
+ } else {
+ result[propertyName] = handler(property, propertyPath)
+ }
+ } catch (err) {
+ // a getter may throw an error
+ result[propertyName] = `[error getting value: ${err.message}]`
+ }
+ }
+ return result
+ }
+
+ return handler(input, path)
+}
+
+module.exports = { deepMap }
diff --git a/node_modules/@npmcli/redact/lib/index.js b/node_modules/@npmcli/redact/lib/index.js
new file mode 100644
index 0000000000000..9b10c7f6a0081
--- /dev/null
+++ b/node_modules/@npmcli/redact/lib/index.js
@@ -0,0 +1,44 @@
+const matchers = require('./matchers')
+const { redactUrlPassword } = require('./utils')
+
+const REPLACE = '***'
+
+const redact = (value) => {
+ if (typeof value !== 'string' || !value) {
+ return value
+ }
+ return redactUrlPassword(value, REPLACE)
+ .replace(matchers.NPM_SECRET.pattern, `npm_${REPLACE}`)
+ .replace(matchers.UUID.pattern, REPLACE)
+}
+
+// split on \s|= similar to how nopt parses options
+const splitAndRedact = (str) => {
+ // stateful regex, don't move out of this scope
+ const splitChars = /[\s=]/g
+
+ let match = null
+ let result = ''
+ let index = 0
+ while (match = splitChars.exec(str)) {
+ result += redact(str.slice(index, match.index)) + match[0]
+ index = splitChars.lastIndex
+ }
+
+ return result + redact(str.slice(index))
+}
+
+// replaces auth info in an array of arguments or in a strings
+const redactLog = (arg) => {
+ if (typeof arg === 'string') {
+ return splitAndRedact(arg)
+ } else if (Array.isArray(arg)) {
+ return arg.map((a) => typeof a === 'string' ? splitAndRedact(a) : a)
+ }
+ return arg
+}
+
+module.exports = {
+ redact,
+ redactLog,
+}
diff --git a/node_modules/@npmcli/redact/lib/matchers.js b/node_modules/@npmcli/redact/lib/matchers.js
new file mode 100644
index 0000000000000..fe9b9071de8a1
--- /dev/null
+++ b/node_modules/@npmcli/redact/lib/matchers.js
@@ -0,0 +1,81 @@
+const TYPE_REGEX = 'regex'
+const TYPE_URL = 'url'
+const TYPE_PATH = 'path'
+
+const NPM_SECRET = {
+ type: TYPE_REGEX,
+ pattern: /\b(npms?_)[a-zA-Z0-9]{36,48}\b/gi,
+ replacement: `[REDACTED_NPM_SECRET]`,
+}
+
+const AUTH_HEADER = {
+ type: TYPE_REGEX,
+ pattern: /\b(Basic\s+|Bearer\s+)[\w+=\-.]+\b/gi,
+ replacement: `[REDACTED_AUTH_HEADER]`,
+}
+
+const JSON_WEB_TOKEN = {
+ type: TYPE_REGEX,
+ pattern: /\b[A-Za-z0-9-_]{10,}(?!\.\d+\.)\.[A-Za-z0-9-_]{3,}\.[A-Za-z0-9-_]{20,}\b/gi,
+ replacement: `[REDACTED_JSON_WEB_TOKEN]`,
+}
+
+const UUID = {
+ type: TYPE_REGEX,
+ pattern: /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/gi,
+ replacement: `[REDACTED_UUID]`,
+}
+
+const URL_MATCHER = {
+ type: TYPE_REGEX,
+ pattern: /(?:https?|ftp):\/\/[^\s/"$.?#].[^\s"]*/gi,
+ replacement: '[REDACTED_URL]',
+}
+
+const DEEP_HEADER_AUTHORIZATION = {
+ type: TYPE_PATH,
+ predicate: ({ path }) => path.endsWith('.headers.authorization'),
+ replacement: '[REDACTED_HEADER_AUTHORIZATION]',
+}
+
+const DEEP_HEADER_SET_COOKIE = {
+ type: TYPE_PATH,
+ predicate: ({ path }) => path.endsWith('.headers.set-cookie'),
+ replacement: '[REDACTED_HEADER_SET_COOKIE]',
+}
+
+const REWRITE_REQUEST = {
+ type: TYPE_PATH,
+ predicate: ({ path }) => path.endsWith('.request'),
+ replacement: (input) => ({
+ method: input?.method,
+ path: input?.path,
+ headers: input?.headers,
+ url: input?.url,
+ }),
+}
+
+const REWRITE_RESPONSE = {
+ type: TYPE_PATH,
+ predicate: ({ path }) => path.endsWith('.response'),
+ replacement: (input) => ({
+ data: input?.data,
+ status: input?.status,
+ headers: input?.headers,
+ }),
+}
+
+module.exports = {
+ TYPE_REGEX,
+ TYPE_URL,
+ TYPE_PATH,
+ NPM_SECRET,
+ AUTH_HEADER,
+ JSON_WEB_TOKEN,
+ UUID,
+ URL_MATCHER,
+ DEEP_HEADER_AUTHORIZATION,
+ DEEP_HEADER_SET_COOKIE,
+ REWRITE_REQUEST,
+ REWRITE_RESPONSE,
+}
diff --git a/node_modules/@npmcli/redact/lib/server.js b/node_modules/@npmcli/redact/lib/server.js
new file mode 100644
index 0000000000000..669e834da6131
--- /dev/null
+++ b/node_modules/@npmcli/redact/lib/server.js
@@ -0,0 +1,34 @@
+const {
+ AUTH_HEADER,
+ JSON_WEB_TOKEN,
+ NPM_SECRET,
+ DEEP_HEADER_AUTHORIZATION,
+ DEEP_HEADER_SET_COOKIE,
+ REWRITE_REQUEST,
+ REWRITE_RESPONSE,
+} = require('./matchers')
+
+const {
+ redactUrlMatcher,
+ redactUrlPasswordMatcher,
+ redactMatchers,
+} = require('./utils')
+
+const { deepMap } = require('./deep-map')
+
+const _redact = redactMatchers(
+ NPM_SECRET,
+ AUTH_HEADER,
+ JSON_WEB_TOKEN,
+ DEEP_HEADER_AUTHORIZATION,
+ DEEP_HEADER_SET_COOKIE,
+ REWRITE_REQUEST,
+ REWRITE_RESPONSE,
+ redactUrlMatcher(
+ redactUrlPasswordMatcher()
+ )
+)
+
+const redact = (input) => deepMap(input, (value, path) => _redact(value, { path }))
+
+module.exports = { redact }
diff --git a/node_modules/@npmcli/redact/lib/utils.js b/node_modules/@npmcli/redact/lib/utils.js
new file mode 100644
index 0000000000000..8395ab25fc373
--- /dev/null
+++ b/node_modules/@npmcli/redact/lib/utils.js
@@ -0,0 +1,202 @@
+const {
+ URL_MATCHER,
+ TYPE_URL,
+ TYPE_REGEX,
+ TYPE_PATH,
+} = require('./matchers')
+
+/**
+ * creates a string of asterisks,
+ * this forces a minimum asterisk for security purposes
+ */
+const asterisk = (length = 0) => {
+ length = typeof length === 'string' ? length.length : length
+ if (length < 8) {
+ return '*'.repeat(8)
+ }
+ return '*'.repeat(length)
+}
+
+/**
+ * escapes all special regex chars
+ * @see https://stackoverflow.com/a/9310752
+ * @see https://github.com/tc39/proposal-regex-escaping
+ */
+const escapeRegExp = (text) => {
+ return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, `\\$&`)
+}
+
+/**
+ * provieds a regex "or" of the url versions of a string
+ */
+const urlEncodeRegexGroup = (value) => {
+ const decoded = decodeURIComponent(value)
+ const encoded = encodeURIComponent(value)
+ const union = [...new Set([encoded, decoded, value])].map(escapeRegExp).join('|')
+ return union
+}
+
+/**
+ * a tagged template literal that returns a regex ensures all variables are excaped
+ */
+const urlEncodeRegexTag = (strings, ...values) => {
+ let pattern = ''
+ for (let i = 0; i < values.length; i++) {
+ pattern += strings[i] + `(${urlEncodeRegexGroup(values[i])})`
+ }
+ pattern += strings[strings.length - 1]
+ return new RegExp(pattern)
+}
+
+/**
+ * creates a matcher for redacting url hostname
+ */
+const redactUrlHostnameMatcher = ({ hostname, replacement } = {}) => ({
+ type: TYPE_URL,
+ predicate: ({ url }) => url.hostname === hostname,
+ pattern: ({ url }) => {
+ return urlEncodeRegexTag`(^${url.protocol}//${url.username}:.+@)?${url.hostname}`
+ },
+ replacement: `$1${replacement || asterisk()}`,
+})
+
+/**
+ * creates a matcher for redacting url search / query parameter values
+ */
+const redactUrlSearchParamsMatcher = ({ param, replacement } = {}) => ({
+ type: TYPE_URL,
+ predicate: ({ url }) => url.searchParams.has(param),
+ pattern: ({ url }) => urlEncodeRegexTag`(${param}=)${url.searchParams.get(param)}`,
+ replacement: `$1${replacement || asterisk()}`,
+})
+
+/** creates a matcher for redacting the url password */
+const redactUrlPasswordMatcher = ({ replacement } = {}) => ({
+ type: TYPE_URL,
+ predicate: ({ url }) => url.password,
+ pattern: ({ url }) => urlEncodeRegexTag`(^${url.protocol}//${url.username}:)${url.password}`,
+ replacement: `$1${replacement || asterisk()}`,
+})
+
+const redactUrlReplacement = (...matchers) => (subValue) => {
+ try {
+ const url = new URL(subValue)
+ return redactMatchers(...matchers)(subValue, { url })
+ } catch (err) {
+ return subValue
+ }
+}
+
+/**
+ * creates a matcher / submatcher for urls, this function allows you to first
+ * collect all urls within a larger string and then pass those urls to a
+ * submatcher
+ *
+ * @example
+ * console.log("this will first match all urls, then pass those urls to the password patcher")
+ * redactMatchers(redactUrlMatcher(redactUrlPasswordMatcher()))
+ *
+ * @example
+ * console.log(
+ * "this will assume you are passing in a string that is a url, and will redact the password"
+ * )
+ * redactMatchers(redactUrlPasswordMatcher())
+ *
+ */
+const redactUrlMatcher = (...matchers) => {
+ return {
+ ...URL_MATCHER,
+ replacement: redactUrlReplacement(...matchers),
+ }
+}
+
+const matcherFunctions = {
+ [TYPE_REGEX]: (matcher) => (value) => {
+ if (typeof value === 'string') {
+ value = value.replace(matcher.pattern, matcher.replacement)
+ }
+ return value
+ },
+ [TYPE_URL]: (matcher) => (value, ctx) => {
+ if (typeof value === 'string') {
+ try {
+ const url = ctx?.url || new URL(value)
+ const { predicate, pattern } = matcher
+ const predicateValue = predicate({ url })
+ if (predicateValue) {
+ value = value.replace(pattern({ url }), matcher.replacement)
+ }
+ } catch (_e) {
+ return value
+ }
+ }
+ return value
+ },
+ [TYPE_PATH]: (matcher) => (value, ctx) => {
+ const rawPath = ctx?.path
+ const path = rawPath.join('.').toLowerCase()
+ const { predicate, replacement } = matcher
+ const replace = typeof replacement === 'function' ? replacement : () => replacement
+ const shouldRun = predicate({ rawPath, path })
+ if (shouldRun) {
+ value = replace(value, { rawPath, path })
+ }
+ return value
+ },
+}
+
+/** converts a matcher to a function */
+const redactMatcher = (matcher) => {
+ return matcherFunctions[matcher.type](matcher)
+}
+
+/** converts a series of matchers to a function */
+const redactMatchers = (...matchers) => (value, ctx) => {
+ const flatMatchers = matchers.flat()
+ return flatMatchers.reduce((result, matcher) => {
+ const fn = (typeof matcher === 'function') ? matcher : redactMatcher(matcher)
+ return fn(result, ctx)
+ }, value)
+}
+
+/**
+ * replacement handler, keeping $1 (if it exists) and replacing the
+ * rest of the string with asterisks, maintaining string length
+ */
+const redactDynamicReplacement = () => (value, start) => {
+ if (typeof start === 'number') {
+ return asterisk(value)
+ }
+ return start + asterisk(value.substring(start.length).length)
+}
+
+/**
+ * replacement handler, keeping $1 (if it exists) and replacing the
+ * rest of the string with a fixed number of asterisks
+ */
+const redactFixedReplacement = (length) => (_value, start) => {
+ if (typeof start === 'number') {
+ return asterisk(length)
+ }
+ return start + asterisk(length)
+}
+
+const redactUrlPassword = (value, replacement) => {
+ return redactMatchers(redactUrlPasswordMatcher({ replacement }))(value)
+}
+
+module.exports = {
+ asterisk,
+ escapeRegExp,
+ urlEncodeRegexGroup,
+ urlEncodeRegexTag,
+ redactUrlHostnameMatcher,
+ redactUrlSearchParamsMatcher,
+ redactUrlPasswordMatcher,
+ redactUrlMatcher,
+ redactUrlReplacement,
+ redactDynamicReplacement,
+ redactFixedReplacement,
+ redactMatchers,
+ redactUrlPassword,
+}
diff --git a/node_modules/@npmcli/redact/package.json b/node_modules/@npmcli/redact/package.json
new file mode 100644
index 0000000000000..831387ca54106
--- /dev/null
+++ b/node_modules/@npmcli/redact/package.json
@@ -0,0 +1,51 @@
+{
+ "name": "@npmcli/redact",
+ "version": "2.0.1",
+ "description": "Redact sensitive npm information from output",
+ "main": "lib/index.js",
+ "exports": {
+ ".": "./lib/index.js",
+ "./server": "./lib/server.js",
+ "./package.json": "./package.json"
+ },
+ "scripts": {
+ "test": "tap",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+ "postlint": "template-oss-check",
+ "template-oss-apply": "template-oss-apply --force",
+ "lintfix": "npm run lint -- --fix",
+ "snap": "tap",
+ "posttest": "npm run lint"
+ },
+ "keywords": [],
+ "author": "GitHub Inc.",
+ "license": "ISC",
+ "files": [
+ "bin/",
+ "lib/"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/npm/redact.git"
+ },
+ "templateOSS": {
+ "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+ "version": "4.21.3",
+ "publish": true
+ },
+ "tap": {
+ "nyc-arg": [
+ "--exclude",
+ "tap-snapshots/**"
+ ],
+ "timeout": 120
+ },
+ "devDependencies": {
+ "@npmcli/eslint-config": "^4.0.2",
+ "@npmcli/template-oss": "4.21.3",
+ "tap": "^16.3.10"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/@npmcli/run-script/lib/is-server-package.js b/node_modules/@npmcli/run-script/lib/is-server-package.js
index d168623247527..c36c40d4898d5 100644
--- a/node_modules/@npmcli/run-script/lib/is-server-package.js
+++ b/node_modules/@npmcli/run-script/lib/is-server-package.js
@@ -1,7 +1,6 @@
-const util = require('util')
-const fs = require('fs')
-const { stat } = fs.promises || { stat: util.promisify(fs.stat) }
-const { resolve } = require('path')
+const { stat } = require('node:fs/promises')
+const { resolve } = require('node:path')
+
module.exports = async path => {
try {
const st = await stat(resolve(path, 'server.js'))
diff --git a/node_modules/@npmcli/run-script/lib/is-windows.js b/node_modules/@npmcli/run-script/lib/is-windows.js
deleted file mode 100644
index 651917e6ad27a..0000000000000
--- a/node_modules/@npmcli/run-script/lib/is-windows.js
+++ /dev/null
@@ -1,2 +0,0 @@
-const platform = process.env.__FAKE_TESTING_PLATFORM__ || process.platform
-module.exports = platform === 'win32'
diff --git a/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/node_modules/@npmcli/run-script/lib/make-spawn-args.js
index 2b2f96a91c8d5..8a32d7198cb2e 100644
--- a/node_modules/@npmcli/run-script/lib/make-spawn-args.js
+++ b/node_modules/@npmcli/run-script/lib/make-spawn-args.js
@@ -9,10 +9,10 @@ const makeSpawnArgs = options => {
path,
scriptShell = true,
binPaths,
- env = {},
+ env,
stdio,
cmd,
- args = [],
+ args,
stdioString,
} = options
diff --git a/node_modules/@npmcli/run-script/lib/package-envs.js b/node_modules/@npmcli/run-script/lib/package-envs.js
index 6b538e50247fd..612f850fb076c 100644
--- a/node_modules/@npmcli/run-script/lib/package-envs.js
+++ b/node_modules/@npmcli/run-script/lib/package-envs.js
@@ -1,26 +1,29 @@
-// https://github.com/npm/rfcs/pull/183
-
-const envVal = val => Array.isArray(val) ? val.map(v => envVal(v)).join('\n\n')
- : val === null || val === false ? ''
- : String(val)
-
-const packageEnvs = (env, vals, prefix) => {
+const packageEnvs = (vals, prefix, env = {}) => {
for (const [key, val] of Object.entries(vals)) {
if (val === undefined) {
continue
- } else if (val && !Array.isArray(val) && typeof val === 'object') {
- packageEnvs(env, val, `${prefix}${key}_`)
+ } else if (val === null || val === false) {
+ env[`${prefix}${key}`] = ''
+ } else if (Array.isArray(val)) {
+ val.forEach((item, index) => {
+ packageEnvs({ [`${key}_${index}`]: item }, `${prefix}`, env)
+ })
+ } else if (typeof val === 'object') {
+ packageEnvs(val, `${prefix}${key}_`, env)
} else {
- env[`${prefix}${key}`] = envVal(val)
+ env[`${prefix}${key}`] = String(val)
}
}
return env
}
-module.exports = (env, pkg) => packageEnvs({ ...env }, {
- name: pkg.name,
- version: pkg.version,
- config: pkg.config,
- engines: pkg.engines,
- bin: pkg.bin,
-}, 'npm_package_')
+// https://github.com/npm/rfcs/pull/183 defines which fields we put into the environment
+module.exports = pkg => {
+ return packageEnvs({
+ name: pkg.name,
+ version: pkg.version,
+ config: pkg.config,
+ engines: pkg.engines,
+ bin: pkg.bin,
+ }, 'npm_package_')
+}
diff --git a/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/@npmcli/run-script/lib/run-script-pkg.js
index cbb0a0b3a5e73..9900c96315f85 100644
--- a/node_modules/@npmcli/run-script/lib/run-script-pkg.js
+++ b/node_modules/@npmcli/run-script/lib/run-script-pkg.js
@@ -5,19 +5,6 @@ const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp'
const signalManager = require('./signal-manager.js')
const isServerPackage = require('./is-server-package.js')
-// you wouldn't like me when I'm angry...
-const bruce = (id, event, cmd, args) => {
- let banner = id
- ? `\n> ${id} ${event}\n`
- : `\n> ${event}\n`
- banner += `> ${cmd.trim().replace(/\n/g, '\n> ')}`
- if (args.length) {
- banner += ` ${args.join(' ')}`
- }
- banner += '\n'
- return banner
-}
-
const runScriptPkg = async options => {
const {
event,
@@ -29,8 +16,6 @@ const runScriptPkg = async options => {
pkg,
args = [],
stdioString,
- // note: only used when stdio:inherit
- banner = true,
// how long to wait for a process.kill signal
// only exposed here so that we can make the test go a bit faster.
signalTimeout = 500,
@@ -59,9 +44,22 @@ const runScriptPkg = async options => {
return { code: 0, signal: null }
}
- if (stdio === 'inherit' && banner !== false) {
- // we're dumping to the parent's stdout, so print the banner
- console.log(bruce(pkg._id, event, cmd, args))
+ let inputEnd = () => {}
+ if (stdio === 'inherit') {
+ let banner
+ if (pkg._id) {
+ banner = `\n> ${pkg._id} ${event}\n`
+ } else {
+ banner = `\n> ${event}\n`
+ }
+ banner += `> ${cmd.trim().replace(/\n/g, '\n> ')}`
+ if (args.length) {
+ banner += ` ${args.join(' ')}`
+ }
+ banner += '\n'
+ const { output, input } = require('proc-log')
+ output.standard(banner)
+ inputEnd = input.start()
}
const [spawnShell, spawnArgs, spawnOpts] = makeSpawnArgs({
@@ -69,7 +67,7 @@ const runScriptPkg = async options => {
path,
scriptShell,
binPaths,
- env: packageEnvs(env, pkg),
+ env: { ...env, ...packageEnvs(pkg) },
stdio,
cmd,
args,
@@ -93,8 +91,14 @@ const runScriptPkg = async options => {
return p.catch(er => {
const { signal } = er
+ // coverage disabled because win32 never emits signals
+ /* istanbul ignore next */
if (stdio === 'inherit' && signal) {
+ // by the time we reach here, the child has already exited. we send the
+ // signal back to ourselves again so that npm will exit with the same
+ // status as the child
process.kill(process.pid, signal)
+
// just in case we don't die, reject after 500ms
// this also keeps the node process open long enough to actually
// get the signal, rather than terminating gracefully.
@@ -102,7 +106,7 @@ const runScriptPkg = async options => {
} else {
throw er
}
- })
+ }).finally(inputEnd)
}
module.exports = runScriptPkg
diff --git a/node_modules/@npmcli/run-script/lib/run-script.js b/node_modules/@npmcli/run-script/lib/run-script.js
index e9d18261a2c1f..b00304c8d6e7f 100644
--- a/node_modules/@npmcli/run-script/lib/run-script.js
+++ b/node_modules/@npmcli/run-script/lib/run-script.js
@@ -1,14 +1,15 @@
-const rpj = require('read-package-json-fast')
+const PackageJson = require('@npmcli/package-json')
const runScriptPkg = require('./run-script-pkg.js')
const validateOptions = require('./validate-options.js')
const isServerPackage = require('./is-server-package.js')
-const runScript = options => {
+const runScript = async options => {
validateOptions(options)
- const { pkg, path } = options
- return pkg ? runScriptPkg(options)
- : rpj(path + '/package.json')
- .then(readPackage => runScriptPkg({ ...options, pkg: readPackage }))
+ if (options.pkg) {
+ return runScriptPkg(options)
+ }
+ const { content: pkg } = await PackageJson.normalize(options.path)
+ return runScriptPkg({ ...options, pkg })
}
module.exports = Object.assign(runScript, { isServerPackage })
diff --git a/node_modules/@npmcli/run-script/lib/signal-manager.js b/node_modules/@npmcli/run-script/lib/signal-manager.js
index 7e10f859e0a68..a099a4af2b9be 100644
--- a/node_modules/@npmcli/run-script/lib/signal-manager.js
+++ b/node_modules/@npmcli/run-script/lib/signal-manager.js
@@ -6,6 +6,9 @@ const forwardedSignals = [
'SIGTERM',
]
+// no-op, this is so receiving the signal doesn't cause us to exit immediately
+// instead, we exit after all children have exited when we re-send the signal
+// to ourselves. see the catch handler at the bottom of run-script-pkg.js
const handleSignal = signal => {
for (const proc of runningProcs) {
proc.kill(signal)
diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json
index dbae5733a368b..8a83e726fbeb2 100644
--- a/node_modules/@npmcli/run-script/package.json
+++ b/node_modules/@npmcli/run-script/package.json
@@ -1,13 +1,13 @@
{
"name": "@npmcli/run-script",
- "version": "6.0.0",
+ "version": "8.1.0",
"description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
"author": "GitHub Inc.",
"license": "ISC",
"scripts": {
"test": "tap",
"eslint": "eslint",
- "lint": "eslint \"**/*.js\"",
+ "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"postlint": "template-oss-check",
"snap": "tap",
@@ -16,17 +16,17 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.8.0",
- "minipass": "^3.1.6",
- "require-inject": "^1.4.4",
+ "@npmcli/template-oss": "4.21.4",
+ "spawk": "^1.8.1",
"tap": "^16.0.1"
},
"dependencies": {
"@npmcli/node-gyp": "^3.0.0",
- "@npmcli/promise-spawn": "^6.0.0",
- "node-gyp": "^9.0.0",
- "read-package-json-fast": "^3.0.0",
- "which": "^3.0.0"
+ "@npmcli/package-json": "^5.0.0",
+ "@npmcli/promise-spawn": "^7.0.0",
+ "node-gyp": "^10.0.0",
+ "proc-log": "^4.0.0",
+ "which": "^4.0.0"
},
"files": [
"bin/",
@@ -38,11 +38,12 @@
"url": "https://github.com/npm/run-script.git"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.8.0"
+ "version": "4.21.4",
+ "publish": "true"
},
"tap": {
"nyc-arg": [
diff --git a/node_modules/@pkgjs/parseargs/LICENSE b/node_modules/@pkgjs/parseargs/LICENSE
new file mode 100644
index 0000000000000..261eeb9e9f8b2
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/node_modules/@pkgjs/parseargs/examples/is-default-value.js b/node_modules/@pkgjs/parseargs/examples/is-default-value.js
new file mode 100644
index 0000000000000..0a67972b71d13
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/examples/is-default-value.js
@@ -0,0 +1,25 @@
+'use strict';
+
+// This example shows how to understand if a default value is used or not.
+
+// 1. const { parseArgs } = require('node:util'); // from node
+// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
+const { parseArgs } = require('..'); // in repo
+
+const options = {
+ file: { short: 'f', type: 'string', default: 'FOO' },
+};
+
+const { values, tokens } = parseArgs({ options, tokens: true });
+
+const isFileDefault = !tokens.some((token) => token.kind === 'option' &&
+ token.name === 'file'
+);
+
+console.log(values);
+console.log(`Is the file option [${values.file}] the default value? ${isFileDefault}`);
+
+// Try the following:
+// node is-default-value.js
+// node is-default-value.js -f FILE
+// node is-default-value.js --file FILE
diff --git a/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js b/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js
new file mode 100644
index 0000000000000..943e643ee9553
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js
@@ -0,0 +1,35 @@
+'use strict';
+
+// This is an example of using tokens to add a custom behaviour.
+//
+// Require the use of `=` for long options and values by blocking
+// the use of space separated values.
+// So allow `--foo=bar`, and not allow `--foo bar`.
+//
+// Note: this is not a common behaviour, most CLIs allow both forms.
+
+// 1. const { parseArgs } = require('node:util'); // from node
+// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
+const { parseArgs } = require('..'); // in repo
+
+const options = {
+ file: { short: 'f', type: 'string' },
+ log: { type: 'string' },
+};
+
+const { values, tokens } = parseArgs({ options, tokens: true });
+
+const badToken = tokens.find((token) => token.kind === 'option' &&
+ token.value != null &&
+ token.rawName.startsWith('--') &&
+ !token.inlineValue
+);
+if (badToken) {
+ throw new Error(`Option value for '${badToken.rawName}' must be inline, like '${badToken.rawName}=VALUE'`);
+}
+
+console.log(values);
+
+// Try the following:
+// node limit-long-syntax.js -f FILE --log=LOG
+// node limit-long-syntax.js --file FILE
diff --git a/node_modules/@pkgjs/parseargs/examples/negate.js b/node_modules/@pkgjs/parseargs/examples/negate.js
new file mode 100644
index 0000000000000..b6634690a4a0c
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/examples/negate.js
@@ -0,0 +1,43 @@
+'use strict';
+
+// This example is used in the documentation.
+
+// How might I add my own support for --no-foo?
+
+// 1. const { parseArgs } = require('node:util'); // from node
+// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
+const { parseArgs } = require('..'); // in repo
+
+const options = {
+ 'color': { type: 'boolean' },
+ 'no-color': { type: 'boolean' },
+ 'logfile': { type: 'string' },
+ 'no-logfile': { type: 'boolean' },
+};
+const { values, tokens } = parseArgs({ options, tokens: true });
+
+// Reprocess the option tokens and overwrite the returned values.
+tokens
+ .filter((token) => token.kind === 'option')
+ .forEach((token) => {
+ if (token.name.startsWith('no-')) {
+ // Store foo:false for --no-foo
+ const positiveName = token.name.slice(3);
+ values[positiveName] = false;
+ delete values[token.name];
+ } else {
+ // Resave value so last one wins if both --foo and --no-foo.
+ values[token.name] = token.value ?? true;
+ }
+ });
+
+const color = values.color;
+const logfile = values.logfile ?? 'default.log';
+
+console.log({ logfile, color });
+
+// Try the following:
+// node negate.js
+// node negate.js --no-logfile --no-color
+// negate.js --logfile=test.log --color
+// node negate.js --no-logfile --logfile=test.log --color --no-color
diff --git a/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js b/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js
new file mode 100644
index 0000000000000..0c324688af030
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js
@@ -0,0 +1,31 @@
+'use strict';
+
+// This is an example of using tokens to add a custom behaviour.
+//
+// Throw an error if an option is used more than once.
+
+// 1. const { parseArgs } = require('node:util'); // from node
+// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
+const { parseArgs } = require('..'); // in repo
+
+const options = {
+ ding: { type: 'boolean', short: 'd' },
+ beep: { type: 'boolean', short: 'b' }
+};
+const { values, tokens } = parseArgs({ options, tokens: true });
+
+const seenBefore = new Set();
+tokens.forEach((token) => {
+ if (token.kind !== 'option') return;
+ if (seenBefore.has(token.name)) {
+ throw new Error(`option '${token.name}' used multiple times`);
+ }
+ seenBefore.add(token.name);
+});
+
+console.log(values);
+
+// Try the following:
+// node no-repeated-options --ding --beep
+// node no-repeated-options --beep -b
+// node no-repeated-options -ddd
diff --git a/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs b/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs
new file mode 100644
index 0000000000000..8ab7367b8bbb1
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs
@@ -0,0 +1,41 @@
+// This is an example of using tokens to add a custom behaviour.
+//
+// This adds a option order check so that --some-unstable-option
+// may only be used after --enable-experimental-options
+//
+// Note: this is not a common behaviour, the order of different options
+// does not usually matter.
+
+import { parseArgs } from '../index.js';
+
+function findTokenIndex(tokens, target) {
+ return tokens.findIndex((token) => token.kind === 'option' &&
+ token.name === target
+ );
+}
+
+const experimentalName = 'enable-experimental-options';
+const unstableName = 'some-unstable-option';
+
+const options = {
+ [experimentalName]: { type: 'boolean' },
+ [unstableName]: { type: 'boolean' },
+};
+
+const { values, tokens } = parseArgs({ options, tokens: true });
+
+const experimentalIndex = findTokenIndex(tokens, experimentalName);
+const unstableIndex = findTokenIndex(tokens, unstableName);
+if (unstableIndex !== -1 &&
+ ((experimentalIndex === -1) || (unstableIndex < experimentalIndex))) {
+ throw new Error(`'--${experimentalName}' must be specified before '--${unstableName}'`);
+}
+
+console.log(values);
+
+/* eslint-disable max-len */
+// Try the following:
+// node ordered-options.mjs
+// node ordered-options.mjs --some-unstable-option
+// node ordered-options.mjs --some-unstable-option --enable-experimental-options
+// node ordered-options.mjs --enable-experimental-options --some-unstable-option
diff --git a/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js b/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js
new file mode 100644
index 0000000000000..eff04c2a60fa2
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js
@@ -0,0 +1,26 @@
+'use strict';
+
+// This example is used in the documentation.
+
+// 1. const { parseArgs } = require('node:util'); // from node
+// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
+const { parseArgs } = require('..'); // in repo
+
+const args = ['-f', '--bar', 'b'];
+const options = {
+ foo: {
+ type: 'boolean',
+ short: 'f'
+ },
+ bar: {
+ type: 'string'
+ }
+};
+const {
+ values,
+ positionals
+} = parseArgs({ args, options });
+console.log(values, positionals);
+
+// Try the following:
+// node simple-hard-coded.js
diff --git a/node_modules/@pkgjs/parseargs/index.js b/node_modules/@pkgjs/parseargs/index.js
new file mode 100644
index 0000000000000..b1004c7b72f27
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/index.js
@@ -0,0 +1,396 @@
+'use strict';
+
+const {
+ ArrayPrototypeForEach,
+ ArrayPrototypeIncludes,
+ ArrayPrototypeMap,
+ ArrayPrototypePush,
+ ArrayPrototypePushApply,
+ ArrayPrototypeShift,
+ ArrayPrototypeSlice,
+ ArrayPrototypeUnshiftApply,
+ ObjectEntries,
+ ObjectPrototypeHasOwnProperty: ObjectHasOwn,
+ StringPrototypeCharAt,
+ StringPrototypeIndexOf,
+ StringPrototypeSlice,
+ StringPrototypeStartsWith,
+} = require('./internal/primordials');
+
+const {
+ validateArray,
+ validateBoolean,
+ validateBooleanArray,
+ validateObject,
+ validateString,
+ validateStringArray,
+ validateUnion,
+} = require('./internal/validators');
+
+const {
+ kEmptyObject,
+} = require('./internal/util');
+
+const {
+ findLongOptionForShort,
+ isLoneLongOption,
+ isLoneShortOption,
+ isLongOptionAndValue,
+ isOptionValue,
+ isOptionLikeValue,
+ isShortOptionAndValue,
+ isShortOptionGroup,
+ useDefaultValueOption,
+ objectGetOwn,
+ optionsGetOwn,
+} = require('./utils');
+
+const {
+ codes: {
+ ERR_INVALID_ARG_VALUE,
+ ERR_PARSE_ARGS_INVALID_OPTION_VALUE,
+ ERR_PARSE_ARGS_UNKNOWN_OPTION,
+ ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL,
+ },
+} = require('./internal/errors');
+
+function getMainArgs() {
+ // Work out where to slice process.argv for user supplied arguments.
+
+ // Check node options for scenarios where user CLI args follow executable.
+ const execArgv = process.execArgv;
+ if (ArrayPrototypeIncludes(execArgv, '-e') ||
+ ArrayPrototypeIncludes(execArgv, '--eval') ||
+ ArrayPrototypeIncludes(execArgv, '-p') ||
+ ArrayPrototypeIncludes(execArgv, '--print')) {
+ return ArrayPrototypeSlice(process.argv, 1);
+ }
+
+ // Normally first two arguments are executable and script, then CLI arguments
+ return ArrayPrototypeSlice(process.argv, 2);
+}
+
+/**
+ * In strict mode, throw for possible usage errors like --foo --bar
+ *
+ * @param {object} token - from tokens as available from parseArgs
+ */
+function checkOptionLikeValue(token) {
+ if (!token.inlineValue && isOptionLikeValue(token.value)) {
+ // Only show short example if user used short option.
+ const example = StringPrototypeStartsWith(token.rawName, '--') ?
+ `'${token.rawName}=-XYZ'` :
+ `'--${token.name}=-XYZ' or '${token.rawName}-XYZ'`;
+ const errorMessage = `Option '${token.rawName}' argument is ambiguous.
+Did you forget to specify the option argument for '${token.rawName}'?
+To specify an option argument starting with a dash use ${example}.`;
+ throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(errorMessage);
+ }
+}
+
+/**
+ * In strict mode, throw for usage errors.
+ *
+ * @param {object} config - from config passed to parseArgs
+ * @param {object} token - from tokens as available from parseArgs
+ */
+function checkOptionUsage(config, token) {
+ if (!ObjectHasOwn(config.options, token.name)) {
+ throw new ERR_PARSE_ARGS_UNKNOWN_OPTION(
+ token.rawName, config.allowPositionals);
+ }
+
+ const short = optionsGetOwn(config.options, token.name, 'short');
+ const shortAndLong = `${short ? `-${short}, ` : ''}--${token.name}`;
+ const type = optionsGetOwn(config.options, token.name, 'type');
+ if (type === 'string' && typeof token.value !== 'string') {
+ throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(`Option '${shortAndLong} ' argument missing`);
+ }
+ // (Idiomatic test for undefined||null, expecting undefined.)
+ if (type === 'boolean' && token.value != null) {
+ throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(`Option '${shortAndLong}' does not take an argument`);
+ }
+}
+
+
+/**
+ * Store the option value in `values`.
+ *
+ * @param {string} longOption - long option name e.g. 'foo'
+ * @param {string|undefined} optionValue - value from user args
+ * @param {object} options - option configs, from parseArgs({ options })
+ * @param {object} values - option values returned in `values` by parseArgs
+ */
+function storeOption(longOption, optionValue, options, values) {
+ if (longOption === '__proto__') {
+ return; // No. Just no.
+ }
+
+ // We store based on the option value rather than option type,
+ // preserving the users intent for author to deal with.
+ const newValue = optionValue ?? true;
+ if (optionsGetOwn(options, longOption, 'multiple')) {
+ // Always store value in array, including for boolean.
+ // values[longOption] starts out not present,
+ // first value is added as new array [newValue],
+ // subsequent values are pushed to existing array.
+ // (note: values has null prototype, so simpler usage)
+ if (values[longOption]) {
+ ArrayPrototypePush(values[longOption], newValue);
+ } else {
+ values[longOption] = [newValue];
+ }
+ } else {
+ values[longOption] = newValue;
+ }
+}
+
+/**
+ * Store the default option value in `values`.
+ *
+ * @param {string} longOption - long option name e.g. 'foo'
+ * @param {string
+ * | boolean
+ * | string[]
+ * | boolean[]} optionValue - default value from option config
+ * @param {object} values - option values returned in `values` by parseArgs
+ */
+function storeDefaultOption(longOption, optionValue, values) {
+ if (longOption === '__proto__') {
+ return; // No. Just no.
+ }
+
+ values[longOption] = optionValue;
+}
+
+/**
+ * Process args and turn into identified tokens:
+ * - option (along with value, if any)
+ * - positional
+ * - option-terminator
+ *
+ * @param {string[]} args - from parseArgs({ args }) or mainArgs
+ * @param {object} options - option configs, from parseArgs({ options })
+ */
+function argsToTokens(args, options) {
+ const tokens = [];
+ let index = -1;
+ let groupCount = 0;
+
+ const remainingArgs = ArrayPrototypeSlice(args);
+ while (remainingArgs.length > 0) {
+ const arg = ArrayPrototypeShift(remainingArgs);
+ const nextArg = remainingArgs[0];
+ if (groupCount > 0)
+ groupCount--;
+ else
+ index++;
+
+ // Check if `arg` is an options terminator.
+ // Guideline 10 in https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html
+ if (arg === '--') {
+ // Everything after a bare '--' is considered a positional argument.
+ ArrayPrototypePush(tokens, { kind: 'option-terminator', index });
+ ArrayPrototypePushApply(
+ tokens, ArrayPrototypeMap(remainingArgs, (arg) => {
+ return { kind: 'positional', index: ++index, value: arg };
+ })
+ );
+ break; // Finished processing args, leave while loop.
+ }
+
+ if (isLoneShortOption(arg)) {
+ // e.g. '-f'
+ const shortOption = StringPrototypeCharAt(arg, 1);
+ const longOption = findLongOptionForShort(shortOption, options);
+ let value;
+ let inlineValue;
+ if (optionsGetOwn(options, longOption, 'type') === 'string' &&
+ isOptionValue(nextArg)) {
+ // e.g. '-f', 'bar'
+ value = ArrayPrototypeShift(remainingArgs);
+ inlineValue = false;
+ }
+ ArrayPrototypePush(
+ tokens,
+ { kind: 'option', name: longOption, rawName: arg,
+ index, value, inlineValue });
+ if (value != null) ++index;
+ continue;
+ }
+
+ if (isShortOptionGroup(arg, options)) {
+ // Expand -fXzy to -f -X -z -y
+ const expanded = [];
+ for (let index = 1; index < arg.length; index++) {
+ const shortOption = StringPrototypeCharAt(arg, index);
+ const longOption = findLongOptionForShort(shortOption, options);
+ if (optionsGetOwn(options, longOption, 'type') !== 'string' ||
+ index === arg.length - 1) {
+ // Boolean option, or last short in group. Well formed.
+ ArrayPrototypePush(expanded, `-${shortOption}`);
+ } else {
+ // String option in middle. Yuck.
+ // Expand -abfFILE to -a -b -fFILE
+ ArrayPrototypePush(expanded, `-${StringPrototypeSlice(arg, index)}`);
+ break; // finished short group
+ }
+ }
+ ArrayPrototypeUnshiftApply(remainingArgs, expanded);
+ groupCount = expanded.length;
+ continue;
+ }
+
+ if (isShortOptionAndValue(arg, options)) {
+ // e.g. -fFILE
+ const shortOption = StringPrototypeCharAt(arg, 1);
+ const longOption = findLongOptionForShort(shortOption, options);
+ const value = StringPrototypeSlice(arg, 2);
+ ArrayPrototypePush(
+ tokens,
+ { kind: 'option', name: longOption, rawName: `-${shortOption}`,
+ index, value, inlineValue: true });
+ continue;
+ }
+
+ if (isLoneLongOption(arg)) {
+ // e.g. '--foo'
+ const longOption = StringPrototypeSlice(arg, 2);
+ let value;
+ let inlineValue;
+ if (optionsGetOwn(options, longOption, 'type') === 'string' &&
+ isOptionValue(nextArg)) {
+ // e.g. '--foo', 'bar'
+ value = ArrayPrototypeShift(remainingArgs);
+ inlineValue = false;
+ }
+ ArrayPrototypePush(
+ tokens,
+ { kind: 'option', name: longOption, rawName: arg,
+ index, value, inlineValue });
+ if (value != null) ++index;
+ continue;
+ }
+
+ if (isLongOptionAndValue(arg)) {
+ // e.g. --foo=bar
+ const equalIndex = StringPrototypeIndexOf(arg, '=');
+ const longOption = StringPrototypeSlice(arg, 2, equalIndex);
+ const value = StringPrototypeSlice(arg, equalIndex + 1);
+ ArrayPrototypePush(
+ tokens,
+ { kind: 'option', name: longOption, rawName: `--${longOption}`,
+ index, value, inlineValue: true });
+ continue;
+ }
+
+ ArrayPrototypePush(tokens, { kind: 'positional', index, value: arg });
+ }
+
+ return tokens;
+}
+
+const parseArgs = (config = kEmptyObject) => {
+ const args = objectGetOwn(config, 'args') ?? getMainArgs();
+ const strict = objectGetOwn(config, 'strict') ?? true;
+ const allowPositionals = objectGetOwn(config, 'allowPositionals') ?? !strict;
+ const returnTokens = objectGetOwn(config, 'tokens') ?? false;
+ const options = objectGetOwn(config, 'options') ?? { __proto__: null };
+ // Bundle these up for passing to strict-mode checks.
+ const parseConfig = { args, strict, options, allowPositionals };
+
+ // Validate input configuration.
+ validateArray(args, 'args');
+ validateBoolean(strict, 'strict');
+ validateBoolean(allowPositionals, 'allowPositionals');
+ validateBoolean(returnTokens, 'tokens');
+ validateObject(options, 'options');
+ ArrayPrototypeForEach(
+ ObjectEntries(options),
+ ({ 0: longOption, 1: optionConfig }) => {
+ validateObject(optionConfig, `options.${longOption}`);
+
+ // type is required
+ const optionType = objectGetOwn(optionConfig, 'type');
+ validateUnion(optionType, `options.${longOption}.type`, ['string', 'boolean']);
+
+ if (ObjectHasOwn(optionConfig, 'short')) {
+ const shortOption = optionConfig.short;
+ validateString(shortOption, `options.${longOption}.short`);
+ if (shortOption.length !== 1) {
+ throw new ERR_INVALID_ARG_VALUE(
+ `options.${longOption}.short`,
+ shortOption,
+ 'must be a single character'
+ );
+ }
+ }
+
+ const multipleOption = objectGetOwn(optionConfig, 'multiple');
+ if (ObjectHasOwn(optionConfig, 'multiple')) {
+ validateBoolean(multipleOption, `options.${longOption}.multiple`);
+ }
+
+ const defaultValue = objectGetOwn(optionConfig, 'default');
+ if (defaultValue !== undefined) {
+ let validator;
+ switch (optionType) {
+ case 'string':
+ validator = multipleOption ? validateStringArray : validateString;
+ break;
+
+ case 'boolean':
+ validator = multipleOption ? validateBooleanArray : validateBoolean;
+ break;
+ }
+ validator(defaultValue, `options.${longOption}.default`);
+ }
+ }
+ );
+
+ // Phase 1: identify tokens
+ const tokens = argsToTokens(args, options);
+
+ // Phase 2: process tokens into parsed option values and positionals
+ const result = {
+ values: { __proto__: null },
+ positionals: [],
+ };
+ if (returnTokens) {
+ result.tokens = tokens;
+ }
+ ArrayPrototypeForEach(tokens, (token) => {
+ if (token.kind === 'option') {
+ if (strict) {
+ checkOptionUsage(parseConfig, token);
+ checkOptionLikeValue(token);
+ }
+ storeOption(token.name, token.value, options, result.values);
+ } else if (token.kind === 'positional') {
+ if (!allowPositionals) {
+ throw new ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL(token.value);
+ }
+ ArrayPrototypePush(result.positionals, token.value);
+ }
+ });
+
+ // Phase 3: fill in default values for missing args
+ ArrayPrototypeForEach(ObjectEntries(options), ({ 0: longOption,
+ 1: optionConfig }) => {
+ const mustSetDefault = useDefaultValueOption(longOption,
+ optionConfig,
+ result.values);
+ if (mustSetDefault) {
+ storeDefaultOption(longOption,
+ objectGetOwn(optionConfig, 'default'),
+ result.values);
+ }
+ });
+
+
+ return result;
+};
+
+module.exports = {
+ parseArgs,
+};
diff --git a/node_modules/@pkgjs/parseargs/internal/errors.js b/node_modules/@pkgjs/parseargs/internal/errors.js
new file mode 100644
index 0000000000000..e1b237b5b1639
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/internal/errors.js
@@ -0,0 +1,47 @@
+'use strict';
+
+class ERR_INVALID_ARG_TYPE extends TypeError {
+ constructor(name, expected, actual) {
+ super(`${name} must be ${expected} got ${actual}`);
+ this.code = 'ERR_INVALID_ARG_TYPE';
+ }
+}
+
+class ERR_INVALID_ARG_VALUE extends TypeError {
+ constructor(arg1, arg2, expected) {
+ super(`The property ${arg1} ${expected}. Received '${arg2}'`);
+ this.code = 'ERR_INVALID_ARG_VALUE';
+ }
+}
+
+class ERR_PARSE_ARGS_INVALID_OPTION_VALUE extends Error {
+ constructor(message) {
+ super(message);
+ this.code = 'ERR_PARSE_ARGS_INVALID_OPTION_VALUE';
+ }
+}
+
+class ERR_PARSE_ARGS_UNKNOWN_OPTION extends Error {
+ constructor(option, allowPositionals) {
+ const suggestDashDash = allowPositionals ? `. To specify a positional argument starting with a '-', place it at the end of the command after '--', as in '-- ${JSON.stringify(option)}` : '';
+ super(`Unknown option '${option}'${suggestDashDash}`);
+ this.code = 'ERR_PARSE_ARGS_UNKNOWN_OPTION';
+ }
+}
+
+class ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL extends Error {
+ constructor(positional) {
+ super(`Unexpected argument '${positional}'. This command does not take positional arguments`);
+ this.code = 'ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL';
+ }
+}
+
+module.exports = {
+ codes: {
+ ERR_INVALID_ARG_TYPE,
+ ERR_INVALID_ARG_VALUE,
+ ERR_PARSE_ARGS_INVALID_OPTION_VALUE,
+ ERR_PARSE_ARGS_UNKNOWN_OPTION,
+ ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL,
+ }
+};
diff --git a/node_modules/@pkgjs/parseargs/internal/primordials.js b/node_modules/@pkgjs/parseargs/internal/primordials.js
new file mode 100644
index 0000000000000..63e23ab117a9c
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/internal/primordials.js
@@ -0,0 +1,393 @@
+/*
+This file is copied from https://github.com/nodejs/node/blob/v14.19.3/lib/internal/per_context/primordials.js
+under the following license:
+
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+*/
+
+'use strict';
+
+/* eslint-disable node-core/prefer-primordials */
+
+// This file subclasses and stores the JS builtins that come from the VM
+// so that Node.js's builtin modules do not need to later look these up from
+// the global proxy, which can be mutated by users.
+
+// Use of primordials have sometimes a dramatic impact on performance, please
+// benchmark all changes made in performance-sensitive areas of the codebase.
+// See: https://github.com/nodejs/node/pull/38248
+
+const primordials = {};
+
+const {
+ defineProperty: ReflectDefineProperty,
+ getOwnPropertyDescriptor: ReflectGetOwnPropertyDescriptor,
+ ownKeys: ReflectOwnKeys,
+} = Reflect;
+
+// `uncurryThis` is equivalent to `func => Function.prototype.call.bind(func)`.
+// It is using `bind.bind(call)` to avoid using `Function.prototype.bind`
+// and `Function.prototype.call` after it may have been mutated by users.
+const { apply, bind, call } = Function.prototype;
+const uncurryThis = bind.bind(call);
+primordials.uncurryThis = uncurryThis;
+
+// `applyBind` is equivalent to `func => Function.prototype.apply.bind(func)`.
+// It is using `bind.bind(apply)` to avoid using `Function.prototype.bind`
+// and `Function.prototype.apply` after it may have been mutated by users.
+const applyBind = bind.bind(apply);
+primordials.applyBind = applyBind;
+
+// Methods that accept a variable number of arguments, and thus it's useful to
+// also create `${prefix}${key}Apply`, which uses `Function.prototype.apply`,
+// instead of `Function.prototype.call`, and thus doesn't require iterator
+// destructuring.
+const varargsMethods = [
+ // 'ArrayPrototypeConcat' is omitted, because it performs the spread
+ // on its own for arrays and array-likes with a truthy
+ // @@isConcatSpreadable symbol property.
+ 'ArrayOf',
+ 'ArrayPrototypePush',
+ 'ArrayPrototypeUnshift',
+ // 'FunctionPrototypeCall' is omitted, since there's 'ReflectApply'
+ // and 'FunctionPrototypeApply'.
+ 'MathHypot',
+ 'MathMax',
+ 'MathMin',
+ 'StringPrototypeConcat',
+ 'TypedArrayOf',
+];
+
+function getNewKey(key) {
+ return typeof key === 'symbol' ?
+ `Symbol${key.description[7].toUpperCase()}${key.description.slice(8)}` :
+ `${key[0].toUpperCase()}${key.slice(1)}`;
+}
+
+function copyAccessor(dest, prefix, key, { enumerable, get, set }) {
+ ReflectDefineProperty(dest, `${prefix}Get${key}`, {
+ value: uncurryThis(get),
+ enumerable
+ });
+ if (set !== undefined) {
+ ReflectDefineProperty(dest, `${prefix}Set${key}`, {
+ value: uncurryThis(set),
+ enumerable
+ });
+ }
+}
+
+function copyPropsRenamed(src, dest, prefix) {
+ for (const key of ReflectOwnKeys(src)) {
+ const newKey = getNewKey(key);
+ const desc = ReflectGetOwnPropertyDescriptor(src, key);
+ if ('get' in desc) {
+ copyAccessor(dest, prefix, newKey, desc);
+ } else {
+ const name = `${prefix}${newKey}`;
+ ReflectDefineProperty(dest, name, desc);
+ if (varargsMethods.includes(name)) {
+ ReflectDefineProperty(dest, `${name}Apply`, {
+ // `src` is bound as the `this` so that the static `this` points
+ // to the object it was defined on,
+ // e.g.: `ArrayOfApply` gets a `this` of `Array`:
+ value: applyBind(desc.value, src),
+ });
+ }
+ }
+ }
+}
+
+function copyPropsRenamedBound(src, dest, prefix) {
+ for (const key of ReflectOwnKeys(src)) {
+ const newKey = getNewKey(key);
+ const desc = ReflectGetOwnPropertyDescriptor(src, key);
+ if ('get' in desc) {
+ copyAccessor(dest, prefix, newKey, desc);
+ } else {
+ const { value } = desc;
+ if (typeof value === 'function') {
+ desc.value = value.bind(src);
+ }
+
+ const name = `${prefix}${newKey}`;
+ ReflectDefineProperty(dest, name, desc);
+ if (varargsMethods.includes(name)) {
+ ReflectDefineProperty(dest, `${name}Apply`, {
+ value: applyBind(value, src),
+ });
+ }
+ }
+ }
+}
+
+function copyPrototype(src, dest, prefix) {
+ for (const key of ReflectOwnKeys(src)) {
+ const newKey = getNewKey(key);
+ const desc = ReflectGetOwnPropertyDescriptor(src, key);
+ if ('get' in desc) {
+ copyAccessor(dest, prefix, newKey, desc);
+ } else {
+ const { value } = desc;
+ if (typeof value === 'function') {
+ desc.value = uncurryThis(value);
+ }
+
+ const name = `${prefix}${newKey}`;
+ ReflectDefineProperty(dest, name, desc);
+ if (varargsMethods.includes(name)) {
+ ReflectDefineProperty(dest, `${name}Apply`, {
+ value: applyBind(value),
+ });
+ }
+ }
+ }
+}
+
+// Create copies of configurable value properties of the global object
+[
+ 'Proxy',
+ 'globalThis',
+].forEach((name) => {
+ // eslint-disable-next-line no-restricted-globals
+ primordials[name] = globalThis[name];
+});
+
+// Create copies of URI handling functions
+[
+ decodeURI,
+ decodeURIComponent,
+ encodeURI,
+ encodeURIComponent,
+].forEach((fn) => {
+ primordials[fn.name] = fn;
+});
+
+// Create copies of the namespace objects
+[
+ 'JSON',
+ 'Math',
+ 'Proxy',
+ 'Reflect',
+].forEach((name) => {
+ // eslint-disable-next-line no-restricted-globals
+ copyPropsRenamed(global[name], primordials, name);
+});
+
+// Create copies of intrinsic objects
+[
+ 'Array',
+ 'ArrayBuffer',
+ 'BigInt',
+ 'BigInt64Array',
+ 'BigUint64Array',
+ 'Boolean',
+ 'DataView',
+ 'Date',
+ 'Error',
+ 'EvalError',
+ 'Float32Array',
+ 'Float64Array',
+ 'Function',
+ 'Int16Array',
+ 'Int32Array',
+ 'Int8Array',
+ 'Map',
+ 'Number',
+ 'Object',
+ 'RangeError',
+ 'ReferenceError',
+ 'RegExp',
+ 'Set',
+ 'String',
+ 'Symbol',
+ 'SyntaxError',
+ 'TypeError',
+ 'URIError',
+ 'Uint16Array',
+ 'Uint32Array',
+ 'Uint8Array',
+ 'Uint8ClampedArray',
+ 'WeakMap',
+ 'WeakSet',
+].forEach((name) => {
+ // eslint-disable-next-line no-restricted-globals
+ const original = global[name];
+ primordials[name] = original;
+ copyPropsRenamed(original, primordials, name);
+ copyPrototype(original.prototype, primordials, `${name}Prototype`);
+});
+
+// Create copies of intrinsic objects that require a valid `this` to call
+// static methods.
+// Refs: https://www.ecma-international.org/ecma-262/#sec-promise.all
+[
+ 'Promise',
+].forEach((name) => {
+ // eslint-disable-next-line no-restricted-globals
+ const original = global[name];
+ primordials[name] = original;
+ copyPropsRenamedBound(original, primordials, name);
+ copyPrototype(original.prototype, primordials, `${name}Prototype`);
+});
+
+// Create copies of abstract intrinsic objects that are not directly exposed
+// on the global object.
+// Refs: https://tc39.es/ecma262/#sec-%typedarray%-intrinsic-object
+[
+ { name: 'TypedArray', original: Reflect.getPrototypeOf(Uint8Array) },
+ { name: 'ArrayIterator', original: {
+ prototype: Reflect.getPrototypeOf(Array.prototype[Symbol.iterator]()),
+ } },
+ { name: 'StringIterator', original: {
+ prototype: Reflect.getPrototypeOf(String.prototype[Symbol.iterator]()),
+ } },
+].forEach(({ name, original }) => {
+ primordials[name] = original;
+ // The static %TypedArray% methods require a valid `this`, but can't be bound,
+ // as they need a subclass constructor as the receiver:
+ copyPrototype(original, primordials, name);
+ copyPrototype(original.prototype, primordials, `${name}Prototype`);
+});
+
+/* eslint-enable node-core/prefer-primordials */
+
+const {
+ ArrayPrototypeForEach,
+ FunctionPrototypeCall,
+ Map,
+ ObjectFreeze,
+ ObjectSetPrototypeOf,
+ Set,
+ SymbolIterator,
+ WeakMap,
+ WeakSet,
+} = primordials;
+
+// Because these functions are used by `makeSafe`, which is exposed
+// on the `primordials` object, it's important to use const references
+// to the primordials that they use:
+const createSafeIterator = (factory, next) => {
+ class SafeIterator {
+ constructor(iterable) {
+ this._iterator = factory(iterable);
+ }
+ next() {
+ return next(this._iterator);
+ }
+ [SymbolIterator]() {
+ return this;
+ }
+ }
+ ObjectSetPrototypeOf(SafeIterator.prototype, null);
+ ObjectFreeze(SafeIterator.prototype);
+ ObjectFreeze(SafeIterator);
+ return SafeIterator;
+};
+
+primordials.SafeArrayIterator = createSafeIterator(
+ primordials.ArrayPrototypeSymbolIterator,
+ primordials.ArrayIteratorPrototypeNext
+);
+primordials.SafeStringIterator = createSafeIterator(
+ primordials.StringPrototypeSymbolIterator,
+ primordials.StringIteratorPrototypeNext
+);
+
+const copyProps = (src, dest) => {
+ ArrayPrototypeForEach(ReflectOwnKeys(src), (key) => {
+ if (!ReflectGetOwnPropertyDescriptor(dest, key)) {
+ ReflectDefineProperty(
+ dest,
+ key,
+ ReflectGetOwnPropertyDescriptor(src, key));
+ }
+ });
+};
+
+const makeSafe = (unsafe, safe) => {
+ if (SymbolIterator in unsafe.prototype) {
+ const dummy = new unsafe();
+ let next; // We can reuse the same `next` method.
+
+ ArrayPrototypeForEach(ReflectOwnKeys(unsafe.prototype), (key) => {
+ if (!ReflectGetOwnPropertyDescriptor(safe.prototype, key)) {
+ const desc = ReflectGetOwnPropertyDescriptor(unsafe.prototype, key);
+ if (
+ typeof desc.value === 'function' &&
+ desc.value.length === 0 &&
+ SymbolIterator in (FunctionPrototypeCall(desc.value, dummy) ?? {})
+ ) {
+ const createIterator = uncurryThis(desc.value);
+ next = next ?? uncurryThis(createIterator(dummy).next);
+ const SafeIterator = createSafeIterator(createIterator, next);
+ desc.value = function() {
+ return new SafeIterator(this);
+ };
+ }
+ ReflectDefineProperty(safe.prototype, key, desc);
+ }
+ });
+ } else {
+ copyProps(unsafe.prototype, safe.prototype);
+ }
+ copyProps(unsafe, safe);
+
+ ObjectSetPrototypeOf(safe.prototype, null);
+ ObjectFreeze(safe.prototype);
+ ObjectFreeze(safe);
+ return safe;
+};
+primordials.makeSafe = makeSafe;
+
+// Subclass the constructors because we need to use their prototype
+// methods later.
+// Defining the `constructor` is necessary here to avoid the default
+// constructor which uses the user-mutable `%ArrayIteratorPrototype%.next`.
+primordials.SafeMap = makeSafe(
+ Map,
+ class SafeMap extends Map {
+ constructor(i) { super(i); } // eslint-disable-line no-useless-constructor
+ }
+);
+primordials.SafeWeakMap = makeSafe(
+ WeakMap,
+ class SafeWeakMap extends WeakMap {
+ constructor(i) { super(i); } // eslint-disable-line no-useless-constructor
+ }
+);
+primordials.SafeSet = makeSafe(
+ Set,
+ class SafeSet extends Set {
+ constructor(i) { super(i); } // eslint-disable-line no-useless-constructor
+ }
+);
+primordials.SafeWeakSet = makeSafe(
+ WeakSet,
+ class SafeWeakSet extends WeakSet {
+ constructor(i) { super(i); } // eslint-disable-line no-useless-constructor
+ }
+);
+
+ObjectSetPrototypeOf(primordials, null);
+ObjectFreeze(primordials);
+
+module.exports = primordials;
diff --git a/node_modules/@pkgjs/parseargs/internal/util.js b/node_modules/@pkgjs/parseargs/internal/util.js
new file mode 100644
index 0000000000000..b9b8fe5b8d7c0
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/internal/util.js
@@ -0,0 +1,14 @@
+'use strict';
+
+// This is a placeholder for util.js in node.js land.
+
+const {
+ ObjectCreate,
+ ObjectFreeze,
+} = require('./primordials');
+
+const kEmptyObject = ObjectFreeze(ObjectCreate(null));
+
+module.exports = {
+ kEmptyObject,
+};
diff --git a/node_modules/@pkgjs/parseargs/internal/validators.js b/node_modules/@pkgjs/parseargs/internal/validators.js
new file mode 100644
index 0000000000000..b5ac4fb501eff
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/internal/validators.js
@@ -0,0 +1,89 @@
+'use strict';
+
+// This file is a proxy of the original file located at:
+// https://github.com/nodejs/node/blob/main/lib/internal/validators.js
+// Every addition or modification to this file must be evaluated
+// during the PR review.
+
+const {
+ ArrayIsArray,
+ ArrayPrototypeIncludes,
+ ArrayPrototypeJoin,
+} = require('./primordials');
+
+const {
+ codes: {
+ ERR_INVALID_ARG_TYPE
+ }
+} = require('./errors');
+
+function validateString(value, name) {
+ if (typeof value !== 'string') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'String', value);
+ }
+}
+
+function validateUnion(value, name, union) {
+ if (!ArrayPrototypeIncludes(union, value)) {
+ throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value);
+ }
+}
+
+function validateBoolean(value, name) {
+ if (typeof value !== 'boolean') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Boolean', value);
+ }
+}
+
+function validateArray(value, name) {
+ if (!ArrayIsArray(value)) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Array', value);
+ }
+}
+
+function validateStringArray(value, name) {
+ validateArray(value, name);
+ for (let i = 0; i < value.length; i++) {
+ validateString(value[i], `${name}[${i}]`);
+ }
+}
+
+function validateBooleanArray(value, name) {
+ validateArray(value, name);
+ for (let i = 0; i < value.length; i++) {
+ validateBoolean(value[i], `${name}[${i}]`);
+ }
+}
+
+/**
+ * @param {unknown} value
+ * @param {string} name
+ * @param {{
+ * allowArray?: boolean,
+ * allowFunction?: boolean,
+ * nullable?: boolean
+ * }} [options]
+ */
+function validateObject(value, name, options) {
+ const useDefaultOptions = options == null;
+ const allowArray = useDefaultOptions ? false : options.allowArray;
+ const allowFunction = useDefaultOptions ? false : options.allowFunction;
+ const nullable = useDefaultOptions ? false : options.nullable;
+ if ((!nullable && value === null) ||
+ (!allowArray && ArrayIsArray(value)) ||
+ (typeof value !== 'object' && (
+ !allowFunction || typeof value !== 'function'
+ ))) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Object', value);
+ }
+}
+
+module.exports = {
+ validateArray,
+ validateObject,
+ validateString,
+ validateStringArray,
+ validateUnion,
+ validateBoolean,
+ validateBooleanArray,
+};
diff --git a/node_modules/@pkgjs/parseargs/package.json b/node_modules/@pkgjs/parseargs/package.json
new file mode 100644
index 0000000000000..0bcc05c0d4a3e
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/package.json
@@ -0,0 +1,36 @@
+{
+ "name": "@pkgjs/parseargs",
+ "version": "0.11.0",
+ "description": "Polyfill of future proposal for `util.parseArgs()`",
+ "engines": {
+ "node": ">=14"
+ },
+ "main": "index.js",
+ "exports": {
+ ".": "./index.js",
+ "./package.json": "./package.json"
+ },
+ "scripts": {
+ "coverage": "c8 --check-coverage tape 'test/*.js'",
+ "test": "c8 tape 'test/*.js'",
+ "posttest": "eslint .",
+ "fix": "npm run posttest -- --fix"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git@github.com:pkgjs/parseargs.git"
+ },
+ "keywords": [],
+ "author": "",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/pkgjs/parseargs/issues"
+ },
+ "homepage": "https://github.com/pkgjs/parseargs#readme",
+ "devDependencies": {
+ "c8": "^7.10.0",
+ "eslint": "^8.2.0",
+ "eslint-plugin-node-core": "iansu/eslint-plugin-node-core",
+ "tape": "^5.2.2"
+ }
+}
diff --git a/node_modules/@pkgjs/parseargs/utils.js b/node_modules/@pkgjs/parseargs/utils.js
new file mode 100644
index 0000000000000..d7f420a233924
--- /dev/null
+++ b/node_modules/@pkgjs/parseargs/utils.js
@@ -0,0 +1,198 @@
+'use strict';
+
+const {
+ ArrayPrototypeFind,
+ ObjectEntries,
+ ObjectPrototypeHasOwnProperty: ObjectHasOwn,
+ StringPrototypeCharAt,
+ StringPrototypeIncludes,
+ StringPrototypeStartsWith,
+} = require('./internal/primordials');
+
+const {
+ validateObject,
+} = require('./internal/validators');
+
+// These are internal utilities to make the parsing logic easier to read, and
+// add lots of detail for the curious. They are in a separate file to allow
+// unit testing, although that is not essential (this could be rolled into
+// main file and just tested implicitly via API).
+//
+// These routines are for internal use, not for export to client.
+
+/**
+ * Return the named property, but only if it is an own property.
+ */
+function objectGetOwn(obj, prop) {
+ if (ObjectHasOwn(obj, prop))
+ return obj[prop];
+}
+
+/**
+ * Return the named options property, but only if it is an own property.
+ */
+function optionsGetOwn(options, longOption, prop) {
+ if (ObjectHasOwn(options, longOption))
+ return objectGetOwn(options[longOption], prop);
+}
+
+/**
+ * Determines if the argument may be used as an option value.
+ * @example
+ * isOptionValue('V') // returns true
+ * isOptionValue('-v') // returns true (greedy)
+ * isOptionValue('--foo') // returns true (greedy)
+ * isOptionValue(undefined) // returns false
+ */
+function isOptionValue(value) {
+ if (value == null) return false;
+
+ // Open Group Utility Conventions are that an option-argument
+ // is the argument after the option, and may start with a dash.
+ return true; // greedy!
+}
+
+/**
+ * Detect whether there is possible confusion and user may have omitted
+ * the option argument, like `--port --verbose` when `port` of type:string.
+ * In strict mode we throw errors if value is option-like.
+ */
+function isOptionLikeValue(value) {
+ if (value == null) return false;
+
+ return value.length > 1 && StringPrototypeCharAt(value, 0) === '-';
+}
+
+/**
+ * Determines if `arg` is just a short option.
+ * @example '-f'
+ */
+function isLoneShortOption(arg) {
+ return arg.length === 2 &&
+ StringPrototypeCharAt(arg, 0) === '-' &&
+ StringPrototypeCharAt(arg, 1) !== '-';
+}
+
+/**
+ * Determines if `arg` is a lone long option.
+ * @example
+ * isLoneLongOption('a') // returns false
+ * isLoneLongOption('-a') // returns false
+ * isLoneLongOption('--foo') // returns true
+ * isLoneLongOption('--foo=bar') // returns false
+ */
+function isLoneLongOption(arg) {
+ return arg.length > 2 &&
+ StringPrototypeStartsWith(arg, '--') &&
+ !StringPrototypeIncludes(arg, '=', 3);
+}
+
+/**
+ * Determines if `arg` is a long option and value in the same argument.
+ * @example
+ * isLongOptionAndValue('--foo') // returns false
+ * isLongOptionAndValue('--foo=bar') // returns true
+ */
+function isLongOptionAndValue(arg) {
+ return arg.length > 2 &&
+ StringPrototypeStartsWith(arg, '--') &&
+ StringPrototypeIncludes(arg, '=', 3);
+}
+
+/**
+ * Determines if `arg` is a short option group.
+ *
+ * See Guideline 5 of the [Open Group Utility Conventions](https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html).
+ * One or more options without option-arguments, followed by at most one
+ * option that takes an option-argument, should be accepted when grouped
+ * behind one '-' delimiter.
+ * @example
+ * isShortOptionGroup('-a', {}) // returns false
+ * isShortOptionGroup('-ab', {}) // returns true
+ * // -fb is an option and a value, not a short option group
+ * isShortOptionGroup('-fb', {
+ * options: { f: { type: 'string' } }
+ * }) // returns false
+ * isShortOptionGroup('-bf', {
+ * options: { f: { type: 'string' } }
+ * }) // returns true
+ * // -bfb is an edge case, return true and caller sorts it out
+ * isShortOptionGroup('-bfb', {
+ * options: { f: { type: 'string' } }
+ * }) // returns true
+ */
+function isShortOptionGroup(arg, options) {
+ if (arg.length <= 2) return false;
+ if (StringPrototypeCharAt(arg, 0) !== '-') return false;
+ if (StringPrototypeCharAt(arg, 1) === '-') return false;
+
+ const firstShort = StringPrototypeCharAt(arg, 1);
+ const longOption = findLongOptionForShort(firstShort, options);
+ return optionsGetOwn(options, longOption, 'type') !== 'string';
+}
+
+/**
+ * Determine if arg is a short string option followed by its value.
+ * @example
+ * isShortOptionAndValue('-a', {}); // returns false
+ * isShortOptionAndValue('-ab', {}); // returns false
+ * isShortOptionAndValue('-fFILE', {
+ * options: { foo: { short: 'f', type: 'string' }}
+ * }) // returns true
+ */
+function isShortOptionAndValue(arg, options) {
+ validateObject(options, 'options');
+
+ if (arg.length <= 2) return false;
+ if (StringPrototypeCharAt(arg, 0) !== '-') return false;
+ if (StringPrototypeCharAt(arg, 1) === '-') return false;
+
+ const shortOption = StringPrototypeCharAt(arg, 1);
+ const longOption = findLongOptionForShort(shortOption, options);
+ return optionsGetOwn(options, longOption, 'type') === 'string';
+}
+
+/**
+ * Find the long option associated with a short option. Looks for a configured
+ * `short` and returns the short option itself if a long option is not found.
+ * @example
+ * findLongOptionForShort('a', {}) // returns 'a'
+ * findLongOptionForShort('b', {
+ * options: { bar: { short: 'b' } }
+ * }) // returns 'bar'
+ */
+function findLongOptionForShort(shortOption, options) {
+ validateObject(options, 'options');
+ const longOptionEntry = ArrayPrototypeFind(
+ ObjectEntries(options),
+ ({ 1: optionConfig }) => objectGetOwn(optionConfig, 'short') === shortOption
+ );
+ return longOptionEntry?.[0] ?? shortOption;
+}
+
+/**
+ * Check if the given option includes a default value
+ * and that option has not been set by the input args.
+ *
+ * @param {string} longOption - long option name e.g. 'foo'
+ * @param {object} optionConfig - the option configuration properties
+ * @param {object} values - option values returned in `values` by parseArgs
+ */
+function useDefaultValueOption(longOption, optionConfig, values) {
+ return objectGetOwn(optionConfig, 'default') !== undefined &&
+ values[longOption] === undefined;
+}
+
+module.exports = {
+ findLongOptionForShort,
+ isLoneLongOption,
+ isLoneShortOption,
+ isLongOptionAndValue,
+ isOptionValue,
+ isOptionLikeValue,
+ isShortOptionAndValue,
+ isShortOptionGroup,
+ useDefaultValueOption,
+ objectGetOwn,
+ optionsGetOwn,
+};
diff --git a/node_modules/@sigstore/bundle/LICENSE b/node_modules/@sigstore/bundle/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/bundle/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 The Sigstore Authors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/node_modules/@sigstore/bundle/dist/build.js b/node_modules/@sigstore/bundle/dist/build.js
new file mode 100644
index 0000000000000..65c71b100ad58
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/build.js
@@ -0,0 +1,101 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const bundle_1 = require("./bundle");
+// Message signature bundle - $case: 'messageSignature'
+function toMessageSignatureBundle(options) {
+ return {
+ mediaType: options.singleCertificate
+ ? bundle_1.BUNDLE_V03_MEDIA_TYPE
+ : bundle_1.BUNDLE_V02_MEDIA_TYPE,
+ content: {
+ $case: 'messageSignature',
+ messageSignature: {
+ messageDigest: {
+ algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
+ digest: options.digest,
+ },
+ signature: options.signature,
+ },
+ },
+ verificationMaterial: toVerificationMaterial(options),
+ };
+}
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+// DSSE envelope bundle - $case: 'dsseEnvelope'
+function toDSSEBundle(options) {
+ return {
+ mediaType: options.singleCertificate
+ ? bundle_1.BUNDLE_V03_MEDIA_TYPE
+ : bundle_1.BUNDLE_V02_MEDIA_TYPE,
+ content: {
+ $case: 'dsseEnvelope',
+ dsseEnvelope: toEnvelope(options),
+ },
+ verificationMaterial: toVerificationMaterial(options),
+ };
+}
+exports.toDSSEBundle = toDSSEBundle;
+function toEnvelope(options) {
+ return {
+ payloadType: options.artifactType,
+ payload: options.artifact,
+ signatures: [toSignature(options)],
+ };
+}
+function toSignature(options) {
+ return {
+ keyid: options.keyHint || '',
+ sig: options.signature,
+ };
+}
+// Verification material
+function toVerificationMaterial(options) {
+ return {
+ content: toKeyContent(options),
+ tlogEntries: [],
+ timestampVerificationData: { rfc3161Timestamps: [] },
+ };
+}
+function toKeyContent(options) {
+ if (options.certificate) {
+ if (options.singleCertificate) {
+ return {
+ $case: 'certificate',
+ certificate: { rawBytes: options.certificate },
+ };
+ }
+ else {
+ return {
+ $case: 'x509CertificateChain',
+ x509CertificateChain: {
+ certificates: [{ rawBytes: options.certificate }],
+ },
+ };
+ }
+ }
+ else {
+ return {
+ $case: 'publicKey',
+ publicKey: {
+ hint: options.keyHint || '',
+ },
+ };
+ }
+}
diff --git a/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/@sigstore/bundle/dist/bundle.js
new file mode 100644
index 0000000000000..dbd35df2ca2bb
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/bundle.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isBundleWithDsseEnvelope = exports.isBundleWithMessageSignature = exports.isBundleWithPublicKey = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0;
+exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
+exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2';
+exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3';
+exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json';
+// Type guards for bundle variants.
+function isBundleWithCertificateChain(b) {
+ return b.verificationMaterial.content.$case === 'x509CertificateChain';
+}
+exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
+function isBundleWithPublicKey(b) {
+ return b.verificationMaterial.content.$case === 'publicKey';
+}
+exports.isBundleWithPublicKey = isBundleWithPublicKey;
+function isBundleWithMessageSignature(b) {
+ return b.content.$case === 'messageSignature';
+}
+exports.isBundleWithMessageSignature = isBundleWithMessageSignature;
+function isBundleWithDsseEnvelope(b) {
+ return b.content.$case === 'dsseEnvelope';
+}
+exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope;
diff --git a/node_modules/@sigstore/bundle/dist/error.js b/node_modules/@sigstore/bundle/dist/error.js
new file mode 100644
index 0000000000000..f84295323b812
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/error.js
@@ -0,0 +1,25 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ValidationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class ValidationError extends Error {
+ constructor(message, fields) {
+ super(message);
+ this.fields = fields;
+ }
+}
+exports.ValidationError = ValidationError;
diff --git a/node_modules/@sigstore/bundle/dist/index.js b/node_modules/@sigstore/bundle/dist/index.js
new file mode 100644
index 0000000000000..1b012acad4d85
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/index.js
@@ -0,0 +1,43 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var build_1 = require("./build");
+Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } });
+Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } });
+var bundle_1 = require("./bundle");
+Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } });
+Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } });
+Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } });
+Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } });
+Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } });
+Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } });
+Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } });
+Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
+var serialized_1 = require("./serialized");
+Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } });
+Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } });
+Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } });
+Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } });
+var validate_1 = require("./validate");
+Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } });
+Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } });
+Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } });
+Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } });
+Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } });
diff --git a/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/@sigstore/bundle/dist/serialized.js
new file mode 100644
index 0000000000000..be0d2a2d54d09
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/serialized.js
@@ -0,0 +1,49 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const bundle_1 = require("./bundle");
+const validate_1 = require("./validate");
+const bundleFromJSON = (obj) => {
+ const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
+ switch (bundle.mediaType) {
+ case bundle_1.BUNDLE_V01_MEDIA_TYPE:
+ (0, validate_1.assertBundleV01)(bundle);
+ break;
+ case bundle_1.BUNDLE_V02_MEDIA_TYPE:
+ (0, validate_1.assertBundleV02)(bundle);
+ break;
+ default:
+ (0, validate_1.assertBundleLatest)(bundle);
+ break;
+ }
+ return bundle;
+};
+exports.bundleFromJSON = bundleFromJSON;
+const bundleToJSON = (bundle) => {
+ return protobuf_specs_1.Bundle.toJSON(bundle);
+};
+exports.bundleToJSON = bundleToJSON;
+const envelopeFromJSON = (obj) => {
+ return protobuf_specs_1.Envelope.fromJSON(obj);
+};
+exports.envelopeFromJSON = envelopeFromJSON;
+const envelopeToJSON = (envelope) => {
+ return protobuf_specs_1.Envelope.toJSON(envelope);
+};
+exports.envelopeToJSON = envelopeToJSON;
diff --git a/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/@sigstore/bundle/dist/utility.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/utility.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/@sigstore/bundle/dist/validate.js
new file mode 100644
index 0000000000000..67079cd1f680a
--- /dev/null
+++ b/node_modules/@sigstore/bundle/dist/validate.js
@@ -0,0 +1,199 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.assertBundleLatest = exports.assertBundleV02 = exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("./error");
+// Performs basic validation of a Sigstore bundle to ensure that all required
+// fields are populated. This is not a complete validation of the bundle, but
+// rather a check that the bundle is in a valid state to be processed by the
+// rest of the code.
+function assertBundle(b) {
+ const invalidValues = validateBundleBase(b);
+ if (invalidValues.length > 0) {
+ throw new error_1.ValidationError('invalid bundle', invalidValues);
+ }
+}
+exports.assertBundle = assertBundle;
+// Asserts that the given bundle conforms to the v0.1 bundle format.
+function assertBundleV01(b) {
+ const invalidValues = [];
+ invalidValues.push(...validateBundleBase(b));
+ invalidValues.push(...validateInclusionPromise(b));
+ if (invalidValues.length > 0) {
+ throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues);
+ }
+}
+exports.assertBundleV01 = assertBundleV01;
+// Type guard to determine if Bundle is a v0.1 bundle.
+function isBundleV01(b) {
+ try {
+ assertBundleV01(b);
+ return true;
+ }
+ catch (e) {
+ return false;
+ }
+}
+exports.isBundleV01 = isBundleV01;
+// Asserts that the given bundle conforms to the v0.2 bundle format.
+function assertBundleV02(b) {
+ const invalidValues = [];
+ invalidValues.push(...validateBundleBase(b));
+ invalidValues.push(...validateInclusionProof(b));
+ if (invalidValues.length > 0) {
+ throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues);
+ }
+}
+exports.assertBundleV02 = assertBundleV02;
+// Asserts that the given bundle conforms to the newest (0.3) bundle format.
+function assertBundleLatest(b) {
+ const invalidValues = [];
+ invalidValues.push(...validateBundleBase(b));
+ invalidValues.push(...validateInclusionProof(b));
+ invalidValues.push(...validateNoCertificateChain(b));
+ if (invalidValues.length > 0) {
+ throw new error_1.ValidationError('invalid bundle', invalidValues);
+ }
+}
+exports.assertBundleLatest = assertBundleLatest;
+function validateBundleBase(b) {
+ const invalidValues = [];
+ // Media type validation
+ if (b.mediaType === undefined ||
+ (!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) &&
+ !b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) {
+ invalidValues.push('mediaType');
+ }
+ // Content-related validation
+ if (b.content === undefined) {
+ invalidValues.push('content');
+ }
+ else {
+ switch (b.content.$case) {
+ case 'messageSignature':
+ if (b.content.messageSignature.messageDigest === undefined) {
+ invalidValues.push('content.messageSignature.messageDigest');
+ }
+ else {
+ if (b.content.messageSignature.messageDigest.digest.length === 0) {
+ invalidValues.push('content.messageSignature.messageDigest.digest');
+ }
+ }
+ if (b.content.messageSignature.signature.length === 0) {
+ invalidValues.push('content.messageSignature.signature');
+ }
+ break;
+ case 'dsseEnvelope':
+ if (b.content.dsseEnvelope.payload.length === 0) {
+ invalidValues.push('content.dsseEnvelope.payload');
+ }
+ if (b.content.dsseEnvelope.signatures.length !== 1) {
+ invalidValues.push('content.dsseEnvelope.signatures');
+ }
+ else {
+ if (b.content.dsseEnvelope.signatures[0].sig.length === 0) {
+ invalidValues.push('content.dsseEnvelope.signatures[0].sig');
+ }
+ }
+ break;
+ }
+ }
+ // Verification material-related validation
+ if (b.verificationMaterial === undefined) {
+ invalidValues.push('verificationMaterial');
+ }
+ else {
+ if (b.verificationMaterial.content === undefined) {
+ invalidValues.push('verificationMaterial.content');
+ }
+ else {
+ switch (b.verificationMaterial.content.$case) {
+ case 'x509CertificateChain':
+ if (b.verificationMaterial.content.x509CertificateChain.certificates
+ .length === 0) {
+ invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates');
+ }
+ b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => {
+ if (cert.rawBytes.length === 0) {
+ invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`);
+ }
+ });
+ break;
+ case 'certificate':
+ if (b.verificationMaterial.content.certificate.rawBytes.length === 0) {
+ invalidValues.push('verificationMaterial.content.certificate.rawBytes');
+ }
+ break;
+ }
+ }
+ if (b.verificationMaterial.tlogEntries === undefined) {
+ invalidValues.push('verificationMaterial.tlogEntries');
+ }
+ else {
+ if (b.verificationMaterial.tlogEntries.length > 0) {
+ b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+ if (entry.logId === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`);
+ }
+ if (entry.kindVersion === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`);
+ }
+ });
+ }
+ }
+ }
+ return invalidValues;
+}
+// Necessary for V01 bundles
+function validateInclusionPromise(b) {
+ const invalidValues = [];
+ if (b.verificationMaterial &&
+ b.verificationMaterial.tlogEntries?.length > 0) {
+ b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+ if (entry.inclusionPromise === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`);
+ }
+ });
+ }
+ return invalidValues;
+}
+// Necessary for V02 and later bundles
+function validateInclusionProof(b) {
+ const invalidValues = [];
+ if (b.verificationMaterial &&
+ b.verificationMaterial.tlogEntries?.length > 0) {
+ b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+ if (entry.inclusionProof === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`);
+ }
+ else {
+ if (entry.inclusionProof.checkpoint === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`);
+ }
+ }
+ });
+ }
+ return invalidValues;
+}
+// Necessary for V03 and later bundles
+function validateNoCertificateChain(b) {
+ const invalidValues = [];
+ if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') {
+ invalidValues.push('verificationMaterial.content.$case');
+ }
+ return invalidValues;
+}
diff --git a/node_modules/@sigstore/bundle/package.json b/node_modules/@sigstore/bundle/package.json
new file mode 100644
index 0000000000000..dd853897226d2
--- /dev/null
+++ b/node_modules/@sigstore/bundle/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "@sigstore/bundle",
+ "version": "2.3.2",
+ "description": "Sigstore bundle type",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "clean": "shx rm -rf dist *.tsbuildinfo",
+ "build": "tsc --build",
+ "test": "jest"
+ },
+ "files": [
+ "dist",
+ "store"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/sigstore-js.git"
+ },
+ "bugs": {
+ "url": "https://github.com/sigstore/sigstore-js/issues"
+ },
+ "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme",
+ "publishConfig": {
+ "provenance": true
+ },
+ "dependencies": {
+ "@sigstore/protobuf-specs": "^0.3.2"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/@sigstore/core/LICENSE b/node_modules/@sigstore/core/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/core/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 The Sigstore Authors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/node_modules/@sigstore/core/dist/asn1/error.js b/node_modules/@sigstore/core/dist/asn1/error.js
new file mode 100644
index 0000000000000..17d93b0f7e706
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/asn1/error.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1TypeError = exports.ASN1ParseError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class ASN1ParseError extends Error {
+}
+exports.ASN1ParseError = ASN1ParseError;
+class ASN1TypeError extends Error {
+}
+exports.ASN1TypeError = ASN1TypeError;
diff --git a/node_modules/@sigstore/core/dist/asn1/index.js b/node_modules/@sigstore/core/dist/asn1/index.js
new file mode 100644
index 0000000000000..348b2ea4022e5
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/asn1/index.js
@@ -0,0 +1,20 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1Obj = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var obj_1 = require("./obj");
+Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } });
diff --git a/node_modules/@sigstore/core/dist/asn1/length.js b/node_modules/@sigstore/core/dist/asn1/length.js
new file mode 100644
index 0000000000000..36fdaf5b9777f
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/asn1/length.js
@@ -0,0 +1,63 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.encodeLength = exports.decodeLength = void 0;
+const error_1 = require("./error");
+// Decodes the length of a DER-encoded ANS.1 element from the supplied stream.
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes
+function decodeLength(stream) {
+ const buf = stream.getUint8();
+ // If the most significant bit is UNSET the length is just the value of the
+ // byte.
+ if ((buf & 0x80) === 0x00) {
+ return buf;
+ }
+ // Otherwise, the lower 7 bits of the first byte indicate the number of bytes
+ // that follow to encode the length.
+ const byteCount = buf & 0x7f;
+ // Ensure the encoded length can safely fit in a JS number.
+ if (byteCount > 6) {
+ throw new error_1.ASN1ParseError('length exceeds 6 byte limit');
+ }
+ // Iterate over the bytes that encode the length.
+ let len = 0;
+ for (let i = 0; i < byteCount; i++) {
+ len = len * 256 + stream.getUint8();
+ }
+ // This is a valid ASN.1 length encoding, but we don't support it.
+ if (len === 0) {
+ throw new error_1.ASN1ParseError('indefinite length encoding not supported');
+ }
+ return len;
+}
+exports.decodeLength = decodeLength;
+// Translates the supplied value to a DER-encoded length.
+function encodeLength(len) {
+ if (len < 128) {
+ return Buffer.from([len]);
+ }
+ // Bitwise operations on large numbers are not supported in JS, so we need to
+ // use BigInts.
+ let val = BigInt(len);
+ const bytes = [];
+ while (val > 0n) {
+ bytes.unshift(Number(val & 255n));
+ val = val >> 8n;
+ }
+ return Buffer.from([0x80 | bytes.length, ...bytes]);
+}
+exports.encodeLength = encodeLength;
diff --git a/node_modules/@sigstore/core/dist/asn1/obj.js b/node_modules/@sigstore/core/dist/asn1/obj.js
new file mode 100644
index 0000000000000..5f9ac9cdbc493
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/asn1/obj.js
@@ -0,0 +1,152 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1Obj = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const stream_1 = require("../stream");
+const error_1 = require("./error");
+const length_1 = require("./length");
+const parse_1 = require("./parse");
+const tag_1 = require("./tag");
+class ASN1Obj {
+ constructor(tag, value, subs) {
+ this.tag = tag;
+ this.value = value;
+ this.subs = subs;
+ }
+ // Constructs an ASN.1 object from a Buffer of DER-encoded bytes.
+ static parseBuffer(buf) {
+ return parseStream(new stream_1.ByteStream(buf));
+ }
+ toDER() {
+ const valueStream = new stream_1.ByteStream();
+ if (this.subs.length > 0) {
+ for (const sub of this.subs) {
+ valueStream.appendView(sub.toDER());
+ }
+ }
+ else {
+ valueStream.appendView(this.value);
+ }
+ const value = valueStream.buffer;
+ // Concat tag/length/value
+ const obj = new stream_1.ByteStream();
+ obj.appendChar(this.tag.toDER());
+ obj.appendView((0, length_1.encodeLength)(value.length));
+ obj.appendView(value);
+ return obj.buffer;
+ }
+ /////////////////////////////////////////////////////////////////////////////
+ // Convenience methods for parsing ASN.1 primitives into JS types
+ // Returns the ASN.1 object's value as a boolean. Throws an error if the
+ // object is not a boolean.
+ toBoolean() {
+ if (!this.tag.isBoolean()) {
+ throw new error_1.ASN1TypeError('not a boolean');
+ }
+ return (0, parse_1.parseBoolean)(this.value);
+ }
+ // Returns the ASN.1 object's value as a BigInt. Throws an error if the
+ // object is not an integer.
+ toInteger() {
+ if (!this.tag.isInteger()) {
+ throw new error_1.ASN1TypeError('not an integer');
+ }
+ return (0, parse_1.parseInteger)(this.value);
+ }
+ // Returns the ASN.1 object's value as an OID string. Throws an error if the
+ // object is not an OID.
+ toOID() {
+ if (!this.tag.isOID()) {
+ throw new error_1.ASN1TypeError('not an OID');
+ }
+ return (0, parse_1.parseOID)(this.value);
+ }
+ // Returns the ASN.1 object's value as a Date. Throws an error if the object
+ // is not either a UTCTime or a GeneralizedTime.
+ toDate() {
+ switch (true) {
+ case this.tag.isUTCTime():
+ return (0, parse_1.parseTime)(this.value, true);
+ case this.tag.isGeneralizedTime():
+ return (0, parse_1.parseTime)(this.value, false);
+ default:
+ throw new error_1.ASN1TypeError('not a date');
+ }
+ }
+ // Returns the ASN.1 object's value as a number[] where each number is the
+ // value of a bit in the bit string. Throws an error if the object is not a
+ // bit string.
+ toBitString() {
+ if (!this.tag.isBitString()) {
+ throw new error_1.ASN1TypeError('not a bit string');
+ }
+ return (0, parse_1.parseBitString)(this.value);
+ }
+}
+exports.ASN1Obj = ASN1Obj;
+/////////////////////////////////////////////////////////////////////////////
+// Internal stream parsing functions
+function parseStream(stream) {
+ // Parse tag, length, and value from stream
+ const tag = new tag_1.ASN1Tag(stream.getUint8());
+ const len = (0, length_1.decodeLength)(stream);
+ const value = stream.slice(stream.position, len);
+ const start = stream.position;
+ let subs = [];
+ // If the object is constructed, parse its children. Sometimes, children
+ // are embedded in OCTESTRING objects, so we need to check those
+ // for children as well.
+ if (tag.constructed) {
+ subs = collectSubs(stream, len);
+ }
+ else if (tag.isOctetString()) {
+ // Attempt to parse children of OCTETSTRING objects. If anything fails,
+ // assume the object is not constructed and treat as primitive.
+ try {
+ subs = collectSubs(stream, len);
+ }
+ catch (e) {
+ // Fail silently and treat as primitive
+ }
+ }
+ // If there are no children, move stream cursor to the end of the object
+ if (subs.length === 0) {
+ stream.seek(start + len);
+ }
+ return new ASN1Obj(tag, value, subs);
+}
+function collectSubs(stream, len) {
+ // Calculate end of object content
+ const end = stream.position + len;
+ // Make sure there are enough bytes left in the stream. This should never
+ // happen, cause it'll get caught when the stream is sliced in parseStream.
+ // Leaving as an extra check just in case.
+ /* istanbul ignore if */
+ if (end > stream.length) {
+ throw new error_1.ASN1ParseError('invalid length');
+ }
+ // Parse all children
+ const subs = [];
+ while (stream.position < end) {
+ subs.push(parseStream(stream));
+ }
+ // When we're done parsing children, we should be at the end of the object
+ if (stream.position !== end) {
+ throw new error_1.ASN1ParseError('invalid length');
+ }
+ return subs;
+}
diff --git a/node_modules/@sigstore/core/dist/asn1/parse.js b/node_modules/@sigstore/core/dist/asn1/parse.js
new file mode 100644
index 0000000000000..482c7239e8316
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/asn1/parse.js
@@ -0,0 +1,125 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseBitString = exports.parseBoolean = exports.parseOID = exports.parseTime = exports.parseStringASCII = exports.parseInteger = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/;
+const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/;
+// Parse a BigInt from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer
+function parseInteger(buf) {
+ let pos = 0;
+ const end = buf.length;
+ let val = buf[pos];
+ const neg = val > 0x7f;
+ // Consume any padding bytes
+ const pad = neg ? 0xff : 0x00;
+ while (val == pad && ++pos < end) {
+ val = buf[pos];
+ }
+ // Calculate remaining bytes to read
+ const len = end - pos;
+ if (len === 0)
+ return BigInt(neg ? -1 : 0);
+ // Handle two's complement for negative numbers
+ val = neg ? val - 256 : val;
+ // Parse remaining bytes
+ let n = BigInt(val);
+ for (let i = pos + 1; i < end; ++i) {
+ n = n * BigInt(256) + BigInt(buf[i]);
+ }
+ return n;
+}
+exports.parseInteger = parseInteger;
+// Parse an ASCII string from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
+function parseStringASCII(buf) {
+ return buf.toString('ascii');
+}
+exports.parseStringASCII = parseStringASCII;
+// Parse a Date from the DER-encoded buffer
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1
+function parseTime(buf, shortYear) {
+ const timeStr = parseStringASCII(buf);
+ // Parse the time string into matches - captured groups start at index 1
+ const m = shortYear
+ ? RE_TIME_SHORT_YEAR.exec(timeStr)
+ : RE_TIME_LONG_YEAR.exec(timeStr);
+ if (!m) {
+ throw new Error('invalid time');
+ }
+ // Translate dates with a 2-digit year to 4 digits per the spec
+ if (shortYear) {
+ let year = Number(m[1]);
+ year += year >= 50 ? 1900 : 2000;
+ m[1] = year.toString();
+ }
+ // Translate to ISO8601 format and parse
+ return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`);
+}
+exports.parseTime = parseTime;
+// Parse an OID from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier
+function parseOID(buf) {
+ let pos = 0;
+ const end = buf.length;
+ // Consume first byte which encodes the first two OID components
+ let n = buf[pos++];
+ const first = Math.floor(n / 40);
+ const second = n % 40;
+ let oid = `${first}.${second}`;
+ // Consume remaining bytes
+ let val = 0;
+ for (; pos < end; ++pos) {
+ n = buf[pos];
+ val = (val << 7) + (n & 0x7f);
+ // If the left-most bit is NOT set, then this is the last byte in the
+ // sequence and we can add the value to the OID and reset the accumulator
+ if ((n & 0x80) === 0) {
+ oid += `.${val}`;
+ val = 0;
+ }
+ }
+ return oid;
+}
+exports.parseOID = parseOID;
+// Parse a boolean from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
+function parseBoolean(buf) {
+ return buf[0] !== 0;
+}
+exports.parseBoolean = parseBoolean;
+// Parse a bit string from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string
+function parseBitString(buf) {
+ // First byte tell us how many unused bits are in the last byte
+ const unused = buf[0];
+ const start = 1;
+ const end = buf.length;
+ const bits = [];
+ for (let i = start; i < end; ++i) {
+ const byte = buf[i];
+ // The skip value is only used for the last byte
+ const skip = i === end - 1 ? unused : 0;
+ // Iterate over each bit in the byte (most significant first)
+ for (let j = 7; j >= skip; --j) {
+ // Read the bit and add it to the bit string
+ bits.push((byte >> j) & 0x01);
+ }
+ }
+ return bits;
+}
+exports.parseBitString = parseBitString;
diff --git a/node_modules/@sigstore/core/dist/asn1/tag.js b/node_modules/@sigstore/core/dist/asn1/tag.js
new file mode 100644
index 0000000000000..84dd938d049aa
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/asn1/tag.js
@@ -0,0 +1,86 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1Tag = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("./error");
+const UNIVERSAL_TAG = {
+ BOOLEAN: 0x01,
+ INTEGER: 0x02,
+ BIT_STRING: 0x03,
+ OCTET_STRING: 0x04,
+ OBJECT_IDENTIFIER: 0x06,
+ SEQUENCE: 0x10,
+ SET: 0x11,
+ PRINTABLE_STRING: 0x13,
+ UTC_TIME: 0x17,
+ GENERALIZED_TIME: 0x18,
+};
+const TAG_CLASS = {
+ UNIVERSAL: 0x00,
+ APPLICATION: 0x01,
+ CONTEXT_SPECIFIC: 0x02,
+ PRIVATE: 0x03,
+};
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes
+class ASN1Tag {
+ constructor(enc) {
+ // Bits 0 through 4 are the tag number
+ this.number = enc & 0x1f;
+ // Bit 5 is the constructed bit
+ this.constructed = (enc & 0x20) === 0x20;
+ // Bit 6 & 7 are the class
+ this.class = enc >> 6;
+ if (this.number === 0x1f) {
+ throw new error_1.ASN1ParseError('long form tags not supported');
+ }
+ if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) {
+ throw new error_1.ASN1ParseError('unsupported tag 0x00');
+ }
+ }
+ isUniversal() {
+ return this.class === TAG_CLASS.UNIVERSAL;
+ }
+ isContextSpecific(num) {
+ const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC;
+ return num !== undefined ? res && this.number === num : res;
+ }
+ isBoolean() {
+ return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN;
+ }
+ isInteger() {
+ return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER;
+ }
+ isBitString() {
+ return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING;
+ }
+ isOctetString() {
+ return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING;
+ }
+ isOID() {
+ return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER);
+ }
+ isUTCTime() {
+ return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME;
+ }
+ isGeneralizedTime() {
+ return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME;
+ }
+ toDER() {
+ return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6);
+ }
+}
+exports.ASN1Tag = ASN1Tag;
diff --git a/node_modules/@sigstore/core/dist/crypto.js b/node_modules/@sigstore/core/dist/crypto.js
new file mode 100644
index 0000000000000..dbe65b165d357
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/crypto.js
@@ -0,0 +1,71 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.bufferEqual = exports.verify = exports.hash = exports.digest = exports.createPublicKey = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+const SHA256_ALGORITHM = 'sha256';
+function createPublicKey(key, type = 'spki') {
+ if (typeof key === 'string') {
+ return crypto_1.default.createPublicKey(key);
+ }
+ else {
+ return crypto_1.default.createPublicKey({ key, format: 'der', type: type });
+ }
+}
+exports.createPublicKey = createPublicKey;
+function digest(algorithm, ...data) {
+ const hash = crypto_1.default.createHash(algorithm);
+ for (const d of data) {
+ hash.update(d);
+ }
+ return hash.digest();
+}
+exports.digest = digest;
+// TODO: deprecate this in favor of digest()
+function hash(...data) {
+ const hash = crypto_1.default.createHash(SHA256_ALGORITHM);
+ for (const d of data) {
+ hash.update(d);
+ }
+ return hash.digest();
+}
+exports.hash = hash;
+function verify(data, key, signature, algorithm) {
+ // The try/catch is to work around an issue in Node 14.x where verify throws
+ // an error in some scenarios if the signature is invalid.
+ try {
+ return crypto_1.default.verify(algorithm, data, key, signature);
+ }
+ catch (e) {
+ /* istanbul ignore next */
+ return false;
+ }
+}
+exports.verify = verify;
+function bufferEqual(a, b) {
+ try {
+ return crypto_1.default.timingSafeEqual(a, b);
+ }
+ catch {
+ /* istanbul ignore next */
+ return false;
+ }
+}
+exports.bufferEqual = bufferEqual;
diff --git a/node_modules/@sigstore/core/dist/dsse.js b/node_modules/@sigstore/core/dist/dsse.js
new file mode 100644
index 0000000000000..a78783c919a25
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/dsse.js
@@ -0,0 +1,31 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.preAuthEncoding = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PAE_PREFIX = 'DSSEv1';
+// DSSE Pre-Authentication Encoding
+function preAuthEncoding(payloadType, payload) {
+ const prefix = [
+ PAE_PREFIX,
+ payloadType.length,
+ payloadType,
+ payload.length,
+ '',
+ ].join(' ');
+ return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]);
+}
+exports.preAuthEncoding = preAuthEncoding;
diff --git a/node_modules/@sigstore/core/dist/encoding.js b/node_modules/@sigstore/core/dist/encoding.js
new file mode 100644
index 0000000000000..b020ac4d6ecd4
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/encoding.js
@@ -0,0 +1,28 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.base64Decode = exports.base64Encode = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const BASE64_ENCODING = 'base64';
+const UTF8_ENCODING = 'utf-8';
+function base64Encode(str) {
+ return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING);
+}
+exports.base64Encode = base64Encode;
+function base64Decode(str) {
+ return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING);
+}
+exports.base64Decode = base64Decode;
diff --git a/node_modules/@sigstore/core/dist/index.js b/node_modules/@sigstore/core/dist/index.js
new file mode 100644
index 0000000000000..ac35e86a8df7d
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/index.js
@@ -0,0 +1,56 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var asn1_1 = require("./asn1");
+Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } });
+exports.crypto = __importStar(require("./crypto"));
+exports.dsse = __importStar(require("./dsse"));
+exports.encoding = __importStar(require("./encoding"));
+exports.json = __importStar(require("./json"));
+exports.pem = __importStar(require("./pem"));
+var rfc3161_1 = require("./rfc3161");
+Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } });
+var stream_1 = require("./stream");
+Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } });
+var x509_1 = require("./x509");
+Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } });
+Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } });
+Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } });
diff --git a/node_modules/@sigstore/core/dist/json.js b/node_modules/@sigstore/core/dist/json.js
new file mode 100644
index 0000000000000..a50df7233c7c5
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/json.js
@@ -0,0 +1,61 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.canonicalize = void 0;
+// JSON canonicalization per https://github.com/cyberphone/json-canonicalization
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function canonicalize(object) {
+ let buffer = '';
+ if (object === null || typeof object !== 'object' || object.toJSON != null) {
+ // Primitives or toJSONable objects
+ buffer += JSON.stringify(object);
+ }
+ else if (Array.isArray(object)) {
+ // Array - maintain element order
+ buffer += '[';
+ let first = true;
+ object.forEach((element) => {
+ if (!first) {
+ buffer += ',';
+ }
+ first = false;
+ // recursive call
+ buffer += canonicalize(element);
+ });
+ buffer += ']';
+ }
+ else {
+ // Object - Sort properties before serializing
+ buffer += '{';
+ let first = true;
+ Object.keys(object)
+ .sort()
+ .forEach((property) => {
+ if (!first) {
+ buffer += ',';
+ }
+ first = false;
+ buffer += JSON.stringify(property);
+ buffer += ':';
+ // recursive call
+ buffer += canonicalize(object[property]);
+ });
+ buffer += '}';
+ }
+ return buffer;
+}
+exports.canonicalize = canonicalize;
diff --git a/node_modules/@sigstore/core/dist/oid.js b/node_modules/@sigstore/core/dist/oid.js
new file mode 100644
index 0000000000000..ac7a643067ad0
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/oid.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0;
+exports.ECDSA_SIGNATURE_ALGOS = {
+ '1.2.840.10045.4.3.1': 'sha224',
+ '1.2.840.10045.4.3.2': 'sha256',
+ '1.2.840.10045.4.3.3': 'sha384',
+ '1.2.840.10045.4.3.4': 'sha512',
+};
+exports.SHA2_HASH_ALGOS = {
+ '2.16.840.1.101.3.4.2.1': 'sha256',
+ '2.16.840.1.101.3.4.2.2': 'sha384',
+ '2.16.840.1.101.3.4.2.3': 'sha512',
+};
diff --git a/node_modules/@sigstore/core/dist/pem.js b/node_modules/@sigstore/core/dist/pem.js
new file mode 100644
index 0000000000000..f35bc3835bbd1
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/pem.js
@@ -0,0 +1,44 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fromDER = exports.toDER = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PEM_HEADER = /-----BEGIN (.*)-----/;
+const PEM_FOOTER = /-----END (.*)-----/;
+function toDER(certificate) {
+ let der = '';
+ certificate.split('\n').forEach((line) => {
+ if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) {
+ return;
+ }
+ der += line;
+ });
+ return Buffer.from(der, 'base64');
+}
+exports.toDER = toDER;
+// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM
+// encoding dictates that each certificate should have a trailing newline after
+// the footer.
+function fromDER(certificate, type = 'CERTIFICATE') {
+ // Base64-encode the certificate.
+ const der = certificate.toString('base64');
+ // Split the certificate into lines of 64 characters.
+ const lines = der.match(/.{1,64}/g) || '';
+ return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`]
+ .join('\n')
+ .concat('\n');
+}
+exports.fromDER = fromDER;
diff --git a/node_modules/@sigstore/core/dist/rfc3161/error.js b/node_modules/@sigstore/core/dist/rfc3161/error.js
new file mode 100644
index 0000000000000..b9b549b0bb323
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/rfc3161/error.js
@@ -0,0 +1,21 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RFC3161TimestampVerificationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class RFC3161TimestampVerificationError extends Error {
+}
+exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError;
diff --git a/node_modules/@sigstore/core/dist/rfc3161/index.js b/node_modules/@sigstore/core/dist/rfc3161/index.js
new file mode 100644
index 0000000000000..b77ecf1c7d50c
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/rfc3161/index.js
@@ -0,0 +1,20 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RFC3161Timestamp = void 0;
+var timestamp_1 = require("./timestamp");
+Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } });
diff --git a/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
new file mode 100644
index 0000000000000..3e61fc1a4e169
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
@@ -0,0 +1,201 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RFC3161Timestamp = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const asn1_1 = require("../asn1");
+const crypto = __importStar(require("../crypto"));
+const oid_1 = require("../oid");
+const error_1 = require("./error");
+const tstinfo_1 = require("./tstinfo");
+const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2';
+const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4';
+const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4';
+class RFC3161Timestamp {
+ constructor(asn1) {
+ this.root = asn1;
+ }
+ static parse(der) {
+ const asn1 = asn1_1.ASN1Obj.parseBuffer(der);
+ return new RFC3161Timestamp(asn1);
+ }
+ get status() {
+ return this.pkiStatusInfoObj.subs[0].toInteger();
+ }
+ get contentType() {
+ return this.contentTypeObj.toOID();
+ }
+ get eContentType() {
+ return this.eContentTypeObj.toOID();
+ }
+ get signingTime() {
+ return this.tstInfo.genTime;
+ }
+ get signerIssuer() {
+ return this.signerSidObj.subs[0].value;
+ }
+ get signerSerialNumber() {
+ return this.signerSidObj.subs[1].value;
+ }
+ get signerDigestAlgorithm() {
+ const oid = this.signerDigestAlgorithmObj.subs[0].toOID();
+ return oid_1.SHA2_HASH_ALGOS[oid];
+ }
+ get signatureAlgorithm() {
+ const oid = this.signatureAlgorithmObj.subs[0].toOID();
+ return oid_1.ECDSA_SIGNATURE_ALGOS[oid];
+ }
+ get signatureValue() {
+ return this.signatureValueObj.value;
+ }
+ get tstInfo() {
+ // Need to unpack tstInfo from an OCTET STRING
+ return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]);
+ }
+ verify(data, publicKey) {
+ if (!this.timeStampTokenObj) {
+ throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing');
+ }
+ // Check for expected ContentInfo content type
+ if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) {
+ throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`);
+ }
+ // Check for expected encapsulated content type
+ if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) {
+ throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`);
+ }
+ // Check that the tstInfo references the correct artifact
+ this.tstInfo.verify(data);
+ // Check that the signed message digest matches the tstInfo
+ this.verifyMessageDigest();
+ // Check that the signature is valid for the signed attributes
+ this.verifySignature(publicKey);
+ }
+ verifyMessageDigest() {
+ // Check that the tstInfo matches the signed data
+ const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw);
+ const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value;
+ if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) {
+ throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo');
+ }
+ }
+ verifySignature(key) {
+ // Encode the signed attributes for verification
+ const signedAttrs = this.signedAttrsObj.toDER();
+ signedAttrs[0] = 0x31; // Change context-specific tag to SET
+ // Check that the signature is valid for the signed attributes
+ const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm);
+ if (!verified) {
+ throw new error_1.RFC3161TimestampVerificationError('signature verification failed');
+ }
+ }
+ // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
+ get pkiStatusInfoObj() {
+ // pkiStatusInfo is the first element of the timestamp response sequence
+ return this.root.subs[0];
+ }
+ // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
+ get timeStampTokenObj() {
+ // timeStampToken is the first element of the timestamp response sequence
+ return this.root.subs[1];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-3
+ get contentTypeObj() {
+ return this.timeStampTokenObj.subs[0];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5652#section-3
+ get signedDataObj() {
+ const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00));
+ return obj.subs[0];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1
+ get encapContentInfoObj() {
+ return this.signedDataObj.subs[2];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1
+ get signerInfosObj() {
+ // SignerInfos is the last element of the signed data sequence
+ const sd = this.signedDataObj;
+ return sd.subs[sd.subs.length - 1];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5652#section-5.1
+ get signerInfoObj() {
+ // Only supporting one signer
+ return this.signerInfosObj.subs[0];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2
+ get eContentTypeObj() {
+ return this.encapContentInfoObj.subs[0];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2
+ get eContentObj() {
+ return this.encapContentInfoObj.subs[1];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+ get signedAttrsObj() {
+ const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00));
+ return signedAttrs;
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+ get messageDigestAttributeObj() {
+ const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() &&
+ sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY);
+ return messageDigest;
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+ get signerSidObj() {
+ return this.signerInfoObj.subs[1];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+ get signerDigestAlgorithmObj() {
+ // Signature is the 2nd element of the signerInfoObj object
+ return this.signerInfoObj.subs[2];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+ get signatureAlgorithmObj() {
+ // Signature is the 4th element of the signerInfoObj object
+ return this.signerInfoObj.subs[4];
+ }
+ // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+ get signatureValueObj() {
+ // Signature is the 6th element of the signerInfoObj object
+ return this.signerInfoObj.subs[5];
+ }
+}
+exports.RFC3161Timestamp = RFC3161Timestamp;
diff --git a/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
new file mode 100644
index 0000000000000..dc8e4fb339383
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
@@ -0,0 +1,61 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSTInfo = void 0;
+const crypto = __importStar(require("../crypto"));
+const oid_1 = require("../oid");
+const error_1 = require("./error");
+class TSTInfo {
+ constructor(asn1) {
+ this.root = asn1;
+ }
+ get version() {
+ return this.root.subs[0].toInteger();
+ }
+ get genTime() {
+ return this.root.subs[4].toDate();
+ }
+ get messageImprintHashAlgorithm() {
+ const oid = this.messageImprintObj.subs[0].subs[0].toOID();
+ return oid_1.SHA2_HASH_ALGOS[oid];
+ }
+ get messageImprintHashedMessage() {
+ return this.messageImprintObj.subs[1].value;
+ }
+ get raw() {
+ return this.root.toDER();
+ }
+ verify(data) {
+ const digest = crypto.digest(this.messageImprintHashAlgorithm, data);
+ if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) {
+ throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact');
+ }
+ }
+ // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
+ get messageImprintObj() {
+ return this.root.subs[2];
+ }
+}
+exports.TSTInfo = TSTInfo;
diff --git a/node_modules/@sigstore/core/dist/stream.js b/node_modules/@sigstore/core/dist/stream.js
new file mode 100644
index 0000000000000..0a24f8582eb23
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/stream.js
@@ -0,0 +1,115 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ByteStream = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class StreamError extends Error {
+}
+class ByteStream {
+ constructor(buffer) {
+ this.start = 0;
+ if (buffer) {
+ this.buf = buffer;
+ this.view = Buffer.from(buffer);
+ }
+ else {
+ this.buf = new ArrayBuffer(0);
+ this.view = Buffer.from(this.buf);
+ }
+ }
+ get buffer() {
+ return this.view.subarray(0, this.start);
+ }
+ get length() {
+ return this.view.byteLength;
+ }
+ get position() {
+ return this.start;
+ }
+ seek(position) {
+ this.start = position;
+ }
+ // Returns a Buffer containing the specified number of bytes starting at the
+ // given start position.
+ slice(start, len) {
+ const end = start + len;
+ if (end > this.length) {
+ throw new StreamError('request past end of buffer');
+ }
+ return this.view.subarray(start, end);
+ }
+ appendChar(char) {
+ this.ensureCapacity(1);
+ this.view[this.start] = char;
+ this.start += 1;
+ }
+ appendUint16(num) {
+ this.ensureCapacity(2);
+ const value = new Uint16Array([num]);
+ const view = new Uint8Array(value.buffer);
+ this.view[this.start] = view[1];
+ this.view[this.start + 1] = view[0];
+ this.start += 2;
+ }
+ appendUint24(num) {
+ this.ensureCapacity(3);
+ const value = new Uint32Array([num]);
+ const view = new Uint8Array(value.buffer);
+ this.view[this.start] = view[2];
+ this.view[this.start + 1] = view[1];
+ this.view[this.start + 2] = view[0];
+ this.start += 3;
+ }
+ appendView(view) {
+ this.ensureCapacity(view.length);
+ this.view.set(view, this.start);
+ this.start += view.length;
+ }
+ getBlock(size) {
+ if (size <= 0) {
+ return Buffer.alloc(0);
+ }
+ if (this.start + size > this.view.length) {
+ throw new Error('request past end of buffer');
+ }
+ const result = this.view.subarray(this.start, this.start + size);
+ this.start += size;
+ return result;
+ }
+ getUint8() {
+ return this.getBlock(1)[0];
+ }
+ getUint16() {
+ const block = this.getBlock(2);
+ return (block[0] << 8) | block[1];
+ }
+ ensureCapacity(size) {
+ if (this.start + size > this.view.byteLength) {
+ const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0);
+ this.realloc(this.view.byteLength + blockSize);
+ }
+ }
+ realloc(size) {
+ const newArray = new ArrayBuffer(size);
+ const newView = Buffer.from(newArray);
+ // Copy the old buffer into the new one
+ newView.set(this.view);
+ this.buf = newArray;
+ this.view = newView;
+ }
+}
+exports.ByteStream = ByteStream;
+ByteStream.BLOCK_SIZE = 1024;
diff --git a/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/@sigstore/core/dist/x509/cert.js
new file mode 100644
index 0000000000000..16c0c40d858d8
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/x509/cert.js
@@ -0,0 +1,226 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const asn1_1 = require("../asn1");
+const crypto = __importStar(require("../crypto"));
+const oid_1 = require("../oid");
+const pem = __importStar(require("../pem"));
+const ext_1 = require("./ext");
+const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14';
+const EXTENSION_OID_KEY_USAGE = '2.5.29.15';
+const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17';
+const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19';
+const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35';
+exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2';
+class X509Certificate {
+ constructor(asn1) {
+ this.root = asn1;
+ }
+ static parse(cert) {
+ const der = typeof cert === 'string' ? pem.toDER(cert) : cert;
+ const asn1 = asn1_1.ASN1Obj.parseBuffer(der);
+ return new X509Certificate(asn1);
+ }
+ get tbsCertificate() {
+ return this.tbsCertificateObj;
+ }
+ get version() {
+ // version number is the first element of the version context specific tag
+ const ver = this.versionObj.subs[0].toInteger();
+ return `v${(ver + BigInt(1)).toString()}`;
+ }
+ get serialNumber() {
+ return this.serialNumberObj.value;
+ }
+ get notBefore() {
+ // notBefore is the first element of the validity sequence
+ return this.validityObj.subs[0].toDate();
+ }
+ get notAfter() {
+ // notAfter is the second element of the validity sequence
+ return this.validityObj.subs[1].toDate();
+ }
+ get issuer() {
+ return this.issuerObj.value;
+ }
+ get subject() {
+ return this.subjectObj.value;
+ }
+ get publicKey() {
+ return this.subjectPublicKeyInfoObj.toDER();
+ }
+ get signatureAlgorithm() {
+ const oid = this.signatureAlgorithmObj.subs[0].toOID();
+ return oid_1.ECDSA_SIGNATURE_ALGOS[oid];
+ }
+ get signatureValue() {
+ // Signature value is a bit string, so we need to skip the first byte
+ return this.signatureValueObj.value.subarray(1);
+ }
+ get subjectAltName() {
+ const ext = this.extSubjectAltName;
+ return ext?.uri || ext?.rfc822Name;
+ }
+ get extensions() {
+ // The extension list is the first (and only) element of the extensions
+ // context specific tag
+ const extSeq = this.extensionsObj?.subs[0];
+ return extSeq?.subs || /* istanbul ignore next */ [];
+ }
+ get extKeyUsage() {
+ const ext = this.findExtension(EXTENSION_OID_KEY_USAGE);
+ return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined;
+ }
+ get extBasicConstraints() {
+ const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS);
+ return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined;
+ }
+ get extSubjectAltName() {
+ const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME);
+ return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined;
+ }
+ get extAuthorityKeyID() {
+ const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID);
+ return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined;
+ }
+ get extSubjectKeyID() {
+ const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID);
+ return ext
+ ? new ext_1.X509SubjectKeyIDExtension(ext)
+ : /* istanbul ignore next */ undefined;
+ }
+ get extSCT() {
+ const ext = this.findExtension(exports.EXTENSION_OID_SCT);
+ return ext ? new ext_1.X509SCTExtension(ext) : undefined;
+ }
+ get isCA() {
+ const ca = this.extBasicConstraints?.isCA || false;
+ // If the KeyUsage extension is present, keyCertSign must be set
+ if (this.extKeyUsage) {
+ ca && this.extKeyUsage.keyCertSign;
+ }
+ return ca;
+ }
+ extension(oid) {
+ const ext = this.findExtension(oid);
+ return ext ? new ext_1.X509Extension(ext) : undefined;
+ }
+ verify(issuerCertificate) {
+ // Use the issuer's public key if provided, otherwise use the subject's
+ const publicKey = issuerCertificate?.publicKey || this.publicKey;
+ const key = crypto.createPublicKey(publicKey);
+ return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm);
+ }
+ validForDate(date) {
+ return this.notBefore <= date && date <= this.notAfter;
+ }
+ equals(other) {
+ return this.root.toDER().equals(other.root.toDER());
+ }
+ // Creates a copy of the certificate with a new buffer
+ clone() {
+ const der = this.root.toDER();
+ const clone = Buffer.alloc(der.length);
+ der.copy(clone);
+ return X509Certificate.parse(clone);
+ }
+ findExtension(oid) {
+ // Find the extension with the given OID. The OID will always be the first
+ // element of the extension sequence
+ return this.extensions.find((ext) => ext.subs[0].toOID() === oid);
+ }
+ /////////////////////////////////////////////////////////////////////////////
+ // The following properties use the documented x509 structure to locate the
+ // desired ASN.1 object
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1
+ get tbsCertificateObj() {
+ // tbsCertificate is the first element of the certificate sequence
+ return this.root.subs[0];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2
+ get signatureAlgorithmObj() {
+ // signatureAlgorithm is the second element of the certificate sequence
+ return this.root.subs[1];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3
+ get signatureValueObj() {
+ // signatureValue is the third element of the certificate sequence
+ return this.root.subs[2];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1
+ get versionObj() {
+ // version is the first element of the tbsCertificate sequence
+ return this.tbsCertificateObj.subs[0];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2
+ get serialNumberObj() {
+ // serialNumber is the second element of the tbsCertificate sequence
+ return this.tbsCertificateObj.subs[1];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4
+ get issuerObj() {
+ // issuer is the fourth element of the tbsCertificate sequence
+ return this.tbsCertificateObj.subs[3];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5
+ get validityObj() {
+ // version is the fifth element of the tbsCertificate sequence
+ return this.tbsCertificateObj.subs[4];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6
+ get subjectObj() {
+ // subject is the sixth element of the tbsCertificate sequence
+ return this.tbsCertificateObj.subs[5];
+ }
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7
+ get subjectPublicKeyInfoObj() {
+ // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence
+ return this.tbsCertificateObj.subs[6];
+ }
+ // Extensions can't be located by index because their position varies. Instead,
+ // we need to find the extensions context specific tag
+ // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9
+ get extensionsObj() {
+ return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03));
+ }
+}
+exports.X509Certificate = X509Certificate;
diff --git a/node_modules/@sigstore/core/dist/x509/ext.js b/node_modules/@sigstore/core/dist/x509/ext.js
new file mode 100644
index 0000000000000..1d481261b0aa6
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/x509/ext.js
@@ -0,0 +1,145 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0;
+const stream_1 = require("../stream");
+const sct_1 = require("./sct");
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.1
+class X509Extension {
+ constructor(asn1) {
+ this.root = asn1;
+ }
+ get oid() {
+ return this.root.subs[0].toOID();
+ }
+ get critical() {
+ // The critical field is optional and will be the second element of the
+ // extension sequence if present. Default to false if not present.
+ return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false;
+ }
+ get value() {
+ return this.extnValueObj.value;
+ }
+ get valueObj() {
+ return this.extnValueObj;
+ }
+ get extnValueObj() {
+ // The extnValue field will be the last element of the extension sequence
+ return this.root.subs[this.root.subs.length - 1];
+ }
+}
+exports.X509Extension = X509Extension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9
+class X509BasicConstraintsExtension extends X509Extension {
+ get isCA() {
+ return this.sequence.subs[0]?.toBoolean() ?? false;
+ }
+ get pathLenConstraint() {
+ return this.sequence.subs.length > 1
+ ? this.sequence.subs[1].toInteger()
+ : undefined;
+ }
+ // The extnValue field contains a single sequence wrapping the isCA and
+ // pathLenConstraint.
+ get sequence() {
+ return this.extnValueObj.subs[0];
+ }
+}
+exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3
+class X509KeyUsageExtension extends X509Extension {
+ get digitalSignature() {
+ return this.bitString[0] === 1;
+ }
+ get keyCertSign() {
+ return this.bitString[5] === 1;
+ }
+ get crlSign() {
+ return this.bitString[6] === 1;
+ }
+ // The extnValue field contains a single bit string which is a bit mask
+ // indicating which key usages are enabled.
+ get bitString() {
+ return this.extnValueObj.subs[0].toBitString();
+ }
+}
+exports.X509KeyUsageExtension = X509KeyUsageExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6
+class X509SubjectAlternativeNameExtension extends X509Extension {
+ get rfc822Name() {
+ return this.findGeneralName(0x01)?.value.toString('ascii');
+ }
+ get uri() {
+ return this.findGeneralName(0x06)?.value.toString('ascii');
+ }
+ // Retrieve the value of an otherName with the given OID.
+ otherName(oid) {
+ const otherName = this.findGeneralName(0x00);
+ if (otherName === undefined) {
+ return undefined;
+ }
+ // The otherName is a sequence containing an OID and a value.
+ // Need to check that the OID matches the one we're looking for.
+ const otherNameOID = otherName.subs[0].toOID();
+ if (otherNameOID !== oid) {
+ return undefined;
+ }
+ // The otherNameValue is a sequence containing the actual value.
+ const otherNameValue = otherName.subs[1];
+ return otherNameValue.subs[0].value.toString('ascii');
+ }
+ findGeneralName(tag) {
+ return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag));
+ }
+ // The extnValue field contains a sequence of GeneralNames.
+ get generalNames() {
+ return this.extnValueObj.subs[0].subs;
+ }
+}
+exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1
+class X509AuthorityKeyIDExtension extends X509Extension {
+ get keyIdentifier() {
+ return this.findSequenceMember(0x00)?.value;
+ }
+ findSequenceMember(tag) {
+ return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag));
+ }
+ // The extnValue field contains a single sequence wrapping the keyIdentifier
+ get sequence() {
+ return this.extnValueObj.subs[0];
+ }
+}
+exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2
+class X509SubjectKeyIDExtension extends X509Extension {
+ get keyIdentifier() {
+ return this.extnValueObj.subs[0].value;
+ }
+}
+exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension;
+// https://www.rfc-editor.org/rfc/rfc6962#section-3.3
+class X509SCTExtension extends X509Extension {
+ constructor(asn1) {
+ super(asn1);
+ }
+ get signedCertificateTimestamps() {
+ const buf = this.extnValueObj.subs[0].value;
+ const stream = new stream_1.ByteStream(buf);
+ // The overall list length is encoded in the first two bytes -- note this
+ // is the length of the list in bytes, NOT the number of SCTs in the list
+ const end = stream.getUint16() + 2;
+ const sctList = [];
+ while (stream.position < end) {
+ // Read the length of the next SCT
+ const sctLength = stream.getUint16();
+ // Slice out the bytes for the next SCT and parse it
+ const sct = stream.getBlock(sctLength);
+ sctList.push(sct_1.SignedCertificateTimestamp.parse(sct));
+ }
+ if (stream.position !== end) {
+ throw new Error('SCT list length does not match actual length');
+ }
+ return sctList;
+ }
+}
+exports.X509SCTExtension = X509SCTExtension;
diff --git a/node_modules/@sigstore/core/dist/x509/index.js b/node_modules/@sigstore/core/dist/x509/index.js
new file mode 100644
index 0000000000000..cdd77e58f37d5
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/x509/index.js
@@ -0,0 +1,23 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
+var cert_1 = require("./cert");
+Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } });
+Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } });
+var ext_1 = require("./ext");
+Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } });
diff --git a/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/@sigstore/core/dist/x509/sct.js
new file mode 100644
index 0000000000000..1603059c0d1ac
--- /dev/null
+++ b/node_modules/@sigstore/core/dist/x509/sct.js
@@ -0,0 +1,141 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SignedCertificateTimestamp = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto = __importStar(require("../crypto"));
+const stream_1 = require("../stream");
+class SignedCertificateTimestamp {
+ constructor(options) {
+ this.version = options.version;
+ this.logID = options.logID;
+ this.timestamp = options.timestamp;
+ this.extensions = options.extensions;
+ this.hashAlgorithm = options.hashAlgorithm;
+ this.signatureAlgorithm = options.signatureAlgorithm;
+ this.signature = options.signature;
+ }
+ get datetime() {
+ return new Date(Number(this.timestamp.readBigInt64BE()));
+ }
+ // Returns the hash algorithm used to generate the SCT's signature.
+ // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
+ get algorithm() {
+ switch (this.hashAlgorithm) {
+ /* istanbul ignore next */
+ case 0:
+ return 'none';
+ /* istanbul ignore next */
+ case 1:
+ return 'md5';
+ /* istanbul ignore next */
+ case 2:
+ return 'sha1';
+ /* istanbul ignore next */
+ case 3:
+ return 'sha224';
+ case 4:
+ return 'sha256';
+ /* istanbul ignore next */
+ case 5:
+ return 'sha384';
+ /* istanbul ignore next */
+ case 6:
+ return 'sha512';
+ /* istanbul ignore next */
+ default:
+ return 'unknown';
+ }
+ }
+ verify(preCert, key) {
+ // Assemble the digitally-signed struct (the data over which the signature
+ // was generated).
+ // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+ const stream = new stream_1.ByteStream();
+ stream.appendChar(this.version);
+ stream.appendChar(0x00); // SignatureType = certificate_timestamp(0)
+ stream.appendView(this.timestamp);
+ stream.appendUint16(0x01); // LogEntryType = precert_entry(1)
+ stream.appendView(preCert);
+ stream.appendUint16(this.extensions.byteLength);
+ /* istanbul ignore next - extensions are very uncommon */
+ if (this.extensions.byteLength > 0) {
+ stream.appendView(this.extensions);
+ }
+ return crypto.verify(stream.buffer, key, this.signature, this.algorithm);
+ }
+ // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using
+ // TLS encoding which means the fields and lengths of most fields are
+ // specified as part of the SCT and TLS specs.
+ // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+ // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
+ static parse(buf) {
+ const stream = new stream_1.ByteStream(buf);
+ // Version - enum { v1(0), (255) }
+ const version = stream.getUint8();
+ // Log ID - struct { opaque key_id[32]; }
+ const logID = stream.getBlock(32);
+ // Timestamp - uint64
+ const timestamp = stream.getBlock(8);
+ // Extensions - opaque extensions<0..2^16-1>;
+ const extenstionLength = stream.getUint16();
+ const extensions = stream.getBlock(extenstionLength);
+ // Hash algo - enum { sha256(4), . . . (255) }
+ const hashAlgorithm = stream.getUint8();
+ // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) }
+ const signatureAlgorithm = stream.getUint8();
+ // Signature - opaque signature<0..2^16-1>;
+ const sigLength = stream.getUint16();
+ const signature = stream.getBlock(sigLength);
+ // Check that we read the entire buffer
+ if (stream.position !== buf.length) {
+ throw new Error('SCT buffer length mismatch');
+ }
+ return new SignedCertificateTimestamp({
+ version,
+ logID,
+ timestamp,
+ extensions,
+ hashAlgorithm,
+ signatureAlgorithm,
+ signature,
+ });
+ }
+}
+exports.SignedCertificateTimestamp = SignedCertificateTimestamp;
diff --git a/node_modules/@sigstore/core/package.json b/node_modules/@sigstore/core/package.json
new file mode 100644
index 0000000000000..621ff1715bcd1
--- /dev/null
+++ b/node_modules/@sigstore/core/package.json
@@ -0,0 +1,31 @@
+{
+ "name": "@sigstore/core",
+ "version": "1.1.0",
+ "description": "Base library for Sigstore",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "clean": "shx rm -rf dist *.tsbuildinfo",
+ "build": "tsc --build",
+ "test": "jest"
+ },
+ "files": [
+ "dist"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/sigstore-js.git"
+ },
+ "bugs": {
+ "url": "https://github.com/sigstore/sigstore-js/issues"
+ },
+ "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme",
+ "publishConfig": {
+ "provenance": true
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/protobuf-specs/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 The Sigstore Authors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..0c367a8384454
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,89 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+function createBaseEnvelope() {
+ return { payload: Buffer.alloc(0), payloadType: "", signatures: [] };
+}
+exports.Envelope = {
+ fromJSON(object) {
+ return {
+ payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+ payloadType: isSet(object.payloadType) ? String(object.payloadType) : "",
+ signatures: Array.isArray(object?.signatures) ? object.signatures.map((e) => exports.Signature.fromJSON(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.payload !== undefined &&
+ (obj.payload = base64FromBytes(message.payload !== undefined ? message.payload : Buffer.alloc(0)));
+ message.payloadType !== undefined && (obj.payloadType = message.payloadType);
+ if (message.signatures) {
+ obj.signatures = message.signatures.map((e) => e ? exports.Signature.toJSON(e) : undefined);
+ }
+ else {
+ obj.signatures = [];
+ }
+ return obj;
+ },
+};
+function createBaseSignature() {
+ return { sig: Buffer.alloc(0), keyid: "" };
+}
+exports.Signature = {
+ fromJSON(object) {
+ return {
+ sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+ keyid: isSet(object.keyid) ? String(object.keyid) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.sig !== undefined && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : Buffer.alloc(0)));
+ message.keyid !== undefined && (obj.keyid = message.keyid);
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..073093b8371a8
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,185 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+function createBaseCloudEvent() {
+ return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
+}
+exports.CloudEvent = {
+ fromJSON(object) {
+ return {
+ id: isSet(object.id) ? String(object.id) : "",
+ source: isSet(object.source) ? String(object.source) : "",
+ specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
+ type: isSet(object.type) ? String(object.type) : "",
+ attributes: isObject(object.attributes)
+ ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+ acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+ return acc;
+ }, {})
+ : {},
+ data: isSet(object.binaryData)
+ ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+ : isSet(object.textData)
+ ? { $case: "textData", textData: String(object.textData) }
+ : isSet(object.protoData)
+ ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.id !== undefined && (obj.id = message.id);
+ message.source !== undefined && (obj.source = message.source);
+ message.specVersion !== undefined && (obj.specVersion = message.specVersion);
+ message.type !== undefined && (obj.type = message.type);
+ obj.attributes = {};
+ if (message.attributes) {
+ Object.entries(message.attributes).forEach(([k, v]) => {
+ obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+ });
+ }
+ message.data?.$case === "binaryData" &&
+ (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
+ message.data?.$case === "textData" && (obj.textData = message.data?.textData);
+ message.data?.$case === "protoData" &&
+ (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
+ return obj;
+ },
+};
+function createBaseCloudEvent_AttributesEntry() {
+ return { key: "", value: undefined };
+}
+exports.CloudEvent_AttributesEntry = {
+ fromJSON(object) {
+ return {
+ key: isSet(object.key) ? String(object.key) : "",
+ value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.key !== undefined && (obj.key = message.key);
+ message.value !== undefined &&
+ (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
+ return obj;
+ },
+};
+function createBaseCloudEvent_CloudEventAttributeValue() {
+ return { attr: undefined };
+}
+exports.CloudEvent_CloudEventAttributeValue = {
+ fromJSON(object) {
+ return {
+ attr: isSet(object.ceBoolean)
+ ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
+ : isSet(object.ceInteger)
+ ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
+ : isSet(object.ceString)
+ ? { $case: "ceString", ceString: String(object.ceString) }
+ : isSet(object.ceBytes)
+ ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+ : isSet(object.ceUri)
+ ? { $case: "ceUri", ceUri: String(object.ceUri) }
+ : isSet(object.ceUriRef)
+ ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
+ : isSet(object.ceTimestamp)
+ ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
+ message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
+ message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
+ message.attr?.$case === "ceBytes" &&
+ (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
+ message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
+ message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
+ message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
+ return obj;
+ },
+};
+function createBaseCloudEventBatch() {
+ return { events: [] };
+}
+exports.CloudEventBatch = {
+ fromJSON(object) {
+ return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.events) {
+ obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
+ }
+ else {
+ obj.events = [];
+ }
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function fromTimestamp(t) {
+ let millis = Number(t.seconds) * 1000;
+ millis += t.nanos / 1000000;
+ return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+ if (o instanceof Date) {
+ return o;
+ }
+ else if (typeof o === "string") {
+ return new Date(o);
+ }
+ else {
+ return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+ }
+}
+function isObject(value) {
+ return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..da627499ad765
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,119 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fieldBehaviorToJSON = exports.fieldBehaviorFromJSON = exports.FieldBehavior = void 0;
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+ /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+ FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+ /**
+ * OPTIONAL - Specifically denotes a field as optional.
+ * While all fields in protocol buffers are optional, this may be specified
+ * for emphasis if appropriate.
+ */
+ FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+ /**
+ * REQUIRED - Denotes a field as required.
+ * This indicates that the field **must** be provided as part of the request,
+ * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+ */
+ FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+ /**
+ * OUTPUT_ONLY - Denotes a field as output only.
+ * This indicates that the field is provided in responses, but including the
+ * field in a request does nothing (the server *must* ignore it and
+ * *must not* throw an error as a result of the field's presence).
+ */
+ FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+ /**
+ * INPUT_ONLY - Denotes a field as input only.
+ * This indicates that the field is provided in requests, and the
+ * corresponding field is not included in output.
+ */
+ FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+ /**
+ * IMMUTABLE - Denotes a field as immutable.
+ * This indicates that the field may be set once in a request to create a
+ * resource, but may not be changed thereafter.
+ */
+ FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+ /**
+ * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+ * This indicates that the service may provide the elements of the list
+ * in any arbitrary order, rather than the order the user originally
+ * provided. Additionally, the list's order may or may not be stable.
+ */
+ FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+})(FieldBehavior = exports.FieldBehavior || (exports.FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "FIELD_BEHAVIOR_UNSPECIFIED":
+ return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+ case 1:
+ case "OPTIONAL":
+ return FieldBehavior.OPTIONAL;
+ case 2:
+ case "REQUIRED":
+ return FieldBehavior.REQUIRED;
+ case 3:
+ case "OUTPUT_ONLY":
+ return FieldBehavior.OUTPUT_ONLY;
+ case 4:
+ case "INPUT_ONLY":
+ return FieldBehavior.INPUT_ONLY;
+ case 5:
+ case "IMMUTABLE":
+ return FieldBehavior.IMMUTABLE;
+ case 6:
+ case "UNORDERED_LIST":
+ return FieldBehavior.UNORDERED_LIST;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+ }
+}
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+function fieldBehaviorToJSON(object) {
+ switch (object) {
+ case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+ return "FIELD_BEHAVIOR_UNSPECIFIED";
+ case FieldBehavior.OPTIONAL:
+ return "OPTIONAL";
+ case FieldBehavior.REQUIRED:
+ return "REQUIRED";
+ case FieldBehavior.OUTPUT_ONLY:
+ return "OUTPUT_ONLY";
+ case FieldBehavior.INPUT_ONLY:
+ return "INPUT_ONLY";
+ case FieldBehavior.IMMUTABLE:
+ return "IMMUTABLE";
+ case FieldBehavior.UNORDERED_LIST:
+ return "UNORDERED_LIST";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+ }
+}
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..6b3f3c97a6647
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,65 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+function createBaseAny() {
+ return { typeUrl: "", value: Buffer.alloc(0) };
+}
+exports.Any = {
+ fromJSON(object) {
+ return {
+ typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
+ value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
+ message.value !== undefined &&
+ (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d429aac846043
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,1308 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo_Annotation = exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.methodOptions_IdempotencyLevelToJSON = exports.methodOptions_IdempotencyLevelFromJSON = exports.MethodOptions_IdempotencyLevel = exports.fieldOptions_JSTypeToJSON = exports.fieldOptions_JSTypeFromJSON = exports.FieldOptions_JSType = exports.fieldOptions_CTypeToJSON = exports.fieldOptions_CTypeFromJSON = exports.FieldOptions_CType = exports.fileOptions_OptimizeModeToJSON = exports.fileOptions_OptimizeModeFromJSON = exports.FileOptions_OptimizeMode = exports.fieldDescriptorProto_LabelToJSON = exports.fieldDescriptorProto_LabelFromJSON = exports.FieldDescriptorProto_Label = exports.fieldDescriptorProto_TypeToJSON = exports.fieldDescriptorProto_TypeFromJSON = exports.FieldDescriptorProto_Type = void 0;
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+ /**
+ * TYPE_DOUBLE - 0 is reserved for errors.
+ * Order is weird for historical reasons.
+ */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+ /**
+ * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
+ * negative values are likely.
+ */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+ /**
+ * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
+ * negative values are likely.
+ */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+ /**
+ * TYPE_GROUP - Tag-delimited aggregate.
+ * Group type is deprecated and not supported in proto3. However, Proto3
+ * implementations should still be able to parse the group wire format and
+ * treat group fields as unknown fields.
+ */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+ /** TYPE_MESSAGE - Length-delimited aggregate. */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+ /** TYPE_BYTES - New in version 2. */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+ /** TYPE_SINT32 - Uses ZigZag encoding. */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+ /** TYPE_SINT64 - Uses ZigZag encoding. */
+ FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type = exports.FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+ switch (object) {
+ case 1:
+ case "TYPE_DOUBLE":
+ return FieldDescriptorProto_Type.TYPE_DOUBLE;
+ case 2:
+ case "TYPE_FLOAT":
+ return FieldDescriptorProto_Type.TYPE_FLOAT;
+ case 3:
+ case "TYPE_INT64":
+ return FieldDescriptorProto_Type.TYPE_INT64;
+ case 4:
+ case "TYPE_UINT64":
+ return FieldDescriptorProto_Type.TYPE_UINT64;
+ case 5:
+ case "TYPE_INT32":
+ return FieldDescriptorProto_Type.TYPE_INT32;
+ case 6:
+ case "TYPE_FIXED64":
+ return FieldDescriptorProto_Type.TYPE_FIXED64;
+ case 7:
+ case "TYPE_FIXED32":
+ return FieldDescriptorProto_Type.TYPE_FIXED32;
+ case 8:
+ case "TYPE_BOOL":
+ return FieldDescriptorProto_Type.TYPE_BOOL;
+ case 9:
+ case "TYPE_STRING":
+ return FieldDescriptorProto_Type.TYPE_STRING;
+ case 10:
+ case "TYPE_GROUP":
+ return FieldDescriptorProto_Type.TYPE_GROUP;
+ case 11:
+ case "TYPE_MESSAGE":
+ return FieldDescriptorProto_Type.TYPE_MESSAGE;
+ case 12:
+ case "TYPE_BYTES":
+ return FieldDescriptorProto_Type.TYPE_BYTES;
+ case 13:
+ case "TYPE_UINT32":
+ return FieldDescriptorProto_Type.TYPE_UINT32;
+ case 14:
+ case "TYPE_ENUM":
+ return FieldDescriptorProto_Type.TYPE_ENUM;
+ case 15:
+ case "TYPE_SFIXED32":
+ return FieldDescriptorProto_Type.TYPE_SFIXED32;
+ case 16:
+ case "TYPE_SFIXED64":
+ return FieldDescriptorProto_Type.TYPE_SFIXED64;
+ case 17:
+ case "TYPE_SINT32":
+ return FieldDescriptorProto_Type.TYPE_SINT32;
+ case 18:
+ case "TYPE_SINT64":
+ return FieldDescriptorProto_Type.TYPE_SINT64;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+ }
+}
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+function fieldDescriptorProto_TypeToJSON(object) {
+ switch (object) {
+ case FieldDescriptorProto_Type.TYPE_DOUBLE:
+ return "TYPE_DOUBLE";
+ case FieldDescriptorProto_Type.TYPE_FLOAT:
+ return "TYPE_FLOAT";
+ case FieldDescriptorProto_Type.TYPE_INT64:
+ return "TYPE_INT64";
+ case FieldDescriptorProto_Type.TYPE_UINT64:
+ return "TYPE_UINT64";
+ case FieldDescriptorProto_Type.TYPE_INT32:
+ return "TYPE_INT32";
+ case FieldDescriptorProto_Type.TYPE_FIXED64:
+ return "TYPE_FIXED64";
+ case FieldDescriptorProto_Type.TYPE_FIXED32:
+ return "TYPE_FIXED32";
+ case FieldDescriptorProto_Type.TYPE_BOOL:
+ return "TYPE_BOOL";
+ case FieldDescriptorProto_Type.TYPE_STRING:
+ return "TYPE_STRING";
+ case FieldDescriptorProto_Type.TYPE_GROUP:
+ return "TYPE_GROUP";
+ case FieldDescriptorProto_Type.TYPE_MESSAGE:
+ return "TYPE_MESSAGE";
+ case FieldDescriptorProto_Type.TYPE_BYTES:
+ return "TYPE_BYTES";
+ case FieldDescriptorProto_Type.TYPE_UINT32:
+ return "TYPE_UINT32";
+ case FieldDescriptorProto_Type.TYPE_ENUM:
+ return "TYPE_ENUM";
+ case FieldDescriptorProto_Type.TYPE_SFIXED32:
+ return "TYPE_SFIXED32";
+ case FieldDescriptorProto_Type.TYPE_SFIXED64:
+ return "TYPE_SFIXED64";
+ case FieldDescriptorProto_Type.TYPE_SINT32:
+ return "TYPE_SINT32";
+ case FieldDescriptorProto_Type.TYPE_SINT64:
+ return "TYPE_SINT64";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+ }
+}
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+ /** LABEL_OPTIONAL - 0 is reserved for errors */
+ FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+ FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+ FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+})(FieldDescriptorProto_Label = exports.FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+ switch (object) {
+ case 1:
+ case "LABEL_OPTIONAL":
+ return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+ case 2:
+ case "LABEL_REQUIRED":
+ return FieldDescriptorProto_Label.LABEL_REQUIRED;
+ case 3:
+ case "LABEL_REPEATED":
+ return FieldDescriptorProto_Label.LABEL_REPEATED;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+ }
+}
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+function fieldDescriptorProto_LabelToJSON(object) {
+ switch (object) {
+ case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+ return "LABEL_OPTIONAL";
+ case FieldDescriptorProto_Label.LABEL_REQUIRED:
+ return "LABEL_REQUIRED";
+ case FieldDescriptorProto_Label.LABEL_REPEATED:
+ return "LABEL_REPEATED";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+ }
+}
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+ /** SPEED - Generate complete code for parsing, serialization, */
+ FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+ /** CODE_SIZE - etc. */
+ FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+ /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+ FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode = exports.FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+ switch (object) {
+ case 1:
+ case "SPEED":
+ return FileOptions_OptimizeMode.SPEED;
+ case 2:
+ case "CODE_SIZE":
+ return FileOptions_OptimizeMode.CODE_SIZE;
+ case 3:
+ case "LITE_RUNTIME":
+ return FileOptions_OptimizeMode.LITE_RUNTIME;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+ }
+}
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+function fileOptions_OptimizeModeToJSON(object) {
+ switch (object) {
+ case FileOptions_OptimizeMode.SPEED:
+ return "SPEED";
+ case FileOptions_OptimizeMode.CODE_SIZE:
+ return "CODE_SIZE";
+ case FileOptions_OptimizeMode.LITE_RUNTIME:
+ return "LITE_RUNTIME";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+ }
+}
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+ /** STRING - Default mode. */
+ FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+ FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+ FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType = exports.FieldOptions_CType || (exports.FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "STRING":
+ return FieldOptions_CType.STRING;
+ case 1:
+ case "CORD":
+ return FieldOptions_CType.CORD;
+ case 2:
+ case "STRING_PIECE":
+ return FieldOptions_CType.STRING_PIECE;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+ }
+}
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+function fieldOptions_CTypeToJSON(object) {
+ switch (object) {
+ case FieldOptions_CType.STRING:
+ return "STRING";
+ case FieldOptions_CType.CORD:
+ return "CORD";
+ case FieldOptions_CType.STRING_PIECE:
+ return "STRING_PIECE";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+ }
+}
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+ /** JS_NORMAL - Use the default type. */
+ FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+ /** JS_STRING - Use JavaScript strings. */
+ FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+ /** JS_NUMBER - Use JavaScript numbers. */
+ FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType = exports.FieldOptions_JSType || (exports.FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "JS_NORMAL":
+ return FieldOptions_JSType.JS_NORMAL;
+ case 1:
+ case "JS_STRING":
+ return FieldOptions_JSType.JS_STRING;
+ case 2:
+ case "JS_NUMBER":
+ return FieldOptions_JSType.JS_NUMBER;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ }
+}
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+function fieldOptions_JSTypeToJSON(object) {
+ switch (object) {
+ case FieldOptions_JSType.JS_NORMAL:
+ return "JS_NORMAL";
+ case FieldOptions_JSType.JS_STRING:
+ return "JS_STRING";
+ case FieldOptions_JSType.JS_NUMBER:
+ return "JS_NUMBER";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ }
+}
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+ MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+ /** NO_SIDE_EFFECTS - implies idempotent */
+ MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+ /** IDEMPOTENT - idempotent, but may have side effects */
+ MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel = exports.MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "IDEMPOTENCY_UNKNOWN":
+ return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+ case 1:
+ case "NO_SIDE_EFFECTS":
+ return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+ case 2:
+ case "IDEMPOTENT":
+ return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+ }
+}
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+function methodOptions_IdempotencyLevelToJSON(object) {
+ switch (object) {
+ case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+ return "IDEMPOTENCY_UNKNOWN";
+ case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+ return "NO_SIDE_EFFECTS";
+ case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+ return "IDEMPOTENT";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+ }
+}
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+function createBaseFileDescriptorSet() {
+ return { file: [] };
+}
+exports.FileDescriptorSet = {
+ fromJSON(object) {
+ return { file: Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [] };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.file) {
+ obj.file = message.file.map((e) => e ? exports.FileDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.file = [];
+ }
+ return obj;
+ },
+};
+function createBaseFileDescriptorProto() {
+ return {
+ name: "",
+ package: "",
+ dependency: [],
+ publicDependency: [],
+ weakDependency: [],
+ messageType: [],
+ enumType: [],
+ service: [],
+ extension: [],
+ options: undefined,
+ sourceCodeInfo: undefined,
+ syntax: "",
+ };
+}
+exports.FileDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ package: isSet(object.package) ? String(object.package) : "",
+ dependency: Array.isArray(object?.dependency) ? object.dependency.map((e) => String(e)) : [],
+ publicDependency: Array.isArray(object?.publicDependency)
+ ? object.publicDependency.map((e) => Number(e))
+ : [],
+ weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e) => Number(e)) : [],
+ messageType: Array.isArray(object?.messageType)
+ ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+ : [],
+ enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+ service: Array.isArray(object?.service) ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) : [],
+ extension: Array.isArray(object?.extension)
+ ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+ : [],
+ options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+ sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+ syntax: isSet(object.syntax) ? String(object.syntax) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.package !== undefined && (obj.package = message.package);
+ if (message.dependency) {
+ obj.dependency = message.dependency.map((e) => e);
+ }
+ else {
+ obj.dependency = [];
+ }
+ if (message.publicDependency) {
+ obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+ }
+ else {
+ obj.publicDependency = [];
+ }
+ if (message.weakDependency) {
+ obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+ }
+ else {
+ obj.weakDependency = [];
+ }
+ if (message.messageType) {
+ obj.messageType = message.messageType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.messageType = [];
+ }
+ if (message.enumType) {
+ obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.enumType = [];
+ }
+ if (message.service) {
+ obj.service = message.service.map((e) => e ? exports.ServiceDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.service = [];
+ }
+ if (message.extension) {
+ obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.extension = [];
+ }
+ message.options !== undefined && (obj.options = message.options ? exports.FileOptions.toJSON(message.options) : undefined);
+ message.sourceCodeInfo !== undefined &&
+ (obj.sourceCodeInfo = message.sourceCodeInfo ? exports.SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined);
+ message.syntax !== undefined && (obj.syntax = message.syntax);
+ return obj;
+ },
+};
+function createBaseDescriptorProto() {
+ return {
+ name: "",
+ field: [],
+ extension: [],
+ nestedType: [],
+ enumType: [],
+ extensionRange: [],
+ oneofDecl: [],
+ options: undefined,
+ reservedRange: [],
+ reservedName: [],
+ };
+}
+exports.DescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ field: Array.isArray(object?.field) ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) : [],
+ extension: Array.isArray(object?.extension)
+ ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+ : [],
+ nestedType: Array.isArray(object?.nestedType)
+ ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+ : [],
+ enumType: Array.isArray(object?.enumType) ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) : [],
+ extensionRange: Array.isArray(object?.extensionRange)
+ ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+ : [],
+ oneofDecl: Array.isArray(object?.oneofDecl)
+ ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+ : [],
+ options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+ reservedRange: Array.isArray(object?.reservedRange)
+ ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+ : [],
+ reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e) => String(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ if (message.field) {
+ obj.field = message.field.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.field = [];
+ }
+ if (message.extension) {
+ obj.extension = message.extension.map((e) => e ? exports.FieldDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.extension = [];
+ }
+ if (message.nestedType) {
+ obj.nestedType = message.nestedType.map((e) => e ? exports.DescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.nestedType = [];
+ }
+ if (message.enumType) {
+ obj.enumType = message.enumType.map((e) => e ? exports.EnumDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.enumType = [];
+ }
+ if (message.extensionRange) {
+ obj.extensionRange = message.extensionRange.map((e) => e ? exports.DescriptorProto_ExtensionRange.toJSON(e) : undefined);
+ }
+ else {
+ obj.extensionRange = [];
+ }
+ if (message.oneofDecl) {
+ obj.oneofDecl = message.oneofDecl.map((e) => e ? exports.OneofDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.oneofDecl = [];
+ }
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.MessageOptions.toJSON(message.options) : undefined);
+ if (message.reservedRange) {
+ obj.reservedRange = message.reservedRange.map((e) => e ? exports.DescriptorProto_ReservedRange.toJSON(e) : undefined);
+ }
+ else {
+ obj.reservedRange = [];
+ }
+ if (message.reservedName) {
+ obj.reservedName = message.reservedName.map((e) => e);
+ }
+ else {
+ obj.reservedName = [];
+ }
+ return obj;
+ },
+};
+function createBaseDescriptorProto_ExtensionRange() {
+ return { start: 0, end: 0, options: undefined };
+}
+exports.DescriptorProto_ExtensionRange = {
+ fromJSON(object) {
+ return {
+ start: isSet(object.start) ? Number(object.start) : 0,
+ end: isSet(object.end) ? Number(object.end) : 0,
+ options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.start !== undefined && (obj.start = Math.round(message.start));
+ message.end !== undefined && (obj.end = Math.round(message.end));
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.ExtensionRangeOptions.toJSON(message.options) : undefined);
+ return obj;
+ },
+};
+function createBaseDescriptorProto_ReservedRange() {
+ return { start: 0, end: 0 };
+}
+exports.DescriptorProto_ReservedRange = {
+ fromJSON(object) {
+ return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.start !== undefined && (obj.start = Math.round(message.start));
+ message.end !== undefined && (obj.end = Math.round(message.end));
+ return obj;
+ },
+};
+function createBaseExtensionRangeOptions() {
+ return { uninterpretedOption: [] };
+}
+exports.ExtensionRangeOptions = {
+ fromJSON(object) {
+ return {
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseFieldDescriptorProto() {
+ return {
+ name: "",
+ number: 0,
+ label: 1,
+ type: 1,
+ typeName: "",
+ extendee: "",
+ defaultValue: "",
+ oneofIndex: 0,
+ jsonName: "",
+ options: undefined,
+ proto3Optional: false,
+ };
+}
+exports.FieldDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ number: isSet(object.number) ? Number(object.number) : 0,
+ label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+ type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+ typeName: isSet(object.typeName) ? String(object.typeName) : "",
+ extendee: isSet(object.extendee) ? String(object.extendee) : "",
+ defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "",
+ oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0,
+ jsonName: isSet(object.jsonName) ? String(object.jsonName) : "",
+ options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+ proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.number !== undefined && (obj.number = Math.round(message.number));
+ message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label));
+ message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type));
+ message.typeName !== undefined && (obj.typeName = message.typeName);
+ message.extendee !== undefined && (obj.extendee = message.extendee);
+ message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue);
+ message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex));
+ message.jsonName !== undefined && (obj.jsonName = message.jsonName);
+ message.options !== undefined && (obj.options = message.options ? exports.FieldOptions.toJSON(message.options) : undefined);
+ message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional);
+ return obj;
+ },
+};
+function createBaseOneofDescriptorProto() {
+ return { name: "", options: undefined };
+}
+exports.OneofDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.options !== undefined && (obj.options = message.options ? exports.OneofOptions.toJSON(message.options) : undefined);
+ return obj;
+ },
+};
+function createBaseEnumDescriptorProto() {
+ return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] };
+}
+exports.EnumDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ value: Array.isArray(object?.value) ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) : [],
+ options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+ reservedRange: Array.isArray(object?.reservedRange)
+ ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+ : [],
+ reservedName: Array.isArray(object?.reservedName)
+ ? object.reservedName.map((e) => String(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ if (message.value) {
+ obj.value = message.value.map((e) => e ? exports.EnumValueDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.value = [];
+ }
+ message.options !== undefined && (obj.options = message.options ? exports.EnumOptions.toJSON(message.options) : undefined);
+ if (message.reservedRange) {
+ obj.reservedRange = message.reservedRange.map((e) => e ? exports.EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined);
+ }
+ else {
+ obj.reservedRange = [];
+ }
+ if (message.reservedName) {
+ obj.reservedName = message.reservedName.map((e) => e);
+ }
+ else {
+ obj.reservedName = [];
+ }
+ return obj;
+ },
+};
+function createBaseEnumDescriptorProto_EnumReservedRange() {
+ return { start: 0, end: 0 };
+}
+exports.EnumDescriptorProto_EnumReservedRange = {
+ fromJSON(object) {
+ return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.start !== undefined && (obj.start = Math.round(message.start));
+ message.end !== undefined && (obj.end = Math.round(message.end));
+ return obj;
+ },
+};
+function createBaseEnumValueDescriptorProto() {
+ return { name: "", number: 0, options: undefined };
+}
+exports.EnumValueDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ number: isSet(object.number) ? Number(object.number) : 0,
+ options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.number !== undefined && (obj.number = Math.round(message.number));
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.EnumValueOptions.toJSON(message.options) : undefined);
+ return obj;
+ },
+};
+function createBaseServiceDescriptorProto() {
+ return { name: "", method: [], options: undefined };
+}
+exports.ServiceDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ method: Array.isArray(object?.method) ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) : [],
+ options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ if (message.method) {
+ obj.method = message.method.map((e) => e ? exports.MethodDescriptorProto.toJSON(e) : undefined);
+ }
+ else {
+ obj.method = [];
+ }
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.ServiceOptions.toJSON(message.options) : undefined);
+ return obj;
+ },
+};
+function createBaseMethodDescriptorProto() {
+ return {
+ name: "",
+ inputType: "",
+ outputType: "",
+ options: undefined,
+ clientStreaming: false,
+ serverStreaming: false,
+ };
+}
+exports.MethodDescriptorProto = {
+ fromJSON(object) {
+ return {
+ name: isSet(object.name) ? String(object.name) : "",
+ inputType: isSet(object.inputType) ? String(object.inputType) : "",
+ outputType: isSet(object.outputType) ? String(object.outputType) : "",
+ options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+ clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false,
+ serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.name !== undefined && (obj.name = message.name);
+ message.inputType !== undefined && (obj.inputType = message.inputType);
+ message.outputType !== undefined && (obj.outputType = message.outputType);
+ message.options !== undefined &&
+ (obj.options = message.options ? exports.MethodOptions.toJSON(message.options) : undefined);
+ message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming);
+ message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming);
+ return obj;
+ },
+};
+function createBaseFileOptions() {
+ return {
+ javaPackage: "",
+ javaOuterClassname: "",
+ javaMultipleFiles: false,
+ javaGenerateEqualsAndHash: false,
+ javaStringCheckUtf8: false,
+ optimizeFor: 1,
+ goPackage: "",
+ ccGenericServices: false,
+ javaGenericServices: false,
+ pyGenericServices: false,
+ phpGenericServices: false,
+ deprecated: false,
+ ccEnableArenas: false,
+ objcClassPrefix: "",
+ csharpNamespace: "",
+ swiftPrefix: "",
+ phpClassPrefix: "",
+ phpNamespace: "",
+ phpMetadataNamespace: "",
+ rubyPackage: "",
+ uninterpretedOption: [],
+ };
+}
+exports.FileOptions = {
+ fromJSON(object) {
+ return {
+ javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "",
+ javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "",
+ javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false,
+ javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+ ? Boolean(object.javaGenerateEqualsAndHash)
+ : false,
+ javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false,
+ optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+ goPackage: isSet(object.goPackage) ? String(object.goPackage) : "",
+ ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false,
+ javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false,
+ pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false,
+ phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false,
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false,
+ objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "",
+ csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "",
+ swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "",
+ phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "",
+ phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "",
+ phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "",
+ rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "",
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage);
+ message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname);
+ message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles);
+ message.javaGenerateEqualsAndHash !== undefined &&
+ (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash);
+ message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8);
+ message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor));
+ message.goPackage !== undefined && (obj.goPackage = message.goPackage);
+ message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices);
+ message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices);
+ message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices);
+ message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices);
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas);
+ message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix);
+ message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace);
+ message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix);
+ message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix);
+ message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace);
+ message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace);
+ message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseMessageOptions() {
+ return {
+ messageSetWireFormat: false,
+ noStandardDescriptorAccessor: false,
+ deprecated: false,
+ mapEntry: false,
+ uninterpretedOption: [],
+ };
+}
+exports.MessageOptions = {
+ fromJSON(object) {
+ return {
+ messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false,
+ noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+ ? Boolean(object.noStandardDescriptorAccessor)
+ : false,
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat);
+ message.noStandardDescriptorAccessor !== undefined &&
+ (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor);
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseFieldOptions() {
+ return {
+ ctype: 0,
+ packed: false,
+ jstype: 0,
+ lazy: false,
+ unverifiedLazy: false,
+ deprecated: false,
+ weak: false,
+ uninterpretedOption: [],
+ };
+}
+exports.FieldOptions = {
+ fromJSON(object) {
+ return {
+ ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+ packed: isSet(object.packed) ? Boolean(object.packed) : false,
+ jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+ lazy: isSet(object.lazy) ? Boolean(object.lazy) : false,
+ unverifiedLazy: isSet(object.unverifiedLazy) ? Boolean(object.unverifiedLazy) : false,
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ weak: isSet(object.weak) ? Boolean(object.weak) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype));
+ message.packed !== undefined && (obj.packed = message.packed);
+ message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype));
+ message.lazy !== undefined && (obj.lazy = message.lazy);
+ message.unverifiedLazy !== undefined && (obj.unverifiedLazy = message.unverifiedLazy);
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ message.weak !== undefined && (obj.weak = message.weak);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseOneofOptions() {
+ return { uninterpretedOption: [] };
+}
+exports.OneofOptions = {
+ fromJSON(object) {
+ return {
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseEnumOptions() {
+ return { allowAlias: false, deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumOptions = {
+ fromJSON(object) {
+ return {
+ allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false,
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias);
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseEnumValueOptions() {
+ return { deprecated: false, uninterpretedOption: [] };
+}
+exports.EnumValueOptions = {
+ fromJSON(object) {
+ return {
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseServiceOptions() {
+ return { deprecated: false, uninterpretedOption: [] };
+}
+exports.ServiceOptions = {
+ fromJSON(object) {
+ return {
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseMethodOptions() {
+ return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] };
+}
+exports.MethodOptions = {
+ fromJSON(object) {
+ return {
+ deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false,
+ idempotencyLevel: isSet(object.idempotencyLevel)
+ ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+ : 0,
+ uninterpretedOption: Array.isArray(object?.uninterpretedOption)
+ ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.deprecated !== undefined && (obj.deprecated = message.deprecated);
+ message.idempotencyLevel !== undefined &&
+ (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel));
+ if (message.uninterpretedOption) {
+ obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? exports.UninterpretedOption.toJSON(e) : undefined);
+ }
+ else {
+ obj.uninterpretedOption = [];
+ }
+ return obj;
+ },
+};
+function createBaseUninterpretedOption() {
+ return {
+ name: [],
+ identifierValue: "",
+ positiveIntValue: "0",
+ negativeIntValue: "0",
+ doubleValue: 0,
+ stringValue: Buffer.alloc(0),
+ aggregateValue: "",
+ };
+}
+exports.UninterpretedOption = {
+ fromJSON(object) {
+ return {
+ name: Array.isArray(object?.name) ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) : [],
+ identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "",
+ positiveIntValue: isSet(object.positiveIntValue) ? String(object.positiveIntValue) : "0",
+ negativeIntValue: isSet(object.negativeIntValue) ? String(object.negativeIntValue) : "0",
+ doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0,
+ stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+ aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.name) {
+ obj.name = message.name.map((e) => e ? exports.UninterpretedOption_NamePart.toJSON(e) : undefined);
+ }
+ else {
+ obj.name = [];
+ }
+ message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue);
+ message.positiveIntValue !== undefined && (obj.positiveIntValue = message.positiveIntValue);
+ message.negativeIntValue !== undefined && (obj.negativeIntValue = message.negativeIntValue);
+ message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue);
+ message.stringValue !== undefined &&
+ (obj.stringValue = base64FromBytes(message.stringValue !== undefined ? message.stringValue : Buffer.alloc(0)));
+ message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue);
+ return obj;
+ },
+};
+function createBaseUninterpretedOption_NamePart() {
+ return { namePart: "", isExtension: false };
+}
+exports.UninterpretedOption_NamePart = {
+ fromJSON(object) {
+ return {
+ namePart: isSet(object.namePart) ? String(object.namePart) : "",
+ isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.namePart !== undefined && (obj.namePart = message.namePart);
+ message.isExtension !== undefined && (obj.isExtension = message.isExtension);
+ return obj;
+ },
+};
+function createBaseSourceCodeInfo() {
+ return { location: [] };
+}
+exports.SourceCodeInfo = {
+ fromJSON(object) {
+ return {
+ location: Array.isArray(object?.location)
+ ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.location) {
+ obj.location = message.location.map((e) => e ? exports.SourceCodeInfo_Location.toJSON(e) : undefined);
+ }
+ else {
+ obj.location = [];
+ }
+ return obj;
+ },
+};
+function createBaseSourceCodeInfo_Location() {
+ return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] };
+}
+exports.SourceCodeInfo_Location = {
+ fromJSON(object) {
+ return {
+ path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+ span: Array.isArray(object?.span) ? object.span.map((e) => Number(e)) : [],
+ leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "",
+ trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "",
+ leadingDetachedComments: Array.isArray(object?.leadingDetachedComments)
+ ? object.leadingDetachedComments.map((e) => String(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.path) {
+ obj.path = message.path.map((e) => Math.round(e));
+ }
+ else {
+ obj.path = [];
+ }
+ if (message.span) {
+ obj.span = message.span.map((e) => Math.round(e));
+ }
+ else {
+ obj.span = [];
+ }
+ message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments);
+ message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments);
+ if (message.leadingDetachedComments) {
+ obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e);
+ }
+ else {
+ obj.leadingDetachedComments = [];
+ }
+ return obj;
+ },
+};
+function createBaseGeneratedCodeInfo() {
+ return { annotation: [] };
+}
+exports.GeneratedCodeInfo = {
+ fromJSON(object) {
+ return {
+ annotation: Array.isArray(object?.annotation)
+ ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.annotation) {
+ obj.annotation = message.annotation.map((e) => e ? exports.GeneratedCodeInfo_Annotation.toJSON(e) : undefined);
+ }
+ else {
+ obj.annotation = [];
+ }
+ return obj;
+ },
+};
+function createBaseGeneratedCodeInfo_Annotation() {
+ return { path: [], sourceFile: "", begin: 0, end: 0 };
+}
+exports.GeneratedCodeInfo_Annotation = {
+ fromJSON(object) {
+ return {
+ path: Array.isArray(object?.path) ? object.path.map((e) => Number(e)) : [],
+ sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "",
+ begin: isSet(object.begin) ? Number(object.begin) : 0,
+ end: isSet(object.end) ? Number(object.end) : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.path) {
+ obj.path = message.path.map((e) => Math.round(e));
+ }
+ else {
+ obj.path = [];
+ }
+ message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile);
+ message.begin !== undefined && (obj.begin = Math.round(message.begin));
+ message.end !== undefined && (obj.end = Math.round(message.end));
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..159135fe87172
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,24 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+function createBaseTimestamp() {
+ return { seconds: "0", nanos: 0 };
+}
+exports.Timestamp = {
+ fromJSON(object) {
+ return {
+ seconds: isSet(object.seconds) ? String(object.seconds) : "0",
+ nanos: isSet(object.nanos) ? Number(object.nanos) : 0,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.seconds !== undefined && (obj.seconds = message.seconds);
+ message.nanos !== undefined && (obj.nanos = Math.round(message.nanos));
+ return obj;
+ },
+};
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..3773867f5426a
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,112 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+function createBaseTimestampVerificationData() {
+ return { rfc3161Timestamps: [] };
+}
+exports.TimestampVerificationData = {
+ fromJSON(object) {
+ return {
+ rfc3161Timestamps: Array.isArray(object?.rfc3161Timestamps)
+ ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.rfc3161Timestamps) {
+ obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => e ? sigstore_common_1.RFC3161SignedTimestamp.toJSON(e) : undefined);
+ }
+ else {
+ obj.rfc3161Timestamps = [];
+ }
+ return obj;
+ },
+};
+function createBaseVerificationMaterial() {
+ return { content: undefined, tlogEntries: [], timestampVerificationData: undefined };
+}
+exports.VerificationMaterial = {
+ fromJSON(object) {
+ return {
+ content: isSet(object.publicKey)
+ ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+ : isSet(object.x509CertificateChain)
+ ? {
+ $case: "x509CertificateChain",
+ x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+ }
+ : isSet(object.certificate)
+ ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
+ : undefined,
+ tlogEntries: Array.isArray(object?.tlogEntries)
+ ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+ : [],
+ timestampVerificationData: isSet(object.timestampVerificationData)
+ ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.content?.$case === "publicKey" &&
+ (obj.publicKey = message.content?.publicKey ? sigstore_common_1.PublicKeyIdentifier.toJSON(message.content?.publicKey) : undefined);
+ message.content?.$case === "x509CertificateChain" &&
+ (obj.x509CertificateChain = message.content?.x509CertificateChain
+ ? sigstore_common_1.X509CertificateChain.toJSON(message.content?.x509CertificateChain)
+ : undefined);
+ message.content?.$case === "certificate" &&
+ (obj.certificate = message.content?.certificate
+ ? sigstore_common_1.X509Certificate.toJSON(message.content?.certificate)
+ : undefined);
+ if (message.tlogEntries) {
+ obj.tlogEntries = message.tlogEntries.map((e) => e ? sigstore_rekor_1.TransparencyLogEntry.toJSON(e) : undefined);
+ }
+ else {
+ obj.tlogEntries = [];
+ }
+ message.timestampVerificationData !== undefined &&
+ (obj.timestampVerificationData = message.timestampVerificationData
+ ? exports.TimestampVerificationData.toJSON(message.timestampVerificationData)
+ : undefined);
+ return obj;
+ },
+};
+function createBaseBundle() {
+ return { mediaType: "", verificationMaterial: undefined, content: undefined };
+}
+exports.Bundle = {
+ fromJSON(object) {
+ return {
+ mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+ verificationMaterial: isSet(object.verificationMaterial)
+ ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+ : undefined,
+ content: isSet(object.messageSignature)
+ ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+ : isSet(object.dsseEnvelope)
+ ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+ message.verificationMaterial !== undefined && (obj.verificationMaterial = message.verificationMaterial
+ ? exports.VerificationMaterial.toJSON(message.verificationMaterial)
+ : undefined);
+ message.content?.$case === "messageSignature" && (obj.messageSignature = message.content?.messageSignature
+ ? sigstore_common_1.MessageSignature.toJSON(message.content?.messageSignature)
+ : undefined);
+ message.content?.$case === "dsseEnvelope" &&
+ (obj.dsseEnvelope = message.content?.dsseEnvelope ? envelope_1.Envelope.toJSON(message.content?.dsseEnvelope) : undefined);
+ return obj;
+ },
+};
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..c6f9baa91fff2
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,588 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.subjectAlternativeNameTypeToJSON = exports.subjectAlternativeNameTypeFromJSON = exports.SubjectAlternativeNameType = exports.publicKeyDetailsToJSON = exports.publicKeyDetailsFromJSON = exports.PublicKeyDetails = exports.hashAlgorithmToJSON = exports.hashAlgorithmFromJSON = exports.HashAlgorithm = void 0;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+ HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+ HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+ HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
+ HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
+ HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
+ HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
+})(HashAlgorithm = exports.HashAlgorithm || (exports.HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "HASH_ALGORITHM_UNSPECIFIED":
+ return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+ case 1:
+ case "SHA2_256":
+ return HashAlgorithm.SHA2_256;
+ case 2:
+ case "SHA2_384":
+ return HashAlgorithm.SHA2_384;
+ case 3:
+ case "SHA2_512":
+ return HashAlgorithm.SHA2_512;
+ case 4:
+ case "SHA3_256":
+ return HashAlgorithm.SHA3_256;
+ case 5:
+ case "SHA3_384":
+ return HashAlgorithm.SHA3_384;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+ }
+}
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+function hashAlgorithmToJSON(object) {
+ switch (object) {
+ case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+ return "HASH_ALGORITHM_UNSPECIFIED";
+ case HashAlgorithm.SHA2_256:
+ return "SHA2_256";
+ case HashAlgorithm.SHA2_384:
+ return "SHA2_384";
+ case HashAlgorithm.SHA2_512:
+ return "SHA2_512";
+ case HashAlgorithm.SHA3_256:
+ return "SHA3_256";
+ case HashAlgorithm.SHA3_384:
+ return "SHA3_384";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+ }
+}
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ *
+ * PublicKeyDetails captures the public key/hash algorithm combinations
+ * recommended in the Sigstore ecosystem.
+ *
+ * This is modelled as a linear set as we want to provide a small number of
+ * opinionated options instead of allowing every possible permutation.
+ *
+ * Any changes to this enum MUST be reflected in the algorithm registry.
+ * See: docs/algorithm-registry.md
+ *
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+ PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+ /**
+ * PKCS1_RSA_PKCS1V5 - RSA
+ *
+ * @deprecated
+ */
+ PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+ /**
+ * PKCS1_RSA_PSS - See RFC8017
+ *
+ * @deprecated
+ */
+ PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+ /** @deprecated */
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+ /** @deprecated */
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+ /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
+ /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
+ PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
+ /**
+ * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
+ *
+ * @deprecated
+ */
+ PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+ /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
+ PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+ PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
+ PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
+ /** PKIX_ED25519 - Ed 25519 */
+ PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+ PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
+ /**
+ * LMS_SHA256 - LMS and LM-OTS
+ *
+ * These keys and signatures may be used by private Sigstore
+ * deployments, but are not currently supported by the public
+ * good instance.
+ *
+ * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
+ * Using them correctly requires discretion and careful consideration
+ * to ensure that individual secret keys are not used more than once.
+ * In addition, LM-OTS is a single-use scheme, meaning that it
+ * MUST NOT be used for more than one signature per LM-OTS key.
+ * If you cannot maintain these invariants, you MUST NOT use these
+ * schemes.
+ */
+ PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
+ PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
+})(PublicKeyDetails = exports.PublicKeyDetails || (exports.PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+ return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+ case 1:
+ case "PKCS1_RSA_PKCS1V5":
+ return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+ case 2:
+ case "PKCS1_RSA_PSS":
+ return PublicKeyDetails.PKCS1_RSA_PSS;
+ case 3:
+ case "PKIX_RSA_PKCS1V5":
+ return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+ case 4:
+ case "PKIX_RSA_PSS":
+ return PublicKeyDetails.PKIX_RSA_PSS;
+ case 9:
+ case "PKIX_RSA_PKCS1V15_2048_SHA256":
+ return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
+ case 10:
+ case "PKIX_RSA_PKCS1V15_3072_SHA256":
+ return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
+ case 11:
+ case "PKIX_RSA_PKCS1V15_4096_SHA256":
+ return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
+ case 16:
+ case "PKIX_RSA_PSS_2048_SHA256":
+ return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
+ case 17:
+ case "PKIX_RSA_PSS_3072_SHA256":
+ return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
+ case 18:
+ case "PKIX_RSA_PSS_4096_SHA256":
+ return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
+ case 6:
+ case "PKIX_ECDSA_P256_HMAC_SHA_256":
+ return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+ case 5:
+ case "PKIX_ECDSA_P256_SHA_256":
+ return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+ case 12:
+ case "PKIX_ECDSA_P384_SHA_384":
+ return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
+ case 13:
+ case "PKIX_ECDSA_P521_SHA_512":
+ return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
+ case 7:
+ case "PKIX_ED25519":
+ return PublicKeyDetails.PKIX_ED25519;
+ case 8:
+ case "PKIX_ED25519_PH":
+ return PublicKeyDetails.PKIX_ED25519_PH;
+ case 14:
+ case "LMS_SHA256":
+ return PublicKeyDetails.LMS_SHA256;
+ case 15:
+ case "LMOTS_SHA256":
+ return PublicKeyDetails.LMOTS_SHA256;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+ }
+}
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+function publicKeyDetailsToJSON(object) {
+ switch (object) {
+ case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+ return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+ case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+ return "PKCS1_RSA_PKCS1V5";
+ case PublicKeyDetails.PKCS1_RSA_PSS:
+ return "PKCS1_RSA_PSS";
+ case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+ return "PKIX_RSA_PKCS1V5";
+ case PublicKeyDetails.PKIX_RSA_PSS:
+ return "PKIX_RSA_PSS";
+ case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
+ return "PKIX_RSA_PKCS1V15_2048_SHA256";
+ case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
+ return "PKIX_RSA_PKCS1V15_3072_SHA256";
+ case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
+ return "PKIX_RSA_PKCS1V15_4096_SHA256";
+ case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
+ return "PKIX_RSA_PSS_2048_SHA256";
+ case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
+ return "PKIX_RSA_PSS_3072_SHA256";
+ case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
+ return "PKIX_RSA_PSS_4096_SHA256";
+ case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+ return "PKIX_ECDSA_P256_HMAC_SHA_256";
+ case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+ return "PKIX_ECDSA_P256_SHA_256";
+ case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
+ return "PKIX_ECDSA_P384_SHA_384";
+ case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
+ return "PKIX_ECDSA_P521_SHA_512";
+ case PublicKeyDetails.PKIX_ED25519:
+ return "PKIX_ED25519";
+ case PublicKeyDetails.PKIX_ED25519_PH:
+ return "PKIX_ED25519_PH";
+ case PublicKeyDetails.LMS_SHA256:
+ return "LMS_SHA256";
+ case PublicKeyDetails.LMOTS_SHA256:
+ return "LMOTS_SHA256";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+ }
+}
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+ SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+ SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+ SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+ /**
+ * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+ * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+ * for more details.
+ */
+ SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType = exports.SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+ switch (object) {
+ case 0:
+ case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+ return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+ case 1:
+ case "EMAIL":
+ return SubjectAlternativeNameType.EMAIL;
+ case 2:
+ case "URI":
+ return SubjectAlternativeNameType.URI;
+ case 3:
+ case "OTHER_NAME":
+ return SubjectAlternativeNameType.OTHER_NAME;
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+ }
+}
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+function subjectAlternativeNameTypeToJSON(object) {
+ switch (object) {
+ case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+ return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+ case SubjectAlternativeNameType.EMAIL:
+ return "EMAIL";
+ case SubjectAlternativeNameType.URI:
+ return "URI";
+ case SubjectAlternativeNameType.OTHER_NAME:
+ return "OTHER_NAME";
+ default:
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+ }
+}
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+function createBaseHashOutput() {
+ return { algorithm: 0, digest: Buffer.alloc(0) };
+}
+exports.HashOutput = {
+ fromJSON(object) {
+ return {
+ algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+ digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.algorithm !== undefined && (obj.algorithm = hashAlgorithmToJSON(message.algorithm));
+ message.digest !== undefined &&
+ (obj.digest = base64FromBytes(message.digest !== undefined ? message.digest : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseMessageSignature() {
+ return { messageDigest: undefined, signature: Buffer.alloc(0) };
+}
+exports.MessageSignature = {
+ fromJSON(object) {
+ return {
+ messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+ signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.messageDigest !== undefined &&
+ (obj.messageDigest = message.messageDigest ? exports.HashOutput.toJSON(message.messageDigest) : undefined);
+ message.signature !== undefined &&
+ (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseLogId() {
+ return { keyId: Buffer.alloc(0) };
+}
+exports.LogId = {
+ fromJSON(object) {
+ return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.keyId !== undefined &&
+ (obj.keyId = base64FromBytes(message.keyId !== undefined ? message.keyId : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseRFC3161SignedTimestamp() {
+ return { signedTimestamp: Buffer.alloc(0) };
+}
+exports.RFC3161SignedTimestamp = {
+ fromJSON(object) {
+ return {
+ signedTimestamp: isSet(object.signedTimestamp)
+ ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+ : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.signedTimestamp !== undefined &&
+ (obj.signedTimestamp = base64FromBytes(message.signedTimestamp !== undefined ? message.signedTimestamp : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBasePublicKey() {
+ return { rawBytes: undefined, keyDetails: 0, validFor: undefined };
+}
+exports.PublicKey = {
+ fromJSON(object) {
+ return {
+ rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+ keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+ validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.rawBytes !== undefined &&
+ (obj.rawBytes = message.rawBytes !== undefined ? base64FromBytes(message.rawBytes) : undefined);
+ message.keyDetails !== undefined && (obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails));
+ message.validFor !== undefined &&
+ (obj.validFor = message.validFor ? exports.TimeRange.toJSON(message.validFor) : undefined);
+ return obj;
+ },
+};
+function createBasePublicKeyIdentifier() {
+ return { hint: "" };
+}
+exports.PublicKeyIdentifier = {
+ fromJSON(object) {
+ return { hint: isSet(object.hint) ? String(object.hint) : "" };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.hint !== undefined && (obj.hint = message.hint);
+ return obj;
+ },
+};
+function createBaseObjectIdentifier() {
+ return { id: [] };
+}
+exports.ObjectIdentifier = {
+ fromJSON(object) {
+ return { id: Array.isArray(object?.id) ? object.id.map((e) => Number(e)) : [] };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.id) {
+ obj.id = message.id.map((e) => Math.round(e));
+ }
+ else {
+ obj.id = [];
+ }
+ return obj;
+ },
+};
+function createBaseObjectIdentifierValuePair() {
+ return { oid: undefined, value: Buffer.alloc(0) };
+}
+exports.ObjectIdentifierValuePair = {
+ fromJSON(object) {
+ return {
+ oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+ value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.oid !== undefined && (obj.oid = message.oid ? exports.ObjectIdentifier.toJSON(message.oid) : undefined);
+ message.value !== undefined &&
+ (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseDistinguishedName() {
+ return { organization: "", commonName: "" };
+}
+exports.DistinguishedName = {
+ fromJSON(object) {
+ return {
+ organization: isSet(object.organization) ? String(object.organization) : "",
+ commonName: isSet(object.commonName) ? String(object.commonName) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.organization !== undefined && (obj.organization = message.organization);
+ message.commonName !== undefined && (obj.commonName = message.commonName);
+ return obj;
+ },
+};
+function createBaseX509Certificate() {
+ return { rawBytes: Buffer.alloc(0) };
+}
+exports.X509Certificate = {
+ fromJSON(object) {
+ return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.rawBytes !== undefined &&
+ (obj.rawBytes = base64FromBytes(message.rawBytes !== undefined ? message.rawBytes : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseSubjectAlternativeName() {
+ return { type: 0, identity: undefined };
+}
+exports.SubjectAlternativeName = {
+ fromJSON(object) {
+ return {
+ type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+ identity: isSet(object.regexp)
+ ? { $case: "regexp", regexp: String(object.regexp) }
+ : isSet(object.value)
+ ? { $case: "value", value: String(object.value) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.type !== undefined && (obj.type = subjectAlternativeNameTypeToJSON(message.type));
+ message.identity?.$case === "regexp" && (obj.regexp = message.identity?.regexp);
+ message.identity?.$case === "value" && (obj.value = message.identity?.value);
+ return obj;
+ },
+};
+function createBaseX509CertificateChain() {
+ return { certificates: [] };
+}
+exports.X509CertificateChain = {
+ fromJSON(object) {
+ return {
+ certificates: Array.isArray(object?.certificates)
+ ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.certificates) {
+ obj.certificates = message.certificates.map((e) => e ? exports.X509Certificate.toJSON(e) : undefined);
+ }
+ else {
+ obj.certificates = [];
+ }
+ return obj;
+ },
+};
+function createBaseTimeRange() {
+ return { start: undefined, end: undefined };
+}
+exports.TimeRange = {
+ fromJSON(object) {
+ return {
+ start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+ end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.start !== undefined && (obj.start = message.start.toISOString());
+ message.end !== undefined && (obj.end = message.end.toISOString());
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function fromTimestamp(t) {
+ let millis = Number(t.seconds) * 1000;
+ millis += t.nanos / 1000000;
+ return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+ if (o instanceof Date) {
+ return o;
+ }
+ else if (typeof o === "string") {
+ return new Date(o);
+ }
+ else {
+ return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..398193b2075a7
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,167 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseKindVersion() {
+ return { kind: "", version: "" };
+}
+exports.KindVersion = {
+ fromJSON(object) {
+ return {
+ kind: isSet(object.kind) ? String(object.kind) : "",
+ version: isSet(object.version) ? String(object.version) : "",
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.kind !== undefined && (obj.kind = message.kind);
+ message.version !== undefined && (obj.version = message.version);
+ return obj;
+ },
+};
+function createBaseCheckpoint() {
+ return { envelope: "" };
+}
+exports.Checkpoint = {
+ fromJSON(object) {
+ return { envelope: isSet(object.envelope) ? String(object.envelope) : "" };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.envelope !== undefined && (obj.envelope = message.envelope);
+ return obj;
+ },
+};
+function createBaseInclusionProof() {
+ return { logIndex: "0", rootHash: Buffer.alloc(0), treeSize: "0", hashes: [], checkpoint: undefined };
+}
+exports.InclusionProof = {
+ fromJSON(object) {
+ return {
+ logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+ rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+ treeSize: isSet(object.treeSize) ? String(object.treeSize) : "0",
+ hashes: Array.isArray(object?.hashes) ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) : [],
+ checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+ message.rootHash !== undefined &&
+ (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : Buffer.alloc(0)));
+ message.treeSize !== undefined && (obj.treeSize = message.treeSize);
+ if (message.hashes) {
+ obj.hashes = message.hashes.map((e) => base64FromBytes(e !== undefined ? e : Buffer.alloc(0)));
+ }
+ else {
+ obj.hashes = [];
+ }
+ message.checkpoint !== undefined &&
+ (obj.checkpoint = message.checkpoint ? exports.Checkpoint.toJSON(message.checkpoint) : undefined);
+ return obj;
+ },
+};
+function createBaseInclusionPromise() {
+ return { signedEntryTimestamp: Buffer.alloc(0) };
+}
+exports.InclusionPromise = {
+ fromJSON(object) {
+ return {
+ signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+ ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+ : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.signedEntryTimestamp !== undefined &&
+ (obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp !== undefined ? message.signedEntryTimestamp : Buffer.alloc(0)));
+ return obj;
+ },
+};
+function createBaseTransparencyLogEntry() {
+ return {
+ logIndex: "0",
+ logId: undefined,
+ kindVersion: undefined,
+ integratedTime: "0",
+ inclusionPromise: undefined,
+ inclusionProof: undefined,
+ canonicalizedBody: Buffer.alloc(0),
+ };
+}
+exports.TransparencyLogEntry = {
+ fromJSON(object) {
+ return {
+ logIndex: isSet(object.logIndex) ? String(object.logIndex) : "0",
+ logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+ kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+ integratedTime: isSet(object.integratedTime) ? String(object.integratedTime) : "0",
+ inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+ inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+ canonicalizedBody: isSet(object.canonicalizedBody)
+ ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+ : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.logIndex !== undefined && (obj.logIndex = message.logIndex);
+ message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+ message.kindVersion !== undefined &&
+ (obj.kindVersion = message.kindVersion ? exports.KindVersion.toJSON(message.kindVersion) : undefined);
+ message.integratedTime !== undefined && (obj.integratedTime = message.integratedTime);
+ message.inclusionPromise !== undefined &&
+ (obj.inclusionPromise = message.inclusionPromise ? exports.InclusionPromise.toJSON(message.inclusionPromise) : undefined);
+ message.inclusionProof !== undefined &&
+ (obj.inclusionProof = message.inclusionProof ? exports.InclusionProof.toJSON(message.inclusionProof) : undefined);
+ message.canonicalizedBody !== undefined &&
+ (obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody !== undefined ? message.canonicalizedBody : Buffer.alloc(0)));
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..8791aba27044b
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,158 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ClientTrustConfig = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+function createBaseTransparencyLogInstance() {
+ return { baseUrl: "", hashAlgorithm: 0, publicKey: undefined, logId: undefined, checkpointKeyId: undefined };
+}
+exports.TransparencyLogInstance = {
+ fromJSON(object) {
+ return {
+ baseUrl: isSet(object.baseUrl) ? String(object.baseUrl) : "",
+ hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+ publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+ logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+ checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.baseUrl !== undefined && (obj.baseUrl = message.baseUrl);
+ message.hashAlgorithm !== undefined && (obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm));
+ message.publicKey !== undefined &&
+ (obj.publicKey = message.publicKey ? sigstore_common_1.PublicKey.toJSON(message.publicKey) : undefined);
+ message.logId !== undefined && (obj.logId = message.logId ? sigstore_common_1.LogId.toJSON(message.logId) : undefined);
+ message.checkpointKeyId !== undefined &&
+ (obj.checkpointKeyId = message.checkpointKeyId ? sigstore_common_1.LogId.toJSON(message.checkpointKeyId) : undefined);
+ return obj;
+ },
+};
+function createBaseCertificateAuthority() {
+ return { subject: undefined, uri: "", certChain: undefined, validFor: undefined };
+}
+exports.CertificateAuthority = {
+ fromJSON(object) {
+ return {
+ subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+ uri: isSet(object.uri) ? String(object.uri) : "",
+ certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+ validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.subject !== undefined &&
+ (obj.subject = message.subject ? sigstore_common_1.DistinguishedName.toJSON(message.subject) : undefined);
+ message.uri !== undefined && (obj.uri = message.uri);
+ message.certChain !== undefined &&
+ (obj.certChain = message.certChain ? sigstore_common_1.X509CertificateChain.toJSON(message.certChain) : undefined);
+ message.validFor !== undefined &&
+ (obj.validFor = message.validFor ? sigstore_common_1.TimeRange.toJSON(message.validFor) : undefined);
+ return obj;
+ },
+};
+function createBaseTrustedRoot() {
+ return { mediaType: "", tlogs: [], certificateAuthorities: [], ctlogs: [], timestampAuthorities: [] };
+}
+exports.TrustedRoot = {
+ fromJSON(object) {
+ return {
+ mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+ tlogs: Array.isArray(object?.tlogs) ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) : [],
+ certificateAuthorities: Array.isArray(object?.certificateAuthorities)
+ ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+ : [],
+ ctlogs: Array.isArray(object?.ctlogs)
+ ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+ : [],
+ timestampAuthorities: Array.isArray(object?.timestampAuthorities)
+ ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+ if (message.tlogs) {
+ obj.tlogs = message.tlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+ }
+ else {
+ obj.tlogs = [];
+ }
+ if (message.certificateAuthorities) {
+ obj.certificateAuthorities = message.certificateAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+ }
+ else {
+ obj.certificateAuthorities = [];
+ }
+ if (message.ctlogs) {
+ obj.ctlogs = message.ctlogs.map((e) => e ? exports.TransparencyLogInstance.toJSON(e) : undefined);
+ }
+ else {
+ obj.ctlogs = [];
+ }
+ if (message.timestampAuthorities) {
+ obj.timestampAuthorities = message.timestampAuthorities.map((e) => e ? exports.CertificateAuthority.toJSON(e) : undefined);
+ }
+ else {
+ obj.timestampAuthorities = [];
+ }
+ return obj;
+ },
+};
+function createBaseSigningConfig() {
+ return { caUrl: "", oidcUrl: "", tlogUrls: [], tsaUrls: [] };
+}
+exports.SigningConfig = {
+ fromJSON(object) {
+ return {
+ caUrl: isSet(object.caUrl) ? String(object.caUrl) : "",
+ oidcUrl: isSet(object.oidcUrl) ? String(object.oidcUrl) : "",
+ tlogUrls: Array.isArray(object?.tlogUrls) ? object.tlogUrls.map((e) => String(e)) : [],
+ tsaUrls: Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => String(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.caUrl !== undefined && (obj.caUrl = message.caUrl);
+ message.oidcUrl !== undefined && (obj.oidcUrl = message.oidcUrl);
+ if (message.tlogUrls) {
+ obj.tlogUrls = message.tlogUrls.map((e) => e);
+ }
+ else {
+ obj.tlogUrls = [];
+ }
+ if (message.tsaUrls) {
+ obj.tsaUrls = message.tsaUrls.map((e) => e);
+ }
+ else {
+ obj.tsaUrls = [];
+ }
+ return obj;
+ },
+};
+function createBaseClientTrustConfig() {
+ return { mediaType: "", trustedRoot: undefined, signingConfig: undefined };
+}
+exports.ClientTrustConfig = {
+ fromJSON(object) {
+ return {
+ mediaType: isSet(object.mediaType) ? String(object.mediaType) : "",
+ trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
+ signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.mediaType !== undefined && (obj.mediaType = message.mediaType);
+ message.trustedRoot !== undefined &&
+ (obj.trustedRoot = message.trustedRoot ? exports.TrustedRoot.toJSON(message.trustedRoot) : undefined);
+ message.signingConfig !== undefined &&
+ (obj.signingConfig = message.signingConfig ? exports.SigningConfig.toJSON(message.signingConfig) : undefined);
+ return obj;
+ },
+};
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..4af83c5a54660
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,324 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+function createBaseCertificateIdentity() {
+ return { issuer: "", san: undefined, oids: [] };
+}
+exports.CertificateIdentity = {
+ fromJSON(object) {
+ return {
+ issuer: isSet(object.issuer) ? String(object.issuer) : "",
+ san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+ oids: Array.isArray(object?.oids) ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.issuer !== undefined && (obj.issuer = message.issuer);
+ message.san !== undefined && (obj.san = message.san ? sigstore_common_1.SubjectAlternativeName.toJSON(message.san) : undefined);
+ if (message.oids) {
+ obj.oids = message.oids.map((e) => e ? sigstore_common_1.ObjectIdentifierValuePair.toJSON(e) : undefined);
+ }
+ else {
+ obj.oids = [];
+ }
+ return obj;
+ },
+};
+function createBaseCertificateIdentities() {
+ return { identities: [] };
+}
+exports.CertificateIdentities = {
+ fromJSON(object) {
+ return {
+ identities: Array.isArray(object?.identities)
+ ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+ : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.identities) {
+ obj.identities = message.identities.map((e) => e ? exports.CertificateIdentity.toJSON(e) : undefined);
+ }
+ else {
+ obj.identities = [];
+ }
+ return obj;
+ },
+};
+function createBasePublicKeyIdentities() {
+ return { publicKeys: [] };
+}
+exports.PublicKeyIdentities = {
+ fromJSON(object) {
+ return {
+ publicKeys: Array.isArray(object?.publicKeys) ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) : [],
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.publicKeys) {
+ obj.publicKeys = message.publicKeys.map((e) => e ? sigstore_common_1.PublicKey.toJSON(e) : undefined);
+ }
+ else {
+ obj.publicKeys = [];
+ }
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions() {
+ return {
+ signers: undefined,
+ tlogOptions: undefined,
+ ctlogOptions: undefined,
+ tsaOptions: undefined,
+ integratedTsOptions: undefined,
+ observerOptions: undefined,
+ };
+}
+exports.ArtifactVerificationOptions = {
+ fromJSON(object) {
+ return {
+ signers: isSet(object.certificateIdentities)
+ ? {
+ $case: "certificateIdentities",
+ certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+ }
+ : isSet(object.publicKeys)
+ ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+ : undefined,
+ tlogOptions: isSet(object.tlogOptions)
+ ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+ : undefined,
+ ctlogOptions: isSet(object.ctlogOptions)
+ ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+ : undefined,
+ tsaOptions: isSet(object.tsaOptions)
+ ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+ : undefined,
+ integratedTsOptions: isSet(object.integratedTsOptions)
+ ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
+ : undefined,
+ observerOptions: isSet(object.observerOptions)
+ ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.signers?.$case === "certificateIdentities" &&
+ (obj.certificateIdentities = message.signers?.certificateIdentities
+ ? exports.CertificateIdentities.toJSON(message.signers?.certificateIdentities)
+ : undefined);
+ message.signers?.$case === "publicKeys" && (obj.publicKeys = message.signers?.publicKeys
+ ? exports.PublicKeyIdentities.toJSON(message.signers?.publicKeys)
+ : undefined);
+ message.tlogOptions !== undefined && (obj.tlogOptions = message.tlogOptions
+ ? exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions)
+ : undefined);
+ message.ctlogOptions !== undefined && (obj.ctlogOptions = message.ctlogOptions
+ ? exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions)
+ : undefined);
+ message.tsaOptions !== undefined && (obj.tsaOptions = message.tsaOptions
+ ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions)
+ : undefined);
+ message.integratedTsOptions !== undefined && (obj.integratedTsOptions = message.integratedTsOptions
+ ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions)
+ : undefined);
+ message.observerOptions !== undefined && (obj.observerOptions = message.observerOptions
+ ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions)
+ : undefined);
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions_TlogOptions() {
+ return { threshold: 0, performOnlineVerification: false, disable: false };
+}
+exports.ArtifactVerificationOptions_TlogOptions = {
+ fromJSON(object) {
+ return {
+ threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ performOnlineVerification: isSet(object.performOnlineVerification)
+ ? Boolean(object.performOnlineVerification)
+ : false,
+ disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+ message.performOnlineVerification !== undefined &&
+ (obj.performOnlineVerification = message.performOnlineVerification);
+ message.disable !== undefined && (obj.disable = message.disable);
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions_CtlogOptions() {
+ return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_CtlogOptions = {
+ fromJSON(object) {
+ return {
+ threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+ message.disable !== undefined && (obj.disable = message.disable);
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions_TimestampAuthorityOptions() {
+ return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+ fromJSON(object) {
+ return {
+ threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+ message.disable !== undefined && (obj.disable = message.disable);
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions_TlogIntegratedTimestampOptions() {
+ return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
+ fromJSON(object) {
+ return {
+ threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+ message.disable !== undefined && (obj.disable = message.disable);
+ return obj;
+ },
+};
+function createBaseArtifactVerificationOptions_ObserverTimestampOptions() {
+ return { threshold: 0, disable: false };
+}
+exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
+ fromJSON(object) {
+ return {
+ threshold: isSet(object.threshold) ? Number(object.threshold) : 0,
+ disable: isSet(object.disable) ? Boolean(object.disable) : false,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.threshold !== undefined && (obj.threshold = Math.round(message.threshold));
+ message.disable !== undefined && (obj.disable = message.disable);
+ return obj;
+ },
+};
+function createBaseArtifact() {
+ return { data: undefined };
+}
+exports.Artifact = {
+ fromJSON(object) {
+ return {
+ data: isSet(object.artifactUri)
+ ? { $case: "artifactUri", artifactUri: String(object.artifactUri) }
+ : isSet(object.artifact)
+ ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.data?.$case === "artifactUri" && (obj.artifactUri = message.data?.artifactUri);
+ message.data?.$case === "artifact" &&
+ (obj.artifact = message.data?.artifact !== undefined ? base64FromBytes(message.data?.artifact) : undefined);
+ return obj;
+ },
+};
+function createBaseInput() {
+ return {
+ artifactTrustRoot: undefined,
+ artifactVerificationOptions: undefined,
+ bundle: undefined,
+ artifact: undefined,
+ };
+}
+exports.Input = {
+ fromJSON(object) {
+ return {
+ artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+ artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+ ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+ : undefined,
+ bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+ artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.artifactTrustRoot !== undefined &&
+ (obj.artifactTrustRoot = message.artifactTrustRoot ? sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot) : undefined);
+ message.artifactVerificationOptions !== undefined &&
+ (obj.artifactVerificationOptions = message.artifactVerificationOptions
+ ? exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions)
+ : undefined);
+ message.bundle !== undefined && (obj.bundle = message.bundle ? sigstore_bundle_1.Bundle.toJSON(message.bundle) : undefined);
+ message.artifact !== undefined && (obj.artifact = message.artifact ? exports.Artifact.toJSON(message.artifact) : undefined);
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..92ae4acbd00ec
--- /dev/null
+++ b/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,31 @@
+{
+ "name": "@sigstore/protobuf-specs",
+ "version": "0.3.2",
+ "description": "code-signing for npm packages",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "build": "tsc"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/protobuf-specs.git"
+ },
+ "files": [
+ "dist"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/sigstore/protobuf-specs/issues"
+ },
+ "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+ "devDependencies": {
+ "@tsconfig/node16": "^16.1.1",
+ "@types/node": "^18.14.0",
+ "typescript": "^4.9.5"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/@sigstore/sign/LICENSE b/node_modules/@sigstore/sign/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/sign/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 The Sigstore Authors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/@sigstore/sign/dist/bundler/base.js
new file mode 100644
index 0000000000000..61d5eba4568a3
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/base.js
@@ -0,0 +1,50 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BaseBundleBuilder = void 0;
+// BaseBundleBuilder is a base class for BundleBuilder implementations. It
+// provides a the basic wokflow for signing and witnessing an artifact.
+// Subclasses must implement the `package` method to assemble a valid bundle
+// with the generated signature and verification material.
+class BaseBundleBuilder {
+ constructor(options) {
+ this.signer = options.signer;
+ this.witnesses = options.witnesses;
+ }
+ // Executes the signing/witnessing process for the given artifact.
+ async create(artifact) {
+ const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob));
+ const bundle = await this.package(artifact, signature);
+ // Invoke all of the witnesses in parallel
+ const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key))));
+ // Collect the verification material from all of the witnesses
+ const tlogEntryList = [];
+ const timestampList = [];
+ verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => {
+ tlogEntryList.push(...(tlogEntries ?? []));
+ timestampList.push(...(rfc3161Timestamps ?? []));
+ });
+ // Merge the collected verification material into the bundle
+ bundle.verificationMaterial.tlogEntries = tlogEntryList;
+ bundle.verificationMaterial.timestampVerificationData = {
+ rfc3161Timestamps: timestampList,
+ };
+ return bundle;
+ }
+ // Override this function to apply any pre-signing transformations to the
+ // artifact. The returned buffer will be signed by the signer. The default
+ // implementation simply returns the artifact data.
+ async prepare(artifact) {
+ return artifact.data;
+ }
+}
+exports.BaseBundleBuilder = BaseBundleBuilder;
+// Extracts the public key from a KeyMaterial. Returns either the public key
+// or the certificate, depending on the type of key material.
+function publicKey(key) {
+ switch (key.$case) {
+ case 'publicKey':
+ return key.publicKey;
+ case 'x509Certificate':
+ return key.certificate;
+ }
+}
diff --git a/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/@sigstore/sign/dist/bundler/bundle.js
new file mode 100644
index 0000000000000..7c2ca9164f0df
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/bundle.js
@@ -0,0 +1,71 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const sigstore = __importStar(require("@sigstore/bundle"));
+const util_1 = require("../util");
+// Helper functions for assembling the parts of a Sigstore bundle
+// Message signature bundle - $case: 'messageSignature'
+function toMessageSignatureBundle(artifact, signature) {
+ const digest = util_1.crypto.hash(artifact.data);
+ return sigstore.toMessageSignatureBundle({
+ digest,
+ signature: signature.signature,
+ certificate: signature.key.$case === 'x509Certificate'
+ ? util_1.pem.toDER(signature.key.certificate)
+ : undefined,
+ keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+ });
+}
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+// DSSE envelope bundle - $case: 'dsseEnvelope'
+function toDSSEBundle(artifact, signature, singleCertificate) {
+ return sigstore.toDSSEBundle({
+ artifact: artifact.data,
+ artifactType: artifact.type,
+ signature: signature.signature,
+ certificate: signature.key.$case === 'x509Certificate'
+ ? util_1.pem.toDER(signature.key.certificate)
+ : undefined,
+ keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+ singleCertificate,
+ });
+}
+exports.toDSSEBundle = toDSSEBundle;
diff --git a/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/@sigstore/sign/dist/bundler/dsse.js
new file mode 100644
index 0000000000000..621700df93842
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/dsse.js
@@ -0,0 +1,46 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSEBundleBuilder = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("../util");
+const base_1 = require("./base");
+const bundle_1 = require("./bundle");
+// BundleBuilder implementation for DSSE wrapped attestations
+class DSSEBundleBuilder extends base_1.BaseBundleBuilder {
+ constructor(options) {
+ super(options);
+ this.singleCertificate = options.singleCertificate ?? false;
+ }
+ // DSSE requires the artifact to be pre-encoded with the payload type
+ // before the signature is generated.
+ async prepare(artifact) {
+ const a = artifactDefaults(artifact);
+ return util_1.dsse.preAuthEncoding(a.type, a.data);
+ }
+ // Packages the artifact and signature into a DSSE bundle
+ async package(artifact, signature) {
+ return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.singleCertificate);
+ }
+}
+exports.DSSEBundleBuilder = DSSEBundleBuilder;
+// Defaults the artifact type to an empty string if not provided
+function artifactDefaults(artifact) {
+ return {
+ ...artifact,
+ type: artifact.type ?? '',
+ };
+}
diff --git a/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/@sigstore/sign/dist/bundler/index.js
new file mode 100644
index 0000000000000..d67c8c324a4f0
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/index.js
@@ -0,0 +1,7 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+var dsse_1 = require("./dsse");
+Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } });
+var message_1 = require("./message");
+Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } });
diff --git a/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/@sigstore/sign/dist/bundler/message.js
new file mode 100644
index 0000000000000..e3991f42bab93
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/bundler/message.js
@@ -0,0 +1,30 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureBundleBuilder = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const base_1 = require("./base");
+const bundle_1 = require("./bundle");
+// BundleBuilder implementation for raw message signatures
+class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder {
+ constructor(options) {
+ super(options);
+ }
+ async package(artifact, signature) {
+ return (0, bundle_1.toMessageSignatureBundle)(artifact, signature);
+ }
+}
+exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder;
diff --git a/node_modules/@sigstore/sign/dist/error.js b/node_modules/@sigstore/sign/dist/error.js
new file mode 100644
index 0000000000000..d57e4567fb89e
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/error.js
@@ -0,0 +1,39 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.internalError = exports.InternalError = void 0;
+const error_1 = require("./external/error");
+class InternalError extends Error {
+ constructor({ code, message, cause, }) {
+ super(message);
+ this.name = this.constructor.name;
+ this.cause = cause;
+ this.code = code;
+ }
+}
+exports.InternalError = InternalError;
+function internalError(err, code, message) {
+ if (err instanceof error_1.HTTPError) {
+ message += ` - ${err.message}`;
+ }
+ throw new InternalError({
+ code: code,
+ message: message,
+ cause: err,
+ });
+}
+exports.internalError = internalError;
diff --git a/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/@sigstore/sign/dist/external/error.js
new file mode 100644
index 0000000000000..a6a65adebb176
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/error.js
@@ -0,0 +1,26 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HTTPError = void 0;
+class HTTPError extends Error {
+ constructor({ status, message, location, }) {
+ super(`(${status}) ${message}`);
+ this.statusCode = status;
+ this.location = location;
+ }
+}
+exports.HTTPError = HTTPError;
diff --git a/node_modules/@sigstore/sign/dist/external/fetch.js b/node_modules/@sigstore/sign/dist/external/fetch.js
new file mode 100644
index 0000000000000..b2d81bde7be16
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/fetch.js
@@ -0,0 +1,99 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fetchWithRetry = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const http2_1 = require("http2");
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const proc_log_1 = require("proc-log");
+const promise_retry_1 = __importDefault(require("promise-retry"));
+const util_1 = require("../util");
+const error_1 = require("./error");
+const { HTTP2_HEADER_LOCATION, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_USER_AGENT, HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_TOO_MANY_REQUESTS, HTTP_STATUS_REQUEST_TIMEOUT, } = http2_1.constants;
+async function fetchWithRetry(url, options) {
+ return (0, promise_retry_1.default)(async (retry, attemptNum) => {
+ const method = options.method || 'POST';
+ const headers = {
+ [HTTP2_HEADER_USER_AGENT]: util_1.ua.getUserAgent(),
+ ...options.headers,
+ };
+ const response = await (0, make_fetch_happen_1.default)(url, {
+ method,
+ headers,
+ body: options.body,
+ timeout: options.timeout,
+ retry: false, // We're handling retries ourselves
+ }).catch((reason) => {
+ proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${reason}`);
+ return retry(reason);
+ });
+ if (response.ok) {
+ return response;
+ }
+ else {
+ const error = await errorFromResponse(response);
+ proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${response.status}`);
+ if (retryable(response.status)) {
+ return retry(error);
+ }
+ else {
+ throw error;
+ }
+ }
+ }, retryOpts(options.retry));
+}
+exports.fetchWithRetry = fetchWithRetry;
+// Translate a Response into an HTTPError instance. This will attempt to parse
+// the response body for a message, but will default to the statusText if none
+// is found.
+const errorFromResponse = async (response) => {
+ let message = response.statusText;
+ const location = response.headers?.get(HTTP2_HEADER_LOCATION) || undefined;
+ const contentType = response.headers?.get(HTTP2_HEADER_CONTENT_TYPE);
+ // If response type is JSON, try to parse the body for a message
+ if (contentType?.includes('application/json')) {
+ try {
+ const body = await response.json();
+ message = body.message || message;
+ }
+ catch (e) {
+ // ignore
+ }
+ }
+ return new error_1.HTTPError({
+ status: response.status,
+ message: message,
+ location: location,
+ });
+};
+// Determine if a status code is retryable. This includes 5xx errors, 408, and
+// 429.
+const retryable = (status) => [HTTP_STATUS_REQUEST_TIMEOUT, HTTP_STATUS_TOO_MANY_REQUESTS].includes(status) || status >= HTTP_STATUS_INTERNAL_SERVER_ERROR;
+// Normalize the retry options to the format expected by promise-retry
+const retryOpts = (retry) => {
+ if (typeof retry === 'boolean') {
+ return { retries: retry ? 1 : 0 };
+ }
+ else if (typeof retry === 'number') {
+ return { retries: retry };
+ }
+ else {
+ return { retries: 0, ...retry };
+ }
+};
diff --git a/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/@sigstore/sign/dist/external/fulcio.js
new file mode 100644
index 0000000000000..de6a1ad9f9e79
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/fulcio.js
@@ -0,0 +1,41 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Fulcio = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fetch_1 = require("./fetch");
+/**
+ * Fulcio API client.
+ */
+class Fulcio {
+ constructor(options) {
+ this.options = options;
+ }
+ async createSigningCertificate(request) {
+ const { baseURL, retry, timeout } = this.options;
+ const url = `${baseURL}/api/v2/signingCert`;
+ const response = await (0, fetch_1.fetchWithRetry)(url, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify(request),
+ timeout,
+ retry,
+ });
+ return response.json();
+ }
+}
+exports.Fulcio = Fulcio;
diff --git a/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/@sigstore/sign/dist/external/rekor.js
new file mode 100644
index 0000000000000..bb59a126e032f
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/rekor.js
@@ -0,0 +1,80 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Rekor = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fetch_1 = require("./fetch");
+/**
+ * Rekor API client.
+ */
+class Rekor {
+ constructor(options) {
+ this.options = options;
+ }
+ /**
+ * Create a new entry in the Rekor log.
+ * @param propsedEntry {ProposedEntry} Data to create a new entry
+ * @returns {Promise} The created entry
+ */
+ async createEntry(propsedEntry) {
+ const { baseURL, timeout, retry } = this.options;
+ const url = `${baseURL}/api/v1/log/entries`;
+ const response = await (0, fetch_1.fetchWithRetry)(url, {
+ headers: {
+ 'Content-Type': 'application/json',
+ Accept: 'application/json',
+ },
+ body: JSON.stringify(propsedEntry),
+ timeout,
+ retry,
+ });
+ const data = await response.json();
+ return entryFromResponse(data);
+ }
+ /**
+ * Get an entry from the Rekor log.
+ * @param uuid {string} The UUID of the entry to retrieve
+ * @returns {Promise} The retrieved entry
+ */
+ async getEntry(uuid) {
+ const { baseURL, timeout, retry } = this.options;
+ const url = `${baseURL}/api/v1/log/entries/${uuid}`;
+ const response = await (0, fetch_1.fetchWithRetry)(url, {
+ method: 'GET',
+ headers: {
+ Accept: 'application/json',
+ },
+ timeout,
+ retry,
+ });
+ const data = await response.json();
+ return entryFromResponse(data);
+ }
+}
+exports.Rekor = Rekor;
+// Unpack the response from the Rekor API into a more convenient format.
+function entryFromResponse(data) {
+ const entries = Object.entries(data);
+ if (entries.length != 1) {
+ throw new Error('Received multiple entries in Rekor response');
+ }
+ // Grab UUID and entry data from the response
+ const [uuid, entry] = entries[0];
+ return {
+ ...entry,
+ uuid,
+ };
+}
diff --git a/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/@sigstore/sign/dist/external/tsa.js
new file mode 100644
index 0000000000000..a948ba9cca2c7
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/external/tsa.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimestampAuthority = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fetch_1 = require("./fetch");
+class TimestampAuthority {
+ constructor(options) {
+ this.options = options;
+ }
+ async createTimestamp(request) {
+ const { baseURL, timeout, retry } = this.options;
+ const url = `${baseURL}/api/v1/timestamp`;
+ const response = await (0, fetch_1.fetchWithRetry)(url, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify(request),
+ timeout,
+ retry,
+ });
+ return response.buffer();
+ }
+}
+exports.TimestampAuthority = TimestampAuthority;
diff --git a/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/@sigstore/sign/dist/identity/ci.js
new file mode 100644
index 0000000000000..d79133952b605
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/identity/ci.js
@@ -0,0 +1,73 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CIContextProvider = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+// Collection of all the CI-specific providers we have implemented
+const providers = [getGHAToken, getEnv];
+/**
+ * CIContextProvider is a composite identity provider which will iterate
+ * over all of the CI-specific providers and return the token from the first
+ * one that resolves.
+ */
+class CIContextProvider {
+ /* istanbul ignore next */
+ constructor(audience = 'sigstore') {
+ this.audience = audience;
+ }
+ // Invoke all registered ProviderFuncs and return the value of whichever one
+ // resolves first.
+ async getToken() {
+ return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available'));
+ }
+}
+exports.CIContextProvider = CIContextProvider;
+/**
+ * getGHAToken can retrieve an OIDC token when running in a GitHub Actions
+ * workflow
+ */
+async function getGHAToken(audience) {
+ // Check to see if we're running in GitHub Actions
+ if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL ||
+ !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) {
+ return Promise.reject('no token available');
+ }
+ // Construct URL to request token w/ appropriate audience
+ const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL);
+ url.searchParams.append('audience', audience);
+ const response = await (0, make_fetch_happen_1.default)(url.href, {
+ retry: 2,
+ headers: {
+ Accept: 'application/json',
+ Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`,
+ },
+ });
+ return response.json().then((data) => data.value);
+}
+/**
+ * getEnv can retrieve an OIDC token from an environment variable.
+ * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar
+ */
+async function getEnv() {
+ if (!process.env.SIGSTORE_ID_TOKEN) {
+ return Promise.reject('no token available');
+ }
+ return process.env.SIGSTORE_ID_TOKEN;
+}
diff --git a/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/@sigstore/sign/dist/identity/index.js
new file mode 100644
index 0000000000000..1c1223b443fab
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/identity/index.js
@@ -0,0 +1,20 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CIContextProvider = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var ci_1 = require("./ci");
+Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } });
diff --git a/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/@sigstore/sign/dist/identity/provider.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/identity/provider.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/sign/dist/index.js b/node_modules/@sigstore/sign/dist/index.js
new file mode 100644
index 0000000000000..383b76083361b
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/index.js
@@ -0,0 +1,17 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+var bundler_1 = require("./bundler");
+Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } });
+Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } });
+var identity_1 = require("./identity");
+Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } });
+var signer_1 = require("./signer");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } });
+var witness_1 = require("./witness");
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } });
+Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } });
+Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } });
diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
new file mode 100644
index 0000000000000..81b421eabadb2
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
@@ -0,0 +1,60 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CAClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const fulcio_1 = require("../../external/fulcio");
+class CAClient {
+ constructor(options) {
+ this.fulcio = new fulcio_1.Fulcio({
+ baseURL: options.fulcioBaseURL,
+ retry: options.retry,
+ timeout: options.timeout,
+ });
+ }
+ async createSigningCertificate(identityToken, publicKey, challenge) {
+ const request = toCertificateRequest(identityToken, publicKey, challenge);
+ try {
+ const resp = await this.fulcio.createSigningCertificate(request);
+ // Account for the fact that the response may contain either a
+ // signedCertificateEmbeddedSct or a signedCertificateDetachedSct.
+ const cert = resp.signedCertificateEmbeddedSct
+ ? resp.signedCertificateEmbeddedSct
+ : resp.signedCertificateDetachedSct;
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+ return cert.chain.certificates;
+ }
+ catch (err) {
+ (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate');
+ }
+ }
+}
+exports.CAClient = CAClient;
+function toCertificateRequest(identityToken, publicKey, challenge) {
+ return {
+ credentials: {
+ oidcIdentityToken: identityToken,
+ },
+ publicKeyRequest: {
+ publicKey: {
+ algorithm: 'ECDSA',
+ content: publicKey,
+ },
+ proofOfPossession: challenge.toString('base64'),
+ },
+ };
+}
diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
new file mode 100644
index 0000000000000..481aa5c3579a2
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
@@ -0,0 +1,45 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.EphemeralSigner = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+const EC_KEYPAIR_TYPE = 'ec';
+const P256_CURVE = 'P-256';
+// Signer implementation which uses an ephemeral keypair to sign artifacts.
+// The private key lives only in memory and is tied to the lifetime of the
+// EphemeralSigner instance.
+class EphemeralSigner {
+ constructor() {
+ this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
+ namedCurve: P256_CURVE,
+ });
+ }
+ async sign(data) {
+ const signature = crypto_1.default.sign(null, data, this.keypair.privateKey);
+ const publicKey = this.keypair.publicKey
+ .export({ format: 'pem', type: 'spki' })
+ .toString('ascii');
+ return {
+ signature: signature,
+ key: { $case: 'publicKey', publicKey },
+ };
+ }
+}
+exports.EphemeralSigner = EphemeralSigner;
diff --git a/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
new file mode 100644
index 0000000000000..89a432548d2b4
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
@@ -0,0 +1,87 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const util_1 = require("../../util");
+const ca_1 = require("./ca");
+const ephemeral_1 = require("./ephemeral");
+exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
+// Signer implementation which can be used to decorate another signer
+// with a Fulcio-issued signing certificate for the signer's public key.
+// Must be instantiated with an identity provider which can provide a JWT
+// which represents the identity to be bound to the signing certificate.
+class FulcioSigner {
+ constructor(options) {
+ this.ca = new ca_1.CAClient({
+ ...options,
+ fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL,
+ });
+ this.identityProvider = options.identityProvider;
+ this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner();
+ }
+ async sign(data) {
+ // Retrieve identity token from the supplied identity provider
+ const identityToken = await this.getIdentityToken();
+ // Extract challenge claim from OIDC token
+ let subject;
+ try {
+ subject = util_1.oidc.extractJWTSubject(identityToken);
+ }
+ catch (err) {
+ throw new error_1.InternalError({
+ code: 'IDENTITY_TOKEN_PARSE_ERROR',
+ message: `invalid identity token: ${identityToken}`,
+ cause: err,
+ });
+ }
+ // Construct challenge value by signing the subject claim
+ const challenge = await this.keyHolder.sign(Buffer.from(subject));
+ if (challenge.key.$case !== 'publicKey') {
+ throw new error_1.InternalError({
+ code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
+ message: 'unexpected format for signing key',
+ });
+ }
+ // Create signing certificate
+ const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature);
+ // Generate artifact signature
+ const signature = await this.keyHolder.sign(data);
+ // Specifically returning only the first certificate in the chain
+ // as the key.
+ return {
+ signature: signature.signature,
+ key: {
+ $case: 'x509Certificate',
+ certificate: certificates[0],
+ },
+ };
+ }
+ async getIdentityToken() {
+ try {
+ return await this.identityProvider.getToken();
+ }
+ catch (err) {
+ throw new error_1.InternalError({
+ code: 'IDENTITY_TOKEN_READ_ERROR',
+ message: 'error retrieving identity token',
+ cause: err,
+ });
+ }
+ }
+}
+exports.FulcioSigner = FulcioSigner;
diff --git a/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/@sigstore/sign/dist/signer/index.js
new file mode 100644
index 0000000000000..e2087767b81c1
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/index.js
@@ -0,0 +1,22 @@
+"use strict";
+/* istanbul ignore file */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var fulcio_1 = require("./fulcio");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } });
diff --git a/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/@sigstore/sign/dist/signer/signer.js
new file mode 100644
index 0000000000000..b92c54183375d
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/signer/signer.js
@@ -0,0 +1,17 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/@sigstore/sign/dist/types/fetch.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/types/fetch.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/@sigstore/sign/dist/util/index.js
new file mode 100644
index 0000000000000..f467c9150c348
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/index.js
@@ -0,0 +1,49 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var core_1 = require("@sigstore/core");
+Object.defineProperty(exports, "crypto", { enumerable: true, get: function () { return core_1.crypto; } });
+Object.defineProperty(exports, "dsse", { enumerable: true, get: function () { return core_1.dsse; } });
+Object.defineProperty(exports, "encoding", { enumerable: true, get: function () { return core_1.encoding; } });
+Object.defineProperty(exports, "json", { enumerable: true, get: function () { return core_1.json; } });
+Object.defineProperty(exports, "pem", { enumerable: true, get: function () { return core_1.pem; } });
+exports.oidc = __importStar(require("./oidc"));
+exports.ua = __importStar(require("./ua"));
diff --git a/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/@sigstore/sign/dist/util/oidc.js
new file mode 100644
index 0000000000000..2f5947d7b6b87
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/oidc.js
@@ -0,0 +1,31 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.extractJWTSubject = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+function extractJWTSubject(jwt) {
+ const parts = jwt.split('.', 3);
+ const payload = JSON.parse(core_1.encoding.base64Decode(parts[1]));
+ switch (payload.iss) {
+ case 'https://accounts.google.com':
+ case 'https://oauth2.sigstore.dev/auth':
+ return payload.email;
+ default:
+ return payload.sub;
+ }
+}
+exports.extractJWTSubject = extractJWTSubject;
diff --git a/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/@sigstore/sign/dist/util/ua.js
new file mode 100644
index 0000000000000..c142330eb8338
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/util/ua.js
@@ -0,0 +1,33 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getUserAgent = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const os_1 = __importDefault(require("os"));
+// Format User-Agent: / ()
+// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
+const getUserAgent = () => {
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
+ const packageVersion = require('../../package.json').version;
+ const nodeVersion = process.version;
+ const platformName = os_1.default.platform();
+ const archName = os_1.default.arch();
+ return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`;
+};
+exports.getUserAgent = getUserAgent;
diff --git a/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/@sigstore/sign/dist/witness/index.js
new file mode 100644
index 0000000000000..72677c399caa7
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/index.js
@@ -0,0 +1,24 @@
+"use strict";
+/* istanbul ignore file */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var tlog_1 = require("./tlog");
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } });
+Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } });
+var tsa_1 = require("./tsa");
+Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } });
diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/@sigstore/sign/dist/witness/tlog/client.js
new file mode 100644
index 0000000000000..22c895f2ca7ed
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tlog/client.js
@@ -0,0 +1,61 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TLogClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const error_2 = require("../../external/error");
+const rekor_1 = require("../../external/rekor");
+class TLogClient {
+ constructor(options) {
+ this.fetchOnConflict = options.fetchOnConflict ?? false;
+ this.rekor = new rekor_1.Rekor({
+ baseURL: options.rekorBaseURL,
+ retry: options.retry,
+ timeout: options.timeout,
+ });
+ }
+ async createEntry(proposedEntry) {
+ let entry;
+ try {
+ entry = await this.rekor.createEntry(proposedEntry);
+ }
+ catch (err) {
+ // If the entry already exists, fetch it (if enabled)
+ if (entryExistsError(err) && this.fetchOnConflict) {
+ // Grab the UUID of the existing entry from the location header
+ /* istanbul ignore next */
+ const uuid = err.location.split('/').pop() || '';
+ try {
+ entry = await this.rekor.getEntry(uuid);
+ }
+ catch (err) {
+ (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry');
+ }
+ }
+ else {
+ (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry');
+ }
+ }
+ return entry;
+ }
+}
+exports.TLogClient = TLogClient;
+function entryExistsError(value) {
+ return (value instanceof error_2.HTTPError &&
+ value.statusCode === 409 &&
+ value.location !== undefined);
+}
diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
new file mode 100644
index 0000000000000..c237523a2c9b2
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
@@ -0,0 +1,136 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toProposedEntry = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const bundle_1 = require("@sigstore/bundle");
+const util_1 = require("../../util");
+function toProposedEntry(content, publicKey,
+// TODO: Remove this parameter once have completely switched to 'dsse' entries
+entryType = 'intoto') {
+ switch (content.$case) {
+ case 'dsseEnvelope':
+ // TODO: Remove this conditional once have completely switched to 'dsse' entries
+ if (entryType === 'dsse') {
+ return toProposedDSSEEntry(content.dsseEnvelope, publicKey);
+ }
+ return toProposedIntotoEntry(content.dsseEnvelope, publicKey);
+ case 'messageSignature':
+ return toProposedHashedRekordEntry(content.messageSignature, publicKey);
+ }
+}
+exports.toProposedEntry = toProposedEntry;
+// Returns a properly formatted Rekor "hashedrekord" entry for the given digest
+// and signature
+function toProposedHashedRekordEntry(messageSignature, publicKey) {
+ const hexDigest = messageSignature.messageDigest.digest.toString('hex');
+ const b64Signature = messageSignature.signature.toString('base64');
+ const b64Key = util_1.encoding.base64Encode(publicKey);
+ return {
+ apiVersion: '0.0.1',
+ kind: 'hashedrekord',
+ spec: {
+ data: {
+ hash: {
+ algorithm: 'sha256',
+ value: hexDigest,
+ },
+ },
+ signature: {
+ content: b64Signature,
+ publicKey: {
+ content: b64Key,
+ },
+ },
+ },
+ };
+}
+// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope
+// and signature
+function toProposedDSSEEntry(envelope, publicKey) {
+ const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope));
+ const encodedKey = util_1.encoding.base64Encode(publicKey);
+ return {
+ apiVersion: '0.0.1',
+ kind: 'dsse',
+ spec: {
+ proposedContent: {
+ envelope: envelopeJSON,
+ verifiers: [encodedKey],
+ },
+ },
+ };
+}
+// Returns a properly formatted Rekor "intoto" entry for the given DSSE
+// envelope and signature
+function toProposedIntotoEntry(envelope, publicKey) {
+ // Calculate the value for the payloadHash field in the Rekor entry
+ const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex');
+ // Calculate the value for the hash field in the Rekor entry
+ const envelopeHash = calculateDSSEHash(envelope, publicKey);
+ // Collect values for re-creating the DSSE envelope.
+ // Double-encode payload and signature cause that's what Rekor expects
+ const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64'));
+ const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64'));
+ const keyid = envelope.signatures[0].keyid;
+ const encodedKey = util_1.encoding.base64Encode(publicKey);
+ // Create the envelope portion of the entry. Note the inclusion of the
+ // publicKey in the signature struct is not a standard part of a DSSE
+ // envelope, but is required by Rekor.
+ const dsse = {
+ payloadType: envelope.payloadType,
+ payload: payload,
+ signatures: [{ sig, publicKey: encodedKey }],
+ };
+ // If the keyid is an empty string, Rekor seems to remove it altogether. We
+ // need to do the same here so that we can properly recreate the entry for
+ // verification.
+ if (keyid.length > 0) {
+ dsse.signatures[0].keyid = keyid;
+ }
+ return {
+ apiVersion: '0.0.2',
+ kind: 'intoto',
+ spec: {
+ content: {
+ envelope: dsse,
+ hash: { algorithm: 'sha256', value: envelopeHash },
+ payloadHash: { algorithm: 'sha256', value: payloadHash },
+ },
+ },
+ };
+}
+// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry.
+// There is no standard way to do this, so the scheme we're using as as
+// follows:
+// * payload is base64 encoded
+// * signature is base64 encoded (only the first signature is used)
+// * keyid is included ONLY if it is NOT an empty string
+// * The resulting JSON is canonicalized and hashed to a hex string
+function calculateDSSEHash(envelope, publicKey) {
+ const dsse = {
+ payloadType: envelope.payloadType,
+ payload: envelope.payload.toString('base64'),
+ signatures: [
+ { sig: envelope.signatures[0].sig.toString('base64'), publicKey },
+ ],
+ };
+ // If the keyid is an empty string, Rekor seems to remove it altogether.
+ if (envelope.signatures[0].keyid.length > 0) {
+ dsse.signatures[0].keyid = envelope.signatures[0].keyid;
+ }
+ return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex');
+}
diff --git a/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/@sigstore/sign/dist/witness/tlog/index.js
new file mode 100644
index 0000000000000..6197b09d4cdd9
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tlog/index.js
@@ -0,0 +1,82 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("../../util");
+const client_1 = require("./client");
+const entry_1 = require("./entry");
+exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
+class RekorWitness {
+ constructor(options) {
+ this.entryType = options.entryType;
+ this.tlog = new client_1.TLogClient({
+ ...options,
+ rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL,
+ });
+ }
+ async testify(content, publicKey) {
+ const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey, this.entryType);
+ const entry = await this.tlog.createEntry(proposedEntry);
+ return toTransparencyLogEntry(entry);
+ }
+}
+exports.RekorWitness = RekorWitness;
+function toTransparencyLogEntry(entry) {
+ const logID = Buffer.from(entry.logID, 'hex');
+ // Parse entry body so we can extract the kind and version.
+ const bodyJSON = util_1.encoding.base64Decode(entry.body);
+ const entryBody = JSON.parse(bodyJSON);
+ const promise = entry?.verification?.signedEntryTimestamp
+ ? inclusionPromise(entry.verification.signedEntryTimestamp)
+ : undefined;
+ const proof = entry?.verification?.inclusionProof
+ ? inclusionProof(entry.verification.inclusionProof)
+ : undefined;
+ const tlogEntry = {
+ logIndex: entry.logIndex.toString(),
+ logId: {
+ keyId: logID,
+ },
+ integratedTime: entry.integratedTime.toString(),
+ kindVersion: {
+ kind: entryBody.kind,
+ version: entryBody.apiVersion,
+ },
+ inclusionPromise: promise,
+ inclusionProof: proof,
+ canonicalizedBody: Buffer.from(entry.body, 'base64'),
+ };
+ return {
+ tlogEntries: [tlogEntry],
+ };
+}
+function inclusionPromise(promise) {
+ return {
+ signedEntryTimestamp: Buffer.from(promise, 'base64'),
+ };
+}
+function inclusionProof(proof) {
+ return {
+ logIndex: proof.logIndex.toString(),
+ treeSize: proof.treeSize.toString(),
+ rootHash: Buffer.from(proof.rootHash, 'hex'),
+ hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')),
+ checkpoint: {
+ envelope: proof.checkpoint,
+ },
+ };
+}
diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/@sigstore/sign/dist/witness/tsa/client.js
new file mode 100644
index 0000000000000..a334deb00b775
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tsa/client.js
@@ -0,0 +1,43 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const tsa_1 = require("../../external/tsa");
+const util_1 = require("../../util");
+class TSAClient {
+ constructor(options) {
+ this.tsa = new tsa_1.TimestampAuthority({
+ baseURL: options.tsaBaseURL,
+ retry: options.retry,
+ timeout: options.timeout,
+ });
+ }
+ async createTimestamp(signature) {
+ const request = {
+ artifactHash: util_1.crypto.hash(signature).toString('base64'),
+ hashAlgorithm: 'sha256',
+ };
+ try {
+ return await this.tsa.createTimestamp(request);
+ }
+ catch (err) {
+ (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp');
+ }
+ }
+}
+exports.TSAClient = TSAClient;
diff --git a/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/@sigstore/sign/dist/witness/tsa/index.js
new file mode 100644
index 0000000000000..d4f5c7c859d10
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/tsa/index.js
@@ -0,0 +1,44 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const client_1 = require("./client");
+class TSAWitness {
+ constructor(options) {
+ this.tsa = new client_1.TSAClient({
+ tsaBaseURL: options.tsaBaseURL,
+ retry: options.retry,
+ timeout: options.timeout,
+ });
+ }
+ async testify(content) {
+ const signature = extractSignature(content);
+ const timestamp = await this.tsa.createTimestamp(signature);
+ return {
+ rfc3161Timestamps: [{ signedTimestamp: timestamp }],
+ };
+ }
+}
+exports.TSAWitness = TSAWitness;
+function extractSignature(content) {
+ switch (content.$case) {
+ case 'dsseEnvelope':
+ return content.dsseEnvelope.signatures[0].sig;
+ case 'messageSignature':
+ return content.messageSignature.signature;
+ }
+}
diff --git a/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/@sigstore/sign/dist/witness/witness.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/sign/dist/witness/witness.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/sign/package.json b/node_modules/@sigstore/sign/package.json
new file mode 100644
index 0000000000000..4adb3d24c6fa6
--- /dev/null
+++ b/node_modules/@sigstore/sign/package.json
@@ -0,0 +1,46 @@
+{
+ "name": "@sigstore/sign",
+ "version": "2.3.2",
+ "description": "Sigstore signing library",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "clean": "shx rm -rf dist *.tsbuildinfo",
+ "build": "tsc --build",
+ "test": "jest"
+ },
+ "files": [
+ "dist"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/sigstore-js.git"
+ },
+ "bugs": {
+ "url": "https://github.com/sigstore/sigstore-js/issues"
+ },
+ "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme",
+ "publishConfig": {
+ "provenance": true
+ },
+ "devDependencies": {
+ "@sigstore/jest": "^0.0.0",
+ "@sigstore/mock": "^0.7.4",
+ "@sigstore/rekor-types": "^2.0.0",
+ "@types/make-fetch-happen": "^10.0.4",
+ "@types/promise-retry": "^1.1.6"
+ },
+ "dependencies": {
+ "@sigstore/bundle": "^2.3.2",
+ "@sigstore/core": "^1.0.0",
+ "@sigstore/protobuf-specs": "^0.3.2",
+ "make-fetch-happen": "^13.0.1",
+ "proc-log": "^4.2.0",
+ "promise-retry": "^2.0.1"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/@sigstore/tuf/LICENSE b/node_modules/@sigstore/tuf/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/@sigstore/tuf/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 The Sigstore Authors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/@sigstore/tuf/dist/appdata.js
new file mode 100644
index 0000000000000..c9a8ee92b531e
--- /dev/null
+++ b/node_modules/@sigstore/tuf/dist/appdata.js
@@ -0,0 +1,44 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.appDataPath = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const os_1 = __importDefault(require("os"));
+const path_1 = __importDefault(require("path"));
+function appDataPath(name) {
+ const homedir = os_1.default.homedir();
+ switch (process.platform) {
+ /* istanbul ignore next */
+ case 'darwin': {
+ const appSupport = path_1.default.join(homedir, 'Library', 'Application Support');
+ return path_1.default.join(appSupport, name);
+ }
+ /* istanbul ignore next */
+ case 'win32': {
+ const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local');
+ return path_1.default.join(localAppData, name, 'Data');
+ }
+ /* istanbul ignore next */
+ default: {
+ const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share');
+ return path_1.default.join(localData, name);
+ }
+ }
+}
+exports.appDataPath = appDataPath;
diff --git a/node_modules/@sigstore/tuf/dist/client.js b/node_modules/@sigstore/tuf/dist/client.js
new file mode 100644
index 0000000000000..2019c1fd30f88
--- /dev/null
+++ b/node_modules/@sigstore/tuf/dist/client.js
@@ -0,0 +1,112 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const path_1 = __importDefault(require("path"));
+const tuf_js_1 = require("tuf-js");
+const _1 = require(".");
+const target_1 = require("./target");
+const TARGETS_DIR_NAME = 'targets';
+class TUFClient {
+ constructor(options) {
+ const url = new URL(options.mirrorURL);
+ const repoName = encodeURIComponent(url.host + url.pathname.replace(/\/$/, ''));
+ const cachePath = path_1.default.join(options.cachePath, repoName);
+ initTufCache(cachePath);
+ seedCache({
+ cachePath,
+ mirrorURL: options.mirrorURL,
+ tufRootPath: options.rootPath,
+ forceInit: options.forceInit,
+ });
+ this.updater = initClient({
+ mirrorURL: options.mirrorURL,
+ cachePath,
+ forceCache: options.forceCache,
+ retry: options.retry,
+ timeout: options.timeout,
+ });
+ }
+ async refresh() {
+ return this.updater.refresh();
+ }
+ getTarget(targetName) {
+ return (0, target_1.readTarget)(this.updater, targetName);
+ }
+}
+exports.TUFClient = TUFClient;
+// Initializes the TUF cache directory structure including the initial
+// root.json file. If the cache directory does not exist, it will be
+// created. If the targets directory does not exist, it will be created.
+// If the root.json file does not exist, it will be copied from the
+// rootPath argument.
+function initTufCache(cachePath) {
+ const targetsPath = path_1.default.join(cachePath, TARGETS_DIR_NAME);
+ if (!fs_1.default.existsSync(cachePath)) {
+ fs_1.default.mkdirSync(cachePath, { recursive: true });
+ }
+ if (!fs_1.default.existsSync(targetsPath)) {
+ fs_1.default.mkdirSync(targetsPath);
+ }
+}
+// Populates the TUF cache with the initial root.json file. If the root.json
+// file does not exist (or we're forcing re-initialization), copy it from either
+// the rootPath argument or from one of the repo seeds.
+function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) {
+ const cachedRootPath = path_1.default.join(cachePath, 'root.json');
+ // If the root.json file does not exist (or we're forcing re-initialization),
+ // populate it either from the supplied rootPath or from one of the repo seeds.
+ if (!fs_1.default.existsSync(cachedRootPath) || forceInit) {
+ if (tufRootPath) {
+ fs_1.default.copyFileSync(tufRootPath, cachedRootPath);
+ }
+ else {
+ /* eslint-disable @typescript-eslint/no-var-requires */
+ const seeds = require('../seeds.json');
+ const repoSeed = seeds[mirrorURL];
+ if (!repoSeed) {
+ throw new _1.TUFError({
+ code: 'TUF_INIT_CACHE_ERROR',
+ message: `No root.json found for mirror: ${mirrorURL}`,
+ });
+ }
+ fs_1.default.writeFileSync(cachedRootPath, Buffer.from(repoSeed['root.json'], 'base64'));
+ // Copy any seed targets into the cache
+ Object.entries(repoSeed.targets).forEach(([targetName, target]) => {
+ fs_1.default.writeFileSync(path_1.default.join(cachePath, TARGETS_DIR_NAME, targetName), Buffer.from(target, 'base64'));
+ });
+ }
+ }
+}
+function initClient(options) {
+ const config = {
+ fetchTimeout: options.timeout,
+ fetchRetry: options.retry,
+ };
+ return new tuf_js_1.Updater({
+ metadataBaseUrl: options.mirrorURL,
+ targetBaseUrl: `${options.mirrorURL}/targets`,
+ metadataDir: options.cachePath,
+ targetDir: path_1.default.join(options.cachePath, TARGETS_DIR_NAME),
+ forceCache: options.forceCache,
+ config,
+ });
+}
diff --git a/node_modules/@sigstore/tuf/dist/error.js b/node_modules/@sigstore/tuf/dist/error.js
new file mode 100644
index 0000000000000..e13971b289ff2
--- /dev/null
+++ b/node_modules/@sigstore/tuf/dist/error.js
@@ -0,0 +1,12 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFError = void 0;
+class TUFError extends Error {
+ constructor({ code, message, cause, }) {
+ super(message);
+ this.code = code;
+ this.cause = cause;
+ this.name = this.constructor.name;
+ }
+}
+exports.TUFError = TUFError;
diff --git a/node_modules/@sigstore/tuf/dist/index.js b/node_modules/@sigstore/tuf/dist/index.js
new file mode 100644
index 0000000000000..678c81d45d21e
--- /dev/null
+++ b/node_modules/@sigstore/tuf/dist/index.js
@@ -0,0 +1,56 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFError = exports.initTUF = exports.getTrustedRoot = exports.DEFAULT_MIRROR_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const appdata_1 = require("./appdata");
+const client_1 = require("./client");
+exports.DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev';
+const DEFAULT_CACHE_DIR = 'sigstore-js';
+const DEFAULT_RETRY = { retries: 2 };
+const DEFAULT_TIMEOUT = 5000;
+const TRUSTED_ROOT_TARGET = 'trusted_root.json';
+async function getTrustedRoot(
+/* istanbul ignore next */
+options = {}) {
+ const client = createClient(options);
+ const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET);
+ return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot));
+}
+exports.getTrustedRoot = getTrustedRoot;
+async function initTUF(
+/* istanbul ignore next */
+options = {}) {
+ const client = createClient(options);
+ return client.refresh().then(() => client);
+}
+exports.initTUF = initTUF;
+// Create a TUF client with default options
+function createClient(options) {
+ /* istanbul ignore next */
+ return new client_1.TUFClient({
+ cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR),
+ rootPath: options.rootPath,
+ mirrorURL: options.mirrorURL || exports.DEFAULT_MIRROR_URL,
+ retry: options.retry ?? DEFAULT_RETRY,
+ timeout: options.timeout ?? DEFAULT_TIMEOUT,
+ forceCache: options.forceCache ?? false,
+ forceInit: options.forceInit ?? options.force ?? false,
+ });
+}
+var error_1 = require("./error");
+Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } });
diff --git a/node_modules/@sigstore/tuf/dist/target.js b/node_modules/@sigstore/tuf/dist/target.js
new file mode 100644
index 0000000000000..29eaf99a7e721
--- /dev/null
+++ b/node_modules/@sigstore/tuf/dist/target.js
@@ -0,0 +1,80 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.readTarget = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const error_1 = require("./error");
+// Downloads and returns the specified target from the provided TUF Updater.
+async function readTarget(tuf, targetPath) {
+ const path = await getTargetPath(tuf, targetPath);
+ return new Promise((resolve, reject) => {
+ fs_1.default.readFile(path, 'utf-8', (err, data) => {
+ if (err) {
+ reject(new error_1.TUFError({
+ code: 'TUF_READ_TARGET_ERROR',
+ message: `error reading target ${path}`,
+ cause: err,
+ }));
+ }
+ else {
+ resolve(data);
+ }
+ });
+ });
+}
+exports.readTarget = readTarget;
+// Returns the local path to the specified target. If the target is not yet
+// cached locally, the provided TUF Updater will be used to download and
+// cache the target.
+async function getTargetPath(tuf, target) {
+ let targetInfo;
+ try {
+ targetInfo = await tuf.getTargetInfo(target);
+ }
+ catch (err) {
+ throw new error_1.TUFError({
+ code: 'TUF_REFRESH_METADATA_ERROR',
+ message: 'error refreshing TUF metadata',
+ cause: err,
+ });
+ }
+ if (!targetInfo) {
+ throw new error_1.TUFError({
+ code: 'TUF_FIND_TARGET_ERROR',
+ message: `target ${target} not found`,
+ });
+ }
+ let path = await tuf.findCachedTarget(targetInfo);
+ // An empty path here means the target has not been cached locally, or is
+ // out of date. In either case, we need to download it.
+ if (!path) {
+ try {
+ path = await tuf.downloadTarget(targetInfo);
+ }
+ catch (err) {
+ throw new error_1.TUFError({
+ code: 'TUF_DOWNLOAD_TARGET_ERROR',
+ message: `error downloading target ${path}`,
+ cause: err,
+ });
+ }
+ }
+ return path;
+}
diff --git a/node_modules/@sigstore/tuf/package.json b/node_modules/@sigstore/tuf/package.json
new file mode 100644
index 0000000000000..b7fd34ac9674e
--- /dev/null
+++ b/node_modules/@sigstore/tuf/package.json
@@ -0,0 +1,41 @@
+{
+ "name": "@sigstore/tuf",
+ "version": "2.3.4",
+ "description": "Client for the Sigstore TUF repository",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "clean": "shx rm -rf dist *.tsbuildinfo",
+ "build": "tsc --build",
+ "test": "jest"
+ },
+ "files": [
+ "dist",
+ "seeds.json"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/sigstore-js.git"
+ },
+ "bugs": {
+ "url": "https://github.com/sigstore/sigstore-js/issues"
+ },
+ "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme",
+ "publishConfig": {
+ "provenance": true
+ },
+ "devDependencies": {
+ "@sigstore/jest": "^0.0.0",
+ "@tufjs/repo-mock": "^2.0.1",
+ "@types/make-fetch-happen": "^10.0.4"
+ },
+ "dependencies": {
+ "@sigstore/protobuf-specs": "^0.3.2",
+ "tuf-js": "^2.2.1"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/@sigstore/tuf/seeds.json b/node_modules/@sigstore/tuf/seeds.json
new file mode 100644
index 0000000000000..e8d97d5fa7a67
--- /dev/null
+++ b/node_modules/@sigstore/tuf/seeds.json
@@ -0,0 +1 @@
+{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"{
	"signed": {
		"_type": "root",
		"spec_version": "1.0",
		"version": 9,
		"expires": "2024-09-12T06:53:10Z",
		"keys": {
			"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBzVOmHCPojMVLSI364WiiV8NPrD\n6IgRxVliskz/v+y3JER5mcVGcONliDcWMC5J2lfHmjPNPhb4H7xm8LzfSA==\n-----END PUBLIC KEY-----\n"
				}
			},
			"230e212616274a4195cdc28e9fce782c20e6c720f1a811b40f98228376bdd3ac": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAELrWvNt94v4R085ELeeCMxHp7PldF\n0/T1GxukUh2ODuggLGJE0pc1e8CSBf6CS91Fwo9FUOuRsjBUld+VqSyCdQ==\n-----END PUBLIC KEY-----\n"
				}
			},
			"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEy8XKsmhBYDI8Jc0GwzBxeKax0cm5\nSTKEU65HPFunUn41sT8pi0FjM4IkHz/YUmwmLUO0Wt7lxhj6BkLIK4qYAw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"923bb39e60dd6fa2c31e6ea55473aa93b64dd4e53e16fbe42f6a207d3f97de2d": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEWRiGr5+j+3J5SsH+Ztr5nE2H2wO7\nBV+nO3s93gLca18qTOzHY1oWyAGDykMSsGTUBSt9D+An0KfKsD2mfSM42Q==\n-----END PUBLIC KEY-----\n"
				}
			},
			"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEinikSsAQmYkNeH5eYq/CnIzLaacO\nxlSaawQDOwqKy/tCqxq5xxPSJc21K4WIhs9GyOkKfzueY3GILzcMJZ4cWw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEEXsz3SZXFb8jMV42j6pJlyjbjR8K\nN3Bwocexq6LMIb5qsWKOQvLN16NUefLc4HswOoumRsVVaajSpQS6fobkRw==\n-----END PUBLIC KEY-----\n"
				}
			},
			"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f": {
				"keytype": "ecdsa",
				"scheme": "ecdsa-sha2-nistp256",
				"keyid_hash_algorithms": [
					"sha256",
					"sha512"
				],
				"keyval": {
					"public": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE0ghrh92Lw1Yr3idGV5WqCtMDB8Cx\n+D8hdC4w2ZLNIplVRoVGLskYa3gheMyOjiJ8kPi15aQ2//7P+oj7UvJPGw==\n-----END PUBLIC KEY-----\n"
				}
			}
		},
		"roles": {
			"root": {
				"keyids": [
					"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
					"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
					"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
					"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
					"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f"
				],
				"threshold": 3
			},
			"snapshot": {
				"keyids": [
					"230e212616274a4195cdc28e9fce782c20e6c720f1a811b40f98228376bdd3ac"
				],
				"threshold": 1
			},
			"targets": {
				"keyids": [
					"3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
					"ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
					"1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
					"e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
					"fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f"
				],
				"threshold": 3
			},
			"timestamp": {
				"keyids": [
					"923bb39e60dd6fa2c31e6ea55473aa93b64dd4e53e16fbe42f6a207d3f97de2d"
				],
				"threshold": 1
			}
		},
		"consistent_snapshot": true
	},
	"signatures": [
		{
			"keyid": "ff51e17fcf253119b7033f6f57512631da4a0969442afcf9fc8b141c7f2be99c",
			"sig": "30450221008b78f894c3cfed3bd486379c4e0e0dfb3e7dd8cbc4d5598d2818eea1ba3c7550022029d3d06e89d04d37849985dc46c0e10dc5b1fc68dc70af1ec9910303a1f3ee2f"
		},
		{
			"keyid": "25a0eb450fd3ee2bd79218c963dce3f1cc6118badf251bf149f0bd07d5cabe99",
			"sig": "30450221009e6b90b935e09b837a90d4402eaa27d5ea26eb7891948ba0ed7090841248f436022003dc2251c4d4a7999b91e9ad0868765ae09ac7269279f2a7899bafef7a2d9260"
		},
		{
			"keyid": "f5312f542c21273d9485a49394386c4575804770667f2ddb59b3bf0669fddd2f",
			"sig": "30440220099e907dcf90b7b6e109fd1d6e442006fccbb48894aaaff47ab824b03fb35d0d02202aa0a06c21a4233f37900a48bc8777d3b47f59e3a38616ce631a04df57f96736"
		},
		{
			"keyid": "3c344aa068fd4cc4e87dc50b612c02431fbc771e95003993683a2b0bf260cf0e",
			"sig": "30450221008b78f894c3cfed3bd486379c4e0e0dfb3e7dd8cbc4d5598d2818eea1ba3c7550022029d3d06e89d04d37849985dc46c0e10dc5b1fc68dc70af1ec9910303a1f3ee2f"
		},
		{
			"keyid": "ec81669734e017996c5b85f3d02c3de1dd4637a152019fe1af125d2f9368b95e",
			"sig": "30450221009e6b90b935e09b837a90d4402eaa27d5ea26eb7891948ba0ed7090841248f436022003dc2251c4d4a7999b91e9ad0868765ae09ac7269279f2a7899bafef7a2d9260"
		},
		{
			"keyid": "e2f59acb9488519407e18cbfc9329510be03c04aca9929d2f0301343fec85523",
			"sig": "304502200e5613b901e0f3e08eceabddc73f98b50ddf892e998d0b369c6e3d451ac48875022100940cf92d1f43ee2e5cdbb22572bb52925ed3863a688f7ffdd4bd2e2e56f028b3"
		},
		{
			"keyid": "2e61cd0cbf4a8f45809bda9f7f78c0d33ad11842ff94ae340873e2664dc843de",
			"sig": "304502202cff44f2215d7a47b28b8f5f580c2cfbbd1bfcfcbbe78de323045b2c0badc5e9022100c743949eb3f4ea5a4b9ae27ac6eddea1f0ff9bfd004f8a9a9d18c6e4142b6e75"
		},
		{
			"keyid": "1e1d65ce98b10addad4764febf7dda2d0436b3d3a3893579c0dddaea20e54849",
			"sig": "30440220099e907dcf90b7b6e109fd1d6e442006fccbb48894aaaff47ab824b03fb35d0d02202aa0a06c21a4233f37900a48bc8777d3b47f59e3a38616ce631a04df57f96736"
		},
		{
			"keyid": "fdfa83a07b5a83589b87ded41f77f39d232ad91f7cce52868dacd06ba089849f",
			"sig": "304502202cff44f2215d7a47b28b8f5f580c2cfbbd1bfcfcbbe78de323045b2c0badc5e9022100c743949eb3f4ea5a4b9ae27ac6eddea1f0ff9bfd004f8a9a9d18c6e4142b6e75"
		},
		{
			"keyid": "7f7513b25429a64473e10ce3ad2f3da372bbdd14b65d07bbaf547e7c8bbbe62b",
			"sig": "304502200e5613b901e0f3e08eceabddc73f98b50ddf892e998d0b369c6e3d451ac48875022100940cf92d1f43ee2e5cdbb22572bb52925ed3863a688f7ffdd4bd2e2e56f028b3"
		}
	]
}","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27.000Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29.000Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15.000Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00.000Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00.000Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "GitHub, Inc.",
        "commonName": "Internal Services Root"
      },
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB3DCCAWKgAwIBAgIUchkNsH36Xa04b1LqIc+qr9DVecMwCgYIKoZIzj0EAwMwMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMB4XDTIzMDQxNDAwMDAwMFoXDTI0MDQxMzAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgVGltZXN0YW1waW5nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEUD5ZNbSqYMd6r8qpOOEX9ibGnZT9GsuXOhr/f8U9FJugBGExKYp40OULS0erjZW7xV9xV52NnJf5OeDq4e5ZKqNWMFQwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMIMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgwFoAUaW1RudOgVt0leqY0WKYbuPr47wAwCgYIKoZIzj0EAwMDaAAwZQIwbUH9HvD4ejCZJOWQnqAlkqURllvu9M8+VqLbiRK+zSfZCZwsiljRn8MQQRSkXEE5AjEAg+VxqtojfVfu8DhzzhCx9GKETbJHb19iV72mMKUbDAFmzZ6bQ8b54Zb8tidy5aWe"
          },
          {
            "rawBytes": "MIICEDCCAZWgAwIBAgIUX8ZO5QXP7vN4dMQ5e9sU3nub8OgwCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTI4MDQxMjAwMDAwMFowMjEVMBMGA1UEChMMR2l0SHViLCBJbmMuMRkwFwYDVQQDExBUU0EgaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEvMLY/dTVbvIJYANAuszEwJnQE1llftynyMKIMhh48HmqbVr5ygybzsLRLVKbBWOdZ21aeJz+gZiytZetqcyF9WlER5NEMf6JV7ZNojQpxHq4RHGoGSceQv/qvTiZxEDKo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaW1RudOgVt0leqY0WKYbuPr47wAwHwYDVR0jBBgwFoAU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaQAwZgIxAK1B185ygCrIYFlIs3GjswjnwSMG6LY8woLVdakKDZxVa8f8cqMs1DhcxJ0+09w95QIxAO+tBzZk7vjUJ9iJgD4R6ZWTxQWKqNm74jO99o+o9sv4FI/SZTZTFyMn0IJEHdNmyA=="
          },
          {
            "rawBytes": "MIIB9DCCAXqgAwIBAgIUa/JAkdUjK4JUwsqtaiRJGWhqLSowCgYIKoZIzj0EAwMwODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MB4XDTIzMDQxNDAwMDAwMFoXDTMzMDQxMTAwMDAwMFowODEVMBMGA1UEChMMR2l0SHViLCBJbmMuMR8wHQYDVQQDExZJbnRlcm5hbCBTZXJ2aWNlcyBSb290MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEf9jFAXxz4kx68AHRMOkFBhflDcMTvzaXz4x/FCcXjJ/1qEKon/qPIGnaURskDtyNbNDOpeJTDDFqt48iMPrnzpx6IZwqemfUJN4xBEZfza+pYt/iyod+9tZr20RRWSv/o0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBAjAdBgNVHQ4EFgQU9NYYlobnAG4c0/qjxyH/lq/wz+QwCgYIKoZIzj0EAwMDaAAwZQIxALZLZ8BgRXzKxLMMN9VIlO+e4hrBnNBgF7tz7Hnrowv2NetZErIACKFymBlvWDvtMAIwZO+ki6ssQ1bsZo98O8mEAf2NZ7iiCgDDU0Vwjeco6zyeh0zBTs9/7gV6AHNQ53xD"
          }
        ]
      },
      "validFor": {
        "start": "2023-04-14T00:00:00.000Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpqbDNid3N3dTgwUGpqb2tDZ2gwbzJ3NWMyVTRMaFFBRTU3Z2o5Y3oxa3pBIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEyLTAxVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
diff --git a/node_modules/@sigstore/verify/dist/bundle/dsse.js b/node_modules/@sigstore/verify/dist/bundle/dsse.js
new file mode 100644
index 0000000000000..193f875fd1014
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/bundle/dsse.js
@@ -0,0 +1,43 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSESignatureContent = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+class DSSESignatureContent {
+ constructor(env) {
+ this.env = env;
+ }
+ compareDigest(digest) {
+ return core_1.crypto.bufferEqual(digest, core_1.crypto.hash(this.env.payload));
+ }
+ compareSignature(signature) {
+ return core_1.crypto.bufferEqual(signature, this.signature);
+ }
+ verifySignature(key) {
+ return core_1.crypto.verify(this.preAuthEncoding, key, this.signature);
+ }
+ get signature() {
+ return this.env.signatures.length > 0
+ ? this.env.signatures[0].sig
+ : Buffer.from('');
+ }
+ // DSSE Pre-Authentication Encoding
+ get preAuthEncoding() {
+ return core_1.dsse.preAuthEncoding(this.env.payloadType, this.env.payload);
+ }
+}
+exports.DSSESignatureContent = DSSESignatureContent;
diff --git a/node_modules/@sigstore/verify/dist/bundle/index.js b/node_modules/@sigstore/verify/dist/bundle/index.js
new file mode 100644
index 0000000000000..63f8d4c499881
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/bundle/index.js
@@ -0,0 +1,58 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.signatureContent = exports.toSignedEntity = void 0;
+const core_1 = require("@sigstore/core");
+const dsse_1 = require("./dsse");
+const message_1 = require("./message");
+function toSignedEntity(bundle, artifact) {
+ const { tlogEntries, timestampVerificationData } = bundle.verificationMaterial;
+ const timestamps = [];
+ for (const entry of tlogEntries) {
+ timestamps.push({
+ $case: 'transparency-log',
+ tlogEntry: entry,
+ });
+ }
+ for (const ts of timestampVerificationData?.rfc3161Timestamps ?? []) {
+ timestamps.push({
+ $case: 'timestamp-authority',
+ timestamp: core_1.RFC3161Timestamp.parse(ts.signedTimestamp),
+ });
+ }
+ return {
+ signature: signatureContent(bundle, artifact),
+ key: key(bundle),
+ tlogEntries,
+ timestamps,
+ };
+}
+exports.toSignedEntity = toSignedEntity;
+function signatureContent(bundle, artifact) {
+ switch (bundle.content.$case) {
+ case 'dsseEnvelope':
+ return new dsse_1.DSSESignatureContent(bundle.content.dsseEnvelope);
+ case 'messageSignature':
+ return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact);
+ }
+}
+exports.signatureContent = signatureContent;
+function key(bundle) {
+ switch (bundle.verificationMaterial.content.$case) {
+ case 'publicKey':
+ return {
+ $case: 'public-key',
+ hint: bundle.verificationMaterial.content.publicKey.hint,
+ };
+ case 'x509CertificateChain':
+ return {
+ $case: 'certificate',
+ certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.x509CertificateChain
+ .certificates[0].rawBytes),
+ };
+ case 'certificate':
+ return {
+ $case: 'certificate',
+ certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.certificate.rawBytes),
+ };
+ }
+}
diff --git a/node_modules/@sigstore/verify/dist/bundle/message.js b/node_modules/@sigstore/verify/dist/bundle/message.js
new file mode 100644
index 0000000000000..836148c68a8b6
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/bundle/message.js
@@ -0,0 +1,36 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureContent = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+class MessageSignatureContent {
+ constructor(messageSignature, artifact) {
+ this.signature = messageSignature.signature;
+ this.messageDigest = messageSignature.messageDigest.digest;
+ this.artifact = artifact;
+ }
+ compareSignature(signature) {
+ return core_1.crypto.bufferEqual(signature, this.signature);
+ }
+ compareDigest(digest) {
+ return core_1.crypto.bufferEqual(digest, this.messageDigest);
+ }
+ verifySignature(key) {
+ return core_1.crypto.verify(this.artifact, key, this.signature);
+ }
+}
+exports.MessageSignatureContent = MessageSignatureContent;
diff --git a/node_modules/@sigstore/verify/dist/error.js b/node_modules/@sigstore/verify/dist/error.js
new file mode 100644
index 0000000000000..6cb1cd4121343
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/error.js
@@ -0,0 +1,32 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PolicyError = exports.VerificationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class BaseError extends Error {
+ constructor({ code, message, cause, }) {
+ super(message);
+ this.code = code;
+ this.cause = cause;
+ this.name = this.constructor.name;
+ }
+}
+class VerificationError extends BaseError {
+}
+exports.VerificationError = VerificationError;
+class PolicyError extends BaseError {
+}
+exports.PolicyError = PolicyError;
diff --git a/node_modules/@sigstore/verify/dist/index.js b/node_modules/@sigstore/verify/dist/index.js
new file mode 100644
index 0000000000000..3222876fcd68b
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/index.js
@@ -0,0 +1,28 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Verifier = exports.toTrustMaterial = exports.VerificationError = exports.PolicyError = exports.toSignedEntity = void 0;
+/* istanbul ignore file */
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var bundle_1 = require("./bundle");
+Object.defineProperty(exports, "toSignedEntity", { enumerable: true, get: function () { return bundle_1.toSignedEntity; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
+Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
+var trust_1 = require("./trust");
+Object.defineProperty(exports, "toTrustMaterial", { enumerable: true, get: function () { return trust_1.toTrustMaterial; } });
+var verifier_1 = require("./verifier");
+Object.defineProperty(exports, "Verifier", { enumerable: true, get: function () { return verifier_1.Verifier; } });
diff --git a/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/@sigstore/verify/dist/key/certificate.js
new file mode 100644
index 0000000000000..c9140dd98d58a
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/key/certificate.js
@@ -0,0 +1,205 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CertificateChainVerifier = exports.verifyCertificateChain = void 0;
+const error_1 = require("../error");
+const trust_1 = require("../trust");
+function verifyCertificateChain(leaf, certificateAuthorities) {
+ // Filter list of trusted CAs to those which are valid for the given
+ // leaf certificate.
+ const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, {
+ start: leaf.notBefore,
+ end: leaf.notAfter,
+ });
+ /* eslint-disable-next-line @typescript-eslint/no-explicit-any */
+ let error;
+ for (const ca of cas) {
+ try {
+ const verifier = new CertificateChainVerifier({
+ trustedCerts: ca.certChain,
+ untrustedCert: leaf,
+ });
+ return verifier.verify();
+ }
+ catch (err) {
+ error = err;
+ }
+ }
+ // If we failed to verify the certificate chain for all of the trusted
+ // CAs, throw the last error we encountered.
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'Failed to verify certificate chain',
+ cause: error,
+ });
+}
+exports.verifyCertificateChain = verifyCertificateChain;
+class CertificateChainVerifier {
+ constructor(opts) {
+ this.untrustedCert = opts.untrustedCert;
+ this.trustedCerts = opts.trustedCerts;
+ this.localCerts = dedupeCertificates([
+ ...opts.trustedCerts,
+ opts.untrustedCert,
+ ]);
+ }
+ verify() {
+ // Construct certificate path from leaf to root
+ const certificatePath = this.sort();
+ // Perform validation checks on each certificate in the path
+ this.checkPath(certificatePath);
+ // Return verified certificate path
+ return certificatePath;
+ }
+ sort() {
+ const leafCert = this.untrustedCert;
+ // Construct all possible paths from the leaf
+ let paths = this.buildPaths(leafCert);
+ // Filter for paths which contain a trusted certificate
+ paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert)));
+ if (paths.length === 0) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'no trusted certificate path found',
+ });
+ }
+ // Find the shortest of possible paths
+ /* istanbul ignore next */
+ const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr);
+ // Construct chain from shortest path
+ // Removes the last certificate in the path, which will be a second copy
+ // of the root certificate given that the root is self-signed.
+ return [leafCert, ...path].slice(0, -1);
+ }
+ // Recursively build all possible paths from the leaf to the root
+ buildPaths(certificate) {
+ const paths = [];
+ const issuers = this.findIssuer(certificate);
+ if (issuers.length === 0) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'no valid certificate path found',
+ });
+ }
+ for (let i = 0; i < issuers.length; i++) {
+ const issuer = issuers[i];
+ // Base case - issuer is self
+ if (issuer.equals(certificate)) {
+ paths.push([certificate]);
+ continue;
+ }
+ // Recursively build path for the issuer
+ const subPaths = this.buildPaths(issuer);
+ // Construct paths by appending the issuer to each subpath
+ for (let j = 0; j < subPaths.length; j++) {
+ paths.push([issuer, ...subPaths[j]]);
+ }
+ }
+ return paths;
+ }
+ // Return all possible issuers for the given certificate
+ findIssuer(certificate) {
+ let issuers = [];
+ let keyIdentifier;
+ // Exit early if the certificate is self-signed
+ if (certificate.subject.equals(certificate.issuer)) {
+ if (certificate.verify()) {
+ return [certificate];
+ }
+ }
+ // If the certificate has an authority key identifier, use that
+ // to find the issuer
+ if (certificate.extAuthorityKeyID) {
+ keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier;
+ // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber
+ // though Fulcio doesn't appear to use these
+ }
+ // Find possible issuers by comparing the authorityKeyID/subjectKeyID
+ // or issuer/subject. Potential issuers are added to the result array.
+ this.localCerts.forEach((possibleIssuer) => {
+ if (keyIdentifier) {
+ if (possibleIssuer.extSubjectKeyID) {
+ if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) {
+ issuers.push(possibleIssuer);
+ }
+ return;
+ }
+ }
+ // Fallback to comparing certificate issuer and subject if
+ // subjectKey/authorityKey extensions are not present
+ if (possibleIssuer.subject.equals(certificate.issuer)) {
+ issuers.push(possibleIssuer);
+ }
+ });
+ // Remove any issuers which fail to verify the certificate
+ issuers = issuers.filter((issuer) => {
+ try {
+ return certificate.verify(issuer);
+ }
+ catch (ex) {
+ /* istanbul ignore next - should never error */
+ return false;
+ }
+ });
+ return issuers;
+ }
+ checkPath(path) {
+ /* istanbul ignore if */
+ if (path.length < 1) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'certificate chain must contain at least one certificate',
+ });
+ }
+ // Ensure that all certificates beyond the leaf are CAs
+ const validCAs = path.slice(1).every((cert) => cert.isCA);
+ if (!validCAs) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'intermediate certificate is not a CA',
+ });
+ }
+ // Certificate's issuer must match the subject of the next certificate
+ // in the chain
+ for (let i = path.length - 2; i >= 0; i--) {
+ /* istanbul ignore if */
+ if (!path[i].issuer.equals(path[i + 1].subject)) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'incorrect certificate name chaining',
+ });
+ }
+ }
+ // Check pathlength constraints
+ for (let i = 0; i < path.length; i++) {
+ const cert = path[i];
+ // If the certificate is a CA, check the path length
+ if (cert.extBasicConstraints?.isCA) {
+ const pathLength = cert.extBasicConstraints.pathLenConstraint;
+ // The path length, if set, indicates how many intermediate
+ // certificates (NOT including the leaf) are allowed to follow. The
+ // pathLength constraint of any intermediate CA certificate MUST be
+ // greater than or equal to it's own depth in the chain (with an
+ // adjustment for the leaf certificate)
+ if (pathLength !== undefined && pathLength < i - 1) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'path length constraint exceeded',
+ });
+ }
+ }
+ }
+ }
+}
+exports.CertificateChainVerifier = CertificateChainVerifier;
+// Remove duplicate certificates from the array
+function dedupeCertificates(certs) {
+ for (let i = 0; i < certs.length; i++) {
+ for (let j = i + 1; j < certs.length; j++) {
+ if (certs[i].equals(certs[j])) {
+ certs.splice(j, 1);
+ j--;
+ }
+ }
+ }
+ return certs;
+}
diff --git a/node_modules/@sigstore/verify/dist/key/index.js b/node_modules/@sigstore/verify/dist/key/index.js
new file mode 100644
index 0000000000000..682a306803a99
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/key/index.js
@@ -0,0 +1,72 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyCertificate = exports.verifyPublicKey = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const certificate_1 = require("./certificate");
+const sct_1 = require("./sct");
+const OID_FULCIO_ISSUER_V1 = '1.3.6.1.4.1.57264.1.1';
+const OID_FULCIO_ISSUER_V2 = '1.3.6.1.4.1.57264.1.8';
+function verifyPublicKey(hint, timestamps, trustMaterial) {
+ const key = trustMaterial.publicKey(hint);
+ timestamps.forEach((timestamp) => {
+ if (!key.validFor(timestamp)) {
+ throw new error_1.VerificationError({
+ code: 'PUBLIC_KEY_ERROR',
+ message: `Public key is not valid for timestamp: ${timestamp.toISOString()}`,
+ });
+ }
+ });
+ return { key: key.publicKey };
+}
+exports.verifyPublicKey = verifyPublicKey;
+function verifyCertificate(leaf, timestamps, trustMaterial) {
+ // Check that leaf certificate chains to a trusted CA
+ const path = (0, certificate_1.verifyCertificateChain)(leaf, trustMaterial.certificateAuthorities);
+ // Check that ALL certificates are valid for ALL of the timestamps
+ const validForDate = timestamps.every((timestamp) => path.every((cert) => cert.validForDate(timestamp)));
+ if (!validForDate) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'certificate is not valid or expired at the specified date',
+ });
+ }
+ return {
+ scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs),
+ signer: getSigner(path[0]),
+ };
+}
+exports.verifyCertificate = verifyCertificate;
+function getSigner(cert) {
+ let issuer;
+ const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2);
+ if (issuerExtension) {
+ issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii');
+ }
+ else {
+ issuer = cert.extension(OID_FULCIO_ISSUER_V1)?.value.toString('ascii');
+ }
+ const identity = {
+ extensions: { issuer },
+ subjectAlternativeName: cert.subjectAltName,
+ };
+ return {
+ key: core_1.crypto.createPublicKey(cert.publicKey),
+ identity,
+ };
+}
diff --git a/node_modules/@sigstore/verify/dist/key/sct.js b/node_modules/@sigstore/verify/dist/key/sct.js
new file mode 100644
index 0000000000000..aea412840e103
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/key/sct.js
@@ -0,0 +1,79 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySCTs = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const trust_1 = require("../trust");
+function verifySCTs(cert, issuer, ctlogs) {
+ let extSCT;
+ // Verifying the SCT requires that we remove the SCT extension and
+ // re-encode the TBS structure to DER -- this value is part of the data
+ // over which the signature is calculated. Since this is a destructive action
+ // we create a copy of the certificate so we can remove the SCT extension
+ // without affecting the original certificate.
+ const clone = cert.clone();
+ // Intentionally not using the findExtension method here because we want to
+ // remove the the SCT extension from the certificate before calculating the
+ // PreCert structure
+ for (let i = 0; i < clone.extensions.length; i++) {
+ const ext = clone.extensions[i];
+ if (ext.subs[0].toOID() === core_1.EXTENSION_OID_SCT) {
+ extSCT = new core_1.X509SCTExtension(ext);
+ // Remove the extension from the certificate
+ clone.extensions.splice(i, 1);
+ break;
+ }
+ }
+ // No SCT extension found to verify
+ if (!extSCT) {
+ return [];
+ }
+ // Found an SCT extension but it has no SCTs
+ /* istanbul ignore if -- too difficult to fabricate test case for this */
+ if (extSCT.signedCertificateTimestamps.length === 0) {
+ return [];
+ }
+ // Construct the PreCert structure
+ // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+ const preCert = new core_1.ByteStream();
+ // Calculate hash of the issuer's public key
+ const issuerId = core_1.crypto.hash(issuer.publicKey);
+ preCert.appendView(issuerId);
+ // Re-encodes the certificate to DER after removing the SCT extension
+ const tbs = clone.tbsCertificate.toDER();
+ preCert.appendUint24(tbs.length);
+ preCert.appendView(tbs);
+ // Calculate and return the verification results for each SCT
+ return extSCT.signedCertificateTimestamps.map((sct) => {
+ // Find the ctlog instance that corresponds to the SCT's logID
+ const validCTLogs = (0, trust_1.filterTLogAuthorities)(ctlogs, {
+ logID: sct.logID,
+ targetDate: sct.datetime,
+ });
+ // See if the SCT is valid for any of the CT logs
+ const verified = validCTLogs.some((log) => sct.verify(preCert.buffer, log.publicKey));
+ if (!verified) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'SCT verification failed',
+ });
+ }
+ return sct.logID;
+ });
+}
+exports.verifySCTs = verifySCTs;
diff --git a/node_modules/@sigstore/verify/dist/policy.js b/node_modules/@sigstore/verify/dist/policy.js
new file mode 100644
index 0000000000000..731e5c8332847
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/policy.js
@@ -0,0 +1,25 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyExtensions = exports.verifySubjectAlternativeName = void 0;
+const error_1 = require("./error");
+function verifySubjectAlternativeName(policyIdentity, signerIdentity) {
+ if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) {
+ throw new error_1.PolicyError({
+ code: 'UNTRUSTED_SIGNER_ERROR',
+ message: `certificate identity error - expected ${policyIdentity}, got ${signerIdentity}`,
+ });
+ }
+}
+exports.verifySubjectAlternativeName = verifySubjectAlternativeName;
+function verifyExtensions(policyExtensions, signerExtensions = {}) {
+ let key;
+ for (key in policyExtensions) {
+ if (signerExtensions[key] !== policyExtensions[key]) {
+ throw new error_1.PolicyError({
+ code: 'UNTRUSTED_SIGNER_ERROR',
+ message: `invalid certificate extension - expected ${key}=${policyExtensions[key]}, got ${key}=${signerExtensions[key]}`,
+ });
+ }
+ }
+}
+exports.verifyExtensions = verifyExtensions;
diff --git a/node_modules/@sigstore/verify/dist/shared.types.js b/node_modules/@sigstore/verify/dist/shared.types.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/shared.types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
new file mode 100644
index 0000000000000..04a87383f0fd1
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
@@ -0,0 +1,158 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyCheckpoint = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const trust_1 = require("../trust");
+// Separator between the note and the signatures in a checkpoint
+const CHECKPOINT_SEPARATOR = '\n\n';
+// Checkpoint signatures are of the following form:
+// "– \n"
+// where:
+// - the prefix is an emdash (U+2014).
+// - gives a human-readable representation of the signing ID.
+// - is the first 4 bytes of the SHA256 hash of the
+// associated public key followed by the signature bytes.
+const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g;
+// Verifies the checkpoint value in the given tlog entry. There are two steps
+// to the verification:
+// 1. Verify that all signatures in the checkpoint can be verified against a
+// trusted public key
+// 2. Verify that the root hash in the checkpoint matches the root hash in the
+// inclusion proof
+// See: https://github.com/transparency-dev/formats/blob/main/log/README.md
+function verifyCheckpoint(entry, tlogs) {
+ // Filter tlog instances to just those which were valid at the time of the
+ // entry
+ const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, {
+ targetDate: new Date(Number(entry.integratedTime) * 1000),
+ });
+ const inclusionProof = entry.inclusionProof;
+ const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope);
+ const checkpoint = LogCheckpoint.fromString(signedNote.note);
+ // Verify that the signatures in the checkpoint are all valid
+ if (!verifySignedNote(signedNote, validTLogs)) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: 'invalid checkpoint signature',
+ });
+ }
+ // Verify that the root hash from the checkpoint matches the root hash in the
+ // inclusion proof
+ if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: 'root hash mismatch',
+ });
+ }
+}
+exports.verifyCheckpoint = verifyCheckpoint;
+// Verifies the signatures in the SignedNote. For each signature, the
+// corresponding transparency log is looked up by the key hint and the
+// signature is verified against the public key in the transparency log.
+// Throws an error if any of the signatures are invalid.
+function verifySignedNote(signedNote, tlogs) {
+ const data = Buffer.from(signedNote.note, 'utf-8');
+ return signedNote.signatures.every((signature) => {
+ // Find the transparency log instance with the matching key hint
+ const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint));
+ if (!tlog) {
+ return false;
+ }
+ return core_1.crypto.verify(data, tlog.publicKey, signature.signature);
+ });
+}
+// SignedNote represents a signed note from a transparency log checkpoint. Consists
+// of a body (or note) and one more signatures calculated over the body. See
+// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope
+class SignedNote {
+ constructor(note, signatures) {
+ this.note = note;
+ this.signatures = signatures;
+ }
+ // Deserialize a SignedNote from a string
+ static fromString(envelope) {
+ if (!envelope.includes(CHECKPOINT_SEPARATOR)) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: 'missing checkpoint separator',
+ });
+ }
+ // Split the note into the header and the data portions at the separator
+ const split = envelope.indexOf(CHECKPOINT_SEPARATOR);
+ const header = envelope.slice(0, split + 1);
+ const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length);
+ // Find all the signature lines in the data portion
+ const matches = data.matchAll(SIGNATURE_REGEX);
+ // Parse each of the matched signature lines into the name and signature.
+ // The first four bytes of the signature are the key hint (should match the
+ // first four bytes of the log ID), and the rest is the signature itself.
+ const signatures = Array.from(matches, (match) => {
+ const [, name, signature] = match;
+ const sigBytes = Buffer.from(signature, 'base64');
+ if (sigBytes.length < 5) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: 'malformed checkpoint signature',
+ });
+ }
+ return {
+ name,
+ keyHint: sigBytes.subarray(0, 4),
+ signature: sigBytes.subarray(4),
+ };
+ });
+ if (signatures.length === 0) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: 'no signatures found in checkpoint',
+ });
+ }
+ return new SignedNote(header, signatures);
+ }
+}
+// LogCheckpoint represents a transparency log checkpoint. Consists of the
+// following:
+// - origin: the name of the transparency log
+// - logSize: the size of the log at the time of the checkpoint
+// - logHash: the root hash of the log at the time of the checkpoint
+// - rest: the rest of the checkpoint body, which is a list of log entries
+// See:
+// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body
+class LogCheckpoint {
+ constructor(origin, logSize, logHash, rest) {
+ this.origin = origin;
+ this.logSize = logSize;
+ this.logHash = logHash;
+ this.rest = rest;
+ }
+ static fromString(note) {
+ const lines = note.trimEnd().split('\n');
+ if (lines.length < 3) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: 'too few lines in checkpoint header',
+ });
+ }
+ const origin = lines[0];
+ const logSize = BigInt(lines[1]);
+ const rootHash = Buffer.from(lines[2], 'base64');
+ const rest = lines.slice(3);
+ return new LogCheckpoint(origin, logSize, rootHash, rest);
+ }
+}
diff --git a/node_modules/@sigstore/verify/dist/timestamp/index.js b/node_modules/@sigstore/verify/dist/timestamp/index.js
new file mode 100644
index 0000000000000..0da554f648d25
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/timestamp/index.js
@@ -0,0 +1,47 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTLogTimestamp = exports.verifyTSATimestamp = void 0;
+const error_1 = require("../error");
+const checkpoint_1 = require("./checkpoint");
+const merkle_1 = require("./merkle");
+const set_1 = require("./set");
+const tsa_1 = require("./tsa");
+function verifyTSATimestamp(timestamp, data, timestampAuthorities) {
+ (0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities);
+ return {
+ type: 'timestamp-authority',
+ logID: timestamp.signerSerialNumber,
+ timestamp: timestamp.signingTime,
+ };
+}
+exports.verifyTSATimestamp = verifyTSATimestamp;
+function verifyTLogTimestamp(entry, tlogAuthorities) {
+ let inclusionVerified = false;
+ if (isTLogEntryWithInclusionPromise(entry)) {
+ (0, set_1.verifyTLogSET)(entry, tlogAuthorities);
+ inclusionVerified = true;
+ }
+ if (isTLogEntryWithInclusionProof(entry)) {
+ (0, merkle_1.verifyMerkleInclusion)(entry);
+ (0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities);
+ inclusionVerified = true;
+ }
+ if (!inclusionVerified) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_MISSING_INCLUSION_ERROR',
+ message: 'inclusion could not be verified',
+ });
+ }
+ return {
+ type: 'transparency-log',
+ logID: entry.logId.keyId,
+ timestamp: new Date(Number(entry.integratedTime) * 1000),
+ };
+}
+exports.verifyTLogTimestamp = verifyTLogTimestamp;
+function isTLogEntryWithInclusionPromise(entry) {
+ return entry.inclusionPromise !== undefined;
+}
+function isTLogEntryWithInclusionProof(entry) {
+ return entry.inclusionProof !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/node_modules/@sigstore/verify/dist/timestamp/merkle.js
new file mode 100644
index 0000000000000..9895d01b7abc0
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/timestamp/merkle.js
@@ -0,0 +1,105 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyMerkleInclusion = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]);
+const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]);
+function verifyMerkleInclusion(entry) {
+ const inclusionProof = entry.inclusionProof;
+ const logIndex = BigInt(inclusionProof.logIndex);
+ const treeSize = BigInt(inclusionProof.treeSize);
+ if (logIndex < 0n || logIndex >= treeSize) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: `invalid index: ${logIndex}`,
+ });
+ }
+ // Figure out which subset of hashes corresponds to the inner and border
+ // nodes
+ const { inner, border } = decompInclProof(logIndex, treeSize);
+ if (inclusionProof.hashes.length !== inner + border) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: 'invalid hash count',
+ });
+ }
+ const innerHashes = inclusionProof.hashes.slice(0, inner);
+ const borderHashes = inclusionProof.hashes.slice(inner);
+ // The entry's hash is the leaf hash
+ const leafHash = hashLeaf(entry.canonicalizedBody);
+ // Chain the hashes belonging to the inner and border portions
+ const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes);
+ // Calculated hash should match the root hash in the inclusion proof
+ if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROOF_ERROR',
+ message: 'calculated root hash does not match inclusion proof',
+ });
+ }
+}
+exports.verifyMerkleInclusion = verifyMerkleInclusion;
+// Breaks down inclusion proof for a leaf at the specified index in a tree of
+// the specified size. The split point is where paths to the index leaf and
+// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof
+// parts.
+function decompInclProof(index, size) {
+ const inner = innerProofSize(index, size);
+ const border = onesCount(index >> BigInt(inner));
+ return { inner, border };
+}
+// Computes a subtree hash for a node on or below the tree's right border.
+// Assumes the provided proof hashes are ordered from lower to higher levels
+// and seed is the initial hash of the node specified by the index.
+function chainInner(seed, hashes, index) {
+ return hashes.reduce((acc, h, i) => {
+ if ((index >> BigInt(i)) & BigInt(1)) {
+ return hashChildren(h, acc);
+ }
+ else {
+ return hashChildren(acc, h);
+ }
+ }, seed);
+}
+// Computes a subtree hash for nodes along the tree's right border.
+function chainBorderRight(seed, hashes) {
+ return hashes.reduce((acc, h) => hashChildren(h, acc), seed);
+}
+function innerProofSize(index, size) {
+ return bitLength(index ^ (size - BigInt(1)));
+}
+// Counts the number of ones in the binary representation of the given number.
+// https://en.wikipedia.org/wiki/Hamming_weight
+function onesCount(num) {
+ return num.toString(2).split('1').length - 1;
+}
+// Returns the number of bits necessary to represent an integer in binary.
+function bitLength(n) {
+ if (n === 0n) {
+ return 0;
+ }
+ return n.toString(2).length;
+}
+// Hashing logic according to RFC6962.
+// https://datatracker.ietf.org/doc/html/rfc6962#section-2
+function hashChildren(left, right) {
+ return core_1.crypto.hash(RFC6962_NODE_HASH_PREFIX, left, right);
+}
+function hashLeaf(leaf) {
+ return core_1.crypto.hash(RFC6962_LEAF_HASH_PREFIX, leaf);
+}
diff --git a/node_modules/@sigstore/verify/dist/timestamp/set.js b/node_modules/@sigstore/verify/dist/timestamp/set.js
new file mode 100644
index 0000000000000..a6357c06999cb
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/timestamp/set.js
@@ -0,0 +1,61 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTLogSET = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const trust_1 = require("../trust");
+// Verifies the SET for the given entry against the list of trusted
+// transparency logs. Returns true if the SET can be verified against at least
+// one of the trusted logs; otherwise, returns false.
+function verifyTLogSET(entry, tlogs) {
+ // Filter the list of tlog instances to only those which might be able to
+ // verify the SET
+ const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, {
+ logID: entry.logId.keyId,
+ targetDate: new Date(Number(entry.integratedTime) * 1000),
+ });
+ // Check to see if we can verify the SET against any of the valid tlogs
+ const verified = validTLogs.some((tlog) => {
+ // Re-create the original Rekor verification payload
+ const payload = toVerificationPayload(entry);
+ // Canonicalize the payload and turn into a buffer for verification
+ const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8');
+ // Extract the SET from the tlog entry
+ const signature = entry.inclusionPromise.signedEntryTimestamp;
+ return core_1.crypto.verify(data, tlog.publicKey, signature);
+ });
+ if (!verified) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_INCLUSION_PROMISE_ERROR',
+ message: 'inclusion promise could not be verified',
+ });
+ }
+}
+exports.verifyTLogSET = verifyTLogSET;
+// Returns a properly formatted "VerificationPayload" for one of the
+// transaction log entires in the given bundle which can be used for SET
+// verification.
+function toVerificationPayload(entry) {
+ const { integratedTime, logIndex, logId, canonicalizedBody } = entry;
+ return {
+ body: canonicalizedBody.toString('base64'),
+ integratedTime: Number(integratedTime),
+ logIndex: Number(logIndex),
+ logID: logId.keyId.toString('hex'),
+ };
+}
diff --git a/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/node_modules/@sigstore/verify/dist/timestamp/tsa.js
new file mode 100644
index 0000000000000..7b095bc3a7f90
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/timestamp/tsa.js
@@ -0,0 +1,74 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyRFC3161Timestamp = void 0;
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const certificate_1 = require("../key/certificate");
+const trust_1 = require("../trust");
+function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) {
+ const signingTime = timestamp.signingTime;
+ // Filter for CAs which were valid at the time of signing
+ timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, {
+ start: signingTime,
+ end: signingTime,
+ });
+ // Filter for CAs which match serial and issuer embedded in the timestamp
+ timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, {
+ serialNumber: timestamp.signerSerialNumber,
+ issuer: timestamp.signerIssuer,
+ });
+ // Check that we can verify the timestamp with AT LEAST ONE of the remaining
+ // CAs
+ const verified = timestampAuthorities.some((ca) => {
+ try {
+ verifyTimestampForCA(timestamp, data, ca);
+ return true;
+ }
+ catch (e) {
+ return false;
+ }
+ });
+ if (!verified) {
+ throw new error_1.VerificationError({
+ code: 'TIMESTAMP_ERROR',
+ message: 'timestamp could not be verified',
+ });
+ }
+}
+exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp;
+function verifyTimestampForCA(timestamp, data, ca) {
+ const [leaf, ...cas] = ca.certChain;
+ const signingKey = core_1.crypto.createPublicKey(leaf.publicKey);
+ const signingTime = timestamp.signingTime;
+ // Verify the certificate chain for the provided CA
+ try {
+ new certificate_1.CertificateChainVerifier({
+ untrustedCert: leaf,
+ trustedCerts: cas,
+ }).verify();
+ }
+ catch (e) {
+ throw new error_1.VerificationError({
+ code: 'TIMESTAMP_ERROR',
+ message: 'invalid certificate chain',
+ });
+ }
+ // Check that all of the CA certs were valid at the time of signing
+ const validAtSigningTime = ca.certChain.every((cert) => cert.validForDate(signingTime));
+ if (!validAtSigningTime) {
+ throw new error_1.VerificationError({
+ code: 'TIMESTAMP_ERROR',
+ message: 'timestamp was signed with an expired certificate',
+ });
+ }
+ // Check that the signing certificate's key can be used to verify the
+ // timestamp signature.
+ timestamp.verify(data, signingKey);
+}
+// Filters the list of CAs to those which have a leaf signing certificate which
+// matches the given serial number and issuer.
+function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) {
+ return timestampAuthorities.filter((ca) => ca.certChain.length > 0 &&
+ core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) &&
+ core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer));
+}
diff --git a/node_modules/@sigstore/verify/dist/tlog/dsse.js b/node_modules/@sigstore/verify/dist/tlog/dsse.js
new file mode 100644
index 0000000000000..bf430e61dde56
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/tlog/dsse.js
@@ -0,0 +1,58 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyDSSETLogBody = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+// Compare the given intoto tlog entry to the given bundle
+function verifyDSSETLogBody(tlogEntry, content) {
+ switch (tlogEntry.apiVersion) {
+ case '0.0.1':
+ return verifyDSSE001TLogBody(tlogEntry, content);
+ default:
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: `unsupported dsse version: ${tlogEntry.apiVersion}`,
+ });
+ }
+}
+exports.verifyDSSETLogBody = verifyDSSETLogBody;
+// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope.
+function verifyDSSE001TLogBody(tlogEntry, content) {
+ // Ensure the bundle's DSSE only contains a single signature
+ if (tlogEntry.spec.signatures?.length !== 1) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: 'signature count mismatch',
+ });
+ }
+ const tlogSig = tlogEntry.spec.signatures[0].signature;
+ // Ensure that the signature in the bundle's DSSE matches tlog entry
+ if (!content.compareSignature(Buffer.from(tlogSig, 'base64')))
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: 'tlog entry signature mismatch',
+ });
+ // Ensure the digest of the bundle's DSSE payload matches the digest in the
+ // tlog entry
+ const tlogHash = tlogEntry.spec.payloadHash?.value || '';
+ if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: 'DSSE payload hash mismatch',
+ });
+ }
+}
diff --git a/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
new file mode 100644
index 0000000000000..d1758858f030d
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
@@ -0,0 +1,52 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyHashedRekordTLogBody = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+// Compare the given hashedrekord tlog entry to the given bundle
+function verifyHashedRekordTLogBody(tlogEntry, content) {
+ switch (tlogEntry.apiVersion) {
+ case '0.0.1':
+ return verifyHashedrekord001TLogBody(tlogEntry, content);
+ default:
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: `unsupported hashedrekord version: ${tlogEntry.apiVersion}`,
+ });
+ }
+}
+exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody;
+// Compare the given hashedrekord v0.0.1 tlog entry to the given message
+// signature
+function verifyHashedrekord001TLogBody(tlogEntry, content) {
+ // Ensure that the bundles message signature matches the tlog entry
+ const tlogSig = tlogEntry.spec.signature.content || '';
+ if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: 'signature mismatch',
+ });
+ }
+ // Ensure that the bundle's message digest matches the tlog entry
+ const tlogDigest = tlogEntry.spec.data.hash?.value || '';
+ if (!content.compareDigest(Buffer.from(tlogDigest, 'hex'))) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: 'digest mismatch',
+ });
+ }
+}
diff --git a/node_modules/@sigstore/verify/dist/tlog/index.js b/node_modules/@sigstore/verify/dist/tlog/index.js
new file mode 100644
index 0000000000000..adfc70ed51ad0
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/tlog/index.js
@@ -0,0 +1,48 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTLogBody = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+const dsse_1 = require("./dsse");
+const hashedrekord_1 = require("./hashedrekord");
+const intoto_1 = require("./intoto");
+// Verifies that the given tlog entry matches the supplied signature content.
+function verifyTLogBody(entry, sigContent) {
+ const { kind, version } = entry.kindVersion;
+ const body = JSON.parse(entry.canonicalizedBody.toString('utf8'));
+ if (kind !== body.kind || version !== body.apiVersion) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: `kind/version mismatch - expected: ${kind}/${version}, received: ${body.kind}/${body.apiVersion}`,
+ });
+ }
+ switch (body.kind) {
+ case 'dsse':
+ return (0, dsse_1.verifyDSSETLogBody)(body, sigContent);
+ case 'intoto':
+ return (0, intoto_1.verifyIntotoTLogBody)(body, sigContent);
+ case 'hashedrekord':
+ return (0, hashedrekord_1.verifyHashedRekordTLogBody)(body, sigContent);
+ /* istanbul ignore next */
+ default:
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: `unsupported kind: ${kind}`,
+ });
+ }
+}
+exports.verifyTLogBody = verifyTLogBody;
diff --git a/node_modules/@sigstore/verify/dist/tlog/intoto.js b/node_modules/@sigstore/verify/dist/tlog/intoto.js
new file mode 100644
index 0000000000000..74c7f50d763e1
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/tlog/intoto.js
@@ -0,0 +1,63 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyIntotoTLogBody = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+// Compare the given intoto tlog entry to the given bundle
+function verifyIntotoTLogBody(tlogEntry, content) {
+ switch (tlogEntry.apiVersion) {
+ case '0.0.2':
+ return verifyIntoto002TLogBody(tlogEntry, content);
+ default:
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: `unsupported intoto version: ${tlogEntry.apiVersion}`,
+ });
+ }
+}
+exports.verifyIntotoTLogBody = verifyIntotoTLogBody;
+// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope.
+function verifyIntoto002TLogBody(tlogEntry, content) {
+ // Ensure the bundle's DSSE contains a single signature
+ if (tlogEntry.spec.content.envelope.signatures?.length !== 1) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: 'signature count mismatch',
+ });
+ }
+ // Signature is double-base64-encoded in the tlog entry
+ const tlogSig = base64Decode(tlogEntry.spec.content.envelope.signatures[0].sig);
+ // Ensure that the signature in the bundle's DSSE matches tlog entry
+ if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: 'tlog entry signature mismatch',
+ });
+ }
+ // Ensure the digest of the bundle's DSSE payload matches the digest in the
+ // tlog entry
+ const tlogHash = tlogEntry.spec.content.payloadHash?.value || '';
+ if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) {
+ throw new error_1.VerificationError({
+ code: 'TLOG_BODY_ERROR',
+ message: 'DSSE payload hash mismatch',
+ });
+ }
+}
+function base64Decode(str) {
+ return Buffer.from(str, 'base64').toString('utf-8');
+}
diff --git a/node_modules/@sigstore/verify/dist/trust/filter.js b/node_modules/@sigstore/verify/dist/trust/filter.js
new file mode 100644
index 0000000000000..c09d055913c4c
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/trust/filter.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0;
+function filterCertAuthorities(certAuthorities, criteria) {
+ return certAuthorities.filter((ca) => {
+ return (ca.validFor.start <= criteria.start && ca.validFor.end >= criteria.end);
+ });
+}
+exports.filterCertAuthorities = filterCertAuthorities;
+// Filter the list of tlog instances to only those which match the given log
+// ID and have public keys which are valid for the given integrated time.
+function filterTLogAuthorities(tlogAuthorities, criteria) {
+ return tlogAuthorities.filter((tlog) => {
+ // If we're filtering by log ID and the log IDs don't match, we can't use
+ // this tlog
+ if (criteria.logID && !tlog.logID.equals(criteria.logID)) {
+ return false;
+ }
+ // Check that the integrated time is within the validFor range
+ return (tlog.validFor.start <= criteria.targetDate &&
+ criteria.targetDate <= tlog.validFor.end);
+ });
+}
+exports.filterTLogAuthorities = filterTLogAuthorities;
diff --git a/node_modules/@sigstore/verify/dist/trust/index.js b/node_modules/@sigstore/verify/dist/trust/index.js
new file mode 100644
index 0000000000000..954de55841590
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/trust/index.js
@@ -0,0 +1,84 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toTrustMaterial = exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const error_1 = require("../error");
+const BEGINNING_OF_TIME = new Date(0);
+const END_OF_TIME = new Date(8640000000000000);
+var filter_1 = require("./filter");
+Object.defineProperty(exports, "filterCertAuthorities", { enumerable: true, get: function () { return filter_1.filterCertAuthorities; } });
+Object.defineProperty(exports, "filterTLogAuthorities", { enumerable: true, get: function () { return filter_1.filterTLogAuthorities; } });
+function toTrustMaterial(root, keys) {
+ const keyFinder = typeof keys === 'function' ? keys : keyLocator(keys);
+ return {
+ certificateAuthorities: root.certificateAuthorities.map(createCertAuthority),
+ timestampAuthorities: root.timestampAuthorities.map(createCertAuthority),
+ tlogs: root.tlogs.map(createTLogAuthority),
+ ctlogs: root.ctlogs.map(createTLogAuthority),
+ publicKey: keyFinder,
+ };
+}
+exports.toTrustMaterial = toTrustMaterial;
+function createTLogAuthority(tlogInstance) {
+ const keyDetails = tlogInstance.publicKey.keyDetails;
+ const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 ||
+ keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V5 ||
+ keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256 ||
+ keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256 ||
+ keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256
+ ? 'pkcs1'
+ : 'spki';
+ return {
+ logID: tlogInstance.logId.keyId,
+ publicKey: core_1.crypto.createPublicKey(tlogInstance.publicKey.rawBytes, keyType),
+ validFor: {
+ start: tlogInstance.publicKey.validFor?.start || BEGINNING_OF_TIME,
+ end: tlogInstance.publicKey.validFor?.end || END_OF_TIME,
+ },
+ };
+}
+function createCertAuthority(ca) {
+ return {
+ certChain: ca.certChain.certificates.map((cert) => {
+ return core_1.X509Certificate.parse(cert.rawBytes);
+ }),
+ validFor: {
+ start: ca.validFor?.start || BEGINNING_OF_TIME,
+ end: ca.validFor?.end || END_OF_TIME,
+ },
+ };
+}
+function keyLocator(keys) {
+ return (hint) => {
+ const key = (keys || {})[hint];
+ if (!key) {
+ throw new error_1.VerificationError({
+ code: 'PUBLIC_KEY_ERROR',
+ message: `key not found: ${hint}`,
+ });
+ }
+ return {
+ publicKey: core_1.crypto.createPublicKey(key.rawBytes),
+ validFor: (date) => {
+ return ((key.validFor?.start || BEGINNING_OF_TIME) <= date &&
+ (key.validFor?.end || END_OF_TIME) >= date);
+ },
+ };
+ };
+}
diff --git a/node_modules/@sigstore/verify/dist/trust/trust.types.js b/node_modules/@sigstore/verify/dist/trust/trust.types.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/trust/trust.types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/@sigstore/verify/dist/verifier.js
new file mode 100644
index 0000000000000..829727cd1d40a
--- /dev/null
+++ b/node_modules/@sigstore/verify/dist/verifier.js
@@ -0,0 +1,141 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Verifier = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("util");
+const error_1 = require("./error");
+const key_1 = require("./key");
+const policy_1 = require("./policy");
+const timestamp_1 = require("./timestamp");
+const tlog_1 = require("./tlog");
+class Verifier {
+ constructor(trustMaterial, options = {}) {
+ this.trustMaterial = trustMaterial;
+ this.options = {
+ ctlogThreshold: options.ctlogThreshold ?? 1,
+ tlogThreshold: options.tlogThreshold ?? 1,
+ tsaThreshold: options.tsaThreshold ?? 0,
+ };
+ }
+ verify(entity, policy) {
+ const timestamps = this.verifyTimestamps(entity);
+ const signer = this.verifySigningKey(entity, timestamps);
+ this.verifyTLogs(entity);
+ this.verifySignature(entity, signer);
+ if (policy) {
+ this.verifyPolicy(policy, signer.identity || {});
+ }
+ return signer;
+ }
+ // Checks that all of the timestamps in the entity are valid and returns them
+ verifyTimestamps(entity) {
+ let tlogCount = 0;
+ let tsaCount = 0;
+ const timestamps = entity.timestamps.map((timestamp) => {
+ switch (timestamp.$case) {
+ case 'timestamp-authority':
+ tsaCount++;
+ return (0, timestamp_1.verifyTSATimestamp)(timestamp.timestamp, entity.signature.signature, this.trustMaterial.timestampAuthorities);
+ case 'transparency-log':
+ tlogCount++;
+ return (0, timestamp_1.verifyTLogTimestamp)(timestamp.tlogEntry, this.trustMaterial.tlogs);
+ }
+ });
+ // Check for duplicate timestamps
+ if (containsDupes(timestamps)) {
+ throw new error_1.VerificationError({
+ code: 'TIMESTAMP_ERROR',
+ message: 'duplicate timestamp',
+ });
+ }
+ if (tlogCount < this.options.tlogThreshold) {
+ throw new error_1.VerificationError({
+ code: 'TIMESTAMP_ERROR',
+ message: `expected ${this.options.tlogThreshold} tlog timestamps, got ${tlogCount}`,
+ });
+ }
+ if (tsaCount < this.options.tsaThreshold) {
+ throw new error_1.VerificationError({
+ code: 'TIMESTAMP_ERROR',
+ message: `expected ${this.options.tsaThreshold} tsa timestamps, got ${tsaCount}`,
+ });
+ }
+ return timestamps.map((t) => t.timestamp);
+ }
+ // Checks that the signing key is valid for all of the the supplied timestamps
+ // and returns the signer.
+ verifySigningKey({ key }, timestamps) {
+ switch (key.$case) {
+ case 'public-key': {
+ return (0, key_1.verifyPublicKey)(key.hint, timestamps, this.trustMaterial);
+ }
+ case 'certificate': {
+ const result = (0, key_1.verifyCertificate)(key.certificate, timestamps, this.trustMaterial);
+ /* istanbul ignore next - no fixture */
+ if (containsDupes(result.scts)) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: 'duplicate SCT',
+ });
+ }
+ if (result.scts.length < this.options.ctlogThreshold) {
+ throw new error_1.VerificationError({
+ code: 'CERTIFICATE_ERROR',
+ message: `expected ${this.options.ctlogThreshold} SCTs, got ${result.scts.length}`,
+ });
+ }
+ return result.signer;
+ }
+ }
+ }
+ // Checks that the tlog entries are valid for the supplied content
+ verifyTLogs({ signature: content, tlogEntries }) {
+ tlogEntries.forEach((entry) => (0, tlog_1.verifyTLogBody)(entry, content));
+ }
+ // Checks that the signature is valid for the supplied content
+ verifySignature(entity, signer) {
+ if (!entity.signature.verifySignature(signer.key)) {
+ throw new error_1.VerificationError({
+ code: 'SIGNATURE_ERROR',
+ message: 'signature verification failed',
+ });
+ }
+ }
+ verifyPolicy(policy, identity) {
+ // Check the subject alternative name of the signer matches the policy
+ if (policy.subjectAlternativeName) {
+ (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName);
+ }
+ // Check that the extensions of the signer match the policy
+ if (policy.extensions) {
+ (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions);
+ }
+ }
+}
+exports.Verifier = Verifier;
+// Checks for duplicate items in the array. Objects are compared using
+// deep equality.
+function containsDupes(arr) {
+ for (let i = 0; i < arr.length; i++) {
+ for (let j = i + 1; j < arr.length; j++) {
+ if ((0, util_1.isDeepStrictEqual)(arr[i], arr[j])) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
diff --git a/node_modules/@sigstore/verify/package.json b/node_modules/@sigstore/verify/package.json
new file mode 100644
index 0000000000000..cd0c845a797e4
--- /dev/null
+++ b/node_modules/@sigstore/verify/package.json
@@ -0,0 +1,36 @@
+{
+ "name": "@sigstore/verify",
+ "version": "1.2.1",
+ "description": "Verification of Sigstore signatures",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "clean": "shx rm -rf dist *.tsbuildinfo",
+ "build": "tsc --build",
+ "test": "jest"
+ },
+ "files": [
+ "dist"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/sigstore-js.git"
+ },
+ "bugs": {
+ "url": "https://github.com/sigstore/sigstore-js/issues"
+ },
+ "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/verify#readme",
+ "publishConfig": {
+ "provenance": true
+ },
+ "dependencies": {
+ "@sigstore/protobuf-specs": "^0.3.2",
+ "@sigstore/bundle": "^2.3.2",
+ "@sigstore/core": "^1.1.0"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/@tootallnate/once/LICENSE b/node_modules/@tootallnate/once/LICENSE
deleted file mode 100644
index c4c56a2a53b2f..0000000000000
--- a/node_modules/@tootallnate/once/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2020 Nathan Rajlich
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/@tootallnate/once/dist/index.d.ts b/node_modules/@tootallnate/once/dist/index.d.ts
deleted file mode 100644
index 93d02a9a348b5..0000000000000
--- a/node_modules/@tootallnate/once/dist/index.d.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-///
-import { EventEmitter } from 'events';
-import { EventNames, EventListenerParameters, AbortSignal } from './types';
-export interface OnceOptions {
- signal?: AbortSignal;
-}
-export default function once>(emitter: Emitter, name: Event, { signal }?: OnceOptions): Promise>;
diff --git a/node_modules/@tootallnate/once/dist/index.js b/node_modules/@tootallnate/once/dist/index.js
deleted file mode 100644
index ca6385b1b82f8..0000000000000
--- a/node_modules/@tootallnate/once/dist/index.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-function once(emitter, name, { signal } = {}) {
- return new Promise((resolve, reject) => {
- function cleanup() {
- signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup);
- emitter.removeListener(name, onEvent);
- emitter.removeListener('error', onError);
- }
- function onEvent(...args) {
- cleanup();
- resolve(args);
- }
- function onError(err) {
- cleanup();
- reject(err);
- }
- signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup);
- emitter.on(name, onEvent);
- emitter.on('error', onError);
- });
-}
-exports.default = once;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/index.js.map b/node_modules/@tootallnate/once/dist/index.js.map
deleted file mode 100644
index 61708ca07f1b0..0000000000000
--- a/node_modules/@tootallnate/once/dist/index.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAOA,SAAwB,IAAI,CAI3B,OAAgB,EAChB,IAAW,EACX,EAAE,MAAM,KAAkB,EAAE;IAE5B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,SAAS,OAAO;YACf,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YAC9C,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC1C,CAAC;QACD,SAAS,OAAO,CAAC,GAAG,IAAW;YAC9B,OAAO,EAAE,CAAC;YACV,OAAO,CAAC,IAA+C,CAAC,CAAC;QAC1D,CAAC;QACD,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QACD,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC3C,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACJ,CAAC;AA1BD,uBA0BC"}
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts b/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts
deleted file mode 100644
index eb2bbc6c6275e..0000000000000
--- a/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts
+++ /dev/null
@@ -1,231 +0,0 @@
-export declare type OverloadedParameters = T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
- (...args: infer A13): any;
- (...args: infer A14): any;
- (...args: infer A15): any;
- (...args: infer A16): any;
- (...args: infer A17): any;
- (...args: infer A18): any;
- (...args: infer A19): any;
- (...args: infer A20): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 | A20 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
- (...args: infer A13): any;
- (...args: infer A14): any;
- (...args: infer A15): any;
- (...args: infer A16): any;
- (...args: infer A17): any;
- (...args: infer A18): any;
- (...args: infer A19): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
- (...args: infer A13): any;
- (...args: infer A14): any;
- (...args: infer A15): any;
- (...args: infer A16): any;
- (...args: infer A17): any;
- (...args: infer A18): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
- (...args: infer A13): any;
- (...args: infer A14): any;
- (...args: infer A15): any;
- (...args: infer A16): any;
- (...args: infer A17): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
- (...args: infer A13): any;
- (...args: infer A14): any;
- (...args: infer A15): any;
- (...args: infer A16): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
- (...args: infer A13): any;
- (...args: infer A14): any;
- (...args: infer A15): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
- (...args: infer A13): any;
- (...args: infer A14): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
- (...args: infer A13): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
- (...args: infer A12): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
- (...args: infer A11): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
- (...args: infer A10): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
- (...args: infer A9): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
- (...args: infer A8): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
- (...args: infer A7): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
- (...args: infer A6): any;
-} ? A1 | A2 | A3 | A4 | A5 | A6 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
- (...args: infer A5): any;
-} ? A1 | A2 | A3 | A4 | A5 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
- (...args: infer A4): any;
-} ? A1 | A2 | A3 | A4 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
- (...args: infer A3): any;
-} ? A1 | A2 | A3 : T extends {
- (...args: infer A1): any;
- (...args: infer A2): any;
-} ? A1 | A2 : T extends {
- (...args: infer A1): any;
-} ? A1 : any;
diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.js b/node_modules/@tootallnate/once/dist/overloaded-parameters.js
deleted file mode 100644
index 207186d9e7cca..0000000000000
--- a/node_modules/@tootallnate/once/dist/overloaded-parameters.js
+++ /dev/null
@@ -1,3 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=overloaded-parameters.js.map
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map b/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map
deleted file mode 100644
index 863f146d625f6..0000000000000
--- a/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"overloaded-parameters.js","sourceRoot":"","sources":["../src/overloaded-parameters.ts"],"names":[],"mappings":""}
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/types.d.ts b/node_modules/@tootallnate/once/dist/types.d.ts
deleted file mode 100644
index 58be8284ab8d3..0000000000000
--- a/node_modules/@tootallnate/once/dist/types.d.ts
+++ /dev/null
@@ -1,17 +0,0 @@
-///
-import { EventEmitter } from 'events';
-import { OverloadedParameters } from './overloaded-parameters';
-export declare type FirstParameter = T extends [infer R, ...any[]] ? R : never;
-export declare type EventListener = F extends [
- T,
- infer R,
- ...any[]
-] ? R : never;
-export declare type EventParameters = OverloadedParameters;
-export declare type EventNames = FirstParameter>;
-export declare type EventListenerParameters> = WithDefault, Event>>, unknown[]>;
-export declare type WithDefault = [T] extends [never] ? D : T;
-export interface AbortSignal {
- addEventListener: (name: string, listener: (...args: any[]) => any) => void;
- removeEventListener: (name: string, listener: (...args: any[]) => any) => void;
-}
diff --git a/node_modules/@tootallnate/once/dist/types.js b/node_modules/@tootallnate/once/dist/types.js
deleted file mode 100644
index 11e638d1ee44a..0000000000000
--- a/node_modules/@tootallnate/once/dist/types.js
+++ /dev/null
@@ -1,3 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/types.js.map b/node_modules/@tootallnate/once/dist/types.js.map
deleted file mode 100644
index c768b79002615..0000000000000
--- a/node_modules/@tootallnate/once/dist/types.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""}
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/package.json b/node_modules/@tootallnate/once/package.json
deleted file mode 100644
index 69ce947d9c310..0000000000000
--- a/node_modules/@tootallnate/once/package.json
+++ /dev/null
@@ -1,52 +0,0 @@
-{
- "name": "@tootallnate/once",
- "version": "2.0.0",
- "description": "Creates a Promise that waits for a single event",
- "main": "./dist/index.js",
- "types": "./dist/index.d.ts",
- "files": [
- "dist"
- ],
- "scripts": {
- "prebuild": "rimraf dist",
- "build": "tsc",
- "test": "jest",
- "prepublishOnly": "npm run build"
- },
- "repository": {
- "type": "git",
- "url": "git://github.com/TooTallNate/once.git"
- },
- "keywords": [],
- "author": "Nathan Rajlich (http://n8.io/)",
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/TooTallNate/once/issues"
- },
- "devDependencies": {
- "@types/jest": "^27.0.2",
- "@types/node": "^12.12.11",
- "abort-controller": "^3.0.0",
- "jest": "^27.2.1",
- "rimraf": "^3.0.0",
- "ts-jest": "^27.0.5",
- "typescript": "^4.4.3"
- },
- "engines": {
- "node": ">= 10"
- },
- "jest": {
- "preset": "ts-jest",
- "globals": {
- "ts-jest": {
- "diagnostics": false,
- "isolatedModules": true
- }
- },
- "verbose": false,
- "testEnvironment": "node",
- "testMatch": [
- "/test/**/*.test.ts"
- ]
- }
-}
diff --git a/node_modules/@tufjs/canonical-json/LICENSE b/node_modules/@tufjs/canonical-json/LICENSE
new file mode 100644
index 0000000000000..420700f5d3765
--- /dev/null
+++ b/node_modules/@tufjs/canonical-json/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 GitHub and the TUF Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/@tufjs/canonical-json/lib/index.js b/node_modules/@tufjs/canonical-json/lib/index.js
new file mode 100644
index 0000000000000..d480696de1f6c
--- /dev/null
+++ b/node_modules/@tufjs/canonical-json/lib/index.js
@@ -0,0 +1,64 @@
+const COMMA = ',';
+const COLON = ':';
+const LEFT_SQUARE_BRACKET = '[';
+const RIGHT_SQUARE_BRACKET = ']';
+const LEFT_CURLY_BRACKET = '{';
+const RIGHT_CURLY_BRACKET = '}';
+
+// Recursively encodes the supplied object according to the canonical JSON form
+// as specified at http://wiki.laptop.org/go/Canonical_JSON. It's a restricted
+// dialect of JSON in which keys are lexically sorted, floats are not allowed,
+// and only double quotes and backslashes are escaped.
+function canonicalize(object) {
+ const buffer = [];
+ if (typeof object === 'string') {
+ buffer.push(canonicalizeString(object));
+ } else if (typeof object === 'boolean') {
+ buffer.push(JSON.stringify(object));
+ } else if (Number.isInteger(object)) {
+ buffer.push(JSON.stringify(object));
+ } else if (object === null) {
+ buffer.push(JSON.stringify(object));
+ } else if (Array.isArray(object)) {
+ buffer.push(LEFT_SQUARE_BRACKET);
+ let first = true;
+ object.forEach((element) => {
+ if (!first) {
+ buffer.push(COMMA);
+ }
+ first = false;
+ buffer.push(canonicalize(element));
+ });
+ buffer.push(RIGHT_SQUARE_BRACKET);
+ } else if (typeof object === 'object') {
+ buffer.push(LEFT_CURLY_BRACKET);
+ let first = true;
+ Object.keys(object)
+ .sort()
+ .forEach((property) => {
+ if (!first) {
+ buffer.push(COMMA);
+ }
+ first = false;
+ buffer.push(canonicalizeString(property));
+ buffer.push(COLON);
+ buffer.push(canonicalize(object[property]));
+ });
+ buffer.push(RIGHT_CURLY_BRACKET);
+ } else {
+ throw new TypeError('cannot encode ' + object.toString());
+ }
+
+ return buffer.join('');
+}
+
+// String canonicalization consists of escaping backslash (\) and double
+// quote (") characters and wrapping the resulting string in double quotes.
+function canonicalizeString(string) {
+ const escapedString = string.replace(/\\/g, '\\\\').replace(/"/g, '\\"');
+ return '"' + escapedString + '"';
+}
+
+module.exports = {
+ canonicalize,
+};
diff --git a/node_modules/@tufjs/canonical-json/package.json b/node_modules/@tufjs/canonical-json/package.json
new file mode 100644
index 0000000000000..886c0c3969225
--- /dev/null
+++ b/node_modules/@tufjs/canonical-json/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "@tufjs/canonical-json",
+ "version": "2.0.0",
+ "description": "OLPC JSON canonicalization",
+ "main": "lib/index.js",
+ "typings": "lib/index.d.ts",
+ "license": "MIT",
+ "keywords": [
+ "json",
+ "canonical",
+ "canonicalize",
+ "canonicalization",
+ "crypto",
+ "signature",
+ "olpc"
+ ],
+ "author": "bdehamer@github.com",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/theupdateframework/tuf-js.git"
+ },
+ "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/canonical-json#readme",
+ "bugs": {
+ "url": "https://github.com/theupdateframework/tuf-js/issues"
+ },
+ "files": [
+ "lib/"
+ ],
+ "scripts": {
+ "test": "jest"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/@tufjs/models/LICENSE b/node_modules/@tufjs/models/LICENSE
new file mode 100644
index 0000000000000..420700f5d3765
--- /dev/null
+++ b/node_modules/@tufjs/models/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 GitHub and the TUF Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/@tufjs/models/dist/base.js b/node_modules/@tufjs/models/dist/base.js
new file mode 100644
index 0000000000000..259f6799c13a0
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/base.js
@@ -0,0 +1,83 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signed = exports.isMetadataKind = exports.MetadataKind = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+const SPECIFICATION_VERSION = ['1', '0', '31'];
+var MetadataKind;
+(function (MetadataKind) {
+ MetadataKind["Root"] = "root";
+ MetadataKind["Timestamp"] = "timestamp";
+ MetadataKind["Snapshot"] = "snapshot";
+ MetadataKind["Targets"] = "targets";
+})(MetadataKind || (exports.MetadataKind = MetadataKind = {}));
+function isMetadataKind(value) {
+ return (typeof value === 'string' &&
+ Object.values(MetadataKind).includes(value));
+}
+exports.isMetadataKind = isMetadataKind;
+/***
+ * A base class for the signed part of TUF metadata.
+ *
+ * Objects with base class Signed are usually included in a ``Metadata`` object
+ * on the signed attribute. This class provides attributes and methods that
+ * are common for all TUF metadata types (roles).
+ */
+class Signed {
+ constructor(options) {
+ this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.');
+ const specList = this.specVersion.split('.');
+ if (!(specList.length === 2 || specList.length === 3) ||
+ !specList.every((item) => isNumeric(item))) {
+ throw new error_1.ValueError('Failed to parse specVersion');
+ }
+ // major version must match
+ if (specList[0] != SPECIFICATION_VERSION[0]) {
+ throw new error_1.ValueError('Unsupported specVersion');
+ }
+ this.expires = options.expires || new Date().toISOString();
+ this.version = options.version || 1;
+ this.unrecognizedFields = options.unrecognizedFields || {};
+ }
+ equals(other) {
+ if (!(other instanceof Signed)) {
+ return false;
+ }
+ return (this.specVersion === other.specVersion &&
+ this.expires === other.expires &&
+ this.version === other.version &&
+ util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+ }
+ isExpired(referenceTime) {
+ if (!referenceTime) {
+ referenceTime = new Date();
+ }
+ return referenceTime >= new Date(this.expires);
+ }
+ static commonFieldsFromJSON(data) {
+ const { spec_version, expires, version, ...rest } = data;
+ if (utils_1.guard.isDefined(spec_version) && !(typeof spec_version === 'string')) {
+ throw new TypeError('spec_version must be a string');
+ }
+ if (utils_1.guard.isDefined(expires) && !(typeof expires === 'string')) {
+ throw new TypeError('expires must be a string');
+ }
+ if (utils_1.guard.isDefined(version) && !(typeof version === 'number')) {
+ throw new TypeError('version must be a number');
+ }
+ return {
+ specVersion: spec_version,
+ expires,
+ version,
+ unrecognizedFields: rest,
+ };
+ }
+}
+exports.Signed = Signed;
+function isNumeric(str) {
+ return !isNaN(Number(str));
+}
diff --git a/node_modules/@tufjs/models/dist/delegations.js b/node_modules/@tufjs/models/dist/delegations.js
new file mode 100644
index 0000000000000..7165f1e244393
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/delegations.js
@@ -0,0 +1,115 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Delegations = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const key_1 = require("./key");
+const role_1 = require("./role");
+const utils_1 = require("./utils");
+/**
+ * A container object storing information about all delegations.
+ *
+ * Targets roles that are trusted to provide signed metadata files
+ * describing targets with designated pathnames and/or further delegations.
+ */
+class Delegations {
+ constructor(options) {
+ this.keys = options.keys;
+ this.unrecognizedFields = options.unrecognizedFields || {};
+ if (options.roles) {
+ if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) {
+ throw new error_1.ValueError('Delegated role name conflicts with top-level role name');
+ }
+ }
+ this.succinctRoles = options.succinctRoles;
+ this.roles = options.roles;
+ }
+ equals(other) {
+ if (!(other instanceof Delegations)) {
+ return false;
+ }
+ return (util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
+ util_1.default.isDeepStrictEqual(this.roles, other.roles) &&
+ util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) &&
+ util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles));
+ }
+ *rolesForTarget(targetPath) {
+ if (this.roles) {
+ for (const role of Object.values(this.roles)) {
+ if (role.isDelegatedPath(targetPath)) {
+ yield { role: role.name, terminating: role.terminating };
+ }
+ }
+ }
+ else if (this.succinctRoles) {
+ yield {
+ role: this.succinctRoles.getRoleForTarget(targetPath),
+ terminating: true,
+ };
+ }
+ }
+ toJSON() {
+ const json = {
+ keys: keysToJSON(this.keys),
+ ...this.unrecognizedFields,
+ };
+ if (this.roles) {
+ json.roles = rolesToJSON(this.roles);
+ }
+ else if (this.succinctRoles) {
+ json.succinct_roles = this.succinctRoles.toJSON();
+ }
+ return json;
+ }
+ static fromJSON(data) {
+ const { keys, roles, succinct_roles, ...unrecognizedFields } = data;
+ let succinctRoles;
+ if (utils_1.guard.isObject(succinct_roles)) {
+ succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles);
+ }
+ return new Delegations({
+ keys: keysFromJSON(keys),
+ roles: rolesFromJSON(roles),
+ unrecognizedFields,
+ succinctRoles,
+ });
+ }
+}
+exports.Delegations = Delegations;
+function keysToJSON(keys) {
+ return Object.entries(keys).reduce((acc, [keyId, key]) => ({
+ ...acc,
+ [keyId]: key.toJSON(),
+ }), {});
+}
+function rolesToJSON(roles) {
+ return Object.values(roles).map((role) => role.toJSON());
+}
+function keysFromJSON(data) {
+ if (!utils_1.guard.isObjectRecord(data)) {
+ throw new TypeError('keys is malformed');
+ }
+ return Object.entries(data).reduce((acc, [keyID, keyData]) => ({
+ ...acc,
+ [keyID]: key_1.Key.fromJSON(keyID, keyData),
+ }), {});
+}
+function rolesFromJSON(data) {
+ let roleMap;
+ if (utils_1.guard.isDefined(data)) {
+ if (!utils_1.guard.isObjectArray(data)) {
+ throw new TypeError('roles is malformed');
+ }
+ roleMap = data.reduce((acc, role) => {
+ const delegatedRole = role_1.DelegatedRole.fromJSON(role);
+ return {
+ ...acc,
+ [delegatedRole.name]: delegatedRole,
+ };
+ }, {});
+ }
+ return roleMap;
+}
diff --git a/node_modules/@tufjs/models/dist/error.js b/node_modules/@tufjs/models/dist/error.js
new file mode 100644
index 0000000000000..ba80698747ba0
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/error.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0;
+// An error about insufficient values
+class ValueError extends Error {
+}
+exports.ValueError = ValueError;
+// An error with a repository's state, such as a missing file.
+// It covers all exceptions that come from the repository side when
+// looking from the perspective of users of metadata API or ngclient.
+class RepositoryError extends Error {
+}
+exports.RepositoryError = RepositoryError;
+// An error about metadata object with insufficient threshold of signatures.
+class UnsignedMetadataError extends RepositoryError {
+}
+exports.UnsignedMetadataError = UnsignedMetadataError;
+// An error while checking the length and hash values of an object.
+class LengthOrHashMismatchError extends RepositoryError {
+}
+exports.LengthOrHashMismatchError = LengthOrHashMismatchError;
+class CryptoError extends Error {
+}
+exports.CryptoError = CryptoError;
+class UnsupportedAlgorithmError extends CryptoError {
+}
+exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError;
diff --git a/node_modules/@tufjs/models/dist/file.js b/node_modules/@tufjs/models/dist/file.js
new file mode 100644
index 0000000000000..b35fe5950bbb7
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/file.js
@@ -0,0 +1,183 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TargetFile = exports.MetaFile = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+// A container with information about a particular metadata file.
+//
+// This class is used for Timestamp and Snapshot metadata.
+class MetaFile {
+ constructor(opts) {
+ if (opts.version <= 0) {
+ throw new error_1.ValueError('Metafile version must be at least 1');
+ }
+ if (opts.length !== undefined) {
+ validateLength(opts.length);
+ }
+ this.version = opts.version;
+ this.length = opts.length;
+ this.hashes = opts.hashes;
+ this.unrecognizedFields = opts.unrecognizedFields || {};
+ }
+ equals(other) {
+ if (!(other instanceof MetaFile)) {
+ return false;
+ }
+ return (this.version === other.version &&
+ this.length === other.length &&
+ util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
+ util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+ }
+ verify(data) {
+ // Verifies that the given data matches the expected length.
+ if (this.length !== undefined) {
+ if (data.length !== this.length) {
+ throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`);
+ }
+ }
+ // Verifies that the given data matches the supplied hashes.
+ if (this.hashes) {
+ Object.entries(this.hashes).forEach(([key, value]) => {
+ let hash;
+ try {
+ hash = crypto_1.default.createHash(key);
+ }
+ catch (e) {
+ throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
+ }
+ const observedHash = hash.update(data).digest('hex');
+ if (observedHash !== value) {
+ throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`);
+ }
+ });
+ }
+ }
+ toJSON() {
+ const json = {
+ version: this.version,
+ ...this.unrecognizedFields,
+ };
+ if (this.length !== undefined) {
+ json.length = this.length;
+ }
+ if (this.hashes) {
+ json.hashes = this.hashes;
+ }
+ return json;
+ }
+ static fromJSON(data) {
+ const { version, length, hashes, ...rest } = data;
+ if (typeof version !== 'number') {
+ throw new TypeError('version must be a number');
+ }
+ if (utils_1.guard.isDefined(length) && typeof length !== 'number') {
+ throw new TypeError('length must be a number');
+ }
+ if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) {
+ throw new TypeError('hashes must be string keys and values');
+ }
+ return new MetaFile({
+ version,
+ length,
+ hashes,
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.MetaFile = MetaFile;
+// Container for info about a particular target file.
+//
+// This class is used for Target metadata.
+class TargetFile {
+ constructor(opts) {
+ validateLength(opts.length);
+ this.length = opts.length;
+ this.path = opts.path;
+ this.hashes = opts.hashes;
+ this.unrecognizedFields = opts.unrecognizedFields || {};
+ }
+ get custom() {
+ const custom = this.unrecognizedFields['custom'];
+ if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) {
+ return {};
+ }
+ return custom;
+ }
+ equals(other) {
+ if (!(other instanceof TargetFile)) {
+ return false;
+ }
+ return (this.length === other.length &&
+ this.path === other.path &&
+ util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
+ util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+ }
+ async verify(stream) {
+ let observedLength = 0;
+ // Create a digest for each hash algorithm
+ const digests = Object.keys(this.hashes).reduce((acc, key) => {
+ try {
+ acc[key] = crypto_1.default.createHash(key);
+ }
+ catch (e) {
+ throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
+ }
+ return acc;
+ }, {});
+ // Read stream chunk by chunk
+ for await (const chunk of stream) {
+ // Keep running tally of stream length
+ observedLength += chunk.length;
+ // Append chunk to each digest
+ Object.values(digests).forEach((digest) => {
+ digest.update(chunk);
+ });
+ }
+ // Verify length matches expected value
+ if (observedLength !== this.length) {
+ throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`);
+ }
+ // Verify each digest matches expected value
+ Object.entries(digests).forEach(([key, value]) => {
+ const expected = this.hashes[key];
+ const actual = value.digest('hex');
+ if (actual !== expected) {
+ throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`);
+ }
+ });
+ }
+ toJSON() {
+ return {
+ length: this.length,
+ hashes: this.hashes,
+ ...this.unrecognizedFields,
+ };
+ }
+ static fromJSON(path, data) {
+ const { length, hashes, ...rest } = data;
+ if (typeof length !== 'number') {
+ throw new TypeError('length must be a number');
+ }
+ if (!utils_1.guard.isStringRecord(hashes)) {
+ throw new TypeError('hashes must have string keys and values');
+ }
+ return new TargetFile({
+ length,
+ path,
+ hashes,
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.TargetFile = TargetFile;
+// Check that supplied length if valid
+function validateLength(length) {
+ if (length < 0) {
+ throw new error_1.ValueError('Length must be at least 0');
+ }
+}
diff --git a/node_modules/@tufjs/models/dist/index.js b/node_modules/@tufjs/models/dist/index.js
new file mode 100644
index 0000000000000..a4dc783659f04
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/index.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0;
+var base_1 = require("./base");
+Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } });
+var file_1 = require("./file");
+Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } });
+Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } });
+var key_1 = require("./key");
+Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } });
+var metadata_1 = require("./metadata");
+Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } });
+var root_1 = require("./root");
+Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } });
+var signature_1 = require("./signature");
+Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } });
+var snapshot_1 = require("./snapshot");
+Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } });
+var targets_1 = require("./targets");
+Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } });
+var timestamp_1 = require("./timestamp");
+Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } });
diff --git a/node_modules/@tufjs/models/dist/key.js b/node_modules/@tufjs/models/dist/key.js
new file mode 100644
index 0000000000000..5e55b09d7c6dd
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/key.js
@@ -0,0 +1,85 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Key = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+const key_1 = require("./utils/key");
+// A container class representing the public portion of a Key.
+class Key {
+ constructor(options) {
+ const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options;
+ this.keyID = keyID;
+ this.keyType = keyType;
+ this.scheme = scheme;
+ this.keyVal = keyVal;
+ this.unrecognizedFields = unrecognizedFields || {};
+ }
+ // Verifies the that the metadata.signatures contains a signature made with
+ // this key and is correctly signed.
+ verifySignature(metadata) {
+ const signature = metadata.signatures[this.keyID];
+ if (!signature)
+ throw new error_1.UnsignedMetadataError('no signature for key found in metadata');
+ if (!this.keyVal.public)
+ throw new error_1.UnsignedMetadataError('no public key found');
+ const publicKey = (0, key_1.getPublicKey)({
+ keyType: this.keyType,
+ scheme: this.scheme,
+ keyVal: this.keyVal.public,
+ });
+ const signedData = metadata.signed.toJSON();
+ try {
+ if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) {
+ throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
+ }
+ }
+ catch (error) {
+ if (error instanceof error_1.UnsignedMetadataError) {
+ throw error;
+ }
+ throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
+ }
+ }
+ equals(other) {
+ if (!(other instanceof Key)) {
+ return false;
+ }
+ return (this.keyID === other.keyID &&
+ this.keyType === other.keyType &&
+ this.scheme === other.scheme &&
+ util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) &&
+ util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+ }
+ toJSON() {
+ return {
+ keytype: this.keyType,
+ scheme: this.scheme,
+ keyval: this.keyVal,
+ ...this.unrecognizedFields,
+ };
+ }
+ static fromJSON(keyID, data) {
+ const { keytype, scheme, keyval, ...rest } = data;
+ if (typeof keytype !== 'string') {
+ throw new TypeError('keytype must be a string');
+ }
+ if (typeof scheme !== 'string') {
+ throw new TypeError('scheme must be a string');
+ }
+ if (!utils_1.guard.isStringRecord(keyval)) {
+ throw new TypeError('keyval must be a string record');
+ }
+ return new Key({
+ keyID,
+ keyType: keytype,
+ scheme,
+ keyVal: keyval,
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.Key = Key;
diff --git a/node_modules/@tufjs/models/dist/metadata.js b/node_modules/@tufjs/models/dist/metadata.js
new file mode 100644
index 0000000000000..9668b6f14fa70
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/metadata.js
@@ -0,0 +1,158 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Metadata = void 0;
+const canonical_json_1 = require("@tufjs/canonical-json");
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const error_1 = require("./error");
+const root_1 = require("./root");
+const signature_1 = require("./signature");
+const snapshot_1 = require("./snapshot");
+const targets_1 = require("./targets");
+const timestamp_1 = require("./timestamp");
+const utils_1 = require("./utils");
+/***
+ * A container for signed TUF metadata.
+ *
+ * Provides methods to convert to and from json, read and write to and
+ * from JSON and to create and verify metadata signatures.
+ *
+ * ``Metadata[T]`` is a generic container type where T can be any one type of
+ * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this
+ * is to allow static type checking of the signed attribute in code using
+ * Metadata::
+ *
+ * root_md = Metadata[Root].fromJSON("root.json")
+ * # root_md type is now Metadata[Root]. This means signed and its
+ * # attributes like consistent_snapshot are now statically typed and the
+ * # types can be verified by static type checkers and shown by IDEs
+ *
+ * Using a type constraint is not required but not doing so means T is not a
+ * specific type so static typing cannot happen. Note that the type constraint
+ * ``[Root]`` is not validated at runtime (as pure annotations are not available
+ * then).
+ *
+ * Apart from ``expires`` all of the arguments to the inner constructors have
+ * reasonable default values for new metadata.
+ */
+class Metadata {
+ constructor(signed, signatures, unrecognizedFields) {
+ this.signed = signed;
+ this.signatures = signatures || {};
+ this.unrecognizedFields = unrecognizedFields || {};
+ }
+ sign(signer, append = true) {
+ const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON()));
+ const signature = signer(bytes);
+ if (!append) {
+ this.signatures = {};
+ }
+ this.signatures[signature.keyID] = signature;
+ }
+ verifyDelegate(delegatedRole, delegatedMetadata) {
+ let role;
+ let keys = {};
+ switch (this.signed.type) {
+ case base_1.MetadataKind.Root:
+ keys = this.signed.keys;
+ role = this.signed.roles[delegatedRole];
+ break;
+ case base_1.MetadataKind.Targets:
+ if (!this.signed.delegations) {
+ throw new error_1.ValueError(`No delegations found for ${delegatedRole}`);
+ }
+ keys = this.signed.delegations.keys;
+ if (this.signed.delegations.roles) {
+ role = this.signed.delegations.roles[delegatedRole];
+ }
+ else if (this.signed.delegations.succinctRoles) {
+ if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) {
+ role = this.signed.delegations.succinctRoles;
+ }
+ }
+ break;
+ default:
+ throw new TypeError('invalid metadata type');
+ }
+ if (!role) {
+ throw new error_1.ValueError(`no delegation found for ${delegatedRole}`);
+ }
+ const signingKeys = new Set();
+ role.keyIDs.forEach((keyID) => {
+ const key = keys[keyID];
+ // If we dont' have the key, continue checking other keys
+ if (!key) {
+ return;
+ }
+ try {
+ key.verifySignature(delegatedMetadata);
+ signingKeys.add(key.keyID);
+ }
+ catch (error) {
+ // continue
+ }
+ });
+ if (signingKeys.size < role.threshold) {
+ throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`);
+ }
+ }
+ equals(other) {
+ if (!(other instanceof Metadata)) {
+ return false;
+ }
+ return (this.signed.equals(other.signed) &&
+ util_1.default.isDeepStrictEqual(this.signatures, other.signatures) &&
+ util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+ }
+ toJSON() {
+ const signatures = Object.values(this.signatures).map((signature) => {
+ return signature.toJSON();
+ });
+ return {
+ signatures,
+ signed: this.signed.toJSON(),
+ ...this.unrecognizedFields,
+ };
+ }
+ static fromJSON(type, data) {
+ const { signed, signatures, ...rest } = data;
+ if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) {
+ throw new TypeError('signed is not defined');
+ }
+ if (type !== signed._type) {
+ throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`);
+ }
+ let signedObj;
+ switch (type) {
+ case base_1.MetadataKind.Root:
+ signedObj = root_1.Root.fromJSON(signed);
+ break;
+ case base_1.MetadataKind.Timestamp:
+ signedObj = timestamp_1.Timestamp.fromJSON(signed);
+ break;
+ case base_1.MetadataKind.Snapshot:
+ signedObj = snapshot_1.Snapshot.fromJSON(signed);
+ break;
+ case base_1.MetadataKind.Targets:
+ signedObj = targets_1.Targets.fromJSON(signed);
+ break;
+ default:
+ throw new TypeError('invalid metadata type');
+ }
+ const sigMap = signaturesFromJSON(signatures);
+ return new Metadata(signedObj, sigMap, rest);
+ }
+}
+exports.Metadata = Metadata;
+function signaturesFromJSON(data) {
+ if (!utils_1.guard.isObjectArray(data)) {
+ throw new TypeError('signatures is not an array');
+ }
+ return data.reduce((acc, sigData) => {
+ const signature = signature_1.Signature.fromJSON(sigData);
+ return { ...acc, [signature.keyID]: signature };
+ }, {});
+}
diff --git a/node_modules/@tufjs/models/dist/role.js b/node_modules/@tufjs/models/dist/role.js
new file mode 100644
index 0000000000000..f7ddbc6fe3f38
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/role.js
@@ -0,0 +1,299 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const minimatch_1 = require("minimatch");
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+exports.TOP_LEVEL_ROLE_NAMES = [
+ 'root',
+ 'targets',
+ 'snapshot',
+ 'timestamp',
+];
+/**
+ * Container that defines which keys are required to sign roles metadata.
+ *
+ * Role defines how many keys are required to successfully sign the roles
+ * metadata, and which keys are accepted.
+ */
+class Role {
+ constructor(options) {
+ const { keyIDs, threshold, unrecognizedFields } = options;
+ if (hasDuplicates(keyIDs)) {
+ throw new error_1.ValueError('duplicate key IDs found');
+ }
+ if (threshold < 1) {
+ throw new error_1.ValueError('threshold must be at least 1');
+ }
+ this.keyIDs = keyIDs;
+ this.threshold = threshold;
+ this.unrecognizedFields = unrecognizedFields || {};
+ }
+ equals(other) {
+ if (!(other instanceof Role)) {
+ return false;
+ }
+ return (this.threshold === other.threshold &&
+ util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) &&
+ util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+ }
+ toJSON() {
+ return {
+ keyids: this.keyIDs,
+ threshold: this.threshold,
+ ...this.unrecognizedFields,
+ };
+ }
+ static fromJSON(data) {
+ const { keyids, threshold, ...rest } = data;
+ if (!utils_1.guard.isStringArray(keyids)) {
+ throw new TypeError('keyids must be an array');
+ }
+ if (typeof threshold !== 'number') {
+ throw new TypeError('threshold must be a number');
+ }
+ return new Role({
+ keyIDs: keyids,
+ threshold,
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.Role = Role;
+function hasDuplicates(array) {
+ return new Set(array).size !== array.length;
+}
+/**
+ * A container with information about a delegated role.
+ *
+ * A delegation can happen in two ways:
+ * - ``paths`` is set: delegates targets matching any path pattern in ``paths``
+ * - ``pathHashPrefixes`` is set: delegates targets whose target path hash
+ * starts with any of the prefixes in ``pathHashPrefixes``
+ *
+ * ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be
+ * set, at least one of them must be set.
+ */
+class DelegatedRole extends Role {
+ constructor(opts) {
+ super(opts);
+ const { name, terminating, paths, pathHashPrefixes } = opts;
+ this.name = name;
+ this.terminating = terminating;
+ if (opts.paths && opts.pathHashPrefixes) {
+ throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive');
+ }
+ this.paths = paths;
+ this.pathHashPrefixes = pathHashPrefixes;
+ }
+ equals(other) {
+ if (!(other instanceof DelegatedRole)) {
+ return false;
+ }
+ return (super.equals(other) &&
+ this.name === other.name &&
+ this.terminating === other.terminating &&
+ util_1.default.isDeepStrictEqual(this.paths, other.paths) &&
+ util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes));
+ }
+ isDelegatedPath(targetFilepath) {
+ if (this.paths) {
+ return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern));
+ }
+ if (this.pathHashPrefixes) {
+ const hasher = crypto_1.default.createHash('sha256');
+ const pathHash = hasher.update(targetFilepath).digest('hex');
+ return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix));
+ }
+ return false;
+ }
+ toJSON() {
+ const json = {
+ ...super.toJSON(),
+ name: this.name,
+ terminating: this.terminating,
+ };
+ if (this.paths) {
+ json.paths = this.paths;
+ }
+ if (this.pathHashPrefixes) {
+ json.path_hash_prefixes = this.pathHashPrefixes;
+ }
+ return json;
+ }
+ static fromJSON(data) {
+ const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data;
+ if (!utils_1.guard.isStringArray(keyids)) {
+ throw new TypeError('keyids must be an array of strings');
+ }
+ if (typeof threshold !== 'number') {
+ throw new TypeError('threshold must be a number');
+ }
+ if (typeof name !== 'string') {
+ throw new TypeError('name must be a string');
+ }
+ if (typeof terminating !== 'boolean') {
+ throw new TypeError('terminating must be a boolean');
+ }
+ if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) {
+ throw new TypeError('paths must be an array of strings');
+ }
+ if (utils_1.guard.isDefined(path_hash_prefixes) &&
+ !utils_1.guard.isStringArray(path_hash_prefixes)) {
+ throw new TypeError('path_hash_prefixes must be an array of strings');
+ }
+ return new DelegatedRole({
+ keyIDs: keyids,
+ threshold,
+ name,
+ terminating,
+ paths,
+ pathHashPrefixes: path_hash_prefixes,
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.DelegatedRole = DelegatedRole;
+// JS version of Ruby's Array#zip
+const zip = (a, b) => a.map((k, i) => [k, b[i]]);
+function isTargetInPathPattern(target, pattern) {
+ const targetParts = target.split('/');
+ const patternParts = pattern.split('/');
+ if (patternParts.length != targetParts.length) {
+ return false;
+ }
+ return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart));
+}
+/**
+ * Succinctly defines a hash bin delegation graph.
+ *
+ * A ``SuccinctRoles`` object describes a delegation graph that covers all
+ * targets, distributing them uniformly over the delegated roles (i.e. bins)
+ * in the graph.
+ *
+ * The total number of bins is 2 to the power of the passed ``bit_length``.
+ *
+ * Bin names are the concatenation of the passed ``name_prefix`` and a
+ * zero-padded hex representation of the bin index separated by a hyphen.
+ *
+ * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin
+ * is 'terminating'.
+ *
+ * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md
+ */
+class SuccinctRoles extends Role {
+ constructor(opts) {
+ super(opts);
+ const { bitLength, namePrefix } = opts;
+ if (bitLength <= 0 || bitLength > 32) {
+ throw new error_1.ValueError('bitLength must be between 1 and 32');
+ }
+ this.bitLength = bitLength;
+ this.namePrefix = namePrefix;
+ // Calculate the suffix_len value based on the total number of bins in
+ // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will
+ // have a suffix between "000" and "3ff" in hex and suffix_len will be 3
+ // meaning the third bin will have a suffix of "003".
+ this.numberOfBins = Math.pow(2, bitLength);
+ // suffix_len is calculated based on "number_of_bins - 1" as the name
+ // of the last bin contains the number "number_of_bins -1" as a suffix.
+ this.suffixLen = (this.numberOfBins - 1).toString(16).length;
+ }
+ equals(other) {
+ if (!(other instanceof SuccinctRoles)) {
+ return false;
+ }
+ return (super.equals(other) &&
+ this.bitLength === other.bitLength &&
+ this.namePrefix === other.namePrefix);
+ }
+ /***
+ * Calculates the name of the delegated role responsible for 'target_filepath'.
+ *
+ * The target at path ''target_filepath' is assigned to a bin by casting
+ * the left-most 'bit_length' of bits of the file path hash digest to
+ * int, using it as bin index between 0 and '2**bit_length - 1'.
+ *
+ * Args:
+ * target_filepath: URL path to a target file, relative to a base
+ * targets URL.
+ */
+ getRoleForTarget(targetFilepath) {
+ const hasher = crypto_1.default.createHash('sha256');
+ const hasherBuffer = hasher.update(targetFilepath).digest();
+ // can't ever need more than 4 bytes (32 bits).
+ const hashBytes = hasherBuffer.subarray(0, 4);
+ // Right shift hash bytes, so that we only have the leftmost
+ // bit_length bits that we care about.
+ const shiftValue = 32 - this.bitLength;
+ const binNumber = hashBytes.readUInt32BE() >>> shiftValue;
+ // Add zero padding if necessary and cast to hex the suffix.
+ const suffix = binNumber.toString(16).padStart(this.suffixLen, '0');
+ return `${this.namePrefix}-${suffix}`;
+ }
+ *getRoles() {
+ for (let i = 0; i < this.numberOfBins; i++) {
+ const suffix = i.toString(16).padStart(this.suffixLen, '0');
+ yield `${this.namePrefix}-${suffix}`;
+ }
+ }
+ /***
+ * Determines whether the given ``role_name`` is in one of
+ * the delegated roles that ``SuccinctRoles`` represents.
+ *
+ * Args:
+ * role_name: The name of the role to check against.
+ */
+ isDelegatedRole(roleName) {
+ const desiredPrefix = this.namePrefix + '-';
+ if (!roleName.startsWith(desiredPrefix)) {
+ return false;
+ }
+ const suffix = roleName.slice(desiredPrefix.length, roleName.length);
+ if (suffix.length != this.suffixLen) {
+ return false;
+ }
+ // make sure the suffix is a hex string
+ if (!suffix.match(/^[0-9a-fA-F]+$/)) {
+ return false;
+ }
+ const num = parseInt(suffix, 16);
+ return 0 <= num && num < this.numberOfBins;
+ }
+ toJSON() {
+ const json = {
+ ...super.toJSON(),
+ bit_length: this.bitLength,
+ name_prefix: this.namePrefix,
+ };
+ return json;
+ }
+ static fromJSON(data) {
+ const { keyids, threshold, bit_length, name_prefix, ...rest } = data;
+ if (!utils_1.guard.isStringArray(keyids)) {
+ throw new TypeError('keyids must be an array of strings');
+ }
+ if (typeof threshold !== 'number') {
+ throw new TypeError('threshold must be a number');
+ }
+ if (typeof bit_length !== 'number') {
+ throw new TypeError('bit_length must be a number');
+ }
+ if (typeof name_prefix !== 'string') {
+ throw new TypeError('name_prefix must be a string');
+ }
+ return new SuccinctRoles({
+ keyIDs: keyids,
+ threshold,
+ bitLength: bit_length,
+ namePrefix: name_prefix,
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.SuccinctRoles = SuccinctRoles;
diff --git a/node_modules/@tufjs/models/dist/root.js b/node_modules/@tufjs/models/dist/root.js
new file mode 100644
index 0000000000000..36d0ef0f186d1
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/root.js
@@ -0,0 +1,116 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Root = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const error_1 = require("./error");
+const key_1 = require("./key");
+const role_1 = require("./role");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of root metadata.
+ *
+ * The top-level role and metadata file signed by the root keys.
+ * This role specifies trusted keys for all other top-level roles, which may further delegate trust.
+ */
+class Root extends base_1.Signed {
+ constructor(options) {
+ super(options);
+ this.type = base_1.MetadataKind.Root;
+ this.keys = options.keys || {};
+ this.consistentSnapshot = options.consistentSnapshot ?? true;
+ if (!options.roles) {
+ this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({
+ ...acc,
+ [role]: new role_1.Role({ keyIDs: [], threshold: 1 }),
+ }), {});
+ }
+ else {
+ const roleNames = new Set(Object.keys(options.roles));
+ if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) {
+ throw new error_1.ValueError('missing top-level role');
+ }
+ this.roles = options.roles;
+ }
+ }
+ addKey(key, role) {
+ if (!this.roles[role]) {
+ throw new error_1.ValueError(`role ${role} does not exist`);
+ }
+ if (!this.roles[role].keyIDs.includes(key.keyID)) {
+ this.roles[role].keyIDs.push(key.keyID);
+ }
+ this.keys[key.keyID] = key;
+ }
+ equals(other) {
+ if (!(other instanceof Root)) {
+ return false;
+ }
+ return (super.equals(other) &&
+ this.consistentSnapshot === other.consistentSnapshot &&
+ util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
+ util_1.default.isDeepStrictEqual(this.roles, other.roles));
+ }
+ toJSON() {
+ return {
+ _type: this.type,
+ spec_version: this.specVersion,
+ version: this.version,
+ expires: this.expires,
+ keys: keysToJSON(this.keys),
+ roles: rolesToJSON(this.roles),
+ consistent_snapshot: this.consistentSnapshot,
+ ...this.unrecognizedFields,
+ };
+ }
+ static fromJSON(data) {
+ const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+ const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields;
+ if (typeof consistent_snapshot !== 'boolean') {
+ throw new TypeError('consistent_snapshot must be a boolean');
+ }
+ return new Root({
+ ...commonFields,
+ keys: keysFromJSON(keys),
+ roles: rolesFromJSON(roles),
+ consistentSnapshot: consistent_snapshot,
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.Root = Root;
+function keysToJSON(keys) {
+ return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {});
+}
+function rolesToJSON(roles) {
+ return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {});
+}
+function keysFromJSON(data) {
+ let keys;
+ if (utils_1.guard.isDefined(data)) {
+ if (!utils_1.guard.isObjectRecord(data)) {
+ throw new TypeError('keys must be an object');
+ }
+ keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({
+ ...acc,
+ [keyID]: key_1.Key.fromJSON(keyID, keyData),
+ }), {});
+ }
+ return keys;
+}
+function rolesFromJSON(data) {
+ let roles;
+ if (utils_1.guard.isDefined(data)) {
+ if (!utils_1.guard.isObjectRecord(data)) {
+ throw new TypeError('roles must be an object');
+ }
+ roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({
+ ...acc,
+ [roleName]: role_1.Role.fromJSON(roleData),
+ }), {});
+ }
+ return roles;
+}
diff --git a/node_modules/@tufjs/models/dist/signature.js b/node_modules/@tufjs/models/dist/signature.js
new file mode 100644
index 0000000000000..33eb204eb0835
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/signature.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = void 0;
+/**
+ * A container class containing information about a signature.
+ *
+ * Contains a signature and the keyid uniquely identifying the key used
+ * to generate the signature.
+ *
+ * Provide a `fromJSON` method to create a Signature from a JSON object.
+ */
+class Signature {
+ constructor(options) {
+ const { keyID, sig } = options;
+ this.keyID = keyID;
+ this.sig = sig;
+ }
+ toJSON() {
+ return {
+ keyid: this.keyID,
+ sig: this.sig,
+ };
+ }
+ static fromJSON(data) {
+ const { keyid, sig } = data;
+ if (typeof keyid !== 'string') {
+ throw new TypeError('keyid must be a string');
+ }
+ if (typeof sig !== 'string') {
+ throw new TypeError('sig must be a string');
+ }
+ return new Signature({
+ keyID: keyid,
+ sig: sig,
+ });
+ }
+}
+exports.Signature = Signature;
diff --git a/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/@tufjs/models/dist/snapshot.js
new file mode 100644
index 0000000000000..e90ea8e729e4e
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/snapshot.js
@@ -0,0 +1,71 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Snapshot = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of snapshot metadata.
+ *
+ * Snapshot contains information about all target Metadata files.
+ * A top-level role that specifies the latest versions of all targets metadata files,
+ * and hence the latest versions of all targets (including any dependencies between them) on the repository.
+ */
+class Snapshot extends base_1.Signed {
+ constructor(opts) {
+ super(opts);
+ this.type = base_1.MetadataKind.Snapshot;
+ this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) };
+ }
+ equals(other) {
+ if (!(other instanceof Snapshot)) {
+ return false;
+ }
+ return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta);
+ }
+ toJSON() {
+ return {
+ _type: this.type,
+ meta: metaToJSON(this.meta),
+ spec_version: this.specVersion,
+ version: this.version,
+ expires: this.expires,
+ ...this.unrecognizedFields,
+ };
+ }
+ static fromJSON(data) {
+ const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+ const { meta, ...rest } = unrecognizedFields;
+ return new Snapshot({
+ ...commonFields,
+ meta: metaFromJSON(meta),
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.Snapshot = Snapshot;
+function metaToJSON(meta) {
+ return Object.entries(meta).reduce((acc, [path, metadata]) => ({
+ ...acc,
+ [path]: metadata.toJSON(),
+ }), {});
+}
+function metaFromJSON(data) {
+ let meta;
+ if (utils_1.guard.isDefined(data)) {
+ if (!utils_1.guard.isObjectRecord(data)) {
+ throw new TypeError('meta field is malformed');
+ }
+ else {
+ meta = Object.entries(data).reduce((acc, [path, metadata]) => ({
+ ...acc,
+ [path]: file_1.MetaFile.fromJSON(metadata),
+ }), {});
+ }
+ }
+ return meta;
+}
diff --git a/node_modules/@tufjs/models/dist/targets.js b/node_modules/@tufjs/models/dist/targets.js
new file mode 100644
index 0000000000000..54bd8f8c554af
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/targets.js
@@ -0,0 +1,92 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Targets = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const delegations_1 = require("./delegations");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+// Container for the signed part of targets metadata.
+//
+// Targets contains verifying information about target files and also delegates
+// responsible to other Targets roles.
+class Targets extends base_1.Signed {
+ constructor(options) {
+ super(options);
+ this.type = base_1.MetadataKind.Targets;
+ this.targets = options.targets || {};
+ this.delegations = options.delegations;
+ }
+ addTarget(target) {
+ this.targets[target.path] = target;
+ }
+ equals(other) {
+ if (!(other instanceof Targets)) {
+ return false;
+ }
+ return (super.equals(other) &&
+ util_1.default.isDeepStrictEqual(this.targets, other.targets) &&
+ util_1.default.isDeepStrictEqual(this.delegations, other.delegations));
+ }
+ toJSON() {
+ const json = {
+ _type: this.type,
+ spec_version: this.specVersion,
+ version: this.version,
+ expires: this.expires,
+ targets: targetsToJSON(this.targets),
+ ...this.unrecognizedFields,
+ };
+ if (this.delegations) {
+ json.delegations = this.delegations.toJSON();
+ }
+ return json;
+ }
+ static fromJSON(data) {
+ const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+ const { targets, delegations, ...rest } = unrecognizedFields;
+ return new Targets({
+ ...commonFields,
+ targets: targetsFromJSON(targets),
+ delegations: delegationsFromJSON(delegations),
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.Targets = Targets;
+function targetsToJSON(targets) {
+ return Object.entries(targets).reduce((acc, [path, target]) => ({
+ ...acc,
+ [path]: target.toJSON(),
+ }), {});
+}
+function targetsFromJSON(data) {
+ let targets;
+ if (utils_1.guard.isDefined(data)) {
+ if (!utils_1.guard.isObjectRecord(data)) {
+ throw new TypeError('targets must be an object');
+ }
+ else {
+ targets = Object.entries(data).reduce((acc, [path, target]) => ({
+ ...acc,
+ [path]: file_1.TargetFile.fromJSON(path, target),
+ }), {});
+ }
+ }
+ return targets;
+}
+function delegationsFromJSON(data) {
+ let delegations;
+ if (utils_1.guard.isDefined(data)) {
+ if (!utils_1.guard.isObject(data)) {
+ throw new TypeError('delegations must be an object');
+ }
+ else {
+ delegations = delegations_1.Delegations.fromJSON(data);
+ }
+ }
+ return delegations;
+}
diff --git a/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/@tufjs/models/dist/timestamp.js
new file mode 100644
index 0000000000000..9880c4c9fc254
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/timestamp.js
@@ -0,0 +1,58 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+const base_1 = require("./base");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of timestamp metadata.
+ *
+ * A top-level that specifies the latest version of the snapshot role metadata file,
+ * and hence the latest versions of all metadata and targets on the repository.
+ */
+class Timestamp extends base_1.Signed {
+ constructor(options) {
+ super(options);
+ this.type = base_1.MetadataKind.Timestamp;
+ this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 });
+ }
+ equals(other) {
+ if (!(other instanceof Timestamp)) {
+ return false;
+ }
+ return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta);
+ }
+ toJSON() {
+ return {
+ _type: this.type,
+ spec_version: this.specVersion,
+ version: this.version,
+ expires: this.expires,
+ meta: { 'snapshot.json': this.snapshotMeta.toJSON() },
+ ...this.unrecognizedFields,
+ };
+ }
+ static fromJSON(data) {
+ const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+ const { meta, ...rest } = unrecognizedFields;
+ return new Timestamp({
+ ...commonFields,
+ snapshotMeta: snapshotMetaFromJSON(meta),
+ unrecognizedFields: rest,
+ });
+ }
+}
+exports.Timestamp = Timestamp;
+function snapshotMetaFromJSON(data) {
+ let snapshotMeta;
+ if (utils_1.guard.isDefined(data)) {
+ const snapshotData = data['snapshot.json'];
+ if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) {
+ throw new TypeError('missing snapshot.json in meta');
+ }
+ else {
+ snapshotMeta = file_1.MetaFile.fromJSON(snapshotData);
+ }
+ }
+ return snapshotMeta;
+}
diff --git a/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/@tufjs/models/dist/utils/guard.js
new file mode 100644
index 0000000000000..efe558852303c
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/utils/guard.js
@@ -0,0 +1,33 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isObjectRecord = exports.isStringRecord = exports.isObjectArray = exports.isStringArray = exports.isObject = exports.isDefined = void 0;
+function isDefined(val) {
+ return val !== undefined;
+}
+exports.isDefined = isDefined;
+function isObject(value) {
+ return typeof value === 'object' && value !== null;
+}
+exports.isObject = isObject;
+function isStringArray(value) {
+ return Array.isArray(value) && value.every((v) => typeof v === 'string');
+}
+exports.isStringArray = isStringArray;
+function isObjectArray(value) {
+ return Array.isArray(value) && value.every(isObject);
+}
+exports.isObjectArray = isObjectArray;
+function isStringRecord(value) {
+ return (typeof value === 'object' &&
+ value !== null &&
+ Object.keys(value).every((k) => typeof k === 'string') &&
+ Object.values(value).every((v) => typeof v === 'string'));
+}
+exports.isStringRecord = isStringRecord;
+function isObjectRecord(value) {
+ return (typeof value === 'object' &&
+ value !== null &&
+ Object.keys(value).every((k) => typeof k === 'string') &&
+ Object.values(value).every((v) => typeof v === 'object' && v !== null));
+}
+exports.isObjectRecord = isObjectRecord;
diff --git a/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/@tufjs/models/dist/utils/index.js
new file mode 100644
index 0000000000000..872aae28049c9
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/utils/index.js
@@ -0,0 +1,28 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.crypto = exports.guard = void 0;
+exports.guard = __importStar(require("./guard"));
+exports.crypto = __importStar(require("./verify"));
diff --git a/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/@tufjs/models/dist/utils/key.js
new file mode 100644
index 0000000000000..1f795ba1a2733
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/utils/key.js
@@ -0,0 +1,143 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getPublicKey = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const error_1 = require("../error");
+const oid_1 = require("./oid");
+const ASN1_TAG_SEQUENCE = 0x30;
+const ANS1_TAG_BIT_STRING = 0x03;
+const NULL_BYTE = 0x00;
+const OID_EDDSA = '1.3.101.112';
+const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1';
+const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7';
+const PEM_HEADER = '-----BEGIN PUBLIC KEY-----';
+function getPublicKey(keyInfo) {
+ switch (keyInfo.keyType) {
+ case 'rsa':
+ return getRSAPublicKey(keyInfo);
+ case 'ed25519':
+ return getED25519PublicKey(keyInfo);
+ case 'ecdsa':
+ case 'ecdsa-sha2-nistp256':
+ case 'ecdsa-sha2-nistp384':
+ return getECDCSAPublicKey(keyInfo);
+ default:
+ throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`);
+ }
+}
+exports.getPublicKey = getPublicKey;
+function getRSAPublicKey(keyInfo) {
+ // Only support PEM-encoded RSA keys
+ if (!keyInfo.keyVal.startsWith(PEM_HEADER)) {
+ throw new error_1.CryptoError('Invalid key format');
+ }
+ const key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+ switch (keyInfo.scheme) {
+ case 'rsassa-pss-sha256':
+ return {
+ key: key,
+ padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING,
+ };
+ default:
+ throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`);
+ }
+}
+function getED25519PublicKey(keyInfo) {
+ let key;
+ // If key is already PEM-encoded we can just parse it
+ if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
+ key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+ }
+ else {
+ // If key is not PEM-encoded it had better be hex
+ if (!isHex(keyInfo.keyVal)) {
+ throw new error_1.CryptoError('Invalid key format');
+ }
+ key = crypto_1.default.createPublicKey({
+ key: ed25519.hexToDER(keyInfo.keyVal),
+ format: 'der',
+ type: 'spki',
+ });
+ }
+ return { key };
+}
+function getECDCSAPublicKey(keyInfo) {
+ let key;
+ // If key is already PEM-encoded we can just parse it
+ if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
+ key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+ }
+ else {
+ // If key is not PEM-encoded it had better be hex
+ if (!isHex(keyInfo.keyVal)) {
+ throw new error_1.CryptoError('Invalid key format');
+ }
+ key = crypto_1.default.createPublicKey({
+ key: ecdsa.hexToDER(keyInfo.keyVal),
+ format: 'der',
+ type: 'spki',
+ });
+ }
+ return { key };
+}
+const ed25519 = {
+ // Translates a hex key into a crypto KeyObject
+ // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/
+ hexToDER: (hex) => {
+ const key = Buffer.from(hex, 'hex');
+ const oid = (0, oid_1.encodeOIDString)(OID_EDDSA);
+ // Create a byte sequence containing the OID and key
+ const elements = Buffer.concat([
+ Buffer.concat([
+ Buffer.from([ASN1_TAG_SEQUENCE]),
+ Buffer.from([oid.length]),
+ oid,
+ ]),
+ Buffer.concat([
+ Buffer.from([ANS1_TAG_BIT_STRING]),
+ Buffer.from([key.length + 1]),
+ Buffer.from([NULL_BYTE]),
+ key,
+ ]),
+ ]);
+ // Wrap up by creating a sequence of elements
+ const der = Buffer.concat([
+ Buffer.from([ASN1_TAG_SEQUENCE]),
+ Buffer.from([elements.length]),
+ elements,
+ ]);
+ return der;
+ },
+};
+const ecdsa = {
+ hexToDER: (hex) => {
+ const key = Buffer.from(hex, 'hex');
+ const bitString = Buffer.concat([
+ Buffer.from([ANS1_TAG_BIT_STRING]),
+ Buffer.from([key.length + 1]),
+ Buffer.from([NULL_BYTE]),
+ key,
+ ]);
+ const oids = Buffer.concat([
+ (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY),
+ (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1),
+ ]);
+ const oidSequence = Buffer.concat([
+ Buffer.from([ASN1_TAG_SEQUENCE]),
+ Buffer.from([oids.length]),
+ oids,
+ ]);
+ // Wrap up by creating a sequence of elements
+ const der = Buffer.concat([
+ Buffer.from([ASN1_TAG_SEQUENCE]),
+ Buffer.from([oidSequence.length + bitString.length]),
+ oidSequence,
+ bitString,
+ ]);
+ return der;
+ },
+};
+const isHex = (key) => /^[0-9a-fA-F]+$/.test(key);
diff --git a/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/@tufjs/models/dist/utils/oid.js
new file mode 100644
index 0000000000000..e1bb7af5e54fb
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/utils/oid.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.encodeOIDString = void 0;
+const ANS1_TAG_OID = 0x06;
+function encodeOIDString(oid) {
+ const parts = oid.split('.');
+ // The first two subidentifiers are encoded into the first byte
+ const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10);
+ const rest = [];
+ parts.slice(2).forEach((part) => {
+ const bytes = encodeVariableLengthInteger(parseInt(part, 10));
+ rest.push(...bytes);
+ });
+ const der = Buffer.from([first, ...rest]);
+ return Buffer.from([ANS1_TAG_OID, der.length, ...der]);
+}
+exports.encodeOIDString = encodeOIDString;
+function encodeVariableLengthInteger(value) {
+ const bytes = [];
+ let mask = 0x00;
+ while (value > 0) {
+ bytes.unshift((value & 0x7f) | mask);
+ value >>= 7;
+ mask = 0x80;
+ }
+ return bytes;
+}
diff --git a/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/@tufjs/models/dist/utils/types.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/utils/types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/@tufjs/models/dist/utils/verify.js
new file mode 100644
index 0000000000000..8232b6f6a97ab
--- /dev/null
+++ b/node_modules/@tufjs/models/dist/utils/verify.js
@@ -0,0 +1,13 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySignature = void 0;
+const canonical_json_1 = require("@tufjs/canonical-json");
+const crypto_1 = __importDefault(require("crypto"));
+const verifySignature = (metaDataSignedData, key, signature) => {
+ const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData));
+ return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex'));
+};
+exports.verifySignature = verifySignature;
diff --git a/node_modules/@tufjs/models/package.json b/node_modules/@tufjs/models/package.json
new file mode 100644
index 0000000000000..be581591a0f3a
--- /dev/null
+++ b/node_modules/@tufjs/models/package.json
@@ -0,0 +1,37 @@
+{
+ "name": "@tufjs/models",
+ "version": "2.0.1",
+ "description": "TUF metadata models",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "build": "tsc --build",
+ "clean": "rm -rf dist && rm tsconfig.tsbuildinfo",
+ "test": "jest"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/theupdateframework/tuf-js.git"
+ },
+ "keywords": [
+ "tuf",
+ "security",
+ "update"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/theupdateframework/tuf-js/issues"
+ },
+ "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme",
+ "dependencies": {
+ "@tufjs/canonical-json": "2.0.0",
+ "minimatch": "^9.0.4"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/node_modules/abort-controller/LICENSE b/node_modules/abort-controller/LICENSE
deleted file mode 100644
index c914149a6f845..0000000000000
--- a/node_modules/abort-controller/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2017 Toru Nagashima
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/abort-controller/browser.js b/node_modules/abort-controller/browser.js
deleted file mode 100644
index b0c5ec37d9b76..0000000000000
--- a/node_modules/abort-controller/browser.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/*globals self, window */
-"use strict"
-
-/*eslint-disable @mysticatea/prettier */
-const { AbortController, AbortSignal } =
- typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- /* otherwise */ undefined
-/*eslint-enable @mysticatea/prettier */
-
-module.exports = AbortController
-module.exports.AbortSignal = AbortSignal
-module.exports.default = AbortController
diff --git a/node_modules/abort-controller/browser.mjs b/node_modules/abort-controller/browser.mjs
deleted file mode 100644
index a8f321afed675..0000000000000
--- a/node_modules/abort-controller/browser.mjs
+++ /dev/null
@@ -1,11 +0,0 @@
-/*globals self, window */
-
-/*eslint-disable @mysticatea/prettier */
-const { AbortController, AbortSignal } =
- typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- /* otherwise */ undefined
-/*eslint-enable @mysticatea/prettier */
-
-export default AbortController
-export { AbortController, AbortSignal }
diff --git a/node_modules/abort-controller/dist/abort-controller.d.ts b/node_modules/abort-controller/dist/abort-controller.d.ts
deleted file mode 100644
index 75852fb59952d..0000000000000
--- a/node_modules/abort-controller/dist/abort-controller.d.ts
+++ /dev/null
@@ -1,43 +0,0 @@
-import { EventTarget } from "event-target-shim"
-
-type Events = {
- abort: any
-}
-type EventAttributes = {
- onabort: any
-}
-/**
- * The signal class.
- * @see https://dom.spec.whatwg.org/#abortsignal
- */
-declare class AbortSignal extends EventTarget {
- /**
- * AbortSignal cannot be constructed directly.
- */
- constructor()
- /**
- * Returns `true` if this `AbortSignal`"s `AbortController` has signaled to abort, and `false` otherwise.
- */
- readonly aborted: boolean
-}
-/**
- * The AbortController.
- * @see https://dom.spec.whatwg.org/#abortcontroller
- */
-declare class AbortController {
- /**
- * Initialize this controller.
- */
- constructor()
- /**
- * Returns the `AbortSignal` object associated with this object.
- */
- readonly signal: AbortSignal
- /**
- * Abort and signal to any observers that the associated activity is to be aborted.
- */
- abort(): void
-}
-
-export default AbortController
-export { AbortController, AbortSignal }
diff --git a/node_modules/abort-controller/dist/abort-controller.js b/node_modules/abort-controller/dist/abort-controller.js
deleted file mode 100644
index 49af73955859f..0000000000000
--- a/node_modules/abort-controller/dist/abort-controller.js
+++ /dev/null
@@ -1,127 +0,0 @@
-/**
- * @author Toru Nagashima
- * See LICENSE file in root directory for full license.
- */
-'use strict';
-
-Object.defineProperty(exports, '__esModule', { value: true });
-
-var eventTargetShim = require('event-target-shim');
-
-/**
- * The signal class.
- * @see https://dom.spec.whatwg.org/#abortsignal
- */
-class AbortSignal extends eventTargetShim.EventTarget {
- /**
- * AbortSignal cannot be constructed directly.
- */
- constructor() {
- super();
- throw new TypeError("AbortSignal cannot be constructed directly");
- }
- /**
- * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.
- */
- get aborted() {
- const aborted = abortedFlags.get(this);
- if (typeof aborted !== "boolean") {
- throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`);
- }
- return aborted;
- }
-}
-eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort");
-/**
- * Create an AbortSignal object.
- */
-function createAbortSignal() {
- const signal = Object.create(AbortSignal.prototype);
- eventTargetShim.EventTarget.call(signal);
- abortedFlags.set(signal, false);
- return signal;
-}
-/**
- * Abort a given signal.
- */
-function abortSignal(signal) {
- if (abortedFlags.get(signal) !== false) {
- return;
- }
- abortedFlags.set(signal, true);
- signal.dispatchEvent({ type: "abort" });
-}
-/**
- * Aborted flag for each instances.
- */
-const abortedFlags = new WeakMap();
-// Properties should be enumerable.
-Object.defineProperties(AbortSignal.prototype, {
- aborted: { enumerable: true },
-});
-// `toString()` should return `"[object AbortSignal]"`
-if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") {
- Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {
- configurable: true,
- value: "AbortSignal",
- });
-}
-
-/**
- * The AbortController.
- * @see https://dom.spec.whatwg.org/#abortcontroller
- */
-class AbortController {
- /**
- * Initialize this controller.
- */
- constructor() {
- signals.set(this, createAbortSignal());
- }
- /**
- * Returns the `AbortSignal` object associated with this object.
- */
- get signal() {
- return getSignal(this);
- }
- /**
- * Abort and signal to any observers that the associated activity is to be aborted.
- */
- abort() {
- abortSignal(getSignal(this));
- }
-}
-/**
- * Associated signals.
- */
-const signals = new WeakMap();
-/**
- * Get the associated signal of a given controller.
- */
-function getSignal(controller) {
- const signal = signals.get(controller);
- if (signal == null) {
- throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`);
- }
- return signal;
-}
-// Properties should be enumerable.
-Object.defineProperties(AbortController.prototype, {
- signal: { enumerable: true },
- abort: { enumerable: true },
-});
-if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") {
- Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {
- configurable: true,
- value: "AbortController",
- });
-}
-
-exports.AbortController = AbortController;
-exports.AbortSignal = AbortSignal;
-exports.default = AbortController;
-
-module.exports = AbortController
-module.exports.AbortController = module.exports["default"] = AbortController
-module.exports.AbortSignal = AbortSignal
-//# sourceMappingURL=abort-controller.js.map
diff --git a/node_modules/abort-controller/dist/abort-controller.js.map b/node_modules/abort-controller/dist/abort-controller.js.map
deleted file mode 100644
index cfdcafdc61167..0000000000000
--- a/node_modules/abort-controller/dist/abort-controller.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"abort-controller.js","sources":["../src/abort-signal.ts","../src/abort-controller.ts"],"sourcesContent":["import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":["EventTarget","defineEventAttribute"],"mappings":";;;;;;;;;;AAkBA;;;;AAIA,MAAqB,WAAY,SAAQA,2BAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACDC,oCAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnDD,2BAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;;;;;;;;;;;;"}
\ No newline at end of file
diff --git a/node_modules/abort-controller/dist/abort-controller.mjs b/node_modules/abort-controller/dist/abort-controller.mjs
deleted file mode 100644
index 88ba22d5574ed..0000000000000
--- a/node_modules/abort-controller/dist/abort-controller.mjs
+++ /dev/null
@@ -1,118 +0,0 @@
-/**
- * @author Toru Nagashima
- * See LICENSE file in root directory for full license.
- */
-import { EventTarget, defineEventAttribute } from 'event-target-shim';
-
-/**
- * The signal class.
- * @see https://dom.spec.whatwg.org/#abortsignal
- */
-class AbortSignal extends EventTarget {
- /**
- * AbortSignal cannot be constructed directly.
- */
- constructor() {
- super();
- throw new TypeError("AbortSignal cannot be constructed directly");
- }
- /**
- * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.
- */
- get aborted() {
- const aborted = abortedFlags.get(this);
- if (typeof aborted !== "boolean") {
- throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`);
- }
- return aborted;
- }
-}
-defineEventAttribute(AbortSignal.prototype, "abort");
-/**
- * Create an AbortSignal object.
- */
-function createAbortSignal() {
- const signal = Object.create(AbortSignal.prototype);
- EventTarget.call(signal);
- abortedFlags.set(signal, false);
- return signal;
-}
-/**
- * Abort a given signal.
- */
-function abortSignal(signal) {
- if (abortedFlags.get(signal) !== false) {
- return;
- }
- abortedFlags.set(signal, true);
- signal.dispatchEvent({ type: "abort" });
-}
-/**
- * Aborted flag for each instances.
- */
-const abortedFlags = new WeakMap();
-// Properties should be enumerable.
-Object.defineProperties(AbortSignal.prototype, {
- aborted: { enumerable: true },
-});
-// `toString()` should return `"[object AbortSignal]"`
-if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") {
- Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {
- configurable: true,
- value: "AbortSignal",
- });
-}
-
-/**
- * The AbortController.
- * @see https://dom.spec.whatwg.org/#abortcontroller
- */
-class AbortController {
- /**
- * Initialize this controller.
- */
- constructor() {
- signals.set(this, createAbortSignal());
- }
- /**
- * Returns the `AbortSignal` object associated with this object.
- */
- get signal() {
- return getSignal(this);
- }
- /**
- * Abort and signal to any observers that the associated activity is to be aborted.
- */
- abort() {
- abortSignal(getSignal(this));
- }
-}
-/**
- * Associated signals.
- */
-const signals = new WeakMap();
-/**
- * Get the associated signal of a given controller.
- */
-function getSignal(controller) {
- const signal = signals.get(controller);
- if (signal == null) {
- throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`);
- }
- return signal;
-}
-// Properties should be enumerable.
-Object.defineProperties(AbortController.prototype, {
- signal: { enumerable: true },
- abort: { enumerable: true },
-});
-if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") {
- Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {
- configurable: true,
- value: "AbortController",
- });
-}
-
-export default AbortController;
-export { AbortController, AbortSignal };
-//# sourceMappingURL=abort-controller.mjs.map
diff --git a/node_modules/abort-controller/dist/abort-controller.mjs.map b/node_modules/abort-controller/dist/abort-controller.mjs.map
deleted file mode 100644
index 1e8fa6b00f6ef..0000000000000
--- a/node_modules/abort-controller/dist/abort-controller.mjs.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"abort-controller.mjs","sources":["../src/abort-signal.ts","../src/abort-controller.ts"],"sourcesContent":["import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":[],"mappings":";;;;;;AAkBA;;;;AAIA,MAAqB,WAAY,SAAQ,WAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACD,oBAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;;;;"}
\ No newline at end of file
diff --git a/node_modules/abort-controller/dist/abort-controller.umd.js b/node_modules/abort-controller/dist/abort-controller.umd.js
deleted file mode 100644
index f643cfd6b6711..0000000000000
--- a/node_modules/abort-controller/dist/abort-controller.umd.js
+++ /dev/null
@@ -1,5 +0,0 @@
-/**
- * @author Toru Nagashima
- * See LICENSE file in root directory for full license.
- */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.AbortControllerShim={}))})(this,function(a){'use strict';function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function d(a,b){for(var c,d=0;d\n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexport default EventTarget;\nexport { defineEventAttribute, EventTarget };\n//# sourceMappingURL=event-target-shim.mjs.map\n","import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":["pd","event","retv","privateData","get","console","assert","setCancelFlag","data","passiveListener","cancelable","canceled","preventDefault","error","Event","eventTarget","set","eventPhase","currentTarget","stopped","immediateStopped","timeStamp","Date","now","Object","defineProperty","value","enumerable","key","keys","i","length","defineRedirectDescriptor","configurable","defineCallDescriptor","apply","arguments","defineWrapper","BaseEvent","proto","CustomEvent","call","prototype","create","constructor","writable","descriptor","getOwnPropertyDescriptor","isFunc","getWrapper","wrapper","wrappers","getPrototypeOf","wrapEvent","Wrapper","isStopped","setEventPhase","setCurrentTarget","setPassiveListener","createAbortSignal","signal","AbortSignal","EventTarget","abortedFlags","abortSignal","dispatchEvent","type","getSignal","controller","signals","TypeError","WeakMap","target","composedPath","NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","stopPropagation","stopImmediatePropagation","bubbles","defaultPrevented","composed","srcElement","cancelBubble","returnValue","initEvent","window","setPrototypeOf","aborted","defineEventAttribute","defineProperties","Symbol","_typeof","toStringTag","AbortController","abort"],"mappings":";;;+3CAkCA,QAASA,CAAAA,CAAT,CAAYC,CAAZ,CAAmB,IACTC,CAAAA,CAAI,CAAGC,CAAW,CAACC,GAAZ,CAAgBH,CAAhB,QACbI,CAAAA,OAAO,CAACC,MAAR,CACY,IAAR,EAAAJ,CADJ,CAEI,6CAFJ,CAGID,CAHJ,EAKOC,EAOX,QAASK,CAAAA,CAAT,CAAuBC,CAAvB,CAA6B,OACG,KAAxB,EAAAA,CAAI,CAACC,eADgB,MAarB,CAACD,CAAI,CAACP,KAAL,CAAWS,UAbS,GAiBzBF,CAAI,CAACG,QAAL,GAjByB,CAkBgB,UAArC,QAAOH,CAAAA,CAAI,CAACP,KAAL,CAAWW,cAlBG,EAmBrBJ,CAAI,CAACP,KAAL,CAAWW,cAAX,EAnBqB,QAGE,WAAnB,QAAOP,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAJE,EAMjBR,OAAO,CAACQ,KAAR,CACI,oEADJ,CAEIL,CAAI,CAACC,eAFT,CANiB,EAiC7B,QAASK,CAAAA,CAAT,CAAeC,CAAf,CAA4Bd,CAA5B,CAAmC,CAC/BE,CAAW,CAACa,GAAZ,CAAgB,IAAhB,CAAsB,CAClBD,WAAW,CAAXA,CADkB,CAElBd,KAAK,CAALA,CAFkB,CAGlBgB,UAAU,CAAE,CAHM,CAIlBC,aAAa,CAAEH,CAJG,CAKlBJ,QAAQ,GALU,CAMlBQ,OAAO,GANW,CAOlBC,gBAAgB,GAPE,CAQlBX,eAAe,CAAE,IARC,CASlBY,SAAS,CAAEpB,CAAK,CAACoB,SAAN,EAAmBC,IAAI,CAACC,GAAL,EATZ,CAAtB,CAD+B,CAc/BC,MAAM,CAACC,cAAP,CAAsB,IAAtB,CAA4B,WAA5B,CAAyC,CAAEC,KAAK,GAAP,CAAgBC,UAAU,GAA1B,CAAzC,CAd+B,QAmBrBC,CAAAA,EAFJC,CAAI,CAAGL,MAAM,CAACK,IAAP,CAAY5B,CAAZ,EACJ6B,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,EACzBF,EAAMC,CAAI,CAACC,CAAD,EACVF,CAAG,GAAI,OACTJ,MAAM,CAACC,cAAP,CAAsB,IAAtB,CAA4BG,CAA5B,CAAiCI,CAAwB,CAACJ,CAAD,CAAzD,EAyOZ,QAASI,CAAAA,CAAT,CAAkCJ,CAAlC,CAAuC,OAC5B,CACHxB,GADG,WACG,OACKJ,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe2B,CAAf,CAFR,CAAA,CAIHZ,GAJG,UAICU,EAAO,CACP1B,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe2B,CAAf,EAAsBF,CALvB,CAAA,CAOHO,YAAY,GAPT,CAQHN,UAAU,GARP,EAkBX,QAASO,CAAAA,CAAT,CAA8BN,CAA9B,CAAmC,OACxB,CACHF,KADG,WACK,IACEzB,CAAAA,CAAK,CAAGD,CAAE,CAAC,IAAD,CAAF,CAASC,YAChBA,CAAAA,CAAK,CAAC2B,CAAD,CAAL,CAAWO,KAAX,CAAiBlC,CAAjB,CAAwBmC,SAAxB,CAHR,CAAA,CAKHH,YAAY,GALT,CAMHN,UAAU,GANP,EAiBX,QAASU,CAAAA,CAAT,CAAuBC,CAAvB,CAAkCC,CAAlC,CAAyC,SAO5BC,CAAAA,EAAYzB,EAAad,EAAO,CACrCqC,CAAS,CAACG,IAAV,CAAe,IAAf,CAAqB1B,CAArB,CAAkCd,CAAlC,KAPE4B,CAAAA,CAAI,CAAGL,MAAM,CAACK,IAAP,CAAYU,CAAZ,KACO,CAAhB,GAAAV,CAAI,CAACE,aACEO,CAAAA,EAQXE,CAAW,CAACE,SAAZ,CAAwBlB,MAAM,CAACmB,MAAP,CAAcL,CAAS,CAACI,SAAxB,CAAmC,CACvDE,WAAW,CAAE,CAAElB,KAAK,CAAEc,CAAT,CAAsBP,YAAY,GAAlC,CAA0CY,QAAQ,GAAlD,CAD0C,CAAnC,CAXa,KAgBhC,GACKjB,CAAAA,CADL,CAAIE,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,KACzBF,EAAMC,CAAI,CAACC,CAAD,EACZ,EAAEF,CAAG,GAAIU,CAAAA,CAAS,CAACI,SAAnB,EAA+B,IACzBI,CAAAA,CAAU,CAAGtB,MAAM,CAACuB,wBAAP,CAAgCR,CAAhC,CAAuCX,CAAvC,CADY,CAEzBoB,CAAM,CAA+B,UAA5B,QAAOF,CAAAA,CAAU,CAACpB,KAFF,CAG/BF,MAAM,CAACC,cAAP,CACIe,CAAW,CAACE,SADhB,CAEId,CAFJ,CAGIoB,CAAM,CACAd,CAAoB,CAACN,CAAD,CADpB,CAEAI,CAAwB,CAACJ,CAAD,CALlC,QAUDY,CAAAA,EASX,QAASS,CAAAA,CAAT,CAAoBV,CAApB,CAA2B,IACV,IAAT,EAAAA,CAAK,EAAYA,CAAK,GAAKf,MAAM,CAACkB,gBAC3B5B,CAAAA,KAGPoC,CAAAA,CAAO,CAAGC,CAAQ,CAAC/C,GAAT,CAAamC,CAAb,QACC,KAAX,EAAAW,IACAA,CAAO,CAAGb,CAAa,CAACY,CAAU,CAACzB,MAAM,CAAC4B,cAAP,CAAsBb,CAAtB,CAAD,CAAX,CAA2CA,CAA3C,EACvBY,CAAQ,CAACnC,GAAT,CAAauB,CAAb,CAAoBW,CAApB,GAEGA,EAUJ,QAASG,CAAAA,CAAT,CAAmBtC,CAAnB,CAAgCd,CAAhC,CAAuC,IACpCqD,CAAAA,CAAO,CAAGL,CAAU,CAACzB,MAAM,CAAC4B,cAAP,CAAsBnD,CAAtB,CAAD,QACnB,IAAIqD,CAAAA,CAAJ,CAAYvC,CAAZ,CAAyBd,CAAzB,EASJ,QAASsD,CAAAA,CAAT,CAAmBtD,CAAnB,CAA0B,OACtBD,CAAAA,CAAE,CAACC,CAAD,CAAF,CAAUmB,iBAUd,QAASoC,CAAAA,CAAT,CAAuBvD,CAAvB,CAA8BgB,CAA9B,CAA0C,CAC7CjB,CAAE,CAACC,CAAD,CAAF,CAAUgB,UAAV,CAAuBA,EAUpB,QAASwC,CAAAA,CAAT,CAA0BxD,CAA1B,CAAiCiB,CAAjC,CAAgD,CACnDlB,CAAE,CAACC,CAAD,CAAF,CAAUiB,aAAV,CAA0BA,EAUvB,QAASwC,CAAAA,CAAT,CAA4BzD,CAA5B,CAAmCQ,CAAnC,CAAoD,CACvDT,CAAE,CAACC,CAAD,CAAF,CAAUQ,eAAV,CAA4BA,ysCC1ahBkD,CAAAA,OACNC,CAAAA,CAAM,CAAGpC,MAAM,CAACmB,MAAPnB,CAAcqC,CAAW,CAACnB,SAA1BlB,QACfsC,CAAAA,CAAW,CAACrB,IAAZqB,CAAiBF,CAAjBE,EACAC,CAAY,CAAC/C,GAAb+C,CAAiBH,CAAjBG,KACOH,UAMKI,CAAAA,EAAYJ,GACpBG,KAAAA,CAAY,CAAC3D,GAAb2D,CAAiBH,CAAjBG,IAIJA,CAAY,CAAC/C,GAAb+C,CAAiBH,CAAjBG,KACAH,CAAM,CAACK,aAAPL,CAA8B,CAAEM,IAAI,CAAE,OAAR,CAA9BN,GC9BJ,QAASO,CAAAA,CAAT,CAAmBC,CAAnB,KACUR,CAAAA,CAAM,CAAGS,CAAO,CAACjE,GAARiE,CAAYD,CAAZC,KACD,IAAVT,EAAAA,OACM,IAAIU,CAAAA,SAAJ,sEAEiB,IAAfF,GAAAA,CAAU,CAAY,MAAZ,GAA4BA,GAFxC,QAMHR,CAAAA,KF3BLzD,CAAAA,CAAW,CAAG,GAAIoE,CAAAA,QAOlBpB,CAAQ,CAAG,GAAIoB,CAAAA,QAkFrBzD,CAAK,CAAC4B,SAAN,CAAkB,IAKVwB,CAAAA,MAAO,OACAlE,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeiE,IANZ,CAAA,IAaVM,CAAAA,QAAS,OACFxE,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASe,WAdN,CAAA,IAqBVG,CAAAA,eAAgB,OACTlB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASkB,aAtBN,CAAA,CA4BduD,YA5Bc,WA4BC,IACLvD,CAAAA,CAAa,CAAGlB,CAAE,CAAC,IAAD,CAAF,CAASkB,cADpB,MAEU,KAAjB,EAAAA,CAFO,CAGA,EAHA,CAKJ,CAACA,CAAD,CAjCG,CAAA,IAwCVwD,CAAAA,MAAO,OACA,EAzCG,CAAA,IAgDVC,CAAAA,iBAAkB,OACX,EAjDG,CAAA,IAwDVC,CAAAA,WAAY,OACL,EAzDG,CAAA,IAgEVC,CAAAA,gBAAiB,OACV,EAjEG,CAAA,IAwEV5D,CAAAA,YAAa,OACNjB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASiB,UAzEN,CAAA,CAgFd6D,eAhFc,WAgFI,IACRtE,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,GAHc,CAI4B,UAAtC,QAAOX,CAAAA,CAAI,CAACP,KAAL,CAAW6E,eAJR,EAKVtE,CAAI,CAACP,KAAL,CAAW6E,eAAX,EArFM,CAAA,CA6FdC,wBA7Fc,WA6Fa,IACjBvE,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,GAHuB,CAIvBX,CAAI,CAACY,gBAAL,GAJuB,CAK4B,UAA/C,QAAOZ,CAAAA,CAAI,CAACP,KAAL,CAAW8E,wBALC,EAMnBvE,CAAI,CAACP,KAAL,CAAW8E,wBAAX,EAnGM,CAAA,IA2GVC,CAAAA,SAAU,SACKhF,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe+E,OA5GpB,CAAA,IAmHVtE,CAAAA,YAAa,SACEV,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeS,UApHpB,CAAA,CA2HdE,cA3Hc,WA2HG,CACbL,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA5HH,CAAA,IAmIViF,CAAAA,kBAAmB,OACZjF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASW,QApIN,CAAA,IA2IVuE,CAAAA,UAAW,SACIlF,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeiF,QA5IpB,CAAA,IAmJV7D,CAAAA,WAAY,OACLrB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASqB,SApJN,CAAA,IA4JV8D,CAAAA,YAAa,OACNnF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASe,WA7JN,CAAA,IAqKVqE,CAAAA,cAAe,OACRpF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASmB,OAtKN,CAAA,IAwKViE,CAAAA,aAAa1D,EAAO,IACfA,MAGClB,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,IACuC,SAAnC,QAAOX,CAAAA,CAAI,CAACP,KAAL,CAAWmF,eAClB5E,CAAI,CAACP,KAAL,CAAWmF,YAAX,KAhLM,CAAA,IAyLVC,CAAAA,aAAc,OACP,CAACrF,CAAE,CAAC,IAAD,CAAF,CAASW,QA1LP,CAAA,IA4LV0E,CAAAA,YAAY3D,EAAO,CACdA,CADc,EAEfnB,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA9LP,CAAA,CAyMdsF,SAzMc,WAyMF,EAzME,EA+MlB9D,MAAM,CAACC,cAAP,CAAsBX,CAAK,CAAC4B,SAA5B,CAAuC,aAAvC,CAAsD,CAClDhB,KAAK,CAAEZ,CAD2C,CAElDmB,YAAY,GAFsC,CAGlDY,QAAQ,GAH0C,CAAtD,EAOsB,WAAlB,QAAO0C,CAAAA,MAAP,EAAyD,WAAxB,QAAOA,CAAAA,MAAM,CAACzE,QAC/CU,MAAM,CAACgE,cAAP,CAAsB1E,CAAK,CAAC4B,SAA5B,CAAuC6C,MAAM,CAACzE,KAAP,CAAa4B,SAApD,EAGAS,CAAQ,CAACnC,GAAT,CAAauE,MAAM,CAACzE,KAAP,CAAa4B,SAA1B,CAAqC5B,CAArC,wiDChTiB+C,CAAAA,2EAMP,GAAIS,CAAAA,SAAJ,CAAc,4CAAd,sDAOAmB,CAAAA,CAAO,CAAG1B,CAAY,CAAC3D,GAAb2D,CAAiB,IAAjBA,KACO,SAAnB,QAAO0B,CAAAA,OACD,IAAInB,CAAAA,SAAJ,kEAEW,IAAT,QAAgB,MAAhB,GAAgC,MAFlC,QAMHmB,CAAAA,SArB0B3B,GAwBzC4B,CAAoB,CAAC7B,CAAW,CAACnB,SAAb,CAAwB,OAAxB,EA2BpB,GAAMqB,CAAAA,CAAY,CAAG,GAAIQ,CAAAA,OAAzB,CAGA/C,MAAM,CAACmE,gBAAPnE,CAAwBqC,CAAW,CAACnB,SAApClB,CAA+C,CAC3CiE,OAAO,CAAE,CAAE9D,UAAU,GAAZ,CADkC,CAA/CH,EAKsB,UAAlB,QAAOoE,CAAAA,MAAP,EAA8D,QAA9B,GAAAC,EAAOD,MAAM,CAACE,cAC9CtE,MAAM,CAACC,cAAPD,CAAsBqC,CAAW,CAACnB,SAAlClB,CAA6CoE,MAAM,CAACE,WAApDtE,CAAiE,CAC7DS,YAAY,GADiD,CAE7DP,KAAK,CAAE,aAFsD,CAAjEF,KC5EiBuE,CAAAA,oCAKb1B,CAAO,CAACrD,GAARqD,CAAY,IAAZA,CAAkBV,CAAiB,EAAnCU,4CAcAL,CAAW,CAACG,CAAS,CAAC,IAAD,CAAV,uCAPJA,CAAAA,CAAS,CAAC,IAAD,WAclBE,CAAO,CAAG,GAAIE,CAAAA,WAkBpB/C,MAAM,CAACmE,gBAAPnE,CAAwBuE,CAAe,CAACrD,SAAxClB,CAAmD,CAC/CoC,MAAM,CAAE,CAAEjC,UAAU,GAAZ,CADuC,CAE/CqE,KAAK,CAAE,CAAErE,UAAU,GAAZ,CAFwC,CAAnDH,EAKsB,UAAlB,QAAOoE,CAAAA,MAAP,EAA8D,QAA9B,GAAAC,EAAOD,MAAM,CAACE,cAC9CtE,MAAM,CAACC,cAAPD,CAAsBuE,CAAe,CAACrD,SAAtClB,CAAiDoE,MAAM,CAACE,WAAxDtE,CAAqE,CACjES,YAAY,GADqD,CAEjEP,KAAK,CAAE,iBAF0D,CAArEF"}
\ No newline at end of file
diff --git a/node_modules/abort-controller/package.json b/node_modules/abort-controller/package.json
deleted file mode 100644
index fc705e03f3539..0000000000000
--- a/node_modules/abort-controller/package.json
+++ /dev/null
@@ -1,97 +0,0 @@
-{
- "name": "abort-controller",
- "version": "3.0.0",
- "description": "An implementation of WHATWG AbortController interface.",
- "main": "dist/abort-controller",
- "files": [
- "dist",
- "polyfill.*",
- "browser.*"
- ],
- "engines": {
- "node": ">=6.5"
- },
- "dependencies": {
- "event-target-shim": "^5.0.0"
- },
- "browser": "./browser.js",
- "devDependencies": {
- "@babel/core": "^7.2.2",
- "@babel/plugin-transform-modules-commonjs": "^7.2.0",
- "@babel/preset-env": "^7.3.0",
- "@babel/register": "^7.0.0",
- "@mysticatea/eslint-plugin": "^8.0.1",
- "@mysticatea/spy": "^0.1.2",
- "@types/mocha": "^5.2.5",
- "@types/node": "^10.12.18",
- "assert": "^1.4.1",
- "codecov": "^3.1.0",
- "dts-bundle-generator": "^2.0.0",
- "eslint": "^5.12.1",
- "karma": "^3.1.4",
- "karma-chrome-launcher": "^2.2.0",
- "karma-coverage": "^1.1.2",
- "karma-firefox-launcher": "^1.1.0",
- "karma-growl-reporter": "^1.0.0",
- "karma-ie-launcher": "^1.0.0",
- "karma-mocha": "^1.3.0",
- "karma-rollup-preprocessor": "^7.0.0-rc.2",
- "mocha": "^5.2.0",
- "npm-run-all": "^4.1.5",
- "nyc": "^13.1.0",
- "opener": "^1.5.1",
- "rimraf": "^2.6.3",
- "rollup": "^1.1.2",
- "rollup-plugin-babel": "^4.3.2",
- "rollup-plugin-babel-minify": "^7.0.0",
- "rollup-plugin-commonjs": "^9.2.0",
- "rollup-plugin-node-resolve": "^4.0.0",
- "rollup-plugin-sourcemaps": "^0.4.2",
- "rollup-plugin-typescript": "^1.0.0",
- "rollup-watch": "^4.3.1",
- "ts-node": "^8.0.1",
- "type-tester": "^1.0.0",
- "typescript": "^3.2.4"
- },
- "scripts": {
- "preversion": "npm test",
- "version": "npm run -s build && git add dist/*",
- "postversion": "git push && git push --tags",
- "clean": "rimraf .nyc_output coverage",
- "coverage": "opener coverage/lcov-report/index.html",
- "lint": "eslint . --ext .ts",
- "build": "run-s -s build:*",
- "build:rollup": "rollup -c",
- "build:dts": "dts-bundle-generator -o dist/abort-controller.d.ts src/abort-controller.ts && ts-node scripts/fix-dts",
- "test": "run-s -s lint test:*",
- "test:mocha": "nyc mocha test/*.ts",
- "test:karma": "karma start --single-run",
- "watch": "run-p -s watch:*",
- "watch:mocha": "mocha test/*.ts --require ts-node/register --watch-extensions ts --watch --growl",
- "watch:karma": "karma start --watch",
- "codecov": "codecov"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/mysticatea/abort-controller.git"
- },
- "keywords": [
- "w3c",
- "whatwg",
- "event",
- "events",
- "abort",
- "cancel",
- "abortcontroller",
- "abortsignal",
- "controller",
- "signal",
- "shim"
- ],
- "author": "Toru Nagashima (https://github.com/mysticatea)",
- "license": "MIT",
- "bugs": {
- "url": "https://github.com/mysticatea/abort-controller/issues"
- },
- "homepage": "https://github.com/mysticatea/abort-controller#readme"
-}
diff --git a/node_modules/abort-controller/polyfill.js b/node_modules/abort-controller/polyfill.js
deleted file mode 100644
index 3ca892330b1e5..0000000000000
--- a/node_modules/abort-controller/polyfill.js
+++ /dev/null
@@ -1,21 +0,0 @@
-/*globals require, self, window */
-"use strict"
-
-const ac = require("./dist/abort-controller")
-
-/*eslint-disable @mysticatea/prettier */
-const g =
- typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- typeof global !== "undefined" ? global :
- /* otherwise */ undefined
-/*eslint-enable @mysticatea/prettier */
-
-if (g) {
- if (typeof g.AbortController === "undefined") {
- g.AbortController = ac.AbortController
- }
- if (typeof g.AbortSignal === "undefined") {
- g.AbortSignal = ac.AbortSignal
- }
-}
diff --git a/node_modules/abort-controller/polyfill.mjs b/node_modules/abort-controller/polyfill.mjs
deleted file mode 100644
index 0602a64dddfd2..0000000000000
--- a/node_modules/abort-controller/polyfill.mjs
+++ /dev/null
@@ -1,19 +0,0 @@
-/*globals self, window */
-import * as ac from "./dist/abort-controller"
-
-/*eslint-disable @mysticatea/prettier */
-const g =
- typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- typeof global !== "undefined" ? global :
- /* otherwise */ undefined
-/*eslint-enable @mysticatea/prettier */
-
-if (g) {
- if (typeof g.AbortController === "undefined") {
- g.AbortController = ac.AbortController
- }
- if (typeof g.AbortSignal === "undefined") {
- g.AbortSignal = ac.AbortSignal
- }
-}
diff --git a/node_modules/agent-base/LICENSE b/node_modules/agent-base/LICENSE
new file mode 100644
index 0000000000000..008728cb51847
--- /dev/null
+++ b/node_modules/agent-base/LICENSE
@@ -0,0 +1,22 @@
+(The MIT License)
+
+Copyright (c) 2013 Nathan Rajlich
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/node_modules/agent-base/dist/helpers.js b/node_modules/agent-base/dist/helpers.js
new file mode 100644
index 0000000000000..ef3f92022d455
--- /dev/null
+++ b/node_modules/agent-base/dist/helpers.js
@@ -0,0 +1,66 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.req = exports.json = exports.toBuffer = void 0;
+const http = __importStar(require("http"));
+const https = __importStar(require("https"));
+async function toBuffer(stream) {
+ let length = 0;
+ const chunks = [];
+ for await (const chunk of stream) {
+ length += chunk.length;
+ chunks.push(chunk);
+ }
+ return Buffer.concat(chunks, length);
+}
+exports.toBuffer = toBuffer;
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+async function json(stream) {
+ const buf = await toBuffer(stream);
+ const str = buf.toString('utf8');
+ try {
+ return JSON.parse(str);
+ }
+ catch (_err) {
+ const err = _err;
+ err.message += ` (input: ${str})`;
+ throw err;
+ }
+}
+exports.json = json;
+function req(url, opts = {}) {
+ const href = typeof url === 'string' ? url : url.href;
+ const req = (href.startsWith('https:') ? https : http).request(url, opts);
+ const promise = new Promise((resolve, reject) => {
+ req
+ .once('response', resolve)
+ .once('error', reject)
+ .end();
+ });
+ req.then = promise.then.bind(promise);
+ return req;
+}
+exports.req = req;
+//# sourceMappingURL=helpers.js.map
\ No newline at end of file
diff --git a/node_modules/agent-base/dist/index.js b/node_modules/agent-base/dist/index.js
new file mode 100644
index 0000000000000..69396356e74db
--- /dev/null
+++ b/node_modules/agent-base/dist/index.js
@@ -0,0 +1,175 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Agent = void 0;
+const net = __importStar(require("net"));
+const http = __importStar(require("http"));
+const https_1 = require("https");
+__exportStar(require("./helpers"), exports);
+const INTERNAL = Symbol('AgentBaseInternalState');
+class Agent extends http.Agent {
+ constructor(opts) {
+ super(opts);
+ this[INTERNAL] = {};
+ }
+ /**
+ * Determine whether this is an `http` or `https` request.
+ */
+ isSecureEndpoint(options) {
+ if (options) {
+ // First check the `secureEndpoint` property explicitly, since this
+ // means that a parent `Agent` is "passing through" to this instance.
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ if (typeof options.secureEndpoint === 'boolean') {
+ return options.secureEndpoint;
+ }
+ // If no explicit `secure` endpoint, check if `protocol` property is
+ // set. This will usually be the case since using a full string URL
+ // or `URL` instance should be the most common usage.
+ if (typeof options.protocol === 'string') {
+ return options.protocol === 'https:';
+ }
+ }
+ // Finally, if no `protocol` property was set, then fall back to
+ // checking the stack trace of the current call stack, and try to
+ // detect the "https" module.
+ const { stack } = new Error();
+ if (typeof stack !== 'string')
+ return false;
+ return stack
+ .split('\n')
+ .some((l) => l.indexOf('(https.js:') !== -1 ||
+ l.indexOf('node:https:') !== -1);
+ }
+ // In order to support async signatures in `connect()` and Node's native
+ // connection pooling in `http.Agent`, the array of sockets for each origin
+ // has to be updated synchronously. This is so the length of the array is
+ // accurate when `addRequest()` is next called. We achieve this by creating a
+ // fake socket and adding it to `sockets[origin]` and incrementing
+ // `totalSocketCount`.
+ incrementSockets(name) {
+ // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no
+ // need to create a fake socket because Node.js native connection pooling
+ // will never be invoked.
+ if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) {
+ return null;
+ }
+ // All instances of `sockets` are expected TypeScript errors. The
+ // alternative is to add it as a private property of this class but that
+ // will break TypeScript subclassing.
+ if (!this.sockets[name]) {
+ // @ts-expect-error `sockets` is readonly in `@types/node`
+ this.sockets[name] = [];
+ }
+ const fakeSocket = new net.Socket({ writable: false });
+ this.sockets[name].push(fakeSocket);
+ // @ts-expect-error `totalSocketCount` isn't defined in `@types/node`
+ this.totalSocketCount++;
+ return fakeSocket;
+ }
+ decrementSockets(name, socket) {
+ if (!this.sockets[name] || socket === null) {
+ return;
+ }
+ const sockets = this.sockets[name];
+ const index = sockets.indexOf(socket);
+ if (index !== -1) {
+ sockets.splice(index, 1);
+ // @ts-expect-error `totalSocketCount` isn't defined in `@types/node`
+ this.totalSocketCount--;
+ if (sockets.length === 0) {
+ // @ts-expect-error `sockets` is readonly in `@types/node`
+ delete this.sockets[name];
+ }
+ }
+ }
+ // In order to properly update the socket pool, we need to call `getName()` on
+ // the core `https.Agent` if it is a secureEndpoint.
+ getName(options) {
+ const secureEndpoint = typeof options.secureEndpoint === 'boolean'
+ ? options.secureEndpoint
+ : this.isSecureEndpoint(options);
+ if (secureEndpoint) {
+ // @ts-expect-error `getName()` isn't defined in `@types/node`
+ return https_1.Agent.prototype.getName.call(this, options);
+ }
+ // @ts-expect-error `getName()` isn't defined in `@types/node`
+ return super.getName(options);
+ }
+ createSocket(req, options, cb) {
+ const connectOpts = {
+ ...options,
+ secureEndpoint: this.isSecureEndpoint(options),
+ };
+ const name = this.getName(connectOpts);
+ const fakeSocket = this.incrementSockets(name);
+ Promise.resolve()
+ .then(() => this.connect(req, connectOpts))
+ .then((socket) => {
+ this.decrementSockets(name, fakeSocket);
+ if (socket instanceof http.Agent) {
+ // @ts-expect-error `addRequest()` isn't defined in `@types/node`
+ return socket.addRequest(req, connectOpts);
+ }
+ this[INTERNAL].currentSocket = socket;
+ // @ts-expect-error `createSocket()` isn't defined in `@types/node`
+ super.createSocket(req, options, cb);
+ }, (err) => {
+ this.decrementSockets(name, fakeSocket);
+ cb(err);
+ });
+ }
+ createConnection() {
+ const socket = this[INTERNAL].currentSocket;
+ this[INTERNAL].currentSocket = undefined;
+ if (!socket) {
+ throw new Error('No socket was returned in the `connect()` function');
+ }
+ return socket;
+ }
+ get defaultPort() {
+ return (this[INTERNAL].defaultPort ??
+ (this.protocol === 'https:' ? 443 : 80));
+ }
+ set defaultPort(v) {
+ if (this[INTERNAL]) {
+ this[INTERNAL].defaultPort = v;
+ }
+ }
+ get protocol() {
+ return (this[INTERNAL].protocol ??
+ (this.isSecureEndpoint() ? 'https:' : 'http:'));
+ }
+ set protocol(v) {
+ if (this[INTERNAL]) {
+ this[INTERNAL].protocol = v;
+ }
+ }
+}
+exports.Agent = Agent;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/agent-base/dist/src/index.d.ts b/node_modules/agent-base/dist/src/index.d.ts
deleted file mode 100644
index bc4ab744c59c8..0000000000000
--- a/node_modules/agent-base/dist/src/index.d.ts
+++ /dev/null
@@ -1,78 +0,0 @@
-///
-import net from 'net';
-import http from 'http';
-import https from 'https';
-import { Duplex } from 'stream';
-import { EventEmitter } from 'events';
-declare function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent;
-declare function createAgent(callback: createAgent.AgentCallback, opts?: createAgent.AgentOptions): createAgent.Agent;
-declare namespace createAgent {
- interface ClientRequest extends http.ClientRequest {
- _last?: boolean;
- _hadError?: boolean;
- method: string;
- }
- interface AgentRequestOptions {
- host?: string;
- path?: string;
- port: number;
- }
- interface HttpRequestOptions extends AgentRequestOptions, Omit {
- secureEndpoint: false;
- }
- interface HttpsRequestOptions extends AgentRequestOptions, Omit {
- secureEndpoint: true;
- }
- type RequestOptions = HttpRequestOptions | HttpsRequestOptions;
- type AgentLike = Pick | http.Agent;
- type AgentCallbackReturn = Duplex | AgentLike;
- type AgentCallbackCallback = (err?: Error | null, socket?: createAgent.AgentCallbackReturn) => void;
- type AgentCallbackPromise = (req: createAgent.ClientRequest, opts: createAgent.RequestOptions) => createAgent.AgentCallbackReturn | Promise;
- type AgentCallback = typeof Agent.prototype.callback;
- type AgentOptions = {
- timeout?: number;
- };
- /**
- * Base `http.Agent` implementation.
- * No pooling/keep-alive is implemented by default.
- *
- * @param {Function} callback
- * @api public
- */
- class Agent extends EventEmitter {
- timeout: number | null;
- maxFreeSockets: number;
- maxTotalSockets: number;
- maxSockets: number;
- sockets: {
- [key: string]: net.Socket[];
- };
- freeSockets: {
- [key: string]: net.Socket[];
- };
- requests: {
- [key: string]: http.IncomingMessage[];
- };
- options: https.AgentOptions;
- private promisifiedCallback?;
- private explicitDefaultPort?;
- private explicitProtocol?;
- constructor(callback?: createAgent.AgentCallback | createAgent.AgentOptions, _opts?: createAgent.AgentOptions);
- get defaultPort(): number;
- set defaultPort(v: number);
- get protocol(): string;
- set protocol(v: string);
- callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions, fn: createAgent.AgentCallbackCallback): void;
- callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions): createAgent.AgentCallbackReturn | Promise;
- /**
- * Called by node-core's "_http_client.js" module when creating
- * a new HTTP request with this Agent instance.
- *
- * @api public
- */
- addRequest(req: ClientRequest, _opts: RequestOptions): void;
- freeSocket(socket: net.Socket, opts: AgentOptions): void;
- destroy(): void;
- }
-}
-export = createAgent;
diff --git a/node_modules/agent-base/dist/src/index.js b/node_modules/agent-base/dist/src/index.js
deleted file mode 100644
index bfd9e22071e7e..0000000000000
--- a/node_modules/agent-base/dist/src/index.js
+++ /dev/null
@@ -1,203 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-const events_1 = require("events");
-const debug_1 = __importDefault(require("debug"));
-const promisify_1 = __importDefault(require("./promisify"));
-const debug = debug_1.default('agent-base');
-function isAgent(v) {
- return Boolean(v) && typeof v.addRequest === 'function';
-}
-function isSecureEndpoint() {
- const { stack } = new Error();
- if (typeof stack !== 'string')
- return false;
- return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1);
-}
-function createAgent(callback, opts) {
- return new createAgent.Agent(callback, opts);
-}
-(function (createAgent) {
- /**
- * Base `http.Agent` implementation.
- * No pooling/keep-alive is implemented by default.
- *
- * @param {Function} callback
- * @api public
- */
- class Agent extends events_1.EventEmitter {
- constructor(callback, _opts) {
- super();
- let opts = _opts;
- if (typeof callback === 'function') {
- this.callback = callback;
- }
- else if (callback) {
- opts = callback;
- }
- // Timeout for the socket to be returned from the callback
- this.timeout = null;
- if (opts && typeof opts.timeout === 'number') {
- this.timeout = opts.timeout;
- }
- // These aren't actually used by `agent-base`, but are required
- // for the TypeScript definition files in `@types/node` :/
- this.maxFreeSockets = 1;
- this.maxSockets = 1;
- this.maxTotalSockets = Infinity;
- this.sockets = {};
- this.freeSockets = {};
- this.requests = {};
- this.options = {};
- }
- get defaultPort() {
- if (typeof this.explicitDefaultPort === 'number') {
- return this.explicitDefaultPort;
- }
- return isSecureEndpoint() ? 443 : 80;
- }
- set defaultPort(v) {
- this.explicitDefaultPort = v;
- }
- get protocol() {
- if (typeof this.explicitProtocol === 'string') {
- return this.explicitProtocol;
- }
- return isSecureEndpoint() ? 'https:' : 'http:';
- }
- set protocol(v) {
- this.explicitProtocol = v;
- }
- callback(req, opts, fn) {
- throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`');
- }
- /**
- * Called by node-core's "_http_client.js" module when creating
- * a new HTTP request with this Agent instance.
- *
- * @api public
- */
- addRequest(req, _opts) {
- const opts = Object.assign({}, _opts);
- if (typeof opts.secureEndpoint !== 'boolean') {
- opts.secureEndpoint = isSecureEndpoint();
- }
- if (opts.host == null) {
- opts.host = 'localhost';
- }
- if (opts.port == null) {
- opts.port = opts.secureEndpoint ? 443 : 80;
- }
- if (opts.protocol == null) {
- opts.protocol = opts.secureEndpoint ? 'https:' : 'http:';
- }
- if (opts.host && opts.path) {
- // If both a `host` and `path` are specified then it's most
- // likely the result of a `url.parse()` call... we need to
- // remove the `path` portion so that `net.connect()` doesn't
- // attempt to open that as a unix socket file.
- delete opts.path;
- }
- delete opts.agent;
- delete opts.hostname;
- delete opts._defaultAgent;
- delete opts.defaultPort;
- delete opts.createConnection;
- // Hint to use "Connection: close"
- // XXX: non-documented `http` module API :(
- req._last = true;
- req.shouldKeepAlive = false;
- let timedOut = false;
- let timeoutId = null;
- const timeoutMs = opts.timeout || this.timeout;
- const onerror = (err) => {
- if (req._hadError)
- return;
- req.emit('error', err);
- // For Safety. Some additional errors might fire later on
- // and we need to make sure we don't double-fire the error event.
- req._hadError = true;
- };
- const ontimeout = () => {
- timeoutId = null;
- timedOut = true;
- const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`);
- err.code = 'ETIMEOUT';
- onerror(err);
- };
- const callbackError = (err) => {
- if (timedOut)
- return;
- if (timeoutId !== null) {
- clearTimeout(timeoutId);
- timeoutId = null;
- }
- onerror(err);
- };
- const onsocket = (socket) => {
- if (timedOut)
- return;
- if (timeoutId != null) {
- clearTimeout(timeoutId);
- timeoutId = null;
- }
- if (isAgent(socket)) {
- // `socket` is actually an `http.Agent` instance, so
- // relinquish responsibility for this `req` to the Agent
- // from here on
- debug('Callback returned another Agent instance %o', socket.constructor.name);
- socket.addRequest(req, opts);
- return;
- }
- if (socket) {
- socket.once('free', () => {
- this.freeSocket(socket, opts);
- });
- req.onSocket(socket);
- return;
- }
- const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``);
- onerror(err);
- };
- if (typeof this.callback !== 'function') {
- onerror(new Error('`callback` is not defined'));
- return;
- }
- if (!this.promisifiedCallback) {
- if (this.callback.length >= 3) {
- debug('Converting legacy callback function to promise');
- this.promisifiedCallback = promisify_1.default(this.callback);
- }
- else {
- this.promisifiedCallback = this.callback;
- }
- }
- if (typeof timeoutMs === 'number' && timeoutMs > 0) {
- timeoutId = setTimeout(ontimeout, timeoutMs);
- }
- if ('port' in opts && typeof opts.port !== 'number') {
- opts.port = Number(opts.port);
- }
- try {
- debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`);
- Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError);
- }
- catch (err) {
- Promise.reject(err).catch(callbackError);
- }
- }
- freeSocket(socket, opts) {
- debug('Freeing socket %o %o', socket.constructor.name, opts);
- socket.destroy();
- }
- destroy() {
- debug('Destroying agent %o', this.constructor.name);
- }
- }
- createAgent.Agent = Agent;
- // So that `instanceof` works correctly
- createAgent.prototype = createAgent.Agent.prototype;
-})(createAgent || (createAgent = {}));
-module.exports = createAgent;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/agent-base/dist/src/index.js.map b/node_modules/agent-base/dist/src/index.js.map
deleted file mode 100644
index bd118ab6bb1ce..0000000000000
--- a/node_modules/agent-base/dist/src/index.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAIA,mCAAsC;AACtC,kDAAgC;AAChC,4DAAoC;AAEpC,MAAM,KAAK,GAAG,eAAW,CAAC,YAAY,CAAC,CAAC;AAExC,SAAS,OAAO,CAAC,CAAM;IACtB,OAAO,OAAO,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,UAAU,KAAK,UAAU,CAAC;AACzD,CAAC;AAED,SAAS,gBAAgB;IACxB,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,KAAK,EAAE,CAAC;IAC9B,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC5C,OAAO,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,IAAK,CAAC,CAAC,OAAO,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACxG,CAAC;AAOD,SAAS,WAAW,CACnB,QAA+D,EAC/D,IAA+B;IAE/B,OAAO,IAAI,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC9C,CAAC;AAED,WAAU,WAAW;IAmDpB;;;;;;OAMG;IACH,MAAa,KAAM,SAAQ,qBAAY;QAmBtC,YACC,QAA+D,EAC/D,KAAgC;YAEhC,KAAK,EAAE,CAAC;YAER,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;gBACnC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;aACzB;iBAAM,IAAI,QAAQ,EAAE;gBACpB,IAAI,GAAG,QAAQ,CAAC;aAChB;YAED,0DAA0D;YAC1D,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;aAC5B;YAED,+DAA+D;YAC/D,0DAA0D;YAC1D,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;YACxB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YACpB,IAAI,CAAC,eAAe,GAAG,QAAQ,CAAC;YAChC,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;YAClB,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;YACtB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC;YACnB,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;QACnB,CAAC;QAED,IAAI,WAAW;YACd,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;gBACjD,OAAO,IAAI,CAAC,mBAAmB,CAAC;aAChC;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QAED,IAAI,WAAW,CAAC,CAAS;YACxB,IAAI,CAAC,mBAAmB,GAAG,CAAC,CAAC;QAC9B,CAAC;QAED,IAAI,QAAQ;YACX,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,QAAQ,EAAE;gBAC9C,OAAO,IAAI,CAAC,gBAAgB,CAAC;aAC7B;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;QAChD,CAAC;QAED,IAAI,QAAQ,CAAC,CAAS;YACrB,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC;QAC3B,CAAC;QAaD,QAAQ,CACP,GAA8B,EAC9B,IAA8B,EAC9B,EAAsC;YAKtC,MAAM,IAAI,KAAK,CACd,yFAAyF,CACzF,CAAC;QACH,CAAC;QAED;;;;;WAKG;QACH,UAAU,CAAC,GAAkB,EAAE,KAAqB;YACnD,MAAM,IAAI,qBAAwB,KAAK,CAAE,CAAC;YAE1C,IAAI,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,EAAE;gBAC7C,IAAI,CAAC,cAAc,GAAG,gBAAgB,EAAE,CAAC;aACzC;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;aACxB;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;aAC3C;YAED,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;aACzD;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE;gBAC3B,2DAA2D;gBAC3D,0DAA0D;gBAC1D,4DAA4D;gBAC5D,8CAA8C;gBAC9C,OAAO,IAAI,CAAC,IAAI,CAAC;aACjB;YAED,OAAO,IAAI,CAAC,KAAK,CAAC;YAClB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACrB,OAAO,IAAI,CAAC,aAAa,CAAC;YAC1B,OAAO,IAAI,CAAC,WAAW,CAAC;YACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC;YAE7B,kCAAkC;YAClC,2CAA2C;YAC3C,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC;YACjB,GAAG,CAAC,eAAe,GAAG,KAAK,CAAC;YAE5B,IAAI,QAAQ,GAAG,KAAK,CAAC;YACrB,IAAI,SAAS,GAAyC,IAAI,CAAC;YAC3D,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAE/C,MAAM,OAAO,GAAG,CAAC,GAA0B,EAAE,EAAE;gBAC9C,IAAI,GAAG,CAAC,SAAS;oBAAE,OAAO;gBAC1B,GAAG,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;gBACvB,yDAAyD;gBACzD,iEAAiE;gBACjE,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC;YACtB,CAAC,CAAC;YAEF,MAAM,SAAS,GAAG,GAAG,EAAE;gBACtB,SAAS,GAAG,IAAI,CAAC;gBACjB,QAAQ,GAAG,IAAI,CAAC;gBAChB,MAAM,GAAG,GAA0B,IAAI,KAAK,CAC3C,sDAAsD,SAAS,IAAI,CACnE,CAAC;gBACF,GAAG,CAAC,IAAI,GAAG,UAAU,CAAC;gBACtB,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,aAAa,GAAG,CAAC,GAA0B,EAAE,EAAE;gBACpD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,KAAK,IAAI,EAAE;oBACvB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBACD,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,QAAQ,GAAG,CAAC,MAA2B,EAAE,EAAE;gBAChD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,IAAI,IAAI,EAAE;oBACtB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBAED,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE;oBACpB,oDAAoD;oBACpD,wDAAwD;oBACxD,eAAe;oBACf,KAAK,CACJ,6CAA6C,EAC7C,MAAM,CAAC,WAAW,CAAC,IAAI,CACvB,CAAC;oBACD,MAA4B,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;oBACpD,OAAO;iBACP;gBAED,IAAI,MAAM,EAAE;oBACX,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE;wBACxB,IAAI,CAAC,UAAU,CAAC,MAAoB,EAAE,IAAI,CAAC,CAAC;oBAC7C,CAAC,CAAC,CAAC;oBACH,GAAG,CAAC,QAAQ,CAAC,MAAoB,CAAC,CAAC;oBACnC,OAAO;iBACP;gBAED,MAAM,GAAG,GAAG,IAAI,KAAK,CACpB,qDAAqD,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,IAAI,CAC/E,CAAC;gBACF,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACxC,OAAO,CAAC,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC,CAAC;gBAChD,OAAO;aACP;YAED,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;gBAC9B,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,IAAI,CAAC,EAAE;oBAC9B,KAAK,CAAC,gDAAgD,CAAC,CAAC;oBACxD,IAAI,CAAC,mBAAmB,GAAG,mBAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;iBACpD;qBAAM;oBACN,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,QAAQ,CAAC;iBACzC;aACD;YAED,IAAI,OAAO,SAAS,KAAK,QAAQ,IAAI,SAAS,GAAG,CAAC,EAAE;gBACnD,SAAS,GAAG,UAAU,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;aAC7C;YAED,IAAI,MAAM,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACpD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAC9B;YAED,IAAI;gBACH,KAAK,CACJ,qCAAqC,EACrC,IAAI,CAAC,QAAQ,EACb,GAAG,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,EAAE,CAC3B,CAAC;gBACF,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CACxD,QAAQ,EACR,aAAa,CACb,CAAC;aACF;YAAC,OAAO,GAAG,EAAE;gBACb,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;aACzC;QACF,CAAC;QAED,UAAU,CAAC,MAAkB,EAAE,IAAkB;YAChD,KAAK,CAAC,sBAAsB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;YAC7D,MAAM,CAAC,OAAO,EAAE,CAAC;QAClB,CAAC;QAED,OAAO;YACN,KAAK,CAAC,qBAAqB,EAAE,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACrD,CAAC;KACD;IAxPY,iBAAK,QAwPjB,CAAA;IAED,uCAAuC;IACvC,WAAW,CAAC,SAAS,GAAG,WAAW,CAAC,KAAK,CAAC,SAAS,CAAC;AACrD,CAAC,EAtTS,WAAW,KAAX,WAAW,QAsTpB;AAED,iBAAS,WAAW,CAAC"}
\ No newline at end of file
diff --git a/node_modules/agent-base/dist/src/promisify.d.ts b/node_modules/agent-base/dist/src/promisify.d.ts
deleted file mode 100644
index 02688696fb4c1..0000000000000
--- a/node_modules/agent-base/dist/src/promisify.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { ClientRequest, RequestOptions, AgentCallbackCallback, AgentCallbackPromise } from './index';
-declare type LegacyCallback = (req: ClientRequest, opts: RequestOptions, fn: AgentCallbackCallback) => void;
-export default function promisify(fn: LegacyCallback): AgentCallbackPromise;
-export {};
diff --git a/node_modules/agent-base/dist/src/promisify.js b/node_modules/agent-base/dist/src/promisify.js
deleted file mode 100644
index b2f6132a7beaa..0000000000000
--- a/node_modules/agent-base/dist/src/promisify.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-function promisify(fn) {
- return function (req, opts) {
- return new Promise((resolve, reject) => {
- fn.call(this, req, opts, (err, rtn) => {
- if (err) {
- reject(err);
- }
- else {
- resolve(rtn);
- }
- });
- });
- };
-}
-exports.default = promisify;
-//# sourceMappingURL=promisify.js.map
\ No newline at end of file
diff --git a/node_modules/agent-base/dist/src/promisify.js.map b/node_modules/agent-base/dist/src/promisify.js.map
deleted file mode 100644
index 4bff9bfcfa289..0000000000000
--- a/node_modules/agent-base/dist/src/promisify.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"promisify.js","sourceRoot":"","sources":["../../src/promisify.ts"],"names":[],"mappings":";;AAeA,SAAwB,SAAS,CAAC,EAAkB;IACnD,OAAO,UAAsB,GAAkB,EAAE,IAAoB;QACpE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACtC,EAAE,CAAC,IAAI,CACN,IAAI,EACJ,GAAG,EACH,IAAI,EACJ,CAAC,GAA6B,EAAE,GAAyB,EAAE,EAAE;gBAC5D,IAAI,GAAG,EAAE;oBACR,MAAM,CAAC,GAAG,CAAC,CAAC;iBACZ;qBAAM;oBACN,OAAO,CAAC,GAAG,CAAC,CAAC;iBACb;YACF,CAAC,CACD,CAAC;QACH,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC;AACH,CAAC;AAjBD,4BAiBC"}
\ No newline at end of file
diff --git a/node_modules/agent-base/package.json b/node_modules/agent-base/package.json
index fadce3ad99f22..8e95171707fef 100644
--- a/node_modules/agent-base/package.json
+++ b/node_modules/agent-base/package.json
@@ -1,24 +1,16 @@
{
"name": "agent-base",
- "version": "6.0.2",
+ "version": "7.1.1",
"description": "Turn a function into an `http.Agent` instance",
- "main": "dist/src/index",
- "typings": "dist/src/index",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
"files": [
- "dist/src",
- "src"
+ "dist"
],
- "scripts": {
- "prebuild": "rimraf dist",
- "build": "tsc",
- "postbuild": "cpy --parents src test '!**/*.ts' dist",
- "test": "mocha --reporter spec dist/test/*.js",
- "test-lint": "eslint src --ext .js,.ts",
- "prepublishOnly": "npm run build"
- },
"repository": {
"type": "git",
- "url": "git://github.com/TooTallNate/node-agent-base.git"
+ "url": "https://github.com/TooTallNate/proxy-agents.git",
+ "directory": "packages/agent-base"
},
"keywords": [
"http",
@@ -29,36 +21,29 @@
],
"author": "Nathan Rajlich (http://n8.io/)",
"license": "MIT",
- "bugs": {
- "url": "https://github.com/TooTallNate/node-agent-base/issues"
- },
"dependencies": {
- "debug": "4"
+ "debug": "^4.3.4"
},
"devDependencies": {
- "@types/debug": "4",
- "@types/mocha": "^5.2.7",
- "@types/node": "^14.0.20",
- "@types/semver": "^7.1.0",
- "@types/ws": "^6.0.3",
- "@typescript-eslint/eslint-plugin": "1.6.0",
- "@typescript-eslint/parser": "1.1.0",
- "async-listen": "^1.2.0",
- "cpy-cli": "^2.0.0",
- "eslint": "5.16.0",
- "eslint-config-airbnb": "17.1.0",
- "eslint-config-prettier": "4.1.0",
- "eslint-import-resolver-typescript": "1.1.1",
- "eslint-plugin-import": "2.16.0",
- "eslint-plugin-jsx-a11y": "6.2.1",
- "eslint-plugin-react": "7.12.4",
- "mocha": "^6.2.0",
- "rimraf": "^3.0.0",
- "semver": "^7.1.2",
- "typescript": "^3.5.3",
- "ws": "^3.0.0"
+ "@types/debug": "^4.1.7",
+ "@types/jest": "^29.5.1",
+ "@types/node": "^14.18.45",
+ "@types/semver": "^7.3.13",
+ "@types/ws": "^6.0.4",
+ "async-listen": "^3.0.0",
+ "jest": "^29.5.0",
+ "ts-jest": "^29.1.0",
+ "typescript": "^5.0.4",
+ "ws": "^3.3.3",
+ "tsconfig": "0.0.0"
},
"engines": {
- "node": ">= 6.0.0"
+ "node": ">= 14"
+ },
+ "scripts": {
+ "build": "tsc",
+ "test": "jest --env node --verbose --bail",
+ "lint": "eslint . --ext .ts",
+ "pack": "node ../../scripts/pack.mjs"
}
-}
+}
\ No newline at end of file
diff --git a/node_modules/agent-base/src/index.ts b/node_modules/agent-base/src/index.ts
deleted file mode 100644
index a47ccd493f90a..0000000000000
--- a/node_modules/agent-base/src/index.ts
+++ /dev/null
@@ -1,345 +0,0 @@
-import net from 'net';
-import http from 'http';
-import https from 'https';
-import { Duplex } from 'stream';
-import { EventEmitter } from 'events';
-import createDebug from 'debug';
-import promisify from './promisify';
-
-const debug = createDebug('agent-base');
-
-function isAgent(v: any): v is createAgent.AgentLike {
- return Boolean(v) && typeof v.addRequest === 'function';
-}
-
-function isSecureEndpoint(): boolean {
- const { stack } = new Error();
- if (typeof stack !== 'string') return false;
- return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1);
-}
-
-function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent;
-function createAgent(
- callback: createAgent.AgentCallback,
- opts?: createAgent.AgentOptions
-): createAgent.Agent;
-function createAgent(
- callback?: createAgent.AgentCallback | createAgent.AgentOptions,
- opts?: createAgent.AgentOptions
-) {
- return new createAgent.Agent(callback, opts);
-}
-
-namespace createAgent {
- export interface ClientRequest extends http.ClientRequest {
- _last?: boolean;
- _hadError?: boolean;
- method: string;
- }
-
- export interface AgentRequestOptions {
- host?: string;
- path?: string;
- // `port` on `http.RequestOptions` can be a string or undefined,
- // but `net.TcpNetConnectOpts` expects only a number
- port: number;
- }
-
- export interface HttpRequestOptions
- extends AgentRequestOptions,
- Omit {
- secureEndpoint: false;
- }
-
- export interface HttpsRequestOptions
- extends AgentRequestOptions,
- Omit {
- secureEndpoint: true;
- }
-
- export type RequestOptions = HttpRequestOptions | HttpsRequestOptions;
-
- export type AgentLike = Pick | http.Agent;
-
- export type AgentCallbackReturn = Duplex | AgentLike;
-
- export type AgentCallbackCallback = (
- err?: Error | null,
- socket?: createAgent.AgentCallbackReturn
- ) => void;
-
- export type AgentCallbackPromise = (
- req: createAgent.ClientRequest,
- opts: createAgent.RequestOptions
- ) =>
- | createAgent.AgentCallbackReturn
- | Promise