Removed the Requirement to Install Python and NodeJS (Now Bundled with Borealis)

This commit is contained in:
2025-04-24 00:42:19 -06:00
parent 785265d3e7
commit 9c68cdea84
7786 changed files with 2386458 additions and 217 deletions

View File

@ -0,0 +1,40 @@
const { log, output } = require('proc-log')
const { redactLog: replaceInfo } = require('@npmcli/redact')
// print an error or just nothing if the audit report has an error
// this is called by the audit command, and by the reify-output util
// prints a JSON version of the error if it's --json
// returns 'true' if there was an error, false otherwise
const auditError = (npm, report) => {
if (!report || !report.error) {
return false
}
if (npm.command !== 'audit') {
return true
}
const { error } = report
// ok, we care about it, then
log.warn('audit', error.message)
const { body: errBody } = error
const body = Buffer.isBuffer(errBody) ? errBody.toString() : errBody
if (npm.flatOptions.json) {
output.buffer({
message: error.message,
method: error.method,
uri: replaceInfo(error.uri),
headers: error.headers,
statusCode: error.statusCode,
body,
})
} else {
output.standard(body)
}
throw 'audit endpoint returned an error'
}
module.exports = auditError

109
Dependencies/NodeJS/node_modules/npm/lib/utils/auth.js generated vendored Normal file
View File

@ -0,0 +1,109 @@
const { webAuthOpener, adduserWeb, loginWeb, loginCouch, adduserCouch } = require('npm-profile')
const { log } = require('proc-log')
const { createOpener } = require('../utils/open-url.js')
const read = require('../utils/read-user-info.js')
const otplease = async (npm, opts, fn) => {
try {
return await fn(opts)
} catch (err) {
if (!process.stdin.isTTY || !process.stdout.isTTY) {
throw err
}
// web otp
if (err.code === 'EOTP' && err.body?.authUrl && err.body?.doneUrl) {
const { token: otp } = await webAuthOpener(
createOpener(npm, 'Authenticate your account at'),
err.body.authUrl,
err.body.doneUrl,
opts
)
return await fn({ ...opts, otp })
}
// classic otp
if (err.code === 'EOTP' || (err.code === 'E401' && /one-time pass/.test(err.body))) {
const otp = await read.otp('This operation requires a one-time password.\nEnter OTP:')
return await fn({ ...opts, otp })
}
throw err
}
}
const adduser = async (npm, { creds, ...opts }) => {
const authType = npm.config.get('auth-type')
let res
if (authType === 'web') {
try {
res = await adduserWeb(createOpener(npm, 'Create your account at'), opts)
} catch (err) {
if (err.code === 'ENYI') {
log.verbose('web add user not supported, trying couch')
} else {
throw err
}
}
}
// auth type !== web or ENYI error w/ web adduser
if (!res) {
const username = await read.username('Username:', creds.username)
const password = await read.password('Password:', creds.password)
const email = await read.email('Email: (this IS public) ', creds.email)
// npm registry quirk: If you "add" an existing user with their current
// password, it's effectively a login, and if that account has otp you'll
// be prompted for it.
res = await otplease(npm, opts, (reqOpts) => adduserCouch(username, email, password, reqOpts))
}
// We don't know the username if it was a web login, all we can reliably log is scope and registry
const message = `Logged in${opts.scope ? ` to scope ${opts.scope}` : ''} on ${opts.registry}.`
log.info('adduser', message)
return {
message,
newCreds: { token: res.token },
}
}
const login = async (npm, { creds, ...opts }) => {
const authType = npm.config.get('auth-type')
let res
if (authType === 'web') {
try {
res = await loginWeb(createOpener(npm, 'Login at'), opts)
} catch (err) {
if (err.code === 'ENYI') {
log.verbose('web login not supported, trying couch')
} else {
throw err
}
}
}
// auth type !== web or ENYI error w/ web login
if (!res) {
const username = await read.username('Username:', creds.username)
const password = await read.password('Password:', creds.password)
res = await otplease(npm, opts, (reqOpts) => loginCouch(username, password, reqOpts))
}
// We don't know the username if it was a web login, all we can reliably log is scope and registry
const message = `Logged in${opts.scope ? ` to scope ${opts.scope}` : ''} on ${opts.registry}.`
log.info('login', message)
return {
message,
newCreds: { token: res.token },
}
}
module.exports = {
adduser,
login,
otplease,
}

View File

@ -0,0 +1,178 @@
const abbrev = require('abbrev')
// These correspond to filenames in lib/commands
// Please keep this list sorted alphabetically
const commands = [
'access',
'adduser',
'audit',
'bugs',
'cache',
'ci',
'completion',
'config',
'dedupe',
'deprecate',
'diff',
'dist-tag',
'docs',
'doctor',
'edit',
'exec',
'explain',
'explore',
'find-dupes',
'fund',
'get',
'help',
'help-search',
'hook',
'init',
'install',
'install-ci-test',
'install-test',
'link',
'll',
'login',
'logout',
'ls',
'org',
'outdated',
'owner',
'pack',
'ping',
'pkg',
'prefix',
'profile',
'prune',
'publish',
'query',
'rebuild',
'repo',
'restart',
'root',
'run-script',
'sbom',
'search',
'set',
'shrinkwrap',
'star',
'stars',
'start',
'stop',
'team',
'test',
'token',
'uninstall',
'unpublish',
'unstar',
'update',
'version',
'view',
'whoami',
]
// These must resolve to an entry in commands
const aliases = {
// aliases
author: 'owner',
home: 'docs',
issues: 'bugs',
info: 'view',
show: 'view',
find: 'search',
add: 'install',
unlink: 'uninstall',
remove: 'uninstall',
rm: 'uninstall',
r: 'uninstall',
// short names for common things
un: 'uninstall',
rb: 'rebuild',
list: 'ls',
ln: 'link',
create: 'init',
i: 'install',
it: 'install-test',
cit: 'install-ci-test',
up: 'update',
c: 'config',
s: 'search',
se: 'search',
tst: 'test',
t: 'test',
ddp: 'dedupe',
v: 'view',
run: 'run-script',
'clean-install': 'ci',
'clean-install-test': 'install-ci-test',
x: 'exec',
why: 'explain',
la: 'll',
verison: 'version',
ic: 'ci',
// typos
innit: 'init',
// manually abbrev so that install-test doesn't make insta stop working
in: 'install',
ins: 'install',
inst: 'install',
insta: 'install',
instal: 'install',
isnt: 'install',
isnta: 'install',
isntal: 'install',
isntall: 'install',
'install-clean': 'ci',
'isntall-clean': 'ci',
hlep: 'help',
'dist-tags': 'dist-tag',
upgrade: 'update',
udpate: 'update',
rum: 'run-script',
sit: 'install-ci-test',
urn: 'run-script',
ogr: 'org',
'add-user': 'adduser',
}
const deref = (c) => {
if (!c) {
return
}
// Translate camelCase to snake-case (i.e. installTest to install-test)
if (c.match(/[A-Z]/)) {
c = c.replace(/([A-Z])/g, m => '-' + m.toLowerCase())
}
// if they asked for something exactly we are done
if (commands.includes(c)) {
return c
}
// if they asked for a direct alias
if (aliases[c]) {
return aliases[c]
}
const abbrevs = abbrev(commands.concat(Object.keys(aliases)))
// first deref the abbrev, if there is one
// then resolve any aliases
// so `npm install-cl` will resolve to `install-clean` then to `ci`
let a = abbrevs[c]
while (aliases[a]) {
a = aliases[a]
}
return a
}
module.exports = {
aliases,
commands,
deref,
}

View File

@ -0,0 +1,40 @@
# npm completions for Fish shell
# This script is a work in progress and does not fall under the normal semver contract as the rest of npm.
# __fish_npm_needs_command taken from:
# https://stackoverflow.com/questions/16657803/creating-autocomplete-script-with-sub-commands
function __fish_npm_needs_command
set -l cmd (commandline -opc)
if test (count $cmd) -eq 1
return 0
end
return 1
end
# Taken from https://github.com/fish-shell/fish-shell/blob/HEAD/share/completions/npm.fish
function __fish_complete_npm -d "Complete the commandline using npm's 'completion' tool"
# tell npm we are fish shell
set -lx COMP_FISH true
if command -sq npm
# npm completion is bash-centric, so we need to translate fish's "commandline" stuff to bash's $COMP_* stuff
# COMP_LINE is an array with the words in the commandline
set -lx COMP_LINE (commandline -opc)
# COMP_CWORD is the index of the current word in COMP_LINE
# bash starts arrays with 0, so subtract 1
set -lx COMP_CWORD (math (count $COMP_LINE) - 1)
# COMP_POINT is the index of point/cursor when the commandline is viewed as a string
set -lx COMP_POINT (commandline -C)
# If the cursor is after the last word, the empty token will disappear in the expansion
# Readd it
if test (commandline -ct) = ""
set COMP_CWORD (math $COMP_CWORD + 1)
set COMP_LINE $COMP_LINE ""
end
command npm completion -- $COMP_LINE 2>/dev/null
end
end
# flush out what ships with fish
complete -e npm

View File

@ -0,0 +1,70 @@
#!/bin/bash
###-begin-npm-completion-###
#
# npm command completion script
#
# Installation: npm completion >> ~/.bashrc (or ~/.zshrc)
# Or, maybe: npm completion > /usr/local/etc/bash_completion.d/npm
#
if type complete &>/dev/null; then
_npm_completion () {
local words cword
if type _get_comp_words_by_ref &>/dev/null; then
_get_comp_words_by_ref -n = -n @ -n : -w words -i cword
else
cword="$COMP_CWORD"
words=("${COMP_WORDS[@]}")
fi
local si="$IFS"
if ! IFS=$'\n' COMPREPLY=($(COMP_CWORD="$cword" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
npm completion -- "${words[@]}" \
2>/dev/null)); then
local ret=$?
IFS="$si"
return $ret
fi
IFS="$si"
if type __ltrim_colon_completions &>/dev/null; then
__ltrim_colon_completions "${words[cword]}"
fi
}
complete -o default -F _npm_completion npm
elif type compdef &>/dev/null; then
_npm_completion() {
local si=$IFS
compadd -- $(COMP_CWORD=$((CURRENT-1)) \
COMP_LINE=$BUFFER \
COMP_POINT=0 \
npm completion -- "${words[@]}" \
2>/dev/null)
IFS=$si
}
compdef _npm_completion npm
elif type compctl &>/dev/null; then
_npm_completion () {
local cword line point words si
read -Ac words
read -cn cword
let cword-=1
read -l line
read -ln point
si="$IFS"
if ! IFS=$'\n' reply=($(COMP_CWORD="$cword" \
COMP_LINE="$line" \
COMP_POINT="$point" \
npm completion -- "${words[@]}" \
2>/dev/null)); then
local ret=$?
IFS="$si"
return $ret
fi
IFS="$si"
}
compctl -K _npm_completion npm
fi
###-end-npm-completion-###

View File

@ -0,0 +1,34 @@
const Npm = require('../npm')
const { distance } = require('fastest-levenshtein')
const { commands } = require('./cmd-list.js')
const runScripts = ['stop', 'start', 'test', 'restart']
const isClose = (scmd, cmd) => distance(scmd, cmd) < scmd.length * 0.4
const didYouMean = (pkg, scmd) => {
const { scripts = {}, bin = {} } = pkg || {}
const best = [
...commands
.filter(cmd => isClose(scmd, cmd) && scmd !== cmd)
.map(str => [str, Npm.cmd(str).description]),
...Object.keys(scripts)
// We would already be suggesting this in `npm x` so omit them here
.filter(cmd => isClose(scmd, cmd) && !runScripts.includes(cmd))
.map(str => [`run ${str}`, `run the "${str}" package script`]),
...Object.keys(bin)
.filter(cmd => isClose(scmd, cmd))
/* eslint-disable-next-line max-len */
.map(str => [`exec ${str}`, `run the "${str}" command from either this or a remote npm package`]),
]
if (best.length === 0) {
return ''
}
return `\n\nDid you mean ${best.length === 1 ? 'this' : 'one of these'}?\n` +
best.slice(0, 3).map(([msg, comment]) => ` npm ${msg} # ${comment}`).join('\n')
}
module.exports = didYouMean

View File

@ -0,0 +1,539 @@
const { log, output, input, META } = require('proc-log')
const { explain } = require('./explain-eresolve.js')
const { formatWithOptions } = require('./format')
// This is the general approach to color:
// Eventually this will be exposed somewhere we can refer to these by name.
// Foreground colors only. Never set the background color.
/*
* Black # (Don't use)
* Red # Danger
* Green # Success
* Yellow # Warning
* Blue # Accent
* Magenta # Done
* Cyan # Emphasis
* White # (Don't use)
*/
// Translates log levels to chalk colors
const COLOR_PALETTE = ({ chalk: c }) => ({
heading: c.bold,
title: c.blueBright,
timing: c.magentaBright,
// loglevels
error: c.red,
warn: c.yellow,
notice: c.cyanBright,
http: c.green,
info: c.cyan,
verbose: c.blue,
silly: c.blue.dim,
})
const LEVEL_OPTIONS = {
silent: {
index: 0,
},
error: {
index: 1,
},
warn: {
index: 2,
},
notice: {
index: 3,
},
http: {
index: 4,
},
info: {
index: 5,
},
verbose: {
index: 6,
},
silly: {
index: 7,
},
}
const LEVEL_METHODS = {
...LEVEL_OPTIONS,
[log.KEYS.timing]: {
show: ({ timing, index }) => !!timing && index !== 0,
},
}
const setBlocking = (stream) => {
// Copied from https://github.com/yargs/set-blocking
// https://raw.githubusercontent.com/yargs/set-blocking/master/LICENSE.txt
/* istanbul ignore next - we trust that this works */
if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') {
stream._handle.setBlocking(true)
}
return stream
}
// These are important
// This is the key that is returned to the user for errors
const ERROR_KEY = 'error'
// This is the key producers use to indicate that there
// is a json error that should be merged into the finished output
const JSON_ERROR_KEY = 'jsonError'
const isPlainObject = (v) => v && typeof v === 'object' && !Array.isArray(v)
const getArrayOrObject = (items) => {
if (items.length) {
const foundNonObject = items.find(o => !isPlainObject(o))
// Non-objects and arrays cant be merged, so just return the first item
if (foundNonObject) {
return foundNonObject
}
// We use objects with 0,1,2,etc keys to merge array
if (items.every((o, i) => Object.hasOwn(o, i))) {
return Object.assign([], ...items)
}
}
// Otherwise its an object with all object items merged together
return Object.assign({}, ...items.filter(o => isPlainObject(o)))
}
const getJsonBuffer = ({ [JSON_ERROR_KEY]: metaError }, buffer) => {
const items = []
// meta also contains the meta object passed to flush
const errors = metaError ? [metaError] : []
// index 1 is the meta, 2 is the logged argument
for (const [, { [JSON_ERROR_KEY]: error }, obj] of buffer) {
if (obj) {
items.push(obj)
}
if (error) {
errors.push(error)
}
}
if (!items.length && !errors.length) {
return null
}
const res = getArrayOrObject(items)
// This skips any error checking since we can only set an error property
// on an object that can be stringified
// XXX(BREAKING_CHANGE): remove this in favor of always returning an object with result and error keys
if (isPlainObject(res) && errors.length) {
// This is not ideal. JSON output has always been keyed at the root with an `error`
// key, so we cant change that without it being a breaking change. At the same time
// some commands output arbitrary keys at the top level of the output, such as package
// names. So the output could already have the same key. The choice here is to overwrite
// it with our error since that is (probably?) more important.
// XXX(BREAKING_CHANGE): all json output should be keyed under well known keys, eg `result` and `error`
if (res[ERROR_KEY]) {
log.warn('', `overwriting existing ${ERROR_KEY} on json output`)
}
res[ERROR_KEY] = getArrayOrObject(errors)
}
return res
}
const withMeta = (handler) => (level, ...args) => {
let meta = {}
const last = args.at(-1)
if (last && typeof last === 'object' && Object.hasOwn(last, META)) {
meta = args.pop()
}
return handler(level, meta, ...args)
}
class Display {
#logState = {
buffering: true,
buffer: [],
}
#outputState = {
buffering: true,
buffer: [],
}
// colors
#noColorChalk
#stdoutChalk
#stdoutColor
#stderrChalk
#stderrColor
#logColors
// progress
#progress
// options
#command
#levelIndex
#timing
#json
#heading
#silent
// display streams
#stdout
#stderr
constructor ({ stdout, stderr }) {
this.#stdout = setBlocking(stdout)
this.#stderr = setBlocking(stderr)
// Handlers are set immediately so they can buffer all events
process.on('log', this.#logHandler)
process.on('output', this.#outputHandler)
process.on('input', this.#inputHandler)
this.#progress = new Progress({ stream: stderr })
}
off () {
process.off('log', this.#logHandler)
this.#logState.buffer.length = 0
process.off('output', this.#outputHandler)
this.#outputState.buffer.length = 0
process.off('input', this.#inputHandler)
this.#progress.off()
}
get chalk () {
return {
noColor: this.#noColorChalk,
stdout: this.#stdoutChalk,
stderr: this.#stderrChalk,
}
}
async load ({
command,
heading,
json,
loglevel,
progress,
stderrColor,
stdoutColor,
timing,
unicode,
}) {
// get createSupportsColor from chalk directly if this lands
// https://github.com/chalk/chalk/pull/600
const [{ Chalk }, { createSupportsColor }] = await Promise.all([
import('chalk'),
import('supports-color'),
])
// we get the chalk level based on a null stream meaning chalk will only use
// what it knows about the environment to get color support since we already
// determined in our definitions that we want to show colors.
const level = Math.max(createSupportsColor(null).level, 1)
this.#noColorChalk = new Chalk({ level: 0 })
this.#stdoutColor = stdoutColor
this.#stdoutChalk = stdoutColor ? new Chalk({ level }) : this.#noColorChalk
this.#stderrColor = stderrColor
this.#stderrChalk = stderrColor ? new Chalk({ level }) : this.#noColorChalk
this.#logColors = COLOR_PALETTE({ chalk: this.#stderrChalk })
this.#command = command
this.#levelIndex = LEVEL_OPTIONS[loglevel].index
this.#timing = timing
this.#json = json
this.#heading = heading
this.#silent = this.#levelIndex <= 0
// Emit resume event on the logs which will flush output
log.resume()
output.flush()
this.#progress.load({
unicode,
enabled: !!progress && !this.#silent,
})
}
// STREAM WRITES
// Write formatted and (non-)colorized output to streams
#write (stream, options, ...args) {
const colors = stream === this.#stdout ? this.#stdoutColor : this.#stderrColor
const value = formatWithOptions({ colors, ...options }, ...args)
this.#progress.write(() => stream.write(value))
}
// HANDLERS
// Arrow function assigned to a private class field so it can be passed
// directly as a listener and still reference "this"
#logHandler = withMeta((level, meta, ...args) => {
switch (level) {
case log.KEYS.resume:
this.#logState.buffering = false
this.#logState.buffer.forEach((item) => this.#tryWriteLog(...item))
this.#logState.buffer.length = 0
break
case log.KEYS.pause:
this.#logState.buffering = true
break
default:
if (this.#logState.buffering) {
this.#logState.buffer.push([level, meta, ...args])
} else {
this.#tryWriteLog(level, meta, ...args)
}
break
}
})
// Arrow function assigned to a private class field so it can be passed
// directly as a listener and still reference "this"
#outputHandler = withMeta((level, meta, ...args) => {
this.#json = typeof meta.json === 'boolean' ? meta.json : this.#json
switch (level) {
case output.KEYS.flush: {
this.#outputState.buffering = false
if (this.#json) {
const json = getJsonBuffer(meta, this.#outputState.buffer)
if (json) {
this.#writeOutput(output.KEYS.standard, meta, JSON.stringify(json, null, 2))
}
} else {
this.#outputState.buffer.forEach((item) => this.#writeOutput(...item))
}
this.#outputState.buffer.length = 0
break
}
case output.KEYS.buffer:
this.#outputState.buffer.push([output.KEYS.standard, meta, ...args])
break
default:
if (this.#outputState.buffering) {
this.#outputState.buffer.push([level, meta, ...args])
} else {
// HACK: Check if the argument looks like a run-script banner. This can be
// replaced with proc-log.META in @npmcli/run-script
if (typeof args[0] === 'string' && args[0].startsWith('\n> ') && args[0].endsWith('\n')) {
if (this.#silent || ['exec', 'explore'].includes(this.#command)) {
// Silent mode and some specific commands always hide run script banners
break
} else if (this.#json) {
// In json mode, change output to stderr since we dont want to break json
// parsing on stdout if the user is piping to jq or something.
// XXX: in a future (breaking?) change it might make sense for run-script to
// always output these banners with proc-log.output.error if we think they
// align closer with "logging" instead of "output"
level = output.KEYS.error
}
}
this.#writeOutput(level, meta, ...args)
}
break
}
})
#inputHandler = withMeta((level, meta, ...args) => {
switch (level) {
case input.KEYS.start:
log.pause()
this.#outputState.buffering = true
this.#progress.off()
break
case input.KEYS.end:
log.resume()
output.flush()
this.#progress.resume()
break
case input.KEYS.read: {
// The convention when calling input.read is to pass in a single fn that returns
// the promise to await. resolve and reject are provided by proc-log
const [res, rej, p] = args
return input.start(() => p()
.then(res)
.catch(rej)
// Any call to procLog.input.read will render a prompt to the user, so we always
// add a single newline of output to stdout to move the cursor to the next line
.finally(() => output.standard('')))
}
}
})
// OUTPUT
#writeOutput (level, meta, ...args) {
switch (level) {
case output.KEYS.standard:
this.#write(this.#stdout, {}, ...args)
break
case output.KEYS.error:
this.#write(this.#stderr, {}, ...args)
break
}
}
// LOGS
#tryWriteLog (level, meta, ...args) {
try {
// Also (and this is a really inexcusable kludge), we patch the
// log.warn() method so that when we see a peerDep override
// explanation from Arborist, we can replace the object with a
// highly abbreviated explanation of what's being overridden.
// TODO: this could probably be moved to arborist now that display is refactored
const [heading, message, expl] = args
if (level === log.KEYS.warn && heading === 'ERESOLVE' && expl && typeof expl === 'object') {
this.#writeLog(level, meta, heading, message)
this.#writeLog(level, meta, '', explain(expl, this.#stderrChalk, 2))
return
}
this.#writeLog(level, meta, ...args)
} catch (ex) {
try {
// if it crashed once, it might again!
this.#writeLog(log.KEYS.verbose, meta, '', `attempt to log crashed`, ...args, ex)
} catch (ex2) {
// This happens if the object has an inspect method that crashes so just console.error
// with the errors but don't do anything else that might error again.
// eslint-disable-next-line no-console
console.error(`attempt to log crashed`, ex, ex2)
}
}
}
#writeLog (level, meta, ...args) {
const levelOpts = LEVEL_METHODS[level]
const show = levelOpts.show ?? (({ index }) => levelOpts.index <= index)
const force = meta.force && !this.#silent
if (force || show({ index: this.#levelIndex, timing: this.#timing })) {
// this mutates the array so we can pass args directly to format later
const title = args.shift()
const prefix = [
this.#logColors.heading(this.#heading),
this.#logColors[level](level),
title ? this.#logColors.title(title) : null,
]
this.#write(this.#stderr, { prefix }, ...args)
}
}
}
class Progress {
// Taken from https://github.com/sindresorhus/cli-spinners
// MIT License
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
static dots = { duration: 80, frames: ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'] }
static lines = { duration: 130, frames: ['-', '\\', '|', '/'] }
#stream
#spinner
#enabled = false
#frameIndex = 0
#lastUpdate = 0
#interval
#timeout
// We are rendering is enabled option is set and we are not waiting for the render timeout
get #rendering () {
return this.#enabled && !this.#timeout
}
// We are spinning if enabled option is set and the render interval has been set
get #spinning () {
return this.#enabled && this.#interval
}
constructor ({ stream }) {
this.#stream = stream
}
load ({ enabled, unicode }) {
this.#enabled = enabled
this.#spinner = unicode ? Progress.dots : Progress.lines
// Dont render the spinner for short durations
this.#render(200)
}
off () {
if (!this.#enabled) {
return
}
clearTimeout(this.#timeout)
this.#timeout = null
clearInterval(this.#interval)
this.#interval = null
this.#frameIndex = 0
this.#lastUpdate = 0
this.#clearSpinner()
}
resume () {
this.#render()
}
// If we are currenting rendering the spinner we clear it
// before writing our line and then re-render the spinner after.
// If not then all we need to do is write the line
write (write) {
if (this.#spinning) {
this.#clearSpinner()
}
write()
if (this.#spinning) {
this.#render()
}
}
#render (ms) {
if (ms) {
this.#timeout = setTimeout(() => {
this.#timeout = null
this.#renderSpinner()
}, ms)
// Make sure this timeout does not keep the process open
this.#timeout.unref()
} else {
this.#renderSpinner()
}
}
#renderSpinner () {
if (!this.#rendering) {
return
}
// We always attempt to render immediately but we only request to move to the next
// frame if it has been longer than our spinner frame duration since our last update
this.#renderFrame(Date.now() - this.#lastUpdate >= this.#spinner.duration)
clearInterval(this.#interval)
this.#interval = setInterval(() => this.#renderFrame(true), this.#spinner.duration)
}
#renderFrame (next) {
if (next) {
this.#lastUpdate = Date.now()
this.#frameIndex++
if (this.#frameIndex >= this.#spinner.frames.length) {
this.#frameIndex = 0
}
}
this.#clearSpinner()
this.#stream.write(this.#spinner.frames[this.#frameIndex])
}
#clearSpinner () {
// Move to the start of the line and clear the rest of the line
this.#stream.cursorTo(0)
this.#stream.clearLine(1)
}
}
module.exports = Display

View File

@ -0,0 +1,447 @@
const { format } = require('node:util')
const { resolve } = require('node:path')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const { log } = require('proc-log')
const errorMessage = (er, npm) => {
const summary = []
const detail = []
const files = []
er.message &&= replaceInfo(er.message)
er.stack &&= replaceInfo(er.stack)
switch (er.code) {
case 'ERESOLVE': {
const { report } = require('./explain-eresolve.js')
summary.push(['ERESOLVE', er.message])
detail.push(['', ''])
// XXX(display): error messages are logged so we use the logColor since that is based
// on stderr. This should be handled solely by the display layer so it could also be
// printed to stdout if necessary.
const { explanation, file } = report(er, npm.logChalk, npm.noColorChalk)
detail.push(['', explanation])
files.push(['eresolve-report.txt', file])
break
}
case 'ENOLOCK': {
const cmd = npm.command || ''
summary.push([cmd, 'This command requires an existing lockfile.'])
detail.push([cmd, 'Try creating one first with: npm i --package-lock-only'])
detail.push([cmd, `Original error: ${er.message}`])
break
}
case 'ENOAUDIT':
summary.push(['audit', er.message])
break
case 'ECONNREFUSED':
summary.push(['', er])
detail.push(['', [
'',
'If you are behind a proxy, please make sure that the',
"'proxy' config is set properly. See: 'npm help config'",
].join('\n')])
break
case 'EACCES':
case 'EPERM': {
const isCachePath =
typeof er.path === 'string' && npm.loaded && er.path.startsWith(npm.config.get('cache'))
const isCacheDest =
typeof er.dest === 'string' && npm.loaded && er.dest.startsWith(npm.config.get('cache'))
if (process.platform !== 'win32' && (isCachePath || isCacheDest)) {
// user probably doesn't need this, but still add it to the debug log
log.verbose(er.stack)
summary.push(['', [
'',
'Your cache folder contains root-owned files, due to a bug in',
'previous versions of npm which has since been addressed.',
'',
'To permanently fix this problem, please run:',
` sudo chown -R ${process.getuid()}:${process.getgid()} "${npm.config.get('cache')}"`,
].join('\n')])
} else {
summary.push(['', er])
detail.push(['', [
'',
'The operation was rejected by your operating system.',
...process.platform === 'win32' ? [
"It's possible that the file was already in use (by a text editor or antivirus),",
'or that you lack permissions to access it.',
] : [
'It is likely you do not have the permissions to access this file as the current user',
],
'',
'If you believe this might be a permissions issue, please double-check the',
'permissions of the file and its containing directories, or try running',
'the command again as root/Administrator.',
].join('\n')])
}
break
}
case 'ENOGIT':
summary.push(['', er.message])
detail.push(['', [
'',
'Failed using git.',
'Please check if you have git installed and in your PATH.',
].join('\n')])
break
case 'EJSONPARSE':
// Check whether we ran into a conflict in our own package.json
if (er.path === resolve(npm.prefix, 'package.json')) {
const { isDiff } = require('parse-conflict-json')
const txt = require('node:fs').readFileSync(er.path, 'utf8').replace(/\r\n/g, '\n')
if (isDiff(txt)) {
detail.push(['', [
'Merge conflict detected in your package.json.',
'',
'Please resolve the package.json conflict and retry.',
].join('\n')])
break
}
}
summary.push(['JSON.parse', er.message])
detail.push(['JSON.parse', [
'Failed to parse JSON data.',
'Note: package.json must be actual JSON, not just JavaScript.',
].join('\n')])
break
case 'EOTP':
case 'E401':
// E401 is for places where we accidentally neglect OTP stuff
if (er.code === 'EOTP' || /one-time pass/.test(er.message)) {
summary.push(['', 'This operation requires a one-time password from your authenticator.'])
detail.push(['', [
'You can provide a one-time password by passing --otp=<code> to the command you ran.',
'If you already provided a one-time password then it is likely that you either typoed',
'it, or it timed out. Please try again.',
].join('\n')])
} else {
// npm ERR! code E401
// npm ERR! Unable to authenticate, need: Basic
const auth = !er.headers || !er.headers['www-authenticate']
? []
: er.headers['www-authenticate'].map(au => au.split(/[,\s]+/))[0]
if (auth.includes('Bearer')) {
summary.push(['',
'Unable to authenticate, your authentication token seems to be invalid.',
])
detail.push(['', [
'To correct this please try logging in again with:',
' npm login',
].join('\n')])
} else if (auth.includes('Basic')) {
summary.push(['', 'Incorrect or missing password.'])
detail.push(['', [
'If you were trying to login, change your password, create an',
'authentication token or enable two-factor authentication then',
'that means you likely typed your password in incorrectly.',
'Please try again, or recover your password at:',
' https://www.npmjs.com/forgot',
'',
'If you were doing some other operation then your saved credentials are',
'probably out of date. To correct this please try logging in again with:',
' npm login',
].join('\n')])
} else {
summary.push(['', er.message || er])
}
}
break
case 'E404':
// There's no need to have 404 in the message as well.
summary.push(['404', er.message.replace(/^404\s+/, '')])
if (er.pkgid && er.pkgid !== '-') {
const pkg = er.pkgid.replace(/(?!^)@.*$/, '')
detail.push(['404', ''])
detail.push(['404', '', `'${replaceInfo(er.pkgid)}' is not in this registry.`])
const nameValidator = require('validate-npm-package-name')
const valResult = nameValidator(pkg)
if (!valResult.validForNewPackages) {
detail.push(['404', 'This package name is not valid, because', ''])
const errorsArray = [...(valResult.errors || []), ...(valResult.warnings || [])]
errorsArray.forEach((item, idx) => detail.push(['404', ' ' + (idx + 1) + '. ' + item]))
}
detail.push(['404', ''])
detail.push(['404', 'Note that you can also install from a'])
detail.push(['404', 'tarball, folder, http url, or git url.'])
}
break
case 'EPUBLISHCONFLICT':
summary.push(['publish fail', 'Cannot publish over existing version.'])
detail.push(['publish fail', "Update the 'version' field in package.json and try again."])
detail.push(['publish fail', ''])
detail.push(['publish fail', 'To automatically increment version numbers, see:'])
detail.push(['publish fail', ' npm help version'])
break
case 'EISGIT':
summary.push(['git', er.message])
summary.push(['git', ` ${er.path}`])
detail.push(['git', [
'Refusing to remove it. Update manually,',
'or move it out of the way first.',
].join('\n')])
break
case 'EBADDEVENGINES': {
const { current, required } = er
summary.push(['EBADDEVENGINES', er.message])
detail.push(['EBADDEVENGINES', { current, required }])
break
}
case 'EBADPLATFORM': {
const actual = er.current
const expected = { ...er.required }
const checkedKeys = []
for (const key in expected) {
if (Array.isArray(expected[key]) && expected[key].length > 0) {
expected[key] = expected[key].join(',')
checkedKeys.push(key)
} else if (expected[key] === undefined ||
Array.isArray(expected[key]) && expected[key].length === 0) {
delete expected[key]
delete actual[key]
} else {
checkedKeys.push(key)
}
}
const longestKey = Math.max(...checkedKeys.map((key) => key.length))
const detailEntry = []
for (const key of checkedKeys) {
const padding = key.length === longestKey
? 1
: 1 + (longestKey - key.length)
// padding + 1 because 'actual' is longer than 'valid'
detailEntry.push(`Valid ${key}:${' '.repeat(padding + 1)}${expected[key]}`)
detailEntry.push(`Actual ${key}:${' '.repeat(padding)}${actual[key]}`)
}
summary.push(['notsup', format(
'Unsupported platform for %s: wanted %j (current: %j)',
er.pkgid,
expected,
actual
)])
detail.push(['notsup', detailEntry.join('\n')])
break
}
case 'EEXIST':
summary.push(['', er.message])
summary.push(['', 'File exists: ' + (er.dest || er.path)])
detail.push(['', 'Remove the existing file and try again, or run npm'])
detail.push(['', 'with --force to overwrite files recklessly.'])
break
case 'ENEEDAUTH':
summary.push(['need auth', er.message])
detail.push(['need auth', 'You need to authorize this machine using `npm adduser`'])
break
case 'ECONNRESET':
case 'ENOTFOUND':
case 'ETIMEDOUT':
case 'ERR_SOCKET_TIMEOUT':
case 'EAI_FAIL':
summary.push(['network', er.message])
detail.push(['network', [
'This is a problem related to network connectivity.',
'In most cases you are behind a proxy or have bad network settings.',
'',
'If you are behind a proxy, please make sure that the',
"'proxy' config is set properly. See: 'npm help config'",
].join('\n')])
break
case 'ETARGET':
summary.push(['notarget', er.message])
detail.push(['notarget', [
'In most cases you or one of your dependencies are requesting',
"a package version that doesn't exist.",
].join('\n')])
break
case 'E403':
summary.push(['403', er.message])
detail.push(['403', [
'In most cases, you or one of your dependencies are requesting',
'a package version that is forbidden by your security policy, or',
'on a server you do not have access to.',
].join('\n')])
break
case 'EBADENGINE':
summary.push(['engine', er.message])
summary.push(['engine', 'Not compatible with your version of node/npm: ' + er.pkgid])
detail.push(['notsup', [
'Not compatible with your version of node/npm: ' + er.pkgid,
'Required: ' + JSON.stringify(er.required),
'Actual: ' +
JSON.stringify({ npm: npm.version, node: process.version }),
].join('\n')])
break
case 'ENOSPC':
summary.push(['nospc', er.message])
detail.push(['nospc', [
'There appears to be insufficient space on your system to finish.',
'Clear up some disk space and try again.',
].join('\n')])
break
case 'EROFS':
summary.push(['rofs', er.message])
detail.push(['rofs', [
'Often virtualized file systems, or other file systems',
"that don't support symlinks, give this error.",
].join('\n')])
break
case 'ENOENT':
summary.push(['enoent', er.message])
detail.push(['enoent', [
'This is related to npm not being able to find a file.',
er.file ? `\nCheck if the file '${er.file}' is present.` : '',
].join('\n')])
break
case 'EMISSINGARG':
case 'EUNKNOWNTYPE':
case 'EINVALIDTYPE':
case 'ETOOMANYARGS':
summary.push(['typeerror', er.stack])
detail.push(['typeerror', [
'This is an error with npm itself. Please report this error at:',
' https://github.com/npm/cli/issues',
].join('\n')])
break
default:
summary.push(['', er.message || er])
if (er.cause) {
detail.push(['cause', er.cause.message])
}
if (er.signal) {
detail.push(['signal', er.signal])
}
if (er.cmd && Array.isArray(er.args)) {
detail.push(['command', ...[er.cmd, ...er.args.map(replaceInfo)]])
}
if (er.stdout) {
detail.push(['', er.stdout.trim()])
}
if (er.stderr) {
detail.push(['', er.stderr.trim()])
}
break
}
return {
summary,
detail,
files,
}
}
const getExitCodeFromError = (err) => {
if (typeof err?.errno === 'number') {
return err.errno
} else if (typeof err?.code === 'number') {
return err.code
}
}
const getError = (err, { npm, command, pkg }) => {
// if we got a command that just shells out to something else, then it
// will presumably print its own errors and exit with a proper status
// code if there's a problem. If we got an error with a code=0, then...
// something else went wrong along the way, so maybe an npm problem?
if (command?.constructor?.isShellout && typeof err.code === 'number' && err.code) {
return {
exitCode: err.code,
suppressError: true,
}
}
// XXX: we should stop throwing strings
if (typeof err === 'string') {
return {
exitCode: 1,
suppressError: true,
summary: [['', err]],
}
}
// XXX: we should stop throwing other non-errors
if (!(err instanceof Error)) {
return {
exitCode: 1,
suppressError: true,
summary: [['weird error', err]],
}
}
if (err.code === 'EUNKNOWNCOMMAND') {
const suggestions = require('./did-you-mean.js')(pkg, err.command)
return {
exitCode: 1,
suppressError: true,
standard: [
`Unknown command: "${err.command}"`,
suggestions,
'To see a list of supported npm commands, run:',
' npm help',
],
}
}
// Anything after this is not suppressed and get more logged information
// add a code to the error if it doesnt have one and mutate some properties
// so they have redacted information
err.code ??= err.message.match(/^(?:Error: )?(E[A-Z]+)/)?.[1]
// this mutates the error and redacts stack/message
const { summary, detail, files } = errorMessage(err, npm)
return {
err,
code: err.code,
exitCode: getExitCodeFromError(err) || 1,
suppressError: false,
summary,
detail,
files,
verbose: ['type', 'stack', 'statusCode', 'pkgid']
.filter(k => err[k])
.map(k => [k, replaceInfo(err[k])]),
error: ['code', 'syscall', 'file', 'path', 'dest', 'errno']
.filter(k => err[k])
.map(k => [k, err[k]]),
}
}
module.exports = {
getExitCodeFromError,
errorMessage,
getError,
}

View File

@ -0,0 +1,103 @@
const { relative } = require('node:path')
const explainNode = (node, depth, chalk) =>
printNode(node, chalk) +
explainDependents(node, depth, chalk) +
explainLinksIn(node, depth, chalk)
const colorType = (type, chalk) => {
const style = type === 'extraneous' ? chalk.red
: type === 'dev' ? chalk.blue
: type === 'optional' ? chalk.magenta
: type === 'peer' ? chalk.magentaBright
: type === 'bundled' ? chalk.underline.cyan
: type === 'workspace' ? chalk.blueBright
: type === 'overridden' ? chalk.dim
: /* istanbul ignore next */ s => s
return style(type)
}
const printNode = (node, chalk) => {
const extra = []
for (const meta of ['extraneous', 'dev', 'optional', 'peer', 'bundled', 'overridden']) {
if (node[meta]) {
extra.push(` ${colorType(meta, chalk)}`)
}
}
const pkgid = node.isWorkspace
? chalk.blueBright(`${node.name}@${node.version}`)
: `${node.name}@${node.version}`
return `${pkgid}${extra.join('')}` +
(node.location ? chalk.dim(`\n${node.location}`) : '')
}
const explainLinksIn = ({ linksIn }, depth, chalk) => {
if (!linksIn || !linksIn.length || depth <= 0) {
return ''
}
const messages = linksIn.map(link => explainNode(link, depth - 1, chalk))
const str = '\n' + messages.join('\n')
return str.split('\n').join('\n ')
}
const explainDependents = ({ dependents }, depth, chalk) => {
if (!dependents || !dependents.length || depth <= 0) {
return ''
}
const max = Math.ceil(depth / 2)
const messages = dependents.slice(0, max)
.map(edge => explainEdge(edge, depth, chalk))
// show just the names of the first 5 deps that overflowed the list
if (dependents.length > max) {
let len = 0
const maxLen = 50
const showNames = []
for (let i = max; i < dependents.length; i++) {
const { from: { name: depName = 'the root project' } } = dependents[i]
len += depName.length
if (len >= maxLen && i < dependents.length - 1) {
showNames.push('...')
break
}
showNames.push(depName)
}
const show = `(${showNames.join(', ')})`
messages.push(`${dependents.length - max} more ${show}`)
}
const str = '\n' + messages.join('\n')
return str.split('\n').join('\n ')
}
const explainEdge = ({ name, type, bundled, from, spec, rawSpec, overridden }, depth, chalk) => {
let dep = type === 'workspace'
? chalk.bold(relative(from.location, spec.slice('file:'.length)))
: `${name}@"${spec}"`
if (overridden) {
dep = `${colorType('overridden', chalk)} ${dep} (was "${rawSpec}")`
}
const fromMsg = ` from ${explainFrom(from, depth, chalk)}`
return (type === 'prod' ? '' : `${colorType(type, chalk)} `) +
(bundled ? `${colorType('bundled', chalk)} ` : '') +
`${dep}${fromMsg}`
}
const explainFrom = (from, depth, chalk) => {
if (!from.name && !from.version) {
return 'the root project'
}
return printNode(from, chalk) +
explainDependents(from, depth - 1, chalk) +
explainLinksIn(from, depth - 1, chalk)
}
module.exports = { explainNode, printNode, explainEdge }

View File

@ -0,0 +1,71 @@
// this is called when an ERESOLVE error is caught in the exit-handler,
// or when there's a log.warn('eresolve', msg, explanation), to turn it
// into a human-intelligible explanation of what's wrong and how to fix.
const { explainEdge, explainNode, printNode } = require('./explain-dep.js')
// expl is an explanation object that comes from Arborist. It looks like:
// Depth is how far we want to want to descend into the object making a report.
// The full report (ie, depth=Infinity) is always written to the cache folder
// at ${cache}/eresolve-report.txt along with full json.
const explain = (expl, chalk, depth) => {
const { edge, dep, current, peerConflict, currentEdge } = expl
const out = []
const whileInstalling = dep && dep.whileInstalling ||
current && current.whileInstalling ||
edge && edge.from && edge.from.whileInstalling
if (whileInstalling) {
out.push('While resolving: ' + printNode(whileInstalling, chalk))
}
// it "should" be impossible for an ERESOLVE explanation to lack both
// current and currentEdge, but better to have a less helpful error
// than a crashing failure.
if (current) {
out.push('Found: ' + explainNode(current, depth, chalk))
} else if (peerConflict && peerConflict.current) {
out.push('Found: ' + explainNode(peerConflict.current, depth, chalk))
} else if (currentEdge) {
out.push('Found: ' + explainEdge(currentEdge, depth, chalk))
} else /* istanbul ignore else - should always have one */ if (edge) {
out.push('Found: ' + explainEdge(edge, depth, chalk))
}
out.push('\nCould not resolve dependency:\n' +
explainEdge(edge, depth, chalk))
if (peerConflict) {
const heading = '\nConflicting peer dependency:'
const pc = explainNode(peerConflict.peer, depth, chalk)
out.push(heading + ' ' + pc)
}
return out.join('\n')
}
// generate a full verbose report and tell the user how to fix it
const report = (expl, chalk, noColorChalk) => {
const flags = [
expl.strictPeerDeps ? '--no-strict-peer-deps' : '',
'--force',
'--legacy-peer-deps',
].filter(Boolean)
const or = (arr) => arr.length <= 2
? arr.join(' or ') :
arr.map((v, i, l) => i + 1 === l.length ? `or ${v}` : v).join(', ')
const fix = `Fix the upstream dependency conflict, or retry
this command with ${or(flags)}
to accept an incorrect (and potentially broken) dependency resolution.`
return {
explanation: `${explain(expl, chalk, 4)}\n\n${fix}`,
file: `# npm resolution error report\n\n${explain(expl, noColorChalk, Infinity)}\n\n${fix}`,
}
}
module.exports = {
explain,
report,
}

View File

@ -0,0 +1,30 @@
// Convert bytes to printable output, for file reporting in tarballs
// Only supports up to GB because that's way larger than anything the registry
// supports anyways.
const formatBytes = (bytes, space = true) => {
let spacer = ''
if (space) {
spacer = ' '
}
if (bytes < 1000) {
// B
return `${bytes}${spacer}B`
}
if (bytes < 1000000) {
// kB
return `${(bytes / 1000).toFixed(1)}${spacer}kB`
}
if (bytes < 1000000000) {
// MB
return `${(bytes / 1000000).toFixed(1)}${spacer}MB`
}
// GB
return `${(bytes / 1000000000).toFixed(1)}${spacer}GB`
}
module.exports = formatBytes

View File

@ -0,0 +1,174 @@
/* eslint-disable max-len */
const { stripVTControlCharacters: strip } = require('node:util')
const { Minipass } = require('minipass')
// This module consumes package data in the following format:
//
// {
// name: String,
// description: String,
// maintainers: [{ username: String, email: String }],
// keywords: String | [String],
// version: String,
// date: Date // can be null,
// }
//
// The returned stream will format this package data
// into a byte stream of formatted, displayable output.
function filter (data, exclude) {
const words = [data.name]
.concat(data.maintainers.map(m => m.username))
.concat(data.keywords || [])
.map(f => f?.trim?.())
.filter(Boolean)
.join(' ')
.toLowerCase()
if (exclude.find(pattern => {
// Treats both /foo and /foo/ as regex searches
if (pattern.startsWith('/')) {
if (pattern.endsWith('/')) {
pattern = pattern.slice(0, -1)
}
return words.match(new RegExp(pattern.slice(1)))
}
return words.includes(pattern)
})) {
return false
}
return true
}
module.exports = (opts) => {
return opts.json ? new JSONOutputStream(opts) : new TextOutputStream(opts)
}
class JSONOutputStream extends Minipass {
#didFirst = false
#exclude
constructor (opts) {
super()
this.#exclude = opts.exclude
}
write (obj) {
if (!filter(obj, this.#exclude)) {
return
}
if (!this.#didFirst) {
super.write('[\n')
this.#didFirst = true
} else {
super.write('\n,\n')
}
return super.write(JSON.stringify(obj))
}
end () {
super.write(this.#didFirst ? ']\n' : '\n[]\n')
super.end()
}
}
class TextOutputStream extends Minipass {
#args
#chalk
#exclude
#parseable
constructor (opts) {
super()
this.#args = opts.args.map(s => s.toLowerCase()).filter(Boolean)
this.#chalk = opts.npm.chalk
this.#exclude = opts.exclude
this.#parseable = opts.parseable
}
write (data) {
if (!filter(data, this.#exclude)) {
return
}
// Normalize
const pkg = {
authors: data.maintainers.map((m) => `${strip(m.username)}`).join(' '),
publisher: strip(data.publisher?.username || ''),
date: data.date ? data.date.toISOString().slice(0, 10) : 'prehistoric',
description: strip(data.description ?? ''),
keywords: [],
name: strip(data.name),
version: data.version,
}
if (Array.isArray(data.keywords)) {
pkg.keywords = data.keywords.map(strip)
} else if (typeof data.keywords === 'string') {
pkg.keywords = strip(data.keywords.replace(/[,\s]+/, ' ')).split(' ')
}
let output
if (this.#parseable) {
output = [pkg.name, pkg.description, pkg.author, pkg.date, pkg.version, pkg.keywords]
.filter(Boolean)
.map(col => ('' + col).replace(/\t/g, ' ')).join('\t')
return super.write(output)
}
const keywords = pkg.keywords.map(k => {
if (this.#args.includes(k)) {
return this.#chalk.cyan(k)
} else {
return k
}
}).join(' ')
let description = []
for (const arg of this.#args) {
const finder = pkg.description.toLowerCase().split(arg.toLowerCase())
let p = 0
for (const f of finder) {
description.push(pkg.description.slice(p, p + f.length))
const word = pkg.description.slice(p + f.length, p + f.length + arg.length)
description.push(this.#chalk.cyan(word))
p += f.length + arg.length
}
}
description = description.filter(Boolean)
let name = pkg.name
if (this.#args.includes(pkg.name)) {
name = this.#chalk.cyan(pkg.name)
} else {
name = []
for (const arg of this.#args) {
const finder = pkg.name.toLowerCase().split(arg.toLowerCase())
let p = 0
for (const f of finder) {
name.push(pkg.name.slice(p, p + f.length))
const word = pkg.name.slice(p + f.length, p + f.length + arg.length)
name.push(this.#chalk.cyan(word))
p += f.length + arg.length
}
}
name = this.#chalk.blue(name.join(''))
}
if (description.length) {
output = `${name}\n${description.join('')}\n`
} else {
output = `${name}\n`
}
if (pkg.publisher) {
output += `Version ${this.#chalk.blue(pkg.version)} published ${this.#chalk.blue(pkg.date)} by ${this.#chalk.blue(pkg.publisher)}\n`
} else {
output += `Version ${this.#chalk.blue(pkg.version)} published ${this.#chalk.blue(pkg.date)} by ${this.#chalk.yellow('???')}\n`
}
output += `Maintainers: ${pkg.authors}\n`
if (keywords) {
output += `Keywords: ${keywords}\n`
}
output += `${this.#chalk.blue(`https://npm.im/${pkg.name}`)}\n`
return super.write(output)
}
}

View File

@ -0,0 +1,50 @@
const { formatWithOptions: baseFormatWithOptions } = require('node:util')
// These are most assuredly not a mistake
// https://eslint.org/docs/latest/rules/no-control-regex
// \x00 through \x1f, \x7f through \x9f, not including \x09 \x0a \x0b \x0d
/* eslint-disable-next-line no-control-regex */
const HAS_C01 = /[\x00-\x08\x0c\x0e-\x1f\x7f-\x9f]/
// Allows everything up to '[38;5;255m' in 8 bit notation
const ALLOWED_SGR = /^\[[0-9;]{0,8}m/
// '[38;5;255m'.length
const SGR_MAX_LEN = 10
// Strips all ANSI C0 and C1 control characters (except for SGR up to 8 bit)
function STRIP_C01 (str) {
if (!HAS_C01.test(str)) {
return str
}
let result = ''
for (let i = 0; i < str.length; i++) {
const char = str[i]
const code = char.charCodeAt(0)
if (!HAS_C01.test(char)) {
// Most characters are in this set so continue early if we can
result = `${result}${char}`
} else if (code === 27 && ALLOWED_SGR.test(str.slice(i + 1, i + SGR_MAX_LEN + 1))) {
// \x1b with allowed SGR
result = `${result}\x1b`
} else if (code <= 31) {
// escape all other C0 control characters besides \x7f
result = `${result}^${String.fromCharCode(code + 64)}`
} else {
// hasC01 ensures this is now a C1 control character or \x7f
result = `${result}^${String.fromCharCode(code - 64)}`
}
}
return result
}
const formatWithOptions = ({ prefix: prefixes = [], eol = '\n', ...options }, ...args) => {
const prefix = prefixes.filter(p => p != null).join(' ')
const formatted = STRIP_C01(baseFormatWithOptions(options, ...args))
// Splitting could be changed to only `\n` once we are sure we only emit unix newlines.
// The eol param to this function will put the correct newlines in place for the returned string.
const lines = formatted.split(/\r?\n/)
return lines.reduce((acc, l) => `${acc}${prefix}${prefix && l ? ' ' : ''}${l}${eol}`, '')
}
module.exports = { formatWithOptions }

View File

@ -0,0 +1,26 @@
const npmFetch = require('npm-registry-fetch')
module.exports = async (npm, opts) => {
const { registry } = opts
// First, check if we have a user/pass-based auth
const creds = npm.config.getCredentialsByURI(registry)
if (creds.username) {
return creds.username
}
// No username, but we have other credentials; fetch the username from registry
if (creds.token || creds.certfile && creds.keyfile) {
const registryData = await npmFetch.json('/-/whoami', { ...opts })
if (typeof registryData?.username === 'string') {
return registryData.username
}
}
// At this point, even if they have a credentials object, it doesn't have a
// valid token.
throw Object.assign(
new Error('This command requires you to be logged in.'),
{ code: 'ENEEDAUTH' }
)
}

View File

@ -0,0 +1,54 @@
const { resolve, relative } = require('node:path')
const mapWorkspaces = require('@npmcli/map-workspaces')
const { minimatch } = require('minimatch')
const pkgJson = require('@npmcli/package-json')
// minimatch wants forward slashes only for glob patterns
const globify = pattern => pattern.split('\\').join('/')
// Returns an Map of paths to workspaces indexed by workspace name
// { foo => '/path/to/foo' }
const getWorkspaces = async (filters, { path, includeWorkspaceRoot, relativeFrom }) => {
// TODO we need a better error to be bubbled up here if this call fails
const { content: pkg } = await pkgJson.normalize(path)
const workspaces = await mapWorkspaces({ cwd: path, pkg })
let res = new Map()
if (includeWorkspaceRoot) {
res.set(pkg.name, path)
}
if (!filters.length) {
res = new Map([...res, ...workspaces])
}
for (const filterArg of filters) {
for (const [workspaceName, workspacePath] of workspaces.entries()) {
let relativePath = relative(relativeFrom, workspacePath)
if (filterArg.startsWith('./')) {
relativePath = `./${relativePath}`
}
const relativeFilter = relative(path, filterArg)
if (filterArg === workspaceName
|| resolve(relativeFrom, filterArg) === workspacePath
|| minimatch(relativePath, `${globify(relativeFilter)}/*`)
|| minimatch(relativePath, `${globify(filterArg)}/*`)
) {
res.set(workspaceName, workspacePath)
}
}
}
if (!res.size) {
let msg = '!'
if (filters.length) {
msg = `:\n ${filters.reduce(
(acc, filterArg) => `${acc} --workspace=${filterArg}`, '')}`
}
throw new Error(`No workspaces found${msg}`)
}
return res
}
module.exports = getWorkspaces

View File

@ -0,0 +1,45 @@
const { resolve } = require('node:path')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const installedDeep = async (npm) => {
const Arborist = require('@npmcli/arborist')
const {
depth,
global,
prefix,
workspacesEnabled,
} = npm.flatOptions
const getValues = (tree) =>
[...tree.inventory.values()]
.filter(i => i.location !== '' && !i.isRoot)
.map(i => {
return i
})
.filter(i => (i.depth - 1) <= depth)
.sort((a, b) => (a.depth - b.depth) || localeCompare(a.name, b.name))
const res = new Set()
const gArb = new Arborist({
global: true,
path: resolve(npm.globalDir, '..'),
workspacesEnabled,
})
const gTree = await gArb.loadActual({ global: true })
for (const node of getValues(gTree)) {
res.add(global ? node.name : [node.name, '-g'])
}
if (!global) {
const arb = new Arborist({ global: false, path: prefix, workspacesEnabled })
const tree = await arb.loadActual()
for (const node of getValues(tree)) {
res.add(node.name)
}
}
return [...res]
}
module.exports = installedDeep

View File

@ -0,0 +1,19 @@
const { readdirScoped } = require('@npmcli/fs')
const installedShallow = async (npm, opts) => {
const names = async global => {
const paths = await readdirScoped(global ? npm.globalDir : npm.localDir)
return paths.map(p => p.replace(/\\/g, '/'))
}
const { conf: { argv: { remain } } } = opts
if (remain.length > 3) {
return null
}
const { global } = npm.flatOptions
const locals = global ? [] : await names(false)
const globals = (await names(true)).map(n => global ? n : `${n} -g`)
return [...locals, ...globals]
}
module.exports = installedShallow

View File

@ -0,0 +1,4 @@
const isWindowsShell = (process.platform === 'win32') &&
!/^MINGW(32|64)$/.test(process.env.MSYSTEM) && process.env.TERM !== 'cygwin'
exports.isWindowsShell = isWindowsShell

View File

@ -0,0 +1,261 @@
const os = require('node:os')
const { join, dirname, basename } = require('node:path')
const fsMiniPass = require('fs-minipass')
const fs = require('node:fs/promises')
const { log } = require('proc-log')
const { formatWithOptions } = require('./format')
const padZero = (n, length) => n.toString().padStart(length.toString().length, '0')
class LogFiles {
// Default to an array so we can buffer
// initial writes before we know the cache location
#logStream = []
// We cap log files at a certain number of log events per file.
// Note that each log event can write more than one line to the
// file. Then we rotate log files once this number of events is reached
#MAX_LOGS_PER_FILE = null
// Now that we write logs continuously we need to have a backstop
// here for infinite loops that still log. This is also partially handled
// by the config.get('max-files') option, but this is a failsafe to
// prevent runaway log file creation
#MAX_FILES_PER_PROCESS = null
#fileLogCount = 0
#totalLogCount = 0
#path = null
#logsMax = null
#files = []
#timing = false
constructor ({
maxLogsPerFile = 50_000,
maxFilesPerProcess = 5,
} = {}) {
this.#MAX_LOGS_PER_FILE = maxLogsPerFile
this.#MAX_FILES_PER_PROCESS = maxFilesPerProcess
this.on()
}
on () {
process.on('log', this.#logHandler)
}
off () {
process.off('log', this.#logHandler)
this.#endStream()
}
load ({ command, path, logsMax = Infinity, timing } = {}) {
if (['completion'].includes(command)) {
return
}
// dir is user configurable and is required to exist so
// this can error if the dir is missing or not configured correctly
this.#path = path
this.#logsMax = logsMax
this.#timing = timing
// Log stream has already ended
if (!this.#logStream) {
return
}
log.verbose('logfile', `logs-max:${logsMax} dir:${this.#path}`)
// Write the contents of our array buffer to our new file stream and
// set that as the new log logstream for future writes
// if logs max is 0 then the user does not want a log file
if (this.#logsMax > 0) {
const initialFile = this.#openLogFile()
if (initialFile) {
for (const item of this.#logStream) {
const formatted = this.#formatLogItem(...item)
if (formatted !== null) {
initialFile.write(formatted)
}
}
this.#logStream = initialFile
}
}
log.verbose('logfile', this.files[0] || 'no logfile created')
// Kickoff cleaning process, even if we aren't writing a logfile.
// This is async but it will always ignore the current logfile
// Return the result so it can be awaited in tests
return this.#cleanLogs()
}
get files () {
return this.#files
}
get #isBuffered () {
return Array.isArray(this.#logStream)
}
#endStream (output) {
if (this.#logStream && !this.#isBuffered) {
this.#logStream.end(output)
this.#logStream = null
}
}
#logHandler = (level, ...args) => {
// Ignore pause and resume events since we
// write everything to the log file
if (level === 'pause' || level === 'resume') {
return
}
// If the stream is ended then do nothing
if (!this.#logStream) {
return
}
if (this.#isBuffered) {
// Cant do anything but buffer the output if we dont
// have a file stream yet
this.#logStream.push([level, ...args])
return
}
const logOutput = this.#formatLogItem(level, ...args)
if (logOutput === null) {
return
}
// Open a new log file if we've written too many logs to this one
if (this.#fileLogCount >= this.#MAX_LOGS_PER_FILE) {
// Write last chunk to the file and close it
this.#endStream(logOutput)
if (this.#files.length >= this.#MAX_FILES_PER_PROCESS) {
// but if its way too many then we just stop listening
this.off()
} else {
// otherwise we are ready for a new file for the next event
this.#logStream = this.#openLogFile()
}
} else {
this.#logStream.write(logOutput)
}
}
#formatLogItem (level, title, ...args) {
// Only right timing logs to logfile if explicitly requests
if (level === log.KEYS.timing && !this.#timing) {
return null
}
this.#fileLogCount += 1
const prefix = [this.#totalLogCount++, level, title || null]
return formatWithOptions({ prefix, eol: os.EOL, colors: false }, ...args)
}
#getLogFilePath (count = '') {
return `${this.#path}debug-${count}.log`
}
#openLogFile () {
// Count in filename will be 0 indexed
const count = this.#files.length
try {
// Pad with zeros so that our log files are always sorted properly
// We never want to write files ending in `-9.log` and `-10.log` because
// log file cleaning is done by deleting the oldest so in this example
// `-10.log` would be deleted next
const f = this.#getLogFilePath(padZero(count, this.#MAX_FILES_PER_PROCESS))
// Some effort was made to make the async, but we need to write logs
// during process.on('exit') which has to be synchronous. So in order
// to never drop log messages, it is easiest to make it sync all the time
// and this was measured to be about 1.5% slower for 40k lines of output
const logStream = new fsMiniPass.WriteStreamSync(f, { flags: 'a' })
if (count > 0) {
// Reset file log count if we are opening
// after our first file
this.#fileLogCount = 0
}
this.#files.push(logStream.path)
return logStream
} catch (e) {
// If the user has a readonly logdir then we don't want to
// warn this on every command so it should be verbose
log.verbose('logfile', `could not be created: ${e}`)
}
}
async #cleanLogs () {
// module to clean out the old log files
// this is a best-effort attempt. if a rm fails, we just
// log a message about it and move on. We do return a
// Promise that succeeds when we've tried to delete everything,
// just for the benefit of testing this function properly.
try {
const logPath = this.#getLogFilePath()
const patternFileName = basename(logPath)
// tell glob to only match digits
.replace(/\d/g, 'd')
// Handle the old (prior to 8.2.0) log file names which did not have a
// counter suffix
.replace('-.log', '')
let files = await fs.readdir(
dirname(logPath), {
withFileTypes: true,
encoding: 'utf-8',
})
files = files.sort((a, b) => basename(a.name).localeCompare(basename(b.name), 'en'))
const logFiles = []
for (const file of files) {
if (!file.isFile()) {
continue
}
const genericFileName = file.name.replace(/\d/g, 'd')
const filePath = join(dirname(logPath), basename(file.name))
// Always ignore the currently written files
if (
genericFileName.includes(patternFileName)
&& genericFileName.endsWith('.log')
&& !this.#files.includes(filePath)
) {
logFiles.push(filePath)
}
}
const toDelete = logFiles.length - this.#logsMax
if (toDelete <= 0) {
return
}
log.silly('logfile', `start cleaning logs, removing ${toDelete} files`)
for (const file of logFiles.slice(0, toDelete)) {
try {
await fs.rm(file, { force: true })
} catch (e) {
log.silly('logfile', 'error removing log file', file, e)
}
}
} catch (e) {
// Disable cleanup failure warnings when log writing is disabled
if (this.#logsMax > 0) {
log.verbose('logfile', 'error cleaning log files', e)
}
} finally {
log.silly('logfile', 'done cleaning log files')
}
}
}
module.exports = LogFiles

View File

@ -0,0 +1,74 @@
const { commands } = require('./cmd-list')
const COL_MAX = 60
const COL_MIN = 24
const COL_GUTTER = 16
const INDENT = 4
const indent = (repeat = INDENT) => ' '.repeat(repeat)
const indentNewline = (repeat) => `\n${indent(repeat)}`
module.exports = (npm) => {
const browser = npm.config.get('viewer') === 'browser' ? ' (in a browser)' : ''
const allCommands = npm.config.get('long') ? cmdUsages(npm.constructor) : cmdNames()
return `npm <command>
Usage:
npm install install all the dependencies in your project
npm install <foo> add the <foo> dependency to your project
npm test run this project's tests
npm run <foo> run the script named <foo>
npm <command> -h quick help on <command>
npm -l display usage info for all commands
npm help <term> search for help on <term>${browser}
npm help npm more involved overview${browser}
All commands:
${allCommands}
Specify configs in the ini-formatted file:
${indent() + npm.config.get('userconfig')}
or on the command line via: npm <command> --key=value
More configuration info: npm help config
Configuration fields: npm help 7 config
npm@${npm.version} ${npm.npmRoot}`
}
const cmdNames = () => {
const out = ['']
const line = !process.stdout.columns ? COL_MAX
: Math.min(COL_MAX, Math.max(process.stdout.columns - COL_GUTTER, COL_MIN))
let l = 0
for (const c of commands) {
if (out[l].length + c.length + 2 < line) {
out[l] += ', ' + c
} else {
out[l++] += ','
out[l] = c
}
}
return indentNewline() + out.join(indentNewline()).slice(2)
}
const cmdUsages = (Npm) => {
// return a string of <command>: <usage>
let maxLen = 0
const set = []
for (const c of commands) {
set.push([c, Npm.cmd(c).describeUsage.split('\n')])
maxLen = Math.max(maxLen, c.length)
}
return set.map(([name, usageLines]) => {
const gutter = indent(maxLen - name.length + 1)
const usage = usageLines.join(indentNewline(INDENT + maxLen + 1))
return indentNewline() + name + gutter + usage
}).join('\n')
}

View File

@ -0,0 +1,98 @@
const { open } = require('@npmcli/promise-spawn')
const { output, input } = require('proc-log')
const { URL } = require('node:url')
const readline = require('node:readline/promises')
const { once } = require('node:events')
const assertValidUrl = (url) => {
try {
if (!/^https?:$/.test(new URL(url).protocol)) {
throw new Error()
}
} catch {
throw new Error('Invalid URL: ' + url)
}
}
const outputMsg = (json, title, url) => {
if (json) {
output.buffer({ title, url })
} else {
output.standard(`${title}:\n${url}`)
}
}
// attempt to open URL in web-browser, print address otherwise:
const openUrl = async (npm, url, title, isFile) => {
url = encodeURI(url)
const browser = npm.config.get('browser')
const json = npm.config.get('json')
if (browser === false) {
outputMsg(json, title, url)
return
}
// We pass this in as true from the help command so we know we don't have to
// check the protocol
if (!isFile) {
assertValidUrl(url)
}
try {
await input.start(() => open(url, {
command: browser === true ? null : browser,
}))
} catch (err) {
if (err.code !== 127) {
throw err
}
outputMsg(json, title, url)
}
}
// Prompt to open URL in browser if possible
const openUrlPrompt = async (npm, url, title, prompt, { signal }) => {
const browser = npm.config.get('browser')
const json = npm.config.get('json')
assertValidUrl(url)
outputMsg(json, title, url)
if (browser === false || !process.stdin.isTTY || !process.stdout.isTTY) {
return
}
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
})
try {
await input.read(() => Promise.race([
rl.question(prompt, { signal }),
once(rl, 'error'),
once(rl, 'SIGINT').then(() => {
throw new Error('canceled')
}),
]))
rl.close()
await openUrl(npm, url, 'Browser unavailable. Please open the URL manually')
} catch (err) {
rl.close()
if (err.name !== 'AbortError') {
throw err
}
}
}
// Rearrange arguments and return a function that takes the two arguments
// returned from the npm-profile methods that take an opener
const createOpener = (npm, title, prompt = 'Press ENTER to open in the browser...') =>
(url, opts) => openUrlPrompt(npm, url, title, prompt, opts)
module.exports = {
openUrl,
openUrlPrompt,
createOpener,
}

View File

@ -0,0 +1,29 @@
const { log, output } = require('proc-log')
const outputError = ({ standard = [], verbose = [], error = [], summary = [], detail = [] }) => {
for (const line of standard) {
// Each output line is just a single string
output.standard(line)
}
for (const line of verbose) {
log.verbose(...line)
}
for (const line of [...error, ...summary, ...detail]) {
log.error(...line)
}
}
const jsonError = (error, npm) => {
if (error && npm?.loaded && npm?.config.get('json')) {
return {
code: error.code,
summary: (error.summary || []).map(l => l.slice(1).join(' ')).join('\n').trim(),
detail: (error.detail || []).map(l => l.slice(1).join(' ')).join('\n').trim(),
}
}
}
module.exports = {
outputError,
jsonError,
}

View File

@ -0,0 +1,7 @@
// ping the npm registry
// used by the ping and doctor commands
const fetch = require('npm-registry-fetch')
module.exports = async (flatOptions) => {
const res = await fetch('/-/ping', { ...flatOptions, cache: false })
return res.json().catch(() => ({}))
}

View File

@ -0,0 +1,310 @@
const util = require('node:util')
const _delete = Symbol('delete')
const _append = Symbol('append')
const sqBracketsMatcher = str => str.match(/(.+)\[([^\]]+)\]\.?(.*)$/)
// replaces any occurrence of an empty-brackets (e.g: []) with a special
// Symbol(append) to represent it, this is going to be useful for the setter
// method that will push values to the end of the array when finding these
const replaceAppendSymbols = str => {
const matchEmptyBracket = str.match(/^(.*)\[\]\.?(.*)$/)
if (matchEmptyBracket) {
const [, pre, post] = matchEmptyBracket
return [...replaceAppendSymbols(pre), _append, post].filter(Boolean)
}
return [str]
}
const parseKeys = key => {
const sqBracketItems = new Set()
sqBracketItems.add(_append)
const parseSqBrackets = str => {
const index = sqBracketsMatcher(str)
// once we find square brackets, we recursively parse all these
if (index) {
const preSqBracketPortion = index[1]
// we want to have a `new String` wrapper here in order to differentiate
// between multiple occurrences of the same string, e.g:
// foo.bar[foo.bar] should split into { foo: { bar: { 'foo.bar': {} } }
/* eslint-disable-next-line no-new-wrappers */
const foundKey = new String(index[2])
const postSqBracketPortion = index[3]
// we keep track of items found during this step to make sure
// we don't try to split-separate keys that were defined within
// square brackets, since the key name itself might contain dots
sqBracketItems.add(foundKey)
// returns an array that contains either dot-separate items (that will
// be split apart during the next step OR the fully parsed keys
// read from square brackets, e.g:
// foo.bar[1.0.0].a.b -> ['foo.bar', '1.0.0', 'a.b']
return [
...parseSqBrackets(preSqBracketPortion),
foundKey,
...(postSqBracketPortion ? parseSqBrackets(postSqBracketPortion) : []),
]
}
// at the end of parsing, any usage of the special empty-bracket syntax
// (e.g: foo.array[]) has not yet been parsed, here we'll take care
// of parsing it and adding a special symbol to represent it in
// the resulting list of keys
return replaceAppendSymbols(str)
}
const res = []
// starts by parsing items defined as square brackets, those might be
// representing properties that have a dot in the name or just array
// indexes, e.g: foo[1.0.0] or list[0]
const sqBracketKeys = parseSqBrackets(key.trim())
for (const k of sqBracketKeys) {
// keys parsed from square brackets should just be added to list of
// resulting keys as they might have dots as part of the key
if (sqBracketItems.has(k)) {
res.push(k)
} else {
// splits the dot-sep property names and add them to the list of keys
/* eslint-disable-next-line no-new-wrappers */
for (const splitKey of k.split('.')) {
res.push(String(splitKey))
}
}
}
// returns an ordered list of strings in which each entry
// represents a key in an object defined by the previous entry
return res
}
const getter = ({ data, key }, { unwrapSingleItemArrays = true } = {}) => {
// keys are a list in which each entry represents the name of
// a property that should be walked through the object in order to
// return the final found value
const keys = parseKeys(key)
let _data = data
let label = ''
for (const k of keys) {
// empty-bracket-shortcut-syntax is not supported on getter
if (k === _append) {
throw Object.assign(new Error('Empty brackets are not valid syntax for retrieving values.'), {
code: 'EINVALIDSYNTAX',
})
}
// extra logic to take into account printing array, along with its
// special syntax in which using a dot-sep property name after an
// arry will expand it's results, e.g:
// arr.name -> arr[0].name=value, arr[1].name=value, ...
const maybeIndex = Number(k)
if (Array.isArray(_data) && !Number.isInteger(maybeIndex)) {
_data = _data.reduce((acc, i, index) => {
acc[`${label}[${index}].${k}`] = i[k]
return acc
}, {})
return _data
} else {
if (!Object.hasOwn(_data, k)) {
return undefined
}
_data = _data[k]
}
label += k
}
// these are some legacy expectations from
// the old API consumed by lib/view.js
if (unwrapSingleItemArrays && Array.isArray(_data) && _data.length <= 1) {
_data = _data[0]
}
return {
[key]: _data,
}
}
const setter = ({ data, key, value, force }) => {
// setter goes to recursively transform the provided data obj,
// setting properties from the list of parsed keys, e.g:
// ['foo', 'bar', 'baz'] -> { foo: { bar: { baz: {} } }
const keys = parseKeys(key)
const setKeys = (_data, _key) => {
// handles array indexes, converting valid integers to numbers,
// note that occurrences of Symbol(append) will throw,
// so we just ignore these for now
let maybeIndex = Number.NaN
try {
maybeIndex = Number(_key)
} catch {
// leave it NaN
}
if (!Number.isNaN(maybeIndex)) {
_key = maybeIndex
}
// creates new array in case key is an index
// and the array obj is not yet defined
const keyIsAnArrayIndex = _key === maybeIndex || _key === _append
const dataHasNoItems = !Object.keys(_data).length
if (keyIsAnArrayIndex && dataHasNoItems && !Array.isArray(_data)) {
_data = []
}
// converting from array to an object is also possible, in case the
// user is using force mode, we should also convert existing arrays
// to an empty object if the current _data is an array
if (force && Array.isArray(_data) && !keyIsAnArrayIndex) {
_data = { ..._data }
}
// the _append key is a special key that is used to represent
// the empty-bracket notation, e.g: arr[] -> arr[arr.length]
if (_key === _append) {
if (!Array.isArray(_data)) {
throw Object.assign(new Error(`Can't use append syntax in non-Array element`), {
code: 'ENOAPPEND',
})
}
_key = _data.length
}
// retrieves the next data object to recursively iterate on,
// throws if trying to override a literal value or add props to an array
const next = () => {
const haveContents = !force && _data[_key] != null && value !== _delete
const shouldNotOverrideLiteralValue = !(typeof _data[_key] === 'object')
// if the next obj to recurse is an array and the next key to be
// appended to the resulting obj is not an array index, then it
// should throw since we can't append arbitrary props to arrays
const shouldNotAddPropsToArrays =
typeof keys[0] !== 'symbol' && Array.isArray(_data[_key]) && Number.isNaN(Number(keys[0]))
const overrideError = haveContents && shouldNotOverrideLiteralValue
if (overrideError) {
throw Object.assign(
new Error(`Property ${_key} already exists and is not an Array or Object.`),
{ code: 'EOVERRIDEVALUE' }
)
}
const addPropsToArrayError = haveContents && shouldNotAddPropsToArrays
if (addPropsToArrayError) {
throw Object.assign(new Error(`Can't add property ${key} to an Array.`), {
code: 'ENOADDPROP',
})
}
return typeof _data[_key] === 'object' ? _data[_key] || {} : {}
}
// sets items from the parsed array of keys as objects, recurses to
// setKeys in case there are still items to be handled, otherwise it
// just sets the original value set by the user
if (keys.length) {
_data[_key] = setKeys(next(), keys.shift())
} else {
// handles special deletion cases for obj props / array items
if (value === _delete) {
if (Array.isArray(_data)) {
_data.splice(_key, 1)
} else {
delete _data[_key]
}
} else {
// finally, sets the value in its right place
_data[_key] = value
}
}
return _data
}
setKeys(data, keys.shift())
}
class Queryable {
static ALL = ''
#data = null
constructor (obj) {
if (!obj || typeof obj !== 'object') {
throw Object.assign(new Error('Queryable needs an object to query properties from.'), {
code: 'ENOQUERYABLEOBJ',
})
}
this.#data = obj
}
query (queries, opts) {
// this ugly interface here is meant to be a compatibility layer
// with the legacy API lib/view.js is consuming, if at some point
// we refactor that command then we can revisit making this nicer
if (queries === Queryable.ALL) {
return { [Queryable.ALL]: this.#data }
}
const q = query =>
getter({
data: this.#data,
key: query,
}, opts)
if (Array.isArray(queries)) {
let res = {}
for (const query of queries) {
res = { ...res, ...q(query) }
}
return res
} else {
return q(queries)
}
}
// return the value for a single query if found, otherwise returns undefined
get (query) {
const obj = this.query(query)
if (obj) {
return obj[query]
}
}
// creates objects along the way for the provided `query` parameter
// and assigns `value` to the last property of the query chain
set (query, value, { force } = {}) {
setter({
data: this.#data,
key: query,
value,
force,
})
}
// deletes the value of the property found at `query`
delete (query) {
setter({
data: this.#data,
key: query,
value: _delete,
})
}
toJSON () {
return this.#data
}
[util.inspect.custom] () {
return this.toJSON()
}
}
module.exports = Queryable

View File

@ -0,0 +1,67 @@
const { read: _read } = require('read')
const userValidate = require('npm-user-validate')
const { log, input } = require('proc-log')
const otpPrompt = `This command requires a one-time password (OTP) from your authenticator app.
Enter one below. You can also pass one on the command line by appending --otp=123456.
For more information, see:
https://docs.npmjs.com/getting-started/using-two-factor-authentication
Enter OTP: `
const passwordPrompt = 'npm password: '
const usernamePrompt = 'npm username: '
const emailPrompt = 'email (this IS public): '
const read = (...args) => input.read(() => _read(...args))
function readOTP (msg = otpPrompt, otp, isRetry) {
if (isRetry && otp && /^[\d ]+$|^[A-Fa-f0-9]{64,64}$/.test(otp)) {
return otp.replace(/\s+/g, '')
}
return read({ prompt: msg, default: otp || '' })
.then((rOtp) => readOTP(msg, rOtp, true))
}
function readPassword (msg = passwordPrompt, password, isRetry) {
if (isRetry && password) {
return password
}
return read({ prompt: msg, silent: true, default: password || '' })
.then((rPassword) => readPassword(msg, rPassword, true))
}
function readUsername (msg = usernamePrompt, username, isRetry) {
if (isRetry && username) {
const error = userValidate.username(username)
if (error) {
log.warn(error.message)
} else {
return Promise.resolve(username.trim())
}
}
return read({ prompt: msg, default: username || '' })
.then((rUsername) => readUsername(msg, rUsername, true))
}
function readEmail (msg = emailPrompt, email, isRetry) {
if (isRetry && email) {
const error = userValidate.email(email)
if (error) {
log.warn(error.message)
} else {
return email.trim()
}
}
return read({ prompt: msg, default: email || '' })
.then((username) => readEmail(msg, username, true))
}
module.exports = {
otp: readOTP,
password: readPassword,
username: readUsername,
email: readEmail,
}

View File

@ -0,0 +1,33 @@
const reifyOutput = require('./reify-output.js')
const ini = require('ini')
const { writeFile } = require('node:fs/promises')
const { resolve } = require('node:path')
const reifyFinish = async (npm, arb) => {
await saveBuiltinConfig(npm, arb)
reifyOutput(npm, arb)
}
const saveBuiltinConfig = async (npm, arb) => {
const { options: { global }, actualTree } = arb
if (!global) {
return
}
// if we are using a builtin config, and just installed npm as
// a top-level global package, we have to preserve that config.
const npmNode = actualTree.inventory.get('node_modules/npm')
if (!npmNode) {
return
}
const builtinConf = npm.config.data.get('builtin')
if (builtinConf.loadError) {
return
}
const content = ini.stringify(builtinConf.raw).trim() + '\n'
await writeFile(resolve(npmNode.path, 'npmrc'), content)
}
module.exports = reifyFinish

View File

@ -0,0 +1,199 @@
// pass in an arborist object, and it'll output the data about what
// was done, what was audited, etc.
//
// added ## packages, removed ## packages, and audited ## packages in 19.157s
//
// 1 package is looking for funding
// run `npm fund` for details
//
// found 37 vulnerabilities (5 low, 7 moderate, 25 high)
// run `npm audit fix` to fix them, or `npm audit` for details
const { log, output } = require('proc-log')
const { depth } = require('treeverse')
const ms = require('ms')
const npmAuditReport = require('npm-audit-report')
const { readTree: getFundingInfo } = require('libnpmfund')
const auditError = require('./audit-error.js')
// TODO: output JSON if flatOptions.json is true
const reifyOutput = (npm, arb) => {
const { diff, actualTree } = arb
// note: fails and crashes if we're running audit fix and there was an error
// which is a good thing, because there's no point printing all this other
// stuff in that case!
const auditReport = auditError(npm, arb.auditReport) ? null : arb.auditReport
// don't print any info in --silent mode, but we still need to
// set the exitCode properly from the audit report, if we have one.
if (npm.silent) {
getAuditReport(npm, auditReport)
return
}
const summary = {
added: 0,
removed: 0,
changed: 0,
audited: auditReport && !auditReport.error ? actualTree.inventory.size : 0,
funding: 0,
}
if (diff) {
const showDiff = npm.config.get('dry-run') || npm.config.get('long')
const chalk = npm.chalk
depth({
tree: diff,
visit: d => {
switch (d.action) {
case 'REMOVE':
if (showDiff) {
/* eslint-disable-next-line max-len */
output.standard(`${chalk.blue('remove')} ${d.actual.name} ${d.actual.package.version}`)
}
summary.removed++
break
case 'ADD':
if (showDiff) {
output.standard(`${chalk.green('add')} ${d.ideal.name} ${d.ideal.package.version}`)
}
actualTree.inventory.has(d.ideal) && summary.added++
break
case 'CHANGE':
if (showDiff) {
/* eslint-disable-next-line max-len */
output.standard(`${chalk.cyan('change')} ${d.actual.name} ${d.actual.package.version} => ${d.ideal.package.version}`)
}
summary.changed++
break
default:
return
}
const node = d.actual || d.ideal
log.silly(d.action, node.location)
},
getChildren: d => d.children,
})
}
if (npm.flatOptions.fund) {
const fundingInfo = getFundingInfo(actualTree, { countOnly: true })
summary.funding = fundingInfo.length
}
if (npm.flatOptions.json) {
if (auditReport) {
// call this to set the exit code properly
getAuditReport(npm, auditReport)
summary.audit = npm.command === 'audit' ? auditReport
: auditReport.toJSON().metadata
}
output.buffer(summary)
} else {
packagesChangedMessage(npm, summary)
packagesFundingMessage(npm, summary)
printAuditReport(npm, auditReport)
}
}
// if we're running `npm audit fix`, then we print the full audit report
// at the end if there's still stuff, because it's silly for `npm audit`
// to tell you to run `npm audit` for details. otherwise, use the summary
// report. if we get here, we know it's not quiet or json.
// If the loglevel is silent, then we just run the report
// to get the exitCode set appropriately.
const printAuditReport = (npm, report) => {
const res = getAuditReport(npm, report)
if (!res || !res.report) {
return
}
output.standard(`\n${res.report}`)
}
const getAuditReport = (npm, report) => {
if (!report) {
return
}
// when in silent mode, we print nothing. the JSON output is
// going to just JSON.stringify() the report object.
const reporter = npm.silent ? 'quiet'
: npm.flatOptions.json ? 'quiet'
: npm.command !== 'audit' ? 'install'
: 'detail'
const defaultAuditLevel = npm.command !== 'audit' ? 'none' : 'low'
const auditLevel = npm.flatOptions.auditLevel || defaultAuditLevel
const res = npmAuditReport(report, {
reporter,
...npm.flatOptions,
auditLevel,
chalk: npm.chalk,
})
if (npm.command === 'audit') {
process.exitCode = process.exitCode || res.exitCode
}
return res
}
const packagesChangedMessage = (npm, { added, removed, changed, audited }) => {
const msg = ['\n']
if (added === 0 && removed === 0 && changed === 0) {
msg.push('up to date')
if (audited) {
msg.push(', ')
}
} else {
if (added) {
msg.push(`added ${added} package${added === 1 ? '' : 's'}`)
}
if (removed) {
if (added) {
msg.push(', ')
}
if (added && !audited && !changed) {
msg.push('and ')
}
msg.push(`removed ${removed} package${removed === 1 ? '' : 's'}`)
}
if (changed) {
if (added || removed) {
msg.push(', ')
}
if (!audited && (added || removed)) {
msg.push('and ')
}
msg.push(`changed ${changed} package${changed === 1 ? '' : 's'}`)
}
if (audited) {
msg.push(', and ')
}
}
if (audited) {
msg.push(`audited ${audited} package${audited === 1 ? '' : 's'}`)
}
msg.push(` in ${ms(Date.now() - npm.started)}`)
output.standard(msg.join(''))
}
const packagesFundingMessage = (npm, { funding }) => {
if (!funding) {
return
}
output.standard('')
const pkg = funding === 1 ? 'package' : 'packages'
const is = funding === 1 ? 'is' : 'are'
output.standard(`${funding} ${pkg} ${is} looking for funding`)
output.standard(' run `npm fund` for details')
}
module.exports = reifyOutput

View File

@ -0,0 +1,201 @@
const crypto = require('node:crypto')
const normalizeData = require('normalize-package-data')
const parseLicense = require('spdx-expression-parse')
const npa = require('npm-package-arg')
const ssri = require('ssri')
const CYCLONEDX_SCHEMA = 'http://cyclonedx.org/schema/bom-1.5.schema.json'
const CYCLONEDX_FORMAT = 'CycloneDX'
const CYCLONEDX_SCHEMA_VERSION = '1.5'
const PROP_PATH = 'cdx:npm:package:path'
const PROP_BUNDLED = 'cdx:npm:package:bundled'
const PROP_DEVELOPMENT = 'cdx:npm:package:development'
const PROP_EXTRANEOUS = 'cdx:npm:package:extraneous'
const PROP_PRIVATE = 'cdx:npm:package:private'
const REF_VCS = 'vcs'
const REF_WEBSITE = 'website'
const REF_ISSUE_TRACKER = 'issue-tracker'
const REF_DISTRIBUTION = 'distribution'
const ALGO_MAP = {
sha1: 'SHA-1',
sha256: 'SHA-256',
sha384: 'SHA-384',
sha512: 'SHA-512',
}
const cyclonedxOutput = ({ npm, nodes, packageType, packageLockOnly }) => {
const rootNode = nodes.find(node => node.isRoot)
const childNodes = nodes.filter(node => !node.isRoot && !node.isLink)
const uuid = crypto.randomUUID()
const deps = []
const seen = new Set()
for (let node of nodes) {
if (node.isLink) {
node = node.target
}
if (seen.has(node)) {
continue
}
seen.add(node)
deps.push(toCyclonedxDependency(node, nodes))
}
const bom = {
$schema: CYCLONEDX_SCHEMA,
bomFormat: CYCLONEDX_FORMAT,
specVersion: CYCLONEDX_SCHEMA_VERSION,
serialNumber: `urn:uuid:${uuid}`,
version: 1,
metadata: {
timestamp: new Date().toISOString(),
lifecycles: [
{ phase: packageLockOnly ? 'pre-build' : 'build' },
],
tools: [
{
vendor: 'npm',
name: 'cli',
version: npm.version,
},
],
component: toCyclonedxItem(rootNode, { packageType }),
},
components: childNodes.map(toCyclonedxItem),
dependencies: deps,
}
return bom
}
const toCyclonedxItem = (node, { packageType }) => {
packageType = packageType || 'library'
// Calculate purl from package spec
let spec = npa(node.pkgid)
spec = (spec.type === 'alias') ? spec.subSpec : spec
const purl = npa.toPurl(spec) + (isGitNode(node) ? `?vcs_url=${node.resolved}` : '')
if (node.package) {
normalizeData(node.package)
}
let parsedLicense
try {
let license = node.package?.license
if (license) {
if (typeof license === 'object') {
license = license.type
}
}
parsedLicense = parseLicense(license)
} catch (err) {
parsedLicense = null
}
const component = {
'bom-ref': toCyclonedxID(node),
type: packageType,
name: node.name,
version: node.version,
scope: (node.optional || node.devOptional) ? 'optional' : 'required',
author: (typeof node.package?.author === 'object')
? node.package.author.name
: (node.package?.author || undefined),
description: node.package?.description || undefined,
purl: purl,
properties: [{
name: PROP_PATH,
value: node.location,
}],
externalReferences: [],
}
if (node.integrity) {
const integrity = ssri.parse(node.integrity, { single: true })
component.hashes = [{
alg: ALGO_MAP[integrity.algorithm] || /* istanbul ignore next */ 'SHA-512',
content: integrity.hexDigest(),
}]
}
if (node.dev === true) {
component.properties.push(prop(PROP_DEVELOPMENT))
}
if (node.package?.private === true) {
component.properties.push(prop(PROP_PRIVATE))
}
if (node.extraneous === true) {
component.properties.push(prop(PROP_EXTRANEOUS))
}
if (node.inBundle === true) {
component.properties.push(prop(PROP_BUNDLED))
}
if (!node.isLink && node.resolved) {
component.externalReferences.push(extRef(REF_DISTRIBUTION, node.resolved))
}
if (node.package?.repository?.url) {
component.externalReferences.push(extRef(REF_VCS, node.package.repository.url))
}
if (node.package?.homepage) {
component.externalReferences.push(extRef(REF_WEBSITE, node.package.homepage))
}
if (node.package?.bugs?.url) {
component.externalReferences.push(extRef(REF_ISSUE_TRACKER, node.package.bugs.url))
}
// If license is a single SPDX license, use the license field
if (parsedLicense?.license) {
component.licenses = [{ license: { id: parsedLicense.license } }]
// If license is a conjunction, use the expression field
} else if (parsedLicense?.conjunction) {
component.licenses = [{ expression: node.package.license }]
}
return component
}
const toCyclonedxDependency = (node, nodes) => {
return {
ref: toCyclonedxID(node),
dependsOn: [...node.edgesOut.values()]
// Filter out edges that are linking to nodes not in the list
.filter(edge => nodes.find(n => n === edge.to))
.map(edge => toCyclonedxID(edge.to))
.filter(id => id),
}
}
const toCyclonedxID = (node) => `${node.packageName}@${node.version}`
const prop = (name) => ({ name, value: 'true' })
const extRef = (type, url) => ({ type, url })
const isGitNode = (node) => {
if (!node.resolved) {
return
}
try {
const { type } = npa(node.resolved)
return type === 'git' || type === 'hosted'
} catch (err) {
/* istanbul ignore next */
return false
}
}
module.exports = { cyclonedxOutput }

View File

@ -0,0 +1,182 @@
const crypto = require('node:crypto')
const normalizeData = require('normalize-package-data')
const npa = require('npm-package-arg')
const ssri = require('ssri')
const SPDX_SCHEMA_VERSION = 'SPDX-2.3'
const SPDX_DATA_LICENSE = 'CC0-1.0'
const SPDX_IDENTIFER = 'SPDXRef-DOCUMENT'
const NO_ASSERTION = 'NOASSERTION'
const REL_DESCRIBES = 'DESCRIBES'
const REL_PREREQ = 'PREREQUISITE_FOR'
const REL_OPTIONAL = 'OPTIONAL_DEPENDENCY_OF'
const REL_DEV = 'DEV_DEPENDENCY_OF'
const REL_DEP = 'DEPENDENCY_OF'
const REF_CAT_PACKAGE_MANAGER = 'PACKAGE-MANAGER'
const REF_TYPE_PURL = 'purl'
const spdxOutput = ({ npm, nodes, packageType }) => {
const rootNode = nodes.find(node => node.isRoot)
const childNodes = nodes.filter(node => !node.isRoot && !node.isLink)
const rootID = rootNode.pkgid
const uuid = crypto.randomUUID()
const ns = `http://spdx.org/spdxdocs/${npa(rootID).escapedName}-${rootNode.version}-${uuid}`
const relationships = []
const seen = new Set()
for (let node of nodes) {
if (node.isLink) {
node = node.target
}
if (seen.has(node)) {
continue
}
seen.add(node)
const rels = [...node.edgesOut.values()]
// Filter out edges that are linking to nodes not in the list
.filter(edge => nodes.find(n => n === edge.to))
.map(edge => toSpdxRelationship(node, edge))
.filter(rel => rel)
relationships.push(...rels)
}
const extraRelationships = nodes.filter(node => node.extraneous)
.map(node => toSpdxRelationship(rootNode, { to: node, type: 'optional' }))
relationships.push(...extraRelationships)
const bom = {
spdxVersion: SPDX_SCHEMA_VERSION,
dataLicense: SPDX_DATA_LICENSE,
SPDXID: SPDX_IDENTIFER,
name: rootID,
documentNamespace: ns,
creationInfo: {
created: new Date().toISOString(),
creators: [
`Tool: npm/cli-${npm.version}`,
],
},
documentDescribes: [toSpdxID(rootNode)],
packages: [toSpdxItem(rootNode, { packageType }), ...childNodes.map(toSpdxItem)],
relationships: [
{
spdxElementId: SPDX_IDENTIFER,
relatedSpdxElement: toSpdxID(rootNode),
relationshipType: REL_DESCRIBES,
},
...relationships,
],
}
return bom
}
const toSpdxItem = (node, { packageType }) => {
normalizeData(node.package)
// Calculate purl from package spec
let spec = npa(node.pkgid)
spec = (spec.type === 'alias') ? spec.subSpec : spec
const purl = npa.toPurl(spec) + (isGitNode(node) ? `?vcs_url=${node.resolved}` : '')
/* For workspace nodes, use the location from their linkNode */
let location = node.location
if (node.isWorkspace && node.linksIn.size > 0) {
location = node.linksIn.values().next().value.location
}
let license = node.package?.license
if (license) {
if (typeof license === 'object') {
license = license.type
}
}
const pkg = {
name: node.packageName,
SPDXID: toSpdxID(node),
versionInfo: node.version,
packageFileName: location,
description: node.package?.description || undefined,
primaryPackagePurpose: packageType ? packageType.toUpperCase() : undefined,
downloadLocation: (node.isLink ? undefined : node.resolved) || NO_ASSERTION,
filesAnalyzed: false,
homepage: node.package?.homepage || NO_ASSERTION,
licenseDeclared: license || NO_ASSERTION,
externalRefs: [
{
referenceCategory: REF_CAT_PACKAGE_MANAGER,
referenceType: REF_TYPE_PURL,
referenceLocator: purl,
},
],
}
if (node.integrity) {
const integrity = ssri.parse(node.integrity, { single: true })
pkg.checksums = [{
algorithm: integrity.algorithm.toUpperCase(),
checksumValue: integrity.hexDigest(),
}]
}
return pkg
}
const toSpdxRelationship = (node, edge) => {
let type
switch (edge.type) {
case 'peer':
type = REL_PREREQ
break
case 'optional':
type = REL_OPTIONAL
break
case 'dev':
type = REL_DEV
break
default:
type = REL_DEP
}
return {
spdxElementId: toSpdxID(edge.to),
relatedSpdxElement: toSpdxID(node),
relationshipType: type,
}
}
const toSpdxID = (node) => {
let name = node.packageName
// Strip leading @ for scoped packages
name = name.replace(/^@/, '')
// Replace slashes with dots
name = name.replace(/\//g, '.')
return `SPDXRef-Package-${name}-${node.version}`
}
const isGitNode = (node) => {
if (!node.resolved) {
return
}
try {
const { type } = npa(node.resolved)
return type === 'git' || type === 'hosted'
} catch (err) {
/* istanbul ignore next */
return false
}
}
module.exports = { spdxOutput }

110
Dependencies/NodeJS/node_modules/npm/lib/utils/tar.js generated vendored Normal file
View File

@ -0,0 +1,110 @@
const tar = require('tar')
const ssri = require('ssri')
const { log, output } = require('proc-log')
const formatBytes = require('./format-bytes.js')
const localeCompare = require('@isaacs/string-locale-compare')('en', {
sensitivity: 'case',
numeric: true,
})
const logTar = (tarball, { unicode = false, json, key } = {}) => {
if (json) {
output.buffer(key == null ? tarball : { [key]: tarball })
return
}
log.notice('')
log.notice('', `${unicode ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`)
log.notice('Tarball Contents')
if (tarball.files.length) {
log.notice(
'',
tarball.files.map(f =>
/^node_modules\//.test(f.path) ? null : `${formatBytes(f.size, false)} ${f.path}`
).filter(f => f).join('\n')
)
}
if (tarball.bundled.length) {
log.notice('Bundled Dependencies')
tarball.bundled.forEach(name => log.notice('', name))
}
log.notice('Tarball Details')
log.notice('', `name: ${tarball.name}`)
log.notice('', `version: ${tarball.version}`)
if (tarball.filename) {
log.notice('', `filename: ${tarball.filename}`)
}
log.notice('', `package size: ${formatBytes(tarball.size)}`)
log.notice('', `unpacked size: ${formatBytes(tarball.unpackedSize)}`)
log.notice('', `shasum: ${tarball.shasum}`)
/* eslint-disable-next-line max-len */
log.notice('', `integrity: ${tarball.integrity.toString().slice(0, 20)}[...]${tarball.integrity.toString().slice(80)}`)
if (tarball.bundled.length) {
log.notice('', `bundled deps: ${tarball.bundled.length}`)
log.notice('', `bundled files: ${tarball.entryCount - tarball.files.length}`)
log.notice('', `own files: ${tarball.files.length}`)
}
log.notice('', `total files: ${tarball.entryCount}`)
log.notice('', '')
}
const getContents = async (manifest, tarball) => {
const files = []
const bundled = new Set()
let totalEntries = 0
let totalEntrySize = 0
// reads contents of tarball
const stream = tar.t({
onentry (entry) {
totalEntries++
totalEntrySize += entry.size
const p = entry.path
if (p.startsWith('package/node_modules/') && p !== 'package/node_modules/') {
const name = p.match(/^package\/node_modules\/((?:@[^/]+\/)?[^/]+)/)[1]
bundled.add(name)
}
files.push({
path: entry.path.replace(/^package\//, ''),
size: entry.size,
mode: entry.mode,
})
},
})
stream.end(tarball)
const integrity = ssri.fromData(tarball, {
algorithms: ['sha1', 'sha512'],
})
const comparator = ({ path: a }, { path: b }) => localeCompare(a, b)
const isUpper = str => {
const ch = str.charAt(0)
return ch === ch.toUpperCase()
}
const uppers = files.filter(file => isUpper(file.path))
const others = files.filter(file => !isUpper(file.path))
uppers.sort(comparator)
others.sort(comparator)
const shasum = integrity.sha1[0].hexDigest()
return {
id: manifest._id || `${manifest.name}@${manifest.version}`,
name: manifest.name,
version: manifest.version,
size: tarball.length,
unpackedSize: totalEntrySize,
shasum,
integrity: ssri.parse(integrity.sha512[0]),
// @scope/packagename.tgz => scope-packagename.tgz
// we can safely use these global replace rules due to npm package naming rules
filename: `${manifest.name.replace('@', '').replace('/', '-')}-${manifest.version}.tgz`,
files: uppers.concat(others),
entryCount: totalEntries,
bundled: Array.from(bundled),
}
}
module.exports = { logTar, getContents }

View File

@ -0,0 +1,89 @@
const EE = require('node:events')
const fs = require('node:fs')
const { log, time } = require('proc-log')
const INITIAL_TIMER = 'npm'
class Timers extends EE {
#file
#timing
#unfinished = new Map()
#finished = {}
constructor () {
super()
this.on()
time.start(INITIAL_TIMER)
this.started = this.#unfinished.get(INITIAL_TIMER)
}
on () {
process.on('time', this.#timeHandler)
}
off () {
process.off('time', this.#timeHandler)
}
load ({ path, timing } = {}) {
this.#timing = timing
this.#file = `${path}timing.json`
}
finish (metadata) {
time.end(INITIAL_TIMER)
for (const [name, timer] of this.#unfinished) {
log.silly('unfinished npm timer', name, timer)
}
if (!this.#timing) {
// Not in timing mode, nothing else to do here
return
}
try {
this.#writeFile(metadata)
log.info('timing', `Timing info written to: ${this.#file}`)
} catch (e) {
log.warn('timing', `could not write timing file: ${e}`)
}
}
#writeFile (metadata) {
const globalStart = this.started
const globalEnd = this.#finished[INITIAL_TIMER]
const content = {
metadata,
timers: this.#finished,
// add any unfinished timers with their relative start/end
unfinishedTimers: [...this.#unfinished.entries()].reduce((acc, [name, start]) => {
acc[name] = [start - globalStart, globalEnd - globalStart]
return acc
}, {}),
}
fs.writeFileSync(this.#file, JSON.stringify(content) + '\n')
}
#timeHandler = (level, name) => {
const now = Date.now()
switch (level) {
case time.KEYS.start:
this.#unfinished.set(name, now)
break
case time.KEYS.end: {
if (this.#unfinished.has(name)) {
const ms = now - this.#unfinished.get(name)
this.#finished[name] = ms
this.#unfinished.delete(name)
log.timing(name, `Completed in ${ms}ms`)
} else {
log.silly('timing', `Tried to end timer that doesn't exist: ${name}`)
}
}
}
}
}
module.exports = Timers

View File

@ -0,0 +1,40 @@
'use strict'
const reifyFinish = require('../utils/reify-finish.js')
async function updateWorkspaces ({
config,
flatOptions,
localPrefix,
npm,
workspaces,
}) {
if (!flatOptions.workspacesUpdate || !workspaces.length) {
return
}
// default behavior is to not save by default in order to avoid
// race condition problems when publishing multiple workspaces
// that have dependencies on one another, it might still be useful
// in some cases, which then need to set --save
const save = config.isDefault('save')
? false
: config.get('save')
// runs a minimalistic reify update, targeting only the workspaces
// that had version updates and skipping fund/audit/save
const opts = {
...flatOptions,
audit: false,
fund: false,
path: localPrefix,
save,
}
const Arborist = require('@npmcli/arborist')
const arb = new Arborist(opts)
await arb.reify({ ...opts, update: workspaces })
await reifyFinish(npm, arb)
}
module.exports = updateWorkspaces

View File

@ -0,0 +1,29 @@
// compares the inventory of package items in the tree
// that is about to be installed (idealTree) with the inventory
// of items stored in the package-lock file (virtualTree)
//
// Returns empty array if no errors found or an array populated
// with an entry for each validation error found.
function validateLockfile (virtualTree, idealTree) {
const errors = []
// loops through the inventory of packages resulted by ideal tree,
// for each package compares the versions with the version stored in the
// package-lock and adds an error to the list in case of mismatches
for (const [key, entry] of idealTree.entries()) {
const lock = virtualTree.get(key)
if (!lock) {
errors.push(`Missing: ${entry.name}@${entry.version} from lock file`)
continue
}
if (entry.version !== lock.version) {
errors.push(`Invalid: lock file's ${lock.name}@${lock.version} does ` +
`not satisfy ${entry.name}@${entry.version}`)
}
}
return errors
}
module.exports = validateLockfile

View File

@ -0,0 +1,386 @@
const fetch = require('npm-registry-fetch')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const npa = require('npm-package-arg')
const pacote = require('pacote')
const pMap = require('p-map')
const tufClient = require('@sigstore/tuf')
const { log, output } = require('proc-log')
const sortAlphabetically = (a, b) => localeCompare(a.name, b.name)
class VerifySignatures {
constructor (tree, filterSet, npm, opts) {
this.tree = tree
this.filterSet = filterSet
this.npm = npm
this.opts = opts
this.keys = new Map()
this.invalid = []
this.missing = []
this.checkedPackages = new Set()
this.auditedWithKeysCount = 0
this.verifiedSignatureCount = 0
this.verifiedAttestationCount = 0
this.exitCode = 0
}
async run () {
const start = process.hrtime.bigint()
// Find all deps in tree
const { edges, registries } = this.getEdgesOut(this.tree.inventory.values(), this.filterSet)
if (edges.size === 0) {
throw new Error('found no installed dependencies to audit')
}
const tuf = await tufClient.initTUF({
cachePath: this.opts.tufCache,
retry: this.opts.retry,
timeout: this.opts.timeout,
})
await Promise.all([...registries].map(registry => this.setKeys({ registry, tuf })))
log.verbose('verifying registry signatures')
await pMap(edges, (e) => this.getVerifiedInfo(e), { concurrency: 20, stopOnError: true })
// Didn't find any dependencies that could be verified, e.g. only local
// deps, missing version, not on a registry etc.
if (!this.auditedWithKeysCount) {
throw new Error('found no dependencies to audit that were installed from ' +
'a supported registry')
}
const invalid = this.invalid.sort(sortAlphabetically)
const missing = this.missing.sort(sortAlphabetically)
const hasNoInvalidOrMissing = invalid.length === 0 && missing.length === 0
if (!hasNoInvalidOrMissing) {
process.exitCode = 1
}
if (this.npm.config.get('json')) {
output.buffer({ invalid, missing })
return
}
const end = process.hrtime.bigint()
const elapsed = end - start
const auditedPlural = this.auditedWithKeysCount > 1 ? 's' : ''
const timing = `audited ${this.auditedWithKeysCount} package${auditedPlural} in ` +
`${Math.floor(Number(elapsed) / 1e9)}s`
output.standard(timing)
output.standard('')
const verifiedBold = this.npm.chalk.bold('verified')
if (this.verifiedSignatureCount) {
if (this.verifiedSignatureCount === 1) {
/* eslint-disable-next-line max-len */
output.standard(`${this.verifiedSignatureCount} package has a ${verifiedBold} registry signature`)
} else {
/* eslint-disable-next-line max-len */
output.standard(`${this.verifiedSignatureCount} packages have ${verifiedBold} registry signatures`)
}
output.standard('')
}
if (this.verifiedAttestationCount) {
if (this.verifiedAttestationCount === 1) {
/* eslint-disable-next-line max-len */
output.standard(`${this.verifiedAttestationCount} package has a ${verifiedBold} attestation`)
} else {
/* eslint-disable-next-line max-len */
output.standard(`${this.verifiedAttestationCount} packages have ${verifiedBold} attestations`)
}
output.standard('')
}
if (missing.length) {
const missingClr = this.npm.chalk.redBright('missing')
if (missing.length === 1) {
/* eslint-disable-next-line max-len */
output.standard(`1 package has a ${missingClr} registry signature but the registry is providing signing keys:`)
} else {
/* eslint-disable-next-line max-len */
output.standard(`${missing.length} packages have ${missingClr} registry signatures but the registry is providing signing keys:`)
}
output.standard('')
missing.map(m =>
output.standard(`${this.npm.chalk.red(`${m.name}@${m.version}`)} (${m.registry})`)
)
}
if (invalid.length) {
if (missing.length) {
output.standard('')
}
const invalidClr = this.npm.chalk.redBright('invalid')
// We can have either invalid signatures or invalid provenance
const invalidSignatures = this.invalid.filter(i => i.code === 'EINTEGRITYSIGNATURE')
if (invalidSignatures.length) {
if (invalidSignatures.length === 1) {
output.standard(`1 package has an ${invalidClr} registry signature:`)
} else {
/* eslint-disable-next-line max-len */
output.standard(`${invalidSignatures.length} packages have ${invalidClr} registry signatures:`)
}
output.standard('')
invalidSignatures.map(i =>
output.standard(`${this.npm.chalk.red(`${i.name}@${i.version}`)} (${i.registry})`)
)
output.standard('')
}
const invalidAttestations = this.invalid.filter(i => i.code === 'EATTESTATIONVERIFY')
if (invalidAttestations.length) {
if (invalidAttestations.length === 1) {
output.standard(`1 package has an ${invalidClr} attestation:`)
} else {
/* eslint-disable-next-line max-len */
output.standard(`${invalidAttestations.length} packages have ${invalidClr} attestations:`)
}
output.standard('')
invalidAttestations.map(i =>
output.standard(`${this.npm.chalk.red(`${i.name}@${i.version}`)} (${i.registry})`)
)
output.standard('')
}
if (invalid.length === 1) {
/* eslint-disable-next-line max-len */
output.standard(`Someone might have tampered with this package since it was published on the registry!`)
} else {
/* eslint-disable-next-line max-len */
output.standard(`Someone might have tampered with these packages since they were published on the registry!`)
}
output.standard('')
}
}
getEdgesOut (nodes, filterSet) {
const edges = new Set()
const registries = new Set()
for (const node of nodes) {
for (const edge of node.edgesOut.values()) {
const filteredOut =
edge.from
&& filterSet
&& filterSet.size > 0
&& !filterSet.has(edge.from.target)
if (!filteredOut) {
const spec = this.getEdgeSpec(edge)
if (spec) {
// Prefetch and cache public keys from used registries
registries.add(this.getSpecRegistry(spec))
}
edges.add(edge)
}
}
}
return { edges, registries }
}
async setKeys ({ registry, tuf }) {
const { host, pathname } = new URL(registry)
// Strip any trailing slashes from pathname
const regKey = `${host}${pathname.replace(/\/$/, '')}/keys.json`
let keys = await tuf.getTarget(regKey)
.then((target) => JSON.parse(target))
.then(({ keys: ks }) => ks.map((key) => ({
...key,
keyid: key.keyId,
pemkey: `-----BEGIN PUBLIC KEY-----\n${key.publicKey.rawBytes}\n-----END PUBLIC KEY-----`,
expires: key.publicKey.validFor.end || null,
}))).catch(err => {
if (err.code === 'TUF_FIND_TARGET_ERROR') {
return null
} else {
throw err
}
})
// If keys not found in Sigstore TUF repo, fallback to registry keys API
if (!keys) {
keys = await fetch.json('/-/npm/v1/keys', {
...this.npm.flatOptions,
registry,
}).then(({ keys: ks }) => ks.map((key) => ({
...key,
pemkey: `-----BEGIN PUBLIC KEY-----\n${key.key}\n-----END PUBLIC KEY-----`,
}))).catch(err => {
if (err.code === 'E404' || err.code === 'E400') {
return null
} else {
throw err
}
})
}
if (keys) {
this.keys.set(registry, keys)
}
}
getEdgeType (edge) {
return edge.optional ? 'optionalDependencies'
: edge.peer ? 'peerDependencies'
: edge.dev ? 'devDependencies'
: 'dependencies'
}
getEdgeSpec (edge) {
let name = edge.name
try {
name = npa(edge.spec).subSpec.name
} catch {
// leave it as edge.name
}
try {
return npa(`${name}@${edge.spec}`)
} catch {
// Skip packages with invalid spec
}
}
buildRegistryConfig (registry) {
const keys = this.keys.get(registry) || []
const parsedRegistry = new URL(registry)
const regKey = `//${parsedRegistry.host}${parsedRegistry.pathname}`
return {
[`${regKey}:_keys`]: keys,
}
}
getSpecRegistry (spec) {
return fetch.pickRegistry(spec, this.npm.flatOptions)
}
getValidPackageInfo (edge) {
const type = this.getEdgeType(edge)
// Skip potentially optional packages that are not on disk, as these could
// be omitted during install
if (edge.error === 'MISSING' && type !== 'dependencies') {
return
}
const spec = this.getEdgeSpec(edge)
// Skip invalid version requirements
if (!spec) {
return
}
const node = edge.to || edge
const { version } = node.package || {}
if (node.isWorkspace || // Skip local workspaces packages
!version || // Skip packages that don't have a installed version, e.g. optonal dependencies
!spec.registry) { // Skip if not from registry, e.g. git package
return
}
for (const omitType of this.npm.config.get('omit')) {
if (node[omitType]) {
return
}
}
return {
name: spec.name,
version,
type,
location: node.location,
registry: this.getSpecRegistry(spec),
}
}
async verifySignatures (name, version, registry) {
const {
_integrity: integrity,
_signatures,
_attestations,
_resolved: resolved,
} = await pacote.manifest(`${name}@${version}`, {
verifySignatures: true,
verifyAttestations: true,
...this.buildRegistryConfig(registry),
...this.npm.flatOptions,
})
const signatures = _signatures || []
const result = {
integrity,
signatures,
attestations: _attestations,
resolved,
}
return result
}
async getVerifiedInfo (edge) {
const info = this.getValidPackageInfo(edge)
if (!info) {
return
}
const { name, version, location, registry, type } = info
if (this.checkedPackages.has(location)) {
// we already did or are doing this one
return
}
this.checkedPackages.add(location)
// We only "audit" or verify the signature, or the presence of it, on
// packages whose registry returns signing keys
const keys = this.keys.get(registry) || []
if (keys.length) {
this.auditedWithKeysCount += 1
}
try {
const { integrity, signatures, attestations, resolved } = await this.verifySignatures(
name, version, registry
)
// Currently we only care about missing signatures on registries that provide a public key
// We could make this configurable in the future with a strict/paranoid mode
if (signatures.length) {
this.verifiedSignatureCount += 1
} else if (keys.length) {
this.missing.push({
integrity,
location,
name,
registry,
resolved,
version,
})
}
// Track verified attestations separately to registry signatures, as all
// packages on registries with signing keys are expected to have registry
// signatures, but not all packages have provenance and publish attestations.
if (attestations) {
this.verifiedAttestationCount += 1
}
} catch (e) {
if (e.code === 'EINTEGRITYSIGNATURE' || e.code === 'EATTESTATIONVERIFY') {
this.invalid.push({
code: e.code,
message: e.message,
integrity: e.integrity,
keyid: e.keyid,
location,
name,
registry,
resolved: e.resolved,
signature: e.signature,
predicateType: e.predicateType,
type,
version,
})
} else {
throw e
}
}
}
}
module.exports = VerifySignatures