Initial Functional Structure Scaffold

This commit is contained in:
2025-05-12 06:05:45 -06:00
parent 8548e95be7
commit ea8967c32e
2458 changed files with 345489 additions and 1 deletions

53
NodeJS/node_modules/npm/lib/arborist-cmd.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
const { log } = require('proc-log')
const BaseCommand = require('./base-cmd.js')
// This is the base for all commands whose execWorkspaces just gets
// a list of workspace names and passes it on to new Arborist() to
// be able to run a filtered Arborist.reify() at some point.
class ArboristCmd extends BaseCommand {
get isArboristCmd () {
return true
}
static params = [
'workspace',
'workspaces',
'include-workspace-root',
'install-links',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static checkDevEngines = true
constructor (npm) {
super(npm)
const { config } = this.npm
// when location isn't set and global isn't true check for a package.json at
// the localPrefix and set the location to project if found
const locationProject = config.get('location') === 'project' || (
config.isDefault('location')
// this is different then `npm.global` which falls back to checking
// location which we do not want to use here
&& !config.get('global')
&& npm.localPackage
)
// if audit is not set and we are in global mode and location is not project
// and we assume its not a project related context, then we set audit=false
if (config.isDefault('audit') && (this.npm.global || !locationProject)) {
config.set('audit', false)
} else if (this.npm.global && config.get('audit')) {
log.warn('config', 'includes both --global and --audit, which is currently unsupported.')
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
return this.exec(args)
}
}
module.exports = ArboristCmd

215
NodeJS/node_modules/npm/lib/base-cmd.js generated vendored Normal file
View File

@@ -0,0 +1,215 @@
const { log } = require('proc-log')
class BaseCommand {
// these defaults can be overridden by individual commands
static workspaces = false
static ignoreImplicitWorkspace = true
static checkDevEngines = false
// these should always be overridden by individual commands
static name = null
static description = null
static params = null
// this is a static so that we can read from it without instantiating a command
// which would require loading the config
static get describeUsage () {
const { definitions } = require('@npmcli/config/lib/definitions')
const { aliases: cmdAliases } = require('./utils/cmd-list')
const seenExclusive = new Set()
const wrapWidth = 80
const { description, usage = [''], name, params } = this
const fullUsage = [
`${description}`,
'',
'Usage:',
...usage.map(u => `npm ${name} ${u}`.trim()),
]
if (params) {
let results = ''
let line = ''
for (const param of params) {
/* istanbul ignore next */
if (seenExclusive.has(param)) {
continue
}
const { exclusive } = definitions[param]
let paramUsage = `${definitions[param].usage}`
if (exclusive) {
const exclusiveParams = [paramUsage]
seenExclusive.add(param)
for (const e of exclusive) {
seenExclusive.add(e)
exclusiveParams.push(definitions[e].usage)
}
paramUsage = `${exclusiveParams.join('|')}`
}
paramUsage = `[${paramUsage}]`
if (line.length + paramUsage.length > wrapWidth) {
results = [results, line].filter(Boolean).join('\n')
line = ''
}
line = [line, paramUsage].filter(Boolean).join(' ')
}
fullUsage.push('')
fullUsage.push('Options:')
fullUsage.push([results, line].filter(Boolean).join('\n'))
}
const aliases = Object.entries(cmdAliases).reduce((p, [k, v]) => {
return p.concat(v === name ? k : [])
}, [])
if (aliases.length) {
const plural = aliases.length === 1 ? '' : 'es'
fullUsage.push('')
fullUsage.push(`alias${plural}: ${aliases.join(', ')}`)
}
fullUsage.push('')
fullUsage.push(`Run "npm help ${name}" for more info`)
return fullUsage.join('\n')
}
constructor (npm) {
this.npm = npm
const { config } = this.npm
if (!this.constructor.skipConfigValidation) {
config.validate()
}
if (config.get('workspaces') === false && config.get('workspace').length) {
throw new Error('Can not use --no-workspaces and --workspace at the same time')
}
}
get name () {
return this.constructor.name
}
get description () {
return this.constructor.description
}
get params () {
return this.constructor.params
}
get usage () {
return this.constructor.describeUsage
}
usageError (prefix = '') {
if (prefix) {
prefix += '\n\n'
}
return Object.assign(new Error(`\n${prefix}${this.usage}`), {
code: 'EUSAGE',
})
}
// Compare the number of entries with what was expected
checkExpected (entries) {
if (!this.npm.config.isDefault('expect-results')) {
const expected = this.npm.config.get('expect-results')
if (!!entries !== !!expected) {
log.warn(this.name, `Expected ${expected ? '' : 'no '}results, got ${entries}`)
process.exitCode = 1
}
} else if (!this.npm.config.isDefault('expect-result-count')) {
const expected = this.npm.config.get('expect-result-count')
if (expected !== entries) {
/* eslint-disable-next-line max-len */
log.warn(this.name, `Expected ${expected} result${expected === 1 ? '' : 's'}, got ${entries}`)
process.exitCode = 1
}
}
}
// Checks the devEngines entry in the package.json at this.localPrefix
async checkDevEngines () {
const force = this.npm.flatOptions.force
const { devEngines } = await require('@npmcli/package-json')
.normalize(this.npm.config.localPrefix)
.then(p => p.content)
.catch(() => ({}))
if (typeof devEngines === 'undefined') {
return
}
const { checkDevEngines, currentEnv } = require('npm-install-checks')
const current = currentEnv.devEngines({
nodeVersion: this.npm.nodeVersion,
npmVersion: this.npm.version,
})
const failures = checkDevEngines(devEngines, current)
const warnings = failures.filter(f => f.isWarn)
const errors = failures.filter(f => f.isError)
const genMsg = (failure, i = 0) => {
return [...new Set([
// eslint-disable-next-line
i === 0 ? 'The developer of this package has specified the following through devEngines' : '',
`${failure.message}`,
`${failure.errors.map(e => e.message).join('\n')}`,
])].filter(v => v).join('\n')
}
[...warnings, ...(force ? errors : [])].forEach((failure, i) => {
const message = genMsg(failure, i)
log.warn('EBADDEVENGINES', message)
log.warn('EBADDEVENGINES', {
current: failure.current,
required: failure.required,
})
})
if (force) {
return
}
if (errors.length) {
const failure = errors[0]
const message = genMsg(failure)
throw Object.assign(new Error(message), {
engine: failure.engine,
code: 'EBADDEVENGINES',
current: failure.current,
required: failure.required,
})
}
}
async setWorkspaces () {
const { relative } = require('node:path')
const includeWorkspaceRoot = this.isArboristCmd
? false
: this.npm.config.get('include-workspace-root')
const prefixInsideCwd = relative(this.npm.localPrefix, process.cwd()).startsWith('..')
const relativeFrom = prefixInsideCwd ? this.npm.localPrefix : process.cwd()
const filters = this.npm.config.get('workspace')
const getWorkspaces = require('./utils/get-workspaces.js')
const ws = await getWorkspaces(filters, {
path: this.npm.localPrefix,
includeWorkspaceRoot,
relativeFrom,
})
this.workspaces = ws
this.workspaceNames = [...ws.keys()]
this.workspacePaths = [...ws.values()]
}
}
module.exports = BaseCommand

12
NodeJS/node_modules/npm/lib/cli.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
try {
const { enableCompileCache } = require('node:module')
/* istanbul ignore next */
if (enableCompileCache) {
enableCompileCache()
}
} catch (e) { /* istanbul ignore next */ }
const validateEngines = require('./cli/validate-engines.js')
const cliEntry = require('node:path').resolve(__dirname, 'cli/entry.js')
module.exports = (process) => validateEngines(process, () => require(cliEntry))

79
NodeJS/node_modules/npm/lib/cli/entry.js generated vendored Normal file
View File

@@ -0,0 +1,79 @@
/* eslint-disable max-len */
// Separated out for easier unit testing
module.exports = async (process, validateEngines) => {
// set it here so that regardless of what happens later, we don't
// leak any private CLI configs to other programs
process.title = 'npm'
// if npm is called as "npmg" or "npm_g", then run in global mode.
if (process.argv[1][process.argv[1].length - 1] === 'g') {
process.argv.splice(1, 1, 'npm', '-g')
}
// Patch the global fs module here at the app level
require('graceful-fs').gracefulify(require('node:fs'))
const satisfies = require('semver/functions/satisfies')
const ExitHandler = require('./exit-handler.js')
const exitHandler = new ExitHandler({ process })
const Npm = require('../npm.js')
const npm = new Npm()
exitHandler.setNpm(npm)
// only log node and npm paths in argv initially since argv can contain sensitive info. a cleaned version will be logged later
const { log, output } = require('proc-log')
log.verbose('cli', process.argv.slice(0, 2).join(' '))
log.info('using', 'npm@%s', npm.version)
log.info('using', 'node@%s', process.version)
// At this point we've required a few files and can be pretty sure we dont contain invalid syntax for this version of node. It's possible a lazy require would, but that's unlikely enough that it's not worth catching anymore and we attach the more important exit handlers.
validateEngines.off()
exitHandler.registerUncaughtHandlers()
// It is now safe to log a warning if they are using a version of node that is not going to fail on syntax errors but is still unsupported and untested and might not work reliably. This is safe to use the logger now which we want since this will show up in the error log too.
if (!satisfies(validateEngines.node, validateEngines.engines)) {
log.warn('cli', validateEngines.unsupportedMessage)
}
// Now actually fire up npm and run the command.
// This is how to use npm programmatically:
try {
const { exec, command, args } = await npm.load()
if (!exec) {
return exitHandler.exit()
}
if (!command) {
output.standard(npm.usage)
process.exitCode = 1
return exitHandler.exit()
}
// Options are prefixed by a hyphen-minus (-, \u2d).
// Other dash-type chars look similar but are invalid.
const nonDashArgs = npm.argv.filter(a => /^[\u2010-\u2015\u2212\uFE58\uFE63\uFF0D]/.test(a))
if (nonDashArgs.length) {
log.error(
'arg',
'Argument starts with non-ascii dash, this is probably invalid:',
require('@npmcli/redact').redactLog(nonDashArgs.join(', '))
)
}
const execPromise = npm.exec(command, args)
// this is async but we dont await it, since its ok if it doesnt
// finish before the command finishes running. it uses command and argv
// so it must be initiated here, after the command name is set
const updateNotifier = require('./update-notifier.js')
// eslint-disable-next-line promise/catch-or-return
updateNotifier(npm).then((msg) => (npm.updateNotification = msg))
await execPromise
return exitHandler.exit()
} catch (err) {
return exitHandler.exit(err)
}
}

174
NodeJS/node_modules/npm/lib/cli/exit-handler.js generated vendored Normal file
View File

@@ -0,0 +1,174 @@
const { log, output, META } = require('proc-log')
const { errorMessage, getExitCodeFromError } = require('../utils/error-message.js')
class ExitHandler {
#npm = null
#process = null
#exited = false
#exitErrorMessage = false
#noNpmError = false
get #hasNpm () {
return !!this.#npm
}
get #loaded () {
return !!this.#npm?.loaded
}
get #showExitErrorMessage () {
if (!this.#loaded) {
return false
}
if (!this.#exited) {
return true
}
return this.#exitErrorMessage
}
get #notLoadedOrExited () {
return !this.#loaded && !this.#exited
}
setNpm (npm) {
this.#npm = npm
}
constructor ({ process }) {
this.#process = process
this.#process.on('exit', this.#handleProcesExitAndReset)
}
registerUncaughtHandlers () {
this.#process.on('uncaughtException', this.#handleExit)
this.#process.on('unhandledRejection', this.#handleExit)
}
exit (err) {
this.#handleExit(err)
}
#handleProcesExitAndReset = (code) => {
this.#handleProcessExit(code)
// Reset all the state. This is only relevant for tests since
// in reality the process fully exits here.
this.#process.off('exit', this.#handleProcesExitAndReset)
this.#process.off('uncaughtException', this.#handleExit)
this.#process.off('unhandledRejection', this.#handleExit)
if (this.#loaded) {
this.#npm.unload()
}
this.#npm = null
this.#exited = false
this.#exitErrorMessage = false
}
#handleProcessExit (code) {
const numCode = Number(code) || 0
// Always exit w/ a non-zero code if exit handler was not called
const exitCode = this.#exited ? numCode : (numCode || 1)
this.#process.exitCode = exitCode
if (this.#notLoadedOrExited) {
// Exit handler was not called and npm was not loaded so we have to log something
this.#logConsoleError(new Error(`Process exited unexpectedly with code: ${exitCode}`))
return
}
if (this.#logNoNpmError()) {
return
}
const os = require('node:os')
log.verbose('cwd', this.#process.cwd())
log.verbose('os', `${os.type()} ${os.release()}`)
log.verbose('node', this.#process.version)
log.verbose('npm ', `v${this.#npm.version}`)
// only show the notification if it finished
if (typeof this.#npm.updateNotification === 'string') {
log.notice('', this.#npm.updateNotification, { [META]: true, force: true })
}
if (!this.#exited) {
log.error('', 'Exit handler never called!')
log.error('', 'This is an error with npm itself. Please report this error at:')
log.error('', ' <https://github.com/npm/cli/issues>')
if (this.#npm.silent) {
output.error('')
}
}
log.verbose('exit', exitCode)
if (exitCode) {
log.verbose('code', exitCode)
} else {
log.info('ok')
}
if (this.#showExitErrorMessage) {
log.error('', this.#npm.exitErrorMessage())
}
}
#logConsoleError (err) {
// Run our error message formatters on all errors even if we
// have no npm or an unloaded npm. This will clean the error
// and possible return a formatted message about EACCESS or something.
const { summary, detail } = errorMessage(err, this.#npm)
const formatted = [...new Set([...summary, ...detail].flat().filter(Boolean))].join('\n')
// If we didn't get anything from the formatted message then just display the full stack
// eslint-disable-next-line no-console
console.error(formatted === err.message ? err.stack : formatted)
}
#logNoNpmError (err) {
if (this.#hasNpm) {
return false
}
// Make sure we only log this error once
if (!this.#noNpmError) {
this.#noNpmError = true
this.#logConsoleError(
new Error(`Exit prior to setting npm in exit handler`, err ? { cause: err } : {})
)
}
return true
}
#handleExit = (err) => {
this.#exited = true
// No npm at all
if (this.#logNoNpmError(err)) {
return this.#process.exit(this.#process.exitCode || getExitCodeFromError(err) || 1)
}
// npm was never loaded but we still might have a config loading error or
// something similar that we can run through the error message formatter
// to give the user a clue as to what happened.s
if (!this.#loaded) {
this.#logConsoleError(new Error('Exit prior to config file resolving', { cause: err }))
return this.#process.exit(this.#process.exitCode || getExitCodeFromError(err) || 1)
}
this.#exitErrorMessage = err?.suppressError === true ? false : !!err
// Prefer the exit code of the error, then the current process exit code,
// then set it to 1 if we still have an error. Otherwise we call process.exit
// with undefined so that it can determine the final exit code
const exitCode = err?.exitCode ?? this.#process.exitCode ?? (err ? 1 : undefined)
// explicitly call process.exit now so we don't hang on things like the
// update notifier, also flush stdout/err beforehand because process.exit doesn't
// wait for that to happen.
this.#process.stderr.write('', () => this.#process.stdout.write('', () => {
this.#process.exit(exitCode)
}))
}
}
module.exports = ExitHandler

121
NodeJS/node_modules/npm/lib/cli/update-notifier.js generated vendored Normal file
View File

@@ -0,0 +1,121 @@
// print a banner telling the user to upgrade npm to latest
// but not in CI, and not if we're doing that already.
// Check daily for betas, and weekly otherwise.
const ciInfo = require('ci-info')
const gt = require('semver/functions/gt')
const gte = require('semver/functions/gte')
const parse = require('semver/functions/parse')
const { stat, writeFile } = require('node:fs/promises')
const { resolve } = require('node:path')
// update check frequency
const DAILY = 1000 * 60 * 60 * 24
const WEEKLY = DAILY * 7
// don't put it in the _cacache folder, just in npm's cache
const lastCheckedFile = npm =>
resolve(npm.flatOptions.cache, '../_update-notifier-last-checked')
// Actual check for updates. This is a separate function so that we only load
// this if we are doing the actual update
const updateCheck = async (npm, spec, version, current) => {
const pacote = require('pacote')
const mani = await pacote.manifest(`npm@${spec}`, {
// always prefer latest, even if doing --tag=whatever on the cmd
defaultTag: 'latest',
...npm.flatOptions,
cache: false,
}).catch(() => null)
// if pacote failed, give up
if (!mani) {
return null
}
const latest = mani.version
// if the current version is *greater* than latest, we're on a 'next'
// and should get the updates from that release train.
// Note that this isn't another http request over the network, because
// the packument will be cached by pacote from previous request.
if (gt(version, latest) && spec === 'latest') {
return updateNotifier(npm, `^${version}`)
}
// if we already have something >= the desired spec, then we're done
if (gte(version, latest)) {
return null
}
const chalk = npm.logChalk
// ok! notify the user about this update they should get.
// The message is saved for printing at process exit so it will not get
// lost in any other messages being printed as part of the command.
const update = parse(mani.version)
const type = update.major !== current.major ? 'major'
: update.minor !== current.minor ? 'minor'
: update.patch !== current.patch ? 'patch'
: 'prerelease'
const typec = type === 'major' ? 'red'
: type === 'minor' ? 'yellow'
: 'cyan'
const cmd = `npm install -g npm@${latest}`
const message = `\nNew ${chalk[typec](type)} version of npm available! ` +
`${chalk[typec](current)} -> ${chalk.blue(latest)}\n` +
`Changelog: ${chalk.blue(`https://github.com/npm/cli/releases/tag/v${latest}`)}\n` +
`To update run: ${chalk.underline(cmd)}\n`
return message
}
const updateNotifier = async (npm, spec = 'latest') => {
// if we're on a prerelease train, then updates are coming fast
// check for a new one daily. otherwise, weekly.
const { version } = npm
const current = parse(version)
// if we're on a beta train, always get the next beta
if (current.prerelease.length) {
spec = `^${version}`
}
// while on a beta train, get updates daily
const duration = spec !== 'latest' ? DAILY : WEEKLY
const t = new Date(Date.now() - duration)
// if we don't have a file, then definitely check it.
const st = await stat(lastCheckedFile(npm)).catch(() => ({ mtime: t - 1 }))
// if we've already checked within the specified duration, don't check again
if (!(t > st.mtime)) {
return null
}
// intentional. do not await this. it's a best-effort update. if this
// fails, it's ok. might be using /dev/null as the cache or something weird
// like that.
writeFile(lastCheckedFile(npm), '').catch(() => {})
return updateCheck(npm, spec, version, current)
}
// only update the notification timeout if we actually finished checking
module.exports = npm => {
if (
// opted out
!npm.config.get('update-notifier')
// global npm update
|| (npm.flatOptions.global &&
['install', 'update'].includes(npm.command) &&
npm.argv.some(arg => /^npm(@|$)/.test(arg)))
// CI
|| ciInfo.isCI
) {
return Promise.resolve(null)
}
return updateNotifier(npm)
}

49
NodeJS/node_modules/npm/lib/cli/validate-engines.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
// This is separate to indicate that it should contain code we expect to work in
// all versions of node >= 6. This is a best effort to catch syntax errors to
// give users a good error message if they are using a node version that doesn't
// allow syntax we are using such as private properties, etc. This file is
// linted with ecmaVersion=6 so we don't use invalid syntax, which is set in the
// .eslintrc.local.json file
const { engines: { node: engines }, version } = require('../../package.json')
const npm = `v${version}`
module.exports = (process, getCli) => {
const node = process.version
/* eslint-disable-next-line max-len */
const unsupportedMessage = `npm ${npm} does not support Node.js ${node}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.`
/* eslint-disable-next-line max-len */
const brokenMessage = `ERROR: npm ${npm} is known not to run on Node.js ${node}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.`
// coverage ignored because this is only hit in very unsupported node versions
// and it's a best effort attempt to show something nice in those cases
/* istanbul ignore next */
const syntaxErrorHandler = (err) => {
if (err instanceof SyntaxError) {
// eslint-disable-next-line no-console
console.error(`${brokenMessage}\n\nERROR:`)
// eslint-disable-next-line no-console
console.error(err)
return process.exit(1)
}
throw err
}
process.on('uncaughtException', syntaxErrorHandler)
process.on('unhandledRejection', syntaxErrorHandler)
// require this only after setting up the error handlers
const cli = getCli()
return cli(process, {
node,
npm,
engines,
unsupportedMessage,
off: () => {
process.off('uncaughtException', syntaxErrorHandler)
process.off('unhandledRejection', syntaxErrorHandler)
},
})
}

222
NodeJS/node_modules/npm/lib/commands/access.js generated vendored Normal file
View File

@@ -0,0 +1,222 @@
const libnpmaccess = require('libnpmaccess')
const npa = require('npm-package-arg')
const { output } = require('proc-log')
const pkgJson = require('@npmcli/package-json')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { otplease } = require('../utils/auth.js')
const getIdentity = require('../utils/get-identity.js')
const BaseCommand = require('../base-cmd.js')
const commands = [
'get',
'grant',
'list',
'revoke',
'set',
]
const setCommands = [
'status=public',
'status=private',
'mfa=none',
'mfa=publish',
'mfa=automation',
'2fa=none',
'2fa=publish',
'2fa=automation',
]
class Access extends BaseCommand {
static description = 'Set access level on published packages'
static name = 'access'
static params = [
'json',
'otp',
'registry',
]
static usage = [
'list packages [<user>|<scope>|<scope:team>] [<package>]',
'list collaborators [<package> [<user>]]',
'get status [<package>]',
'set status=public|private [<package>]',
'set mfa=none|publish|automation [<package>]',
'grant <read-only|read-write> <scope:team> [<package>]',
'revoke <scope:team> [<package>]',
]
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return commands
}
if (argv.length === 3) {
switch (argv[2]) {
case 'grant':
return ['read-only', 'read-write']
case 'revoke':
return []
case 'list':
case 'ls':
return ['packages', 'collaborators']
case 'get':
return ['status']
case 'set':
return setCommands
default:
throw new Error(argv[2] + ' not recognized')
}
}
}
async exec ([cmd, subcmd, ...args]) {
if (!cmd) {
throw this.usageError()
}
if (!commands.includes(cmd)) {
throw this.usageError(`${cmd} is not a valid access command`)
}
// All commands take at least one more parameter so we can do this check up front
if (!subcmd) {
throw this.usageError()
}
switch (cmd) {
case 'grant':
if (!['read-only', 'read-write'].includes(subcmd)) {
throw this.usageError('grant must be either `read-only` or `read-write`')
}
if (!args[0]) {
throw this.usageError('`<scope:team>` argument is required')
}
return this.#grant(subcmd, args[0], args[1])
case 'revoke':
return this.#revoke(subcmd, args[0])
case 'list':
case 'ls':
if (subcmd === 'packages') {
return this.#listPackages(args[0], args[1])
}
if (subcmd === 'collaborators') {
return this.#listCollaborators(args[0], args[1])
}
throw this.usageError(`list ${subcmd} is not a valid access command`)
case 'get':
if (subcmd !== 'status') {
throw this.usageError(`get ${subcmd} is not a valid access command`)
}
return this.#getStatus(args[0])
case 'set':
if (!setCommands.includes(subcmd)) {
throw this.usageError(`set ${subcmd} is not a valid access command`)
}
return this.#set(subcmd, args[0])
}
}
async #grant (permissions, scope, pkg) {
await libnpmaccess.setPermissions(scope, pkg, permissions, this.npm.flatOptions)
}
async #revoke (scope, pkg) {
await libnpmaccess.removePermissions(scope, pkg, this.npm.flatOptions)
}
async #listPackages (owner, pkg) {
if (!owner) {
owner = await getIdentity(this.npm, this.npm.flatOptions)
}
const pkgs = await libnpmaccess.getPackages(owner, this.npm.flatOptions)
this.#output(pkgs, pkg)
}
async #listCollaborators (pkg, user) {
const pkgName = await this.#getPackage(pkg, false)
const collabs = await libnpmaccess.getCollaborators(pkgName, this.npm.flatOptions)
this.#output(collabs, user)
}
async #getStatus (pkg) {
const pkgName = await this.#getPackage(pkg, false)
const visibility = await libnpmaccess.getVisibility(pkgName, this.npm.flatOptions)
this.#output({ [pkgName]: visibility.public ? 'public' : 'private' })
}
async #set (subcmd, pkg) {
const [subkey, subval] = subcmd.split('=')
switch (subkey) {
case 'mfa':
case '2fa':
return this.#setMfa(pkg, subval)
case 'status':
return this.#setStatus(pkg, subval)
}
}
async #setMfa (pkg, level) {
const pkgName = await this.#getPackage(pkg, false)
await otplease(this.npm, this.npm.flatOptions, (opts) => {
return libnpmaccess.setMfa(pkgName, level, opts)
})
}
async #setStatus (pkg, status) {
// only scoped packages can have their access changed
const pkgName = await this.#getPackage(pkg, true)
if (status === 'private') {
status = 'restricted'
}
await otplease(this.npm, this.npm.flatOptions, (opts) => {
return libnpmaccess.setAccess(pkgName, status, opts)
})
return this.#getStatus(pkgName)
}
async #getPackage (name, requireScope) {
if (!name) {
try {
const { content } = await pkgJson.normalize(this.npm.prefix)
name = content.name
} catch (err) {
if (err.code === 'ENOENT') {
throw Object.assign(new Error('no package name given and no package.json found'), {
code: 'ENOENT',
})
} else {
throw err
}
}
}
const spec = npa(name)
if (requireScope && !spec.scope) {
throw this.usageError('This command is only available for scoped packages.')
}
return name
}
#output (items, limiter) {
const outputs = {}
const lookup = {
__proto__: null,
read: 'read-only',
write: 'read-write',
}
for (const item in items) {
const val = items[item]
outputs[item] = lookup[val] || val
}
if (this.npm.config.get('json')) {
output.buffer(outputs)
} else {
for (const item of Object.keys(outputs).sort(localeCompare)) {
if (!limiter || limiter === item) {
output.standard(`${item}: ${outputs[item]}`)
}
}
}
}
}
module.exports = Access

50
NodeJS/node_modules/npm/lib/commands/adduser.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
const { log, output } = require('proc-log')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const auth = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
class AddUser extends BaseCommand {
static description = 'Add a registry user account'
static name = 'adduser'
static params = [
'registry',
'scope',
'auth-type',
]
async exec () {
const scope = this.npm.config.get('scope')
let registry = this.npm.config.get('registry')
if (scope) {
const scopedRegistry = this.npm.config.get(`${scope}:registry`)
const cliRegistry = this.npm.config.get('registry', 'cli')
if (scopedRegistry && !cliRegistry) {
registry = scopedRegistry
}
}
const creds = this.npm.config.getCredentialsByURI(registry)
log.notice('', `Log in on ${replaceInfo(registry)}`)
const { message, newCreds } = await auth.adduser(this.npm, {
...this.npm.flatOptions,
creds,
registry,
})
this.npm.config.delete('_token', 'user') // prevent legacy pollution
this.npm.config.setCredentialsByURI(registry, newCreds)
if (scope) {
this.npm.config.set(scope + ':registry', registry, 'user')
}
await this.npm.config.save('user')
output.standard(message)
}
}
module.exports = AddUser

121
NodeJS/node_modules/npm/lib/commands/audit.js generated vendored Normal file
View File

@@ -0,0 +1,121 @@
const npmAuditReport = require('npm-audit-report')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const auditError = require('../utils/audit-error.js')
const { log, output } = require('proc-log')
const reifyFinish = require('../utils/reify-finish.js')
const VerifySignatures = require('../utils/verify-signatures.js')
class Audit extends ArboristWorkspaceCmd {
static description = 'Run a security audit'
static name = 'audit'
static params = [
'audit-level',
'dry-run',
'force',
'json',
'package-lock-only',
'package-lock',
'omit',
'include',
'foreground-scripts',
'ignore-scripts',
...super.params,
]
static usage = ['[fix|signatures]']
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['fix', 'signatures']
}
switch (argv[2]) {
case 'fix':
case 'signatures':
return []
default:
throw Object.assign(new Error(argv[2] + ' not recognized'), {
code: 'EUSAGE',
})
}
}
async exec (args) {
if (args[0] === 'signatures') {
await this.auditSignatures()
} else {
await this.auditAdvisories(args)
}
}
async auditAdvisories (args) {
const fix = args[0] === 'fix'
if (this.npm.config.get('package-lock') === false && fix) {
throw this.usageError('fix can not be used without a package-lock')
}
const reporter = this.npm.config.get('json') ? 'json' : 'detail'
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
audit: true,
path: this.npm.prefix,
reporter,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.audit({ fix })
if (fix) {
await reifyFinish(this.npm, arb)
} else {
// will throw if there's an error, because this is an audit command
auditError(this.npm, arb.auditReport)
const result = npmAuditReport(arb.auditReport, {
...opts,
chalk: this.npm.chalk,
})
process.exitCode = process.exitCode || result.exitCode
output.standard(result.report)
}
}
async auditSignatures () {
if (this.npm.global) {
throw Object.assign(
new Error('`npm audit signatures` does not support global packages'), {
code: 'EAUDITGLOBAL',
}
)
}
log.verbose('audit', 'loading installed dependencies')
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: this.npm.prefix,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
const tree = await arb.loadActual()
let filterSet = new Set()
if (opts.workspaces && opts.workspaces.length) {
filterSet =
arb.workspaceDependencySet(
tree,
opts.workspaces,
this.npm.flatOptions.includeWorkspaceRoot
)
} else if (!this.npm.flatOptions.workspacesEnabled) {
filterSet =
arb.excludeWorkspacesDependencySet(tree)
}
const verify = new VerifySignatures(tree, filterSet, this.npm, { ...opts })
await verify.run()
}
}
module.exports = Audit

34
NodeJS/node_modules/npm/lib/commands/bugs.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
const PackageUrlCmd = require('../package-url-cmd.js')
class Bugs extends PackageUrlCmd {
static description = 'Report bugs for a package in a web browser'
static name = 'bugs'
getUrl (spec, mani) {
if (mani.bugs) {
if (typeof mani.bugs === 'string') {
return mani.bugs
}
if (typeof mani.bugs === 'object' && mani.bugs.url) {
return mani.bugs.url
}
if (typeof mani.bugs === 'object' && mani.bugs.email) {
return `mailto:${mani.bugs.email}`
}
}
// try to get it from the repo, if possible
const info = this.hostedFromMani(mani)
const infoUrl = info?.bugs()
if (infoUrl) {
return infoUrl
}
// just send them to the website, hopefully that has some info!
return `https://www.npmjs.com/package/${mani.name}`
}
}
module.exports = Bugs

218
NodeJS/node_modules/npm/lib/commands/cache.js generated vendored Normal file
View File

@@ -0,0 +1,218 @@
const cacache = require('cacache')
const pacote = require('pacote')
const fs = require('node:fs/promises')
const { join } = require('node:path')
const semver = require('semver')
const BaseCommand = require('../base-cmd.js')
const npa = require('npm-package-arg')
const jsonParse = require('json-parse-even-better-errors')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { log, output } = require('proc-log')
const searchCachePackage = async (path, parsed, cacheKeys) => {
/* eslint-disable-next-line max-len */
const searchMFH = new RegExp(`^make-fetch-happen:request-cache:.*(?<!/[@a-zA-Z]+)/${parsed.name}/-/(${parsed.name}[^/]+.tgz)$`)
const searchPack = new RegExp(`^make-fetch-happen:request-cache:.*/${parsed.escapedName}$`)
const results = new Set()
cacheKeys = new Set(cacheKeys)
for (const key of cacheKeys) {
// match on the public key registry url format
if (searchMFH.test(key)) {
// extract the version from the filename
const filename = key.match(searchMFH)[1]
const noExt = filename.slice(0, -4)
const noScope = `${parsed.name.split('/').pop()}-`
const ver = noExt.slice(noScope.length)
if (semver.satisfies(ver, parsed.rawSpec)) {
results.add(key)
}
continue
}
// is this key a packument?
if (!searchPack.test(key)) {
continue
}
results.add(key)
let packument, details
try {
details = await cacache.get(path, key)
packument = jsonParse(details.data)
} catch (_) {
// if we couldn't parse the packument, abort
continue
}
if (!packument.versions || typeof packument.versions !== 'object') {
continue
}
// assuming this is a packument
for (const ver of Object.keys(packument.versions)) {
if (semver.satisfies(ver, parsed.rawSpec)) {
if (packument.versions[ver].dist &&
typeof packument.versions[ver].dist === 'object' &&
packument.versions[ver].dist.tarball !== undefined &&
cacheKeys.has(`make-fetch-happen:request-cache:${packument.versions[ver].dist.tarball}`)
) {
results.add(`make-fetch-happen:request-cache:${packument.versions[ver].dist.tarball}`)
}
}
}
}
return results
}
class Cache extends BaseCommand {
static description = 'Manipulates packages cache'
static name = 'cache'
static params = ['cache']
static usage = [
'add <package-spec>',
'clean [<key>]',
'ls [<name>@<version>]',
'verify',
]
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['add', 'clean', 'verify', 'ls']
}
// TODO - eventually...
switch (argv[2]) {
case 'verify':
case 'clean':
case 'add':
case 'ls':
return []
}
}
async exec (args) {
const cmd = args.shift()
switch (cmd) {
case 'rm': case 'clear': case 'clean':
return await this.clean(args)
case 'add':
return await this.add(args)
case 'verify': case 'check':
return await this.verify()
case 'ls':
return await this.ls(args)
default:
throw this.usageError()
}
}
// npm cache clean [pkg]*
async clean (args) {
const cachePath = join(this.npm.cache, '_cacache')
if (args.length === 0) {
if (!this.npm.config.get('force')) {
throw new Error(`As of npm@5, the npm cache self-heals from corruption issues
by treating integrity mismatches as cache misses. As a result,
data extracted from the cache is guaranteed to be valid. If you
want to make sure everything is consistent, use \`npm cache verify\`
instead. Deleting the cache can only make npm go slower, and is
not likely to correct any problems you may be encountering!
On the other hand, if you're debugging an issue with the installer,
or race conditions that depend on the timing of writing to an empty
cache, you can use \`npm install --cache /tmp/empty-cache\` to use a
temporary cache instead of nuking the actual one.
If you're sure you want to delete the entire cache, rerun this command
with --force.`)
}
return fs.rm(cachePath, { recursive: true, force: true })
}
for (const key of args) {
let entry
try {
entry = await cacache.get(cachePath, key)
} catch (err) {
log.warn('cache', `Not Found: ${key}`)
break
}
output.standard(`Deleted: ${key}`)
await cacache.rm.entry(cachePath, key)
// XXX this could leave other entries without content!
await cacache.rm.content(cachePath, entry.integrity)
}
}
// npm cache add <tarball-url>...
// npm cache add <pkg> <ver>...
// npm cache add <tarball>...
// npm cache add <folder>...
async add (args) {
log.silly('cache add', 'args', args)
if (args.length === 0) {
throw this.usageError('First argument to `add` is required')
}
await Promise.all(args.map(async spec => {
log.silly('cache add', 'spec', spec)
// we ask pacote for the thing, and then just throw the data
// away so that it tee-pipes it into the cache like it does
// for a normal request.
await pacote.tarball.stream(spec, stream => {
stream.resume()
return stream.promise()
}, { ...this.npm.flatOptions })
await pacote.manifest(spec, {
...this.npm.flatOptions,
fullMetadata: true,
})
}))
}
async verify () {
const cache = join(this.npm.cache, '_cacache')
const prefix = cache.indexOf(process.env.HOME) === 0
? `~${cache.slice(process.env.HOME.length)}`
: cache
const stats = await cacache.verify(cache)
output.standard(`Cache verified and compressed (${prefix})`)
output.standard(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`)
if (stats.badContentCount) {
output.standard(`Corrupted content removed: ${stats.badContentCount}`)
}
if (stats.reclaimedCount) {
/* eslint-disable-next-line max-len */
output.standard(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`)
}
if (stats.missingContent) {
output.standard(`Missing content: ${stats.missingContent}`)
}
output.standard(`Index entries: ${stats.totalEntries}`)
output.standard(`Finished in ${stats.runTime.total / 1000}s`)
}
// npm cache ls [--package <spec> ...]
async ls (specs) {
const cachePath = join(this.npm.cache, '_cacache')
const cacheKeys = Object.keys(await cacache.ls(cachePath))
if (specs.length > 0) {
// get results for each package spec specified
const results = new Set()
for (const spec of specs) {
const parsed = npa(spec)
if (parsed.rawSpec !== '' && parsed.type === 'tag') {
throw this.usageError('Cannot list cache keys for a tagged package.')
}
const keySet = await searchCachePackage(cachePath, parsed, cacheKeys)
for (const key of keySet) {
results.add(key)
}
}
[...results].sort(localeCompare).forEach(key => output.standard(key))
return
}
cacheKeys.sort(localeCompare).forEach(key => output.standard(key))
}
}
module.exports = Cache

129
NodeJS/node_modules/npm/lib/commands/ci.js generated vendored Normal file
View File

@@ -0,0 +1,129 @@
const reifyFinish = require('../utils/reify-finish.js')
const runScript = require('@npmcli/run-script')
const fs = require('node:fs/promises')
const path = require('node:path')
const { log, time } = require('proc-log')
const validateLockfile = require('../utils/validate-lockfile.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const getWorkspaces = require('../utils/get-workspaces.js')
class CI extends ArboristWorkspaceCmd {
static description = 'Clean install a project'
static name = 'ci'
// These are in the order they will show up in when running "-h"
static params = [
'install-strategy',
'legacy-bundling',
'global-style',
'omit',
'include',
'strict-peer-deps',
'foreground-scripts',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
...super.params,
]
async exec () {
if (this.npm.global) {
throw Object.assign(new Error('`npm ci` does not work for global packages'), {
code: 'ECIGLOBAL',
})
}
const where = this.npm.prefix
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
packageLock: true, // npm ci should never skip lock files
path: where,
save: false, // npm ci should never modify the lockfile or package.json
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.loadVirtual().catch(er => {
log.verbose('loadVirtual', er.stack)
const msg =
'The `npm ci` command can only install with an existing package-lock.json or\n' +
'npm-shrinkwrap.json with lockfileVersion >= 1. Run an install with npm@5 or\n' +
'later to generate a package-lock.json file, then try again.'
throw this.usageError(msg)
})
// retrieves inventory of packages from loaded virtual tree (lock file)
const virtualInventory = new Map(arb.virtualTree.inventory)
// build ideal tree step needs to come right after retrieving the virtual
// inventory since it's going to erase the previous ref to virtualTree
await arb.buildIdealTree()
// verifies that the packages from the ideal tree will match
// the same versions that are present in the virtual tree (lock file)
// throws a validation error in case of mismatches
const errors = validateLockfile(virtualInventory, arb.idealTree.inventory)
if (errors.length) {
throw this.usageError(
'`npm ci` can only install packages when your package.json and ' +
'package-lock.json or npm-shrinkwrap.json are in sync. Please ' +
'update your lock file with `npm install` ' +
'before continuing.\n\n' +
errors.join('\n')
)
}
const dryRun = this.npm.config.get('dry-run')
if (!dryRun) {
const workspacePaths = await getWorkspaces([], {
path: this.npm.localPrefix,
includeWorkspaceRoot: true,
})
// Only remove node_modules after we've successfully loaded the virtual
// tree and validated the lockfile
await time.start('npm-ci:rm', async () => {
return await Promise.all([...workspacePaths.values()].map(async modulePath => {
const fullPath = path.join(modulePath, 'node_modules')
// get the list of entries so we can skip the glob for performance
const entries = await fs.readdir(fullPath, null).catch(() => [])
return Promise.all(entries.map(folder => {
return fs.rm(path.join(fullPath, folder), { force: true, recursive: true })
}))
}))
})
}
await arb.reify(opts)
const ignoreScripts = this.npm.config.get('ignore-scripts')
// run the same set of scripts that `npm install` runs.
if (!ignoreScripts) {
const scripts = [
'preinstall',
'install',
'postinstall',
'prepublish', // XXX should we remove this finally??
'preprepare',
'prepare',
'postprepare',
]
const scriptShell = this.npm.config.get('script-shell') || undefined
for (const event of scripts) {
await runScript({
path: where,
args: [],
scriptShell,
stdio: 'inherit',
event,
})
}
}
await reifyFinish(this.npm, arb)
}
}
module.exports = CI

283
NodeJS/node_modules/npm/lib/commands/completion.js generated vendored Normal file
View File

@@ -0,0 +1,283 @@
// Each command has a completion function that takes an options object and a cb
// The callback gets called with an error and an array of possible completions.
// The options object is built up based on the environment variables set by
// zsh or bash when calling a function for completion, based on the cursor
// position and the command line thus far. These are:
// COMP_CWORD: the index of the "word" in the command line being completed
// COMP_LINE: the full command line thusfar as a string
// COMP_POINT: the cursor index at the point of triggering completion
//
// We parse the command line with nopt, like npm does, and then create an
// options object containing:
// words: array of words in the command line
// w: the index of the word being completed (ie, COMP_CWORD)
// word: the word being completed
// line: the COMP_LINE
// lineLength
// point: the COMP_POINT, usually equal to line length, but not always, eg if
// the user has pressed the left-arrow to complete an earlier word
// partialLine: the line up to the point
// partialWord: the word being completed (which might be ''), up to the point
// conf: a nopt parse of the command line
//
// When the implementation completion method returns its list of strings,
// and arrays of strings, we filter that by any that start with the
// partialWord, since only those can possibly be valid matches.
//
// Matches are wrapped with ' to escape them, if necessary, and then printed
// one per line for the shell completion method to consume in IFS=$'\n' mode
// as an array.
const fs = require('node:fs/promises')
const nopt = require('nopt')
const { resolve } = require('node:path')
const { output } = require('proc-log')
const Npm = require('../npm.js')
const { definitions, shorthands } = require('@npmcli/config/lib/definitions')
const { commands, aliases, deref } = require('../utils/cmd-list.js')
const { isWindowsShell } = require('../utils/is-windows.js')
const BaseCommand = require('../base-cmd.js')
const fileExists = (file) => fs.stat(file).then(s => s.isFile()).catch(() => false)
const configNames = Object.keys(definitions)
const shorthandNames = Object.keys(shorthands)
const allConfs = configNames.concat(shorthandNames)
class Completion extends BaseCommand {
static description = 'Tab Completion for npm'
static name = 'completion'
// completion for the completion command
static async completion (opts) {
if (opts.w > 2) {
return
}
const [bashExists, zshExists] = await Promise.all([
fileExists(resolve(process.env.HOME, '.bashrc')),
fileExists(resolve(process.env.HOME, '.zshrc')),
])
const out = []
if (zshExists) {
out.push(['>>', '~/.zshrc'])
}
if (bashExists) {
out.push(['>>', '~/.bashrc'])
}
return out
}
async exec (args) {
if (isWindowsShell) {
const msg = 'npm completion supported only in MINGW / Git bash on Windows'
throw Object.assign(new Error(msg), {
code: 'ENOTSUP',
})
}
const { COMP_CWORD, COMP_LINE, COMP_POINT, COMP_FISH } = process.env
// if the COMP_* isn't in the env, then just dump the script.
if (COMP_CWORD === undefined || COMP_LINE === undefined || COMP_POINT === undefined) {
return dumpScript(resolve(this.npm.npmRoot, 'lib', 'utils', 'completion.sh'))
}
// ok we're actually looking at the envs and outputting the suggestions
// get the partial line and partial word,
// if the point isn't at the end.
// ie, tabbing at: npm foo b|ar
const w = +COMP_CWORD
const words = args.map(unescape)
const word = words[w]
const line = COMP_LINE
const point = +COMP_POINT
const partialLine = line.slice(0, point)
const partialWords = words.slice(0, w)
// figure out where in that last word the point is.
const partialWordRaw = args[w]
let i = partialWordRaw.length
while (partialWordRaw.slice(0, i) !== partialLine.slice(-1 * i) && i > 0) {
i--
}
const partialWord = unescape(partialWordRaw.slice(0, i))
partialWords.push(partialWord)
const opts = {
isFish: COMP_FISH === 'true',
words,
w,
word,
line,
lineLength: line.length,
point,
partialLine,
partialWords,
partialWord,
raw: args,
}
if (partialWords.slice(0, -1).indexOf('--') === -1) {
if (word.charAt(0) === '-') {
return this.wrap(opts, configCompl(opts))
}
if (words[w - 1] &&
words[w - 1].charAt(0) === '-' &&
!isFlag(words[w - 1])) {
// awaiting a value for a non-bool config.
// don't even try to do this for now
return this.wrap(opts, configValueCompl(opts))
}
}
// try to find the npm command.
// it's the first thing after all the configs.
// take a little shortcut and use npm's arg parsing logic.
// don't have to worry about the last arg being implicitly
// boolean'ed, since the last block will catch that.
const types = Object.entries(definitions).reduce((acc, [key, def]) => {
acc[key] = def.type
return acc
}, {})
const parsed = opts.conf =
nopt(types, shorthands, partialWords.slice(0, -1), 0)
// check if there's a command already.
const cmd = parsed.argv.remain[1]
if (!cmd) {
return this.wrap(opts, cmdCompl(opts, this.npm))
}
Object.keys(parsed).forEach(k => this.npm.config.set(k, parsed[k]))
// at this point, if words[1] is some kind of npm command,
// then complete on it.
// otherwise, do nothing
try {
const { completion } = Npm.cmd(cmd)
if (completion) {
const comps = await completion(opts, this.npm)
return this.wrap(opts, comps)
}
} catch {
// it wasnt a valid command, so do nothing
}
}
// The command should respond with an array. Loop over that,
// wrapping quotes around any that have spaces, and writing
// them to stdout.
// If any of the items are arrays, then join them with a space.
// Ie, returning ['a', 'b c', ['d', 'e']] would allow it to expand
// to: 'a', 'b c', or 'd' 'e'
wrap (opts, compls) {
// TODO this was dead code, leaving it in case we find some command we
// forgot that requires this. if so *that command should fix its
// completions*
// compls = compls.map(w => !/\s+/.test(w) ? w : '\'' + w + '\'')
if (opts.partialWord) {
compls = compls.filter(c => c.startsWith(opts.partialWord))
}
if (compls.length > 0) {
output.standard(compls.join('\n'))
}
}
}
const dumpScript = async (p) => {
const d = (await fs.readFile(p, 'utf8')).replace(/^#!.*?\n/, '')
await new Promise((res, rej) => {
let done = false
process.stdout.on('error', er => {
if (done) {
return
}
done = true
// Darwin is a pain sometimes.
//
// This is necessary because the "source" or "." program in
// bash on OS X closes its file argument before reading
// from it, meaning that you get exactly 1 write, which will
// work most of the time, and will always raise an EPIPE.
//
// Really, one should not be tossing away EPIPE errors, or any
// errors, so casually. But, without this, `. <(npm completion)`
// can never ever work on OS X.
// TODO Ignoring coverage, see 'non EPIPE errors cause failures' test.
/* istanbul ignore next */
if (er.errno === 'EPIPE') {
res()
} else {
rej(er)
}
})
process.stdout.write(d, () => {
if (done) {
return
}
done = true
res()
})
})
}
const unescape = w => w.charAt(0) === '\'' ? w.replace(/^'|'$/g, '')
: w.replace(/\\ /g, ' ')
// the current word has a dash. Return the config names,
// with the same number of dashes as the current word has.
const configCompl = opts => {
const word = opts.word
const split = word.match(/^(-+)((?:no-)*)(.*)$/)
const dashes = split[1]
const no = split[2]
const flags = configNames.filter(isFlag)
return allConfs.map(c => dashes + c)
.concat(flags.map(f => dashes + (no || 'no-') + f))
}
// expand with the valid values of various config values.
// not yet implemented.
const configValueCompl = () => []
// check if the thing is a flag or not.
const isFlag = word => {
// shorthands never take args.
const split = word.match(/^(-*)((?:no-)+)?(.*)$/)
const no = split[2]
const conf = split[3]
const { type } = definitions[conf]
return no ||
type === Boolean ||
(Array.isArray(type) && type.includes(Boolean)) ||
shorthands[conf]
}
// complete against the npm commands
// if they all resolve to the same thing, just return the thing it already is
const cmdCompl = (opts) => {
const allCommands = commands.concat(Object.keys(aliases))
const matches = allCommands.filter(c => c.startsWith(opts.partialWord))
if (!matches.length) {
return matches
}
const derefs = new Set([...matches.map(c => deref(c))])
if (derefs.size === 1) {
return [...derefs]
}
return allCommands
}
module.exports = Completion

410
NodeJS/node_modules/npm/lib/commands/config.js generated vendored Normal file
View File

@@ -0,0 +1,410 @@
const { mkdir, readFile, writeFile } = require('node:fs/promises')
const { dirname, resolve } = require('node:path')
const { spawn } = require('node:child_process')
const { EOL } = require('node:os')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const pkgJson = require('@npmcli/package-json')
const { defaults, definitions } = require('@npmcli/config/lib/definitions')
const { log, output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
const { redact } = require('@npmcli/redact')
// These are the configs that we can nerf-dart. Not all of them currently even
// *have* config definitions so we have to explicitly validate them here.
// This is used to validate during "npm config set"
const nerfDarts = [
'_auth',
'_authToken',
'_password',
'certfile',
'email',
'keyfile',
'username',
]
// These are the config values to swap with "protected". It does not catch
// every single sensitive thing a user may put in the npmrc file but it gets
// the common ones. This is distinct from nerfDarts because that is used to
// validate valid configs during "npm config set", and folks may have old
// invalid entries lying around in a config file that we still want to protect
// when running "npm config list"
// This is a more general list of values to consider protected. You can not
// "npm config get" them, and they will not display during "npm config list"
const protected = [
'auth',
'authToken',
'certfile',
'email',
'keyfile',
'password',
'username',
]
// take an array of `[key, value, k2=v2, k3, v3, ...]` and turn into
// { key: value, k2: v2, k3: v3 }
const keyValues = args => {
const kv = {}
for (let i = 0; i < args.length; i++) {
const arg = args[i].split('=')
const key = arg.shift()
const val = arg.length ? arg.join('=')
: i < args.length - 1 ? args[++i]
: ''
kv[key.trim()] = val.trim()
}
return kv
}
const isProtected = (k) => {
// _password
if (k.startsWith('_')) {
return true
}
if (protected.includes(k)) {
return true
}
// //localhost:8080/:_password
if (k.startsWith('//')) {
if (k.includes(':_')) {
return true
}
// //registry:_authToken or //registry:authToken
for (const p of protected) {
if (k.endsWith(`:${p}`) || k.endsWith(`:_${p}`)) {
return true
}
}
}
return false
}
// Private fields are either protected or they can redacted info
const isPrivate = (k, v) => isProtected(k) || redact(v) !== v
const displayVar = (k, v) =>
`${k} = ${isProtected(k, v) ? '(protected)' : JSON.stringify(redact(v))}`
class Config extends BaseCommand {
static description = 'Manage the npm configuration files'
static name = 'config'
static usage = [
'set <key>=<value> [<key>=<value> ...]',
'get [<key> [<key> ...]]',
'delete <key> [<key> ...]',
'list [--json]',
'edit',
'fix',
]
static params = [
'json',
'global',
'editor',
'location',
'long',
]
static ignoreImplicitWorkspace = false
static skipConfigValidation = true
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv[1] !== 'config') {
argv.unshift('config')
}
if (argv.length === 2) {
const cmds = ['get', 'set', 'delete', 'ls', 'rm', 'edit', 'fix']
if (opts.partialWord !== 'l') {
cmds.push('list')
}
return cmds
}
const action = argv[2]
switch (action) {
case 'set':
// todo: complete with valid values, if possible.
if (argv.length > 3) {
return []
}
// fallthrough
/* eslint no-fallthrough:0 */
case 'get':
case 'delete':
case 'rm':
return Object.keys(definitions)
case 'edit':
case 'list':
case 'ls':
case 'fix':
default:
return []
}
}
async exec ([action, ...args]) {
switch (action) {
case 'set':
await this.set(args)
break
case 'get':
await this.get(args)
break
case 'delete':
case 'rm':
case 'del':
await this.del(args)
break
case 'list':
case 'ls':
await (this.npm.flatOptions.json ? this.listJson() : this.list())
break
case 'edit':
await this.edit()
break
case 'fix':
await this.fix()
break
default:
throw this.usageError()
}
}
async set (args) {
if (!args.length) {
throw this.usageError()
}
const where = this.npm.flatOptions.location
for (const [key, val] of Object.entries(keyValues(args))) {
log.info('config', 'set %j %j', key, val)
const baseKey = key.split(':').pop()
if (!this.npm.config.definitions[baseKey] && !nerfDarts.includes(baseKey)) {
throw new Error(`\`${baseKey}\` is not a valid npm option`)
}
const deprecated = this.npm.config.definitions[baseKey]?.deprecated
if (deprecated) {
throw new Error(
`The \`${baseKey}\` option is deprecated, and can not be set in this way${deprecated}`
)
}
if (val === '') {
this.npm.config.delete(key, where)
} else {
this.npm.config.set(key, val, where)
}
if (!this.npm.config.validate(where)) {
log.warn('config', 'omitting invalid config values')
}
}
await this.npm.config.save(where)
}
async get (keys) {
if (!keys.length) {
return this.list()
}
const out = []
for (const key of keys) {
const val = this.npm.config.get(key)
if (isPrivate(key, val)) {
throw new Error(`The ${key} option is protected, and can not be retrieved in this way`)
}
const pref = keys.length > 1 ? `${key}=` : ''
out.push(pref + val)
}
output.standard(out.join('\n'))
}
async del (keys) {
if (!keys.length) {
throw this.usageError()
}
const where = this.npm.flatOptions.location
for (const key of keys) {
this.npm.config.delete(key, where)
}
await this.npm.config.save(where)
}
async edit () {
const ini = require('ini')
const e = this.npm.flatOptions.editor
const where = this.npm.flatOptions.location
const file = this.npm.config.data.get(where).source
// save first, just to make sure it's synced up
// this also removes all the comments from the last time we edited it.
await this.npm.config.save(where)
const data = (
await readFile(file, 'utf8').catch(() => '')
).replace(/\r\n/g, '\n')
const entries = Object.entries(defaults)
const defData = entries.reduce((str, [key, val]) => {
const obj = { [key]: val }
const i = ini.stringify(obj)
.replace(/\r\n/g, '\n') // normalizes output from ini.stringify
.replace(/\n$/m, '')
.replace(/^/g, '; ')
.replace(/\n/g, '\n; ')
.split('\n')
return str + '\n' + i
}, '')
const tmpData = `;;;;
; npm ${where}config file: ${file}
; this is a simple ini-formatted file
; lines that start with semi-colons are comments
; run \`npm help 7 config\` for documentation of the various options
;
; Configs like \`@scope:registry\` map a scope to a given registry url.
;
; Configs like \`//<hostname>/:_authToken\` are auth that is restricted
; to the registry host specified.
${data.split('\n').sort(localeCompare).join('\n').trim()}
;;;;
; all available options shown below with default values
;;;;
${defData}
`.split('\n').join(EOL)
await mkdir(dirname(file), { recursive: true })
await writeFile(file, tmpData, 'utf8')
await new Promise((res, rej) => {
const [bin, ...args] = e.split(/\s+/)
const editor = spawn(bin, [...args, file], { stdio: 'inherit' })
editor.on('exit', (code) => {
if (code) {
return rej(new Error(`editor process exited with code: ${code}`))
}
return res()
})
})
}
async fix () {
let problems
try {
this.npm.config.validate()
return // if validate doesn't throw we have nothing to do
} catch (err) {
// coverage skipped because we don't need to test rethrowing errors
// istanbul ignore next
if (err.code !== 'ERR_INVALID_AUTH') {
throw err
}
problems = err.problems
}
if (!this.npm.config.isDefault('location')) {
problems = problems.filter((problem) => {
return problem.where === this.npm.config.get('location')
})
}
this.npm.config.repair(problems)
const locations = []
output.standard('The following configuration problems have been repaired:\n')
const summary = problems.map(({ action, from, to, key, where }) => {
// coverage disabled for else branch because it is intentionally omitted
// istanbul ignore else
if (action === 'rename') {
// we keep track of which configs were modified here so we know what to save later
locations.push(where)
return `~ \`${from}\` renamed to \`${to}\` in ${where} config`
} else if (action === 'delete') {
locations.push(where)
return `- \`${key}\` deleted from ${where} config`
}
}).join('\n')
output.standard(summary)
return await Promise.all(locations.map((location) => this.npm.config.save(location)))
}
async list () {
const msg = []
// long does not have a flattener
const long = this.npm.config.get('long')
for (const [where, { data, source }] of this.npm.config.data.entries()) {
if (where === 'default' && !long) {
continue
}
const entries = Object.entries(data).sort(([a], [b]) => localeCompare(a, b))
if (!entries.length) {
continue
}
msg.push(`; "${where}" config from ${source}`, '')
for (const [k, v] of entries) {
const display = displayVar(k, v)
const src = this.npm.config.find(k)
msg.push(src === where ? display : `; ${display} ; overridden by ${src}`)
msg.push()
}
msg.push('')
}
if (!long) {
msg.push(
`; node bin location = ${process.execPath}`,
`; node version = ${process.version}`,
`; npm local prefix = ${this.npm.localPrefix}`,
`; npm version = ${this.npm.version}`,
`; cwd = ${process.cwd()}`,
`; HOME = ${process.env.HOME}`,
'; Run `npm config ls -l` to show all defaults.'
)
msg.push('')
}
if (!this.npm.global) {
const { content } = await pkgJson.normalize(this.npm.prefix).catch(() => ({ content: {} }))
if (content.publishConfig) {
const pkgPath = resolve(this.npm.prefix, 'package.json')
msg.push(`; "publishConfig" from ${pkgPath}`)
msg.push('; This set of config values will be used at publish-time.', '')
const entries = Object.entries(content.publishConfig)
.sort(([a], [b]) => localeCompare(a, b))
for (const [k, value] of entries) {
msg.push(displayVar(k, value))
}
msg.push('')
}
}
output.standard(msg.join('\n').trim())
}
async listJson () {
const publicConf = {}
for (const key in this.npm.config.list[0]) {
const value = this.npm.config.get(key)
if (isPrivate(key, value)) {
continue
}
publicConf[key] = value
}
output.buffer(publicConf)
}
}
module.exports = Config

51
NodeJS/node_modules/npm/lib/commands/dedupe.js generated vendored Normal file
View File

@@ -0,0 +1,51 @@
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
// dedupe duplicated packages, or find them in the tree
class Dedupe extends ArboristWorkspaceCmd {
static description = 'Reduce duplication in the package tree'
static name = 'dedupe'
static params = [
'install-strategy',
'legacy-bundling',
'global-style',
'strict-peer-deps',
'package-lock',
'omit',
'include',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
...super.params,
]
async exec () {
if (this.npm.global) {
const er = new Error('`npm dedupe` does not work in global mode.')
er.code = 'EDEDUPEGLOBAL'
throw er
}
const dryRun = this.npm.config.get('dry-run')
const where = this.npm.prefix
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
dryRun,
// Saving during dedupe would only update if one of your direct
// dependencies was also duplicated somewhere in your tree. It would be
// confusing if running this were to also update your package.json. In
// order to reduce potential confusion we set this to false.
save: false,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.dedupe(opts)
await reifyFinish(this.npm, arb)
}
}
module.exports = Dedupe

76
NodeJS/node_modules/npm/lib/commands/deprecate.js generated vendored Normal file
View File

@@ -0,0 +1,76 @@
const fetch = require('npm-registry-fetch')
const { otplease } = require('../utils/auth.js')
const npa = require('npm-package-arg')
const { log } = require('proc-log')
const semver = require('semver')
const getIdentity = require('../utils/get-identity.js')
const libaccess = require('libnpmaccess')
const BaseCommand = require('../base-cmd.js')
class Deprecate extends BaseCommand {
static description = 'Deprecate a version of a package'
static name = 'deprecate'
static usage = ['<package-spec> <message>']
static params = [
'registry',
'otp',
]
static ignoreImplicitWorkspace = true
static async completion (opts, npm) {
if (opts.conf.argv.remain.length > 1) {
return []
}
const username = await getIdentity(npm, npm.flatOptions)
const packages = await libaccess.getPackages(username, npm.flatOptions)
return Object.keys(packages)
.filter((name) =>
packages[name] === 'write' &&
(opts.conf.argv.remain.length === 0 ||
name.startsWith(opts.conf.argv.remain[0])))
}
async exec ([pkg, msg]) {
// msg == null because '' is a valid value, it indicates undeprecate
if (!pkg || msg == null) {
throw this.usageError()
}
// fetch the data and make sure it exists.
const p = npa(pkg)
const spec = p.rawSpec === '*' ? '*' : p.fetchSpec
if (semver.validRange(spec, true) === null) {
throw new Error(`invalid version range: ${spec}`)
}
const uri = '/' + p.escapedName
const packument = await fetch.json(uri, {
...this.npm.flatOptions,
spec: p,
query: { write: true },
})
const versions = Object.keys(packument.versions)
.filter(v => semver.satisfies(v, spec, { includePrerelease: true }))
if (versions.length) {
for (const v of versions) {
packument.versions[v].deprecated = msg
}
return otplease(this.npm, this.npm.flatOptions, opts => fetch(uri, {
...opts,
spec: p,
method: 'PUT',
body: packument,
ignoreBody: true,
}))
} else {
log.warn('deprecate', 'No version found for', p.rawSpec)
}
}
}
module.exports = Deprecate

291
NodeJS/node_modules/npm/lib/commands/diff.js generated vendored Normal file
View File

@@ -0,0 +1,291 @@
const { resolve } = require('node:path')
const semver = require('semver')
const libnpmdiff = require('libnpmdiff')
const npa = require('npm-package-arg')
const pacote = require('pacote')
const pickManifest = require('npm-pick-manifest')
const { log, output } = require('proc-log')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
class Diff extends BaseCommand {
static description = 'The registry diff command'
static name = 'diff'
static usage = [
'[...<paths>]',
]
static params = [
'diff',
'diff-name-only',
'diff-unified',
'diff-ignore-all-space',
'diff-no-prefix',
'diff-src-prefix',
'diff-dst-prefix',
'diff-text',
'global',
'tag',
'workspace',
'workspaces',
'include-workspace-root',
]
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
const specs = this.npm.config.get('diff').filter(d => d)
if (specs.length > 2) {
throw this.usageError(`Can't use more than two --diff arguments.`)
}
// execWorkspaces may have set this already
if (!this.prefix) {
this.prefix = this.npm.prefix
}
// this is the "top" directory, one up from node_modules
// in global mode we have to walk one up from globalDir because our
// node_modules is sometimes under ./lib, and in global mode we're only ever
// walking through node_modules (because we will have been given a package
// name already)
if (this.npm.global) {
this.top = resolve(this.npm.globalDir, '..')
} else {
this.top = this.prefix
}
const [a, b] = await this.retrieveSpecs(specs)
log.info('diff', { src: a, dst: b })
const res = await libnpmdiff([a, b], {
...this.npm.flatOptions,
diffFiles: args,
where: this.top,
})
return output.standard(res)
}
async execWorkspaces (args) {
await this.setWorkspaces()
for (const workspacePath of this.workspacePaths) {
this.top = workspacePath
this.prefix = workspacePath
await this.exec(args)
}
}
// get the package name from the packument at `path`
// throws if no packument is present OR if it does not have `name` attribute
async packageName () {
let name
try {
const { content: pkg } = await pkgJson.normalize(this.prefix)
name = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
}
if (!name) {
throw this.usageError('Needs multiple arguments to compare or run from a project dir.')
}
return name
}
async retrieveSpecs ([a, b]) {
if (a && b) {
const specs = await this.convertVersionsToSpecs([a, b])
return this.findVersionsByPackageName(specs)
}
// no arguments, defaults to comparing cwd
// to its latest published registry version
if (!a) {
const pkgName = await this.packageName()
return [
`${pkgName}@${this.npm.config.get('tag')}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
]
}
// single argument, used to compare wanted versions of an
// installed dependency or to compare the cwd to a published version
let noPackageJson
let pkgName
try {
const { content: pkg } = await pkgJson.normalize(this.prefix)
pkgName = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
noPackageJson = true
}
const missingPackageJson =
this.usageError('Needs multiple arguments to compare or run from a project dir.')
// using a valid semver range, that means it should just diff
// the cwd against a published version to the registry using the
// same project name and the provided semver range
if (semver.validRange(a)) {
if (!pkgName) {
throw missingPackageJson
}
return [
`${pkgName}@${a}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
]
}
// when using a single package name as arg and it's part of the current
// install tree, then retrieve the current installed version and compare
// it against the same value `npm outdated` would suggest you to update to
const spec = npa(a)
if (spec.registry) {
let actualTree
let node
const Arborist = require('@npmcli/arborist')
try {
const opts = {
...this.npm.flatOptions,
path: this.top,
}
const arb = new Arborist(opts)
actualTree = await arb.loadActual(opts)
node = actualTree &&
actualTree.inventory.query('name', spec.name)
.values().next().value
} catch (e) {
log.verbose('diff', 'failed to load actual install tree')
}
if (!node || !node.name || !node.package || !node.package.version) {
if (noPackageJson) {
throw missingPackageJson
}
return [
`${spec.name}@${spec.fetchSpec}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
]
}
const tryRootNodeSpec = () =>
(actualTree && actualTree.edgesOut.get(spec.name) || {}).spec
const tryAnySpec = () => {
for (const edge of node.edgesIn) {
return edge.spec
}
}
const aSpec = `file:${node.realpath.replace(/#/g, '%23')}`
// finds what version of the package to compare against, if a exact
// version or tag was passed than it should use that, otherwise
// work from the top of the arborist tree to find the original semver
// range declared in the package that depends on the package.
let bSpec
if (spec.rawSpec !== '*') {
bSpec = spec.rawSpec
} else {
const bTargetVersion =
tryRootNodeSpec()
|| tryAnySpec()
// figure out what to compare against,
// follows same logic to npm outdated "Wanted" results
const packument = await pacote.packument(spec, {
...this.npm.flatOptions,
preferOnline: true,
})
bSpec = pickManifest(
packument,
bTargetVersion,
{ ...this.npm.flatOptions }
).version
}
return [
`${spec.name}@${aSpec}`,
`${spec.name}@${bSpec}`,
]
} else if (spec.type === 'directory') {
return [
`file:${spec.fetchSpec.replace(/#/g, '%23')}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
]
} else {
throw this.usageError(`Spec type ${spec.type} not supported.`)
}
}
async convertVersionsToSpecs ([a, b]) {
const semverA = semver.validRange(a)
const semverB = semver.validRange(b)
// both specs are semver versions, assume current project dir name
if (semverA && semverB) {
let pkgName
try {
const { content: pkg } = await pkgJson.normalize(this.prefix)
pkgName = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
}
if (!pkgName) {
throw this.usageError('Needs to be run from a project dir in order to diff two versions.')
}
return [`${pkgName}@${a}`, `${pkgName}@${b}`]
}
// otherwise uses the name from the other arg to
// figure out the spec.name of what to compare
if (!semverA && semverB) {
return [a, `${npa(a).name}@${b}`]
}
if (semverA && !semverB) {
return [`${npa(b).name}@${a}`, b]
}
// no valid semver ranges used
return [a, b]
}
async findVersionsByPackageName (specs) {
let actualTree
const Arborist = require('@npmcli/arborist')
try {
const opts = {
...this.npm.flatOptions,
path: this.top,
}
const arb = new Arborist(opts)
actualTree = await arb.loadActual(opts)
} catch (e) {
log.verbose('diff', 'failed to load actual install tree')
}
return specs.map(i => {
const spec = npa(i)
if (spec.rawSpec !== '*') {
return i
}
const node = actualTree
&& actualTree.inventory.query('name', spec.name)
.values().next().value
const res = !node || !node.package || !node.package.version
? spec.fetchSpec
: `file:${node.realpath.replace(/#/g, '%23')}`
return `${spec.name}@${res}`
})
}
}
module.exports = Diff

209
NodeJS/node_modules/npm/lib/commands/dist-tag.js generated vendored Normal file
View File

@@ -0,0 +1,209 @@
const npa = require('npm-package-arg')
const regFetch = require('npm-registry-fetch')
const semver = require('semver')
const { log, output } = require('proc-log')
const { otplease } = require('../utils/auth.js')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
class DistTag extends BaseCommand {
static description = 'Modify package distribution tags'
static params = ['workspace', 'workspaces', 'include-workspace-root']
static name = 'dist-tag'
static usage = [
'add <package-spec (with version)> [<tag>]',
'rm <package-spec> <tag>',
'ls [<package-spec>]',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['add', 'rm', 'ls']
}
switch (argv[2]) {
default:
return []
}
}
async exec ([cmdName, pkg, tag]) {
const opts = {
...this.npm.flatOptions,
}
if (['add', 'a', 'set', 's'].includes(cmdName)) {
return this.add(pkg, tag, opts)
}
if (['rm', 'r', 'del', 'd', 'remove'].includes(cmdName)) {
return this.remove(pkg, tag, opts)
}
if (['ls', 'l', 'sl', 'list'].includes(cmdName)) {
return this.list(pkg, opts)
}
if (!pkg) {
// when only using the pkg name the default behavior
// should be listing the existing tags
return this.list(cmdName, opts)
} else {
throw this.usageError()
}
}
async execWorkspaces ([cmdName, pkg, tag]) {
// cmdName is some form of list
// pkg is one of:
// - unset
// - .
// - .@version
if (['ls', 'l', 'sl', 'list'].includes(cmdName) && (!pkg || pkg === '.' || /^\.@/.test(pkg))) {
return this.listWorkspaces()
}
// pkg is unset
// cmdName is one of:
// - unset
// - .
// - .@version
if (!pkg && (!cmdName || cmdName === '.' || /^\.@/.test(cmdName))) {
return this.listWorkspaces()
}
// anything else is just a regular dist-tag command
// so we fallback to the non-workspaces implementation
log.warn('dist-tag', 'Ignoring workspaces for specified package')
return this.exec([cmdName, pkg, tag])
}
async add (spec, tag, opts) {
spec = npa(spec || '')
const version = spec.rawSpec
const defaultTag = tag || this.npm.config.get('tag')
log.verbose('dist-tag add', defaultTag, 'to', spec.name + '@' + version)
// make sure new spec with tag is valid, this will throw if invalid
npa(`${spec.name}@${defaultTag}`)
if (!spec.name || !version || !defaultTag) {
throw this.usageError('must provide a spec with a name and version, and a tag to add')
}
const t = defaultTag.trim()
if (semver.validRange(t)) {
throw new Error('Tag name must not be a valid SemVer range: ' + t)
}
const tags = await this.fetchTags(spec, opts)
if (tags[t] === version) {
log.warn('dist-tag add', t, 'is already set to version', version)
return
}
tags[t] = version
const url =
`/-/package/${spec.escapedName}/dist-tags/${encodeURIComponent(t)}`
const reqOpts = {
...opts,
method: 'PUT',
body: JSON.stringify(version),
headers: {
'content-type': 'application/json',
},
spec,
}
await otplease(this.npm, reqOpts, o => regFetch(url, o))
output.standard(`+${t}: ${spec.name}@${version}`)
}
async remove (spec, tag, opts) {
spec = npa(spec || '')
log.verbose('dist-tag del', tag, 'from', spec.name)
if (!spec.name) {
throw this.usageError()
}
const tags = await this.fetchTags(spec, opts)
if (!tags[tag]) {
log.info('dist-tag del', tag, 'is not a dist-tag on', spec.name)
throw new Error(tag + ' is not a dist-tag on ' + spec.name)
}
const version = tags[tag]
delete tags[tag]
const url =
`/-/package/${spec.escapedName}/dist-tags/${encodeURIComponent(tag)}`
const reqOpts = {
...opts,
method: 'DELETE',
spec,
}
await otplease(this.npm, reqOpts, o => regFetch(url, o))
output.standard(`-${tag}: ${spec.name}@${version}`)
}
async list (spec, opts) {
if (!spec) {
if (this.npm.global) {
throw this.usageError()
}
const { content: { name } } = await pkgJson.normalize(this.npm.prefix)
if (!name) {
throw this.usageError()
}
return this.list(name, opts)
}
spec = npa(spec)
try {
const tags = await this.fetchTags(spec, opts)
const msg =
Object.keys(tags).map(k => `${k}: ${tags[k]}`).sort().join('\n')
output.standard(msg)
return tags
} catch (err) {
log.error('dist-tag ls', "Couldn't get dist-tag data for", spec)
throw err
}
}
async listWorkspaces () {
await this.setWorkspaces()
for (const name of this.workspaceNames) {
try {
output.standard(`${name}:`)
await this.list(npa(name), this.npm.flatOptions)
} catch (err) {
// set the exitCode directly, but ignore the error
// since it will have already been logged by this.list()
process.exitCode = 1
}
}
}
async fetchTags (spec, opts) {
const data = await regFetch.json(
`/-/package/${spec.escapedName}/dist-tags`,
{ ...opts, 'prefer-online': true, spec }
)
if (data && typeof data === 'object') {
delete data._etag
}
if (!data || !Object.keys(data).length) {
throw new Error('No dist-tags found for ' + spec.name)
}
return data
}
}
module.exports = DistTag

21
NodeJS/node_modules/npm/lib/commands/docs.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
const PackageUrlCmd = require('../package-url-cmd.js')
class Docs extends PackageUrlCmd {
static description = 'Open documentation for a package in a web browser'
static name = 'docs'
getUrl (spec, mani) {
if (mani.homepage) {
return mani.homepage
}
const info = this.hostedFromMani(mani)
if (info) {
return info.docs()
}
return `https://www.npmjs.com/package/${mani.name}`
}
}
module.exports = Docs

347
NodeJS/node_modules/npm/lib/commands/doctor.js generated vendored Normal file
View File

@@ -0,0 +1,347 @@
const cacache = require('cacache')
const { access, lstat, readdir, constants: { R_OK, W_OK, X_OK } } = require('node:fs/promises')
const fetch = require('make-fetch-happen')
const which = require('which')
const pacote = require('pacote')
const { resolve } = require('node:path')
const semver = require('semver')
const { log, output } = require('proc-log')
const ping = require('../utils/ping.js')
const { defaults } = require('@npmcli/config/lib/definitions')
const BaseCommand = require('../base-cmd.js')
const maskLabel = mask => {
const label = []
if (mask & R_OK) {
label.push('readable')
}
if (mask & W_OK) {
label.push('writable')
}
if (mask & X_OK) {
label.push('executable')
}
return label.join(', ')
}
const subcommands = [
{
// Ping is left in as a legacy command but is listed as "connection" to
// make more sense to more people
groups: ['connection', 'ping', 'registry'],
title: 'Connecting to the registry',
cmd: 'checkPing',
}, {
groups: ['versions'],
title: 'Checking npm version',
cmd: 'getLatestNpmVersion',
}, {
groups: ['versions'],
title: 'Checking node version',
cmd: 'getLatestNodejsVersion',
}, {
groups: ['registry'],
title: 'Checking configured npm registry',
cmd: 'checkNpmRegistry',
}, {
groups: ['environment'],
title: 'Checking for git executable in PATH',
cmd: 'getGitPath',
}, {
groups: ['environment'],
title: 'Checking for global bin folder in PATH',
cmd: 'getBinPath',
}, {
groups: ['permissions', 'cache'],
title: 'Checking permissions on cached files (this may take awhile)',
cmd: 'checkCachePermission',
windows: false,
}, {
groups: ['permissions'],
title: 'Checking permissions on local node_modules (this may take awhile)',
cmd: 'checkLocalModulesPermission',
windows: false,
}, {
groups: ['permissions'],
title: 'Checking permissions on global node_modules (this may take awhile)',
cmd: 'checkGlobalModulesPermission',
windows: false,
}, {
groups: ['permissions'],
title: 'Checking permissions on local bin folder',
cmd: 'checkLocalBinPermission',
windows: false,
}, {
groups: ['permissions'],
title: 'Checking permissions on global bin folder',
cmd: 'checkGlobalBinPermission',
windows: false,
}, {
groups: ['cache'],
title: 'Verifying cache contents (this may take awhile)',
cmd: 'verifyCachedFiles',
windows: false,
},
// TODO:
// group === 'dependencies'?
// - ensure arborist.loadActual() runs without errors and no invalid edges
// - ensure package-lock.json matches loadActual()
// - verify loadActual without hidden lock file matches hidden lockfile
// group === '???'
// - verify all local packages have bins linked
// What is the fix for these?
]
class Doctor extends BaseCommand {
static description = 'Check the health of your npm environment'
static name = 'doctor'
static params = ['registry']
static ignoreImplicitWorkspace = false
static usage = [`[${subcommands.flatMap(s => s.groups)
.filter((value, index, self) => self.indexOf(value) === index && value !== 'ping')
.join('] [')}]`]
static subcommands = subcommands
async exec (args) {
log.info('doctor', 'Running checkup')
let allOk = true
const actions = this.actions(args)
const chalk = this.npm.chalk
for (const { title, cmd } of actions) {
this.output(title)
// TODO when we have an in progress indicator that could go here
let result
try {
result = await this[cmd]()
this.output(`${chalk.green('Ok')}${result ? `\n${result}` : ''}\n`)
} catch (err) {
allOk = false
this.output(`${chalk.red('Not ok')}\n${chalk.cyan(err)}\n`)
}
}
if (!allOk) {
if (this.npm.silent) {
/* eslint-disable-next-line max-len */
throw new Error('Some problems found. Check logs or disable silent mode for recommendations.')
} else {
throw new Error('Some problems found. See above for recommendations.')
}
}
}
async checkPing () {
log.info('doctor', 'Pinging registry')
try {
await ping({ ...this.npm.flatOptions, retry: false })
return ''
} catch (er) {
if (/^E\d{3}$/.test(er.code || '')) {
throw er.code.slice(1) + ' ' + er.message
} else {
throw er.message
}
}
}
async getLatestNpmVersion () {
log.info('doctor', 'Getting npm package information')
const latest = (await pacote.manifest('npm@latest', this.npm.flatOptions)).version
if (semver.gte(this.npm.version, latest)) {
return `current: v${this.npm.version}, latest: v${latest}`
} else {
throw `Use npm v${latest}`
}
}
async getLatestNodejsVersion () {
// XXX get the latest in the current major as well
const current = process.version
const currentRange = `^${current}`
const url = 'https://nodejs.org/dist/index.json'
log.info('doctor', 'Getting Node.js release information')
const res = await fetch(url, { method: 'GET', ...this.npm.flatOptions })
const data = await res.json()
let maxCurrent = '0.0.0'
let maxLTS = '0.0.0'
for (const { lts, version } of data) {
if (lts && semver.gt(version, maxLTS)) {
maxLTS = version
}
if (semver.satisfies(version, currentRange) && semver.gt(version, maxCurrent)) {
maxCurrent = version
}
}
const recommended = semver.gt(maxCurrent, maxLTS) ? maxCurrent : maxLTS
if (semver.gte(process.version, recommended)) {
return `current: ${current}, recommended: ${recommended}`
} else {
throw `Use node ${recommended} (current: ${current})`
}
}
async getBinPath () {
log.info('doctor', 'getBinPath', 'Finding npm global bin in your PATH')
if (!process.env.PATH.includes(this.npm.globalBin)) {
throw new Error(`Add ${this.npm.globalBin} to your $PATH`)
}
return this.npm.globalBin
}
async checkCachePermission () {
return this.checkFilesPermission(this.npm.cache, true, R_OK)
}
async checkLocalModulesPermission () {
return this.checkFilesPermission(this.npm.localDir, true, R_OK | W_OK, true)
}
async checkGlobalModulesPermission () {
return this.checkFilesPermission(this.npm.globalDir, false, R_OK)
}
async checkLocalBinPermission () {
return this.checkFilesPermission(this.npm.localBin, false, R_OK | W_OK | X_OK, true)
}
async checkGlobalBinPermission () {
return this.checkFilesPermission(this.npm.globalBin, false, X_OK)
}
async checkFilesPermission (root, shouldOwn, mask, missingOk) {
let ok = true
try {
const uid = process.getuid()
const gid = process.getgid()
const files = new Set([root])
for (const f of files) {
const st = await lstat(f).catch(er => {
// if it can't be missing, or if it can and the error wasn't that it was missing
if (!missingOk || er.code !== 'ENOENT') {
ok = false
log.warn('doctor', 'checkFilesPermission', 'error getting info for ' + f)
}
})
if (!st) {
continue
}
if (shouldOwn && (uid !== st.uid || gid !== st.gid)) {
log.warn('doctor', 'checkFilesPermission', 'should be owner of ' + f)
ok = false
}
if (!st.isDirectory() && !st.isFile()) {
continue
}
try {
await access(f, mask)
} catch (er) {
ok = false
const msg = `Missing permissions on ${f} (expect: ${maskLabel(mask)})`
log.error('doctor', 'checkFilesPermission', msg)
continue
}
if (st.isDirectory()) {
const entries = await readdir(f).catch(() => {
ok = false
log.warn('doctor', 'checkFilesPermission', 'error reading directory ' + f)
return []
})
for (const entry of entries) {
files.add(resolve(f, entry))
}
}
}
} finally {
if (!ok) {
throw (
`Check the permissions of files in ${root}` +
(shouldOwn ? ' (should be owned by current user)' : '')
)
} else {
return ''
}
}
}
async getGitPath () {
log.info('doctor', 'Finding git in your PATH')
return await which('git').catch(er => {
log.warn('doctor', 'getGitPath', er)
throw new Error("Install git and ensure it's in your PATH.")
})
}
async verifyCachedFiles () {
log.info('doctor', 'verifyCachedFiles', 'Verifying the npm cache')
const stats = await cacache.verify(this.npm.flatOptions.cache)
const { badContentCount, reclaimedCount, missingContent, reclaimedSize } = stats
if (badContentCount || reclaimedCount || missingContent) {
if (badContentCount) {
log.warn('doctor', 'verifyCachedFiles', `Corrupted content removed: ${badContentCount}`)
}
if (reclaimedCount) {
log.warn(
'doctor',
'verifyCachedFiles',
`Content garbage-collected: ${reclaimedCount} (${reclaimedSize} bytes)`
)
}
if (missingContent) {
log.warn('doctor', 'verifyCachedFiles', `Missing content: ${missingContent}`)
}
log.warn('doctor', 'verifyCachedFiles', 'Cache issues have been fixed')
}
log.info(
'doctor',
'verifyCachedFiles',
`Verification complete. Stats: ${JSON.stringify(stats, null, 2)}`
)
return `verified ${stats.verifiedContent} tarballs`
}
async checkNpmRegistry () {
if (this.npm.flatOptions.registry !== defaults.registry) {
throw `Try \`npm config set registry=${defaults.registry}\``
} else {
return `using default registry (${defaults.registry})`
}
}
output (...args) {
// TODO display layer should do this
if (!this.npm.silent) {
output.standard(...args)
}
}
actions (params) {
return this.constructor.subcommands.filter(subcmd => {
if (process.platform === 'win32' && subcmd.windows === false) {
return false
}
if (params.length) {
return params.some(param => subcmd.groups.includes(param))
}
return true
})
}
}
module.exports = Doctor

64
NodeJS/node_modules/npm/lib/commands/edit.js generated vendored Normal file
View File

@@ -0,0 +1,64 @@
const { resolve } = require('node:path')
const { lstat } = require('node:fs/promises')
const cp = require('node:child_process')
const completion = require('../utils/installed-shallow.js')
const BaseCommand = require('../base-cmd.js')
const splitPackageNames = (path) => path.split('/')
// combine scoped parts
.reduce((parts, part) => {
if (parts.length === 0) {
return [part]
}
const lastPart = parts[parts.length - 1]
// check if previous part is the first part of a scoped package
if (lastPart[0] === '@' && !lastPart.includes('/')) {
parts[parts.length - 1] += '/' + part
} else {
parts.push(part)
}
return parts
}, [])
.join('/node_modules/')
.replace(/(\/node_modules)+/, '/node_modules')
// npm edit <pkg>
// open the package folder in the $EDITOR
class Edit extends BaseCommand {
static description = 'Edit an installed package'
static name = 'edit'
static usage = ['<pkg>[/<subpkg>...]']
static params = ['editor']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
return completion(npm, opts)
}
async exec (args) {
if (args.length !== 1) {
throw this.usageError()
}
const path = splitPackageNames(args[0])
const dir = resolve(this.npm.dir, path)
await lstat(dir)
await new Promise((res, rej) => {
const [bin, ...spawnArgs] = this.npm.config.get('editor').split(/\s+/)
const editor = cp.spawn(bin, [...spawnArgs, dir], { stdio: 'inherit' })
editor.on('exit', async (code) => {
if (code) {
return rej(new Error(`editor process exited with code: ${code}`))
}
await this.npm.exec('rebuild', [dir]).then(res).catch(rej)
})
})
}
}
module.exports = Edit

110
NodeJS/node_modules/npm/lib/commands/exec.js generated vendored Normal file
View File

@@ -0,0 +1,110 @@
const { resolve } = require('node:path')
const libexec = require('libnpmexec')
const BaseCommand = require('../base-cmd.js')
class Exec extends BaseCommand {
static description = 'Run a command from a local or remote npm package'
static params = [
'package',
'call',
'workspace',
'workspaces',
'include-workspace-root',
]
static name = 'exec'
static usage = [
'-- <pkg>[@<version>] [args...]',
'--package=<pkg>[@<version>] -- <cmd> [args...]',
'-c \'<cmd> [args...]\'',
'--package=foo -c \'<cmd> [args...]\'',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static isShellout = true
async exec (args) {
return this.callExec(args)
}
async execWorkspaces (args) {
await this.setWorkspaces()
for (const [name, path] of this.workspaces) {
const locationMsg =
`in workspace ${this.npm.chalk.green(name)} at location:\n${this.npm.chalk.dim(path)}`
await this.callExec(args, { name, locationMsg, runPath: path })
}
}
async callExec (args, { name, locationMsg, runPath } = {}) {
let localBin = this.npm.localBin
let pkgPath = this.npm.localPrefix
// This is where libnpmexec will actually run the scripts from
if (!runPath) {
runPath = process.cwd()
} else {
// We have to consider if the workspace has its own separate versions
// libnpmexec will walk up to localDir after looking here
localBin = resolve(this.npm.localDir, name, 'node_modules', '.bin')
// We also need to look for `bin` entries in the workspace package.json
// libnpmexec will NOT look in the project root for the bin entry
pkgPath = runPath
}
const call = this.npm.config.get('call')
let globalPath
const {
flatOptions,
globalBin,
globalDir,
chalk,
} = this.npm
const scriptShell = this.npm.config.get('script-shell') || undefined
const packages = this.npm.config.get('package')
const yes = this.npm.config.get('yes')
// --prefix sets both of these to the same thing, meaning the global prefix
// is invalid (i.e. no lib/node_modules). This is not a trivial thing to
// untangle and fix so we work around it here.
if (this.npm.localPrefix !== this.npm.globalPrefix) {
globalPath = resolve(globalDir, '..')
}
if (call && args.length) {
throw this.usageError()
}
return libexec({
...flatOptions,
// we explicitly set packageLockOnly to false because if it's true
// when we try to install a missing package, we won't actually install it
packageLockOnly: false,
// what the user asked to run args[0] is run by default
args: [...args], // copy args so they dont get mutated
// specify a custom command to be run instead of args[0]
call,
chalk,
// where to look for bins globally, if a file matches call or args[0] it is called
globalBin,
// where to look for packages globally, if a package matches call or args[0] it is called
globalPath,
// where to look for bins locally, if a file matches call or args[0] it is called
localBin,
locationMsg,
// packages that need to be installed
packages,
// path where node_modules is
path: this.npm.localPrefix,
// where to look for package.json#bin entries first
pkgPath,
// cwd to run from
runPath,
scriptShell,
yes,
})
}
}
module.exports = Exec

130
NodeJS/node_modules/npm/lib/commands/explain.js generated vendored Normal file
View File

@@ -0,0 +1,130 @@
const { explainNode } = require('../utils/explain-dep.js')
const npa = require('npm-package-arg')
const semver = require('semver')
const { relative, resolve } = require('node:path')
const validName = require('validate-npm-package-name')
const { output } = require('proc-log')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Explain extends ArboristWorkspaceCmd {
static description = 'Explain installed packages'
static name = 'explain'
static usage = ['<package-spec>']
static params = [
'json',
'workspace',
]
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
if (!args.length) {
throw this.usageError()
}
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({ path: this.npm.prefix, ...this.npm.flatOptions })
const tree = await arb.loadActual()
if (this.npm.flatOptions.workspacesEnabled
&& this.workspaceNames
&& this.workspaceNames.length
) {
this.filterSet = arb.workspaceDependencySet(tree, this.workspaceNames)
} else if (!this.npm.flatOptions.workspacesEnabled) {
this.filterSet =
arb.excludeWorkspacesDependencySet(tree)
}
const nodes = new Set()
for (const arg of args) {
for (const node of this.getNodes(tree, arg)) {
const filteredOut = this.filterSet
&& this.filterSet.size > 0
&& !this.filterSet.has(node)
if (!filteredOut) {
nodes.add(node)
}
}
}
if (nodes.size === 0) {
throw new Error(`No dependencies found matching ${args.join(', ')}`)
}
const expls = []
for (const node of nodes) {
const { extraneous, dev, optional, devOptional, peer, inBundle, overridden } = node
const expl = node.explain()
if (extraneous) {
expl.extraneous = true
} else {
expl.dev = dev
expl.optional = optional
expl.devOptional = devOptional
expl.peer = peer
expl.bundled = inBundle
expl.overridden = overridden
}
expls.push(expl)
}
if (this.npm.flatOptions.json) {
output.buffer(expls)
} else {
output.standard(expls.map(expl => {
return explainNode(expl, Infinity, this.npm.chalk)
}).join('\n\n'))
}
}
getNodes (tree, arg) {
// if it's just a name, return packages by that name
const { validForOldPackages: valid } = validName(arg)
if (valid) {
return tree.inventory.query('packageName', arg)
}
// if it's a location, get that node
const maybeLoc = arg.replace(/\\/g, '/').replace(/\/+$/, '')
const nodeByLoc = tree.inventory.get(maybeLoc)
if (nodeByLoc) {
return [nodeByLoc]
}
// maybe a path to a node_modules folder
const maybePath = relative(this.npm.prefix, resolve(maybeLoc))
.replace(/\\/g, '/').replace(/\/+$/, '')
const nodeByPath = tree.inventory.get(maybePath)
if (nodeByPath) {
return [nodeByPath]
}
// otherwise, try to select all matching nodes
try {
return this.getNodesByVersion(tree, arg)
} catch (er) {
return []
}
}
getNodesByVersion (tree, arg) {
const spec = npa(arg, this.npm.prefix)
if (spec.type !== 'version' && spec.type !== 'range') {
return []
}
return tree.inventory.filter(node => {
return node.package.name === spec.name &&
semver.satisfies(node.package.version, spec.rawSpec)
})
}
}
module.exports = Explain

74
NodeJS/node_modules/npm/lib/commands/explore.js generated vendored Normal file
View File

@@ -0,0 +1,74 @@
const pkgJson = require('@npmcli/package-json')
const runScript = require('@npmcli/run-script')
const { join, relative } = require('node:path')
const { log, output } = require('proc-log')
const completion = require('../utils/installed-shallow.js')
const BaseCommand = require('../base-cmd.js')
// npm explore <pkg>[@<version>]
// open a subshell to the package folder.
class Explore extends BaseCommand {
static description = 'Browse an installed package'
static name = 'explore'
static usage = ['<pkg> [ -- <command>]']
static params = ['shell']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
return completion(npm, opts)
}
async exec (args) {
if (args.length < 1 || !args[0]) {
throw this.usageError()
}
const pkgname = args.shift()
// detect and prevent any .. shenanigans
const path = join(this.npm.dir, join('/', pkgname))
if (relative(path, this.npm.dir) === '') {
throw this.usageError()
}
// run as if running a script named '_explore', which we set to either
// the set of arguments, or the shell config, and let @npmcli/run-script
// handle all the escaping and PATH setup stuff.
const { content: pkg } = await pkgJson.normalize(path).catch(er => {
log.error('explore', `It doesn't look like ${pkgname} is installed.`)
throw er
})
const { shell } = this.npm.flatOptions
pkg.scripts = {
...(pkg.scripts || {}),
_explore: args.join(' ').trim() || shell,
}
if (!args.length) {
output.standard(`\nExploring ${path}\nType 'exit' or ^D when finished\n`)
}
return runScript({
...this.npm.flatOptions,
pkg,
path,
event: '_explore',
stdio: 'inherit',
}).catch(er => {
process.exitCode = typeof er.code === 'number' && er.code !== 0 ? er.code
: 1
// if it's not an exit error, or non-interactive, throw it
const isProcExit = er.message === 'command failed' &&
(typeof er.code === 'number' || /^SIG/.test(er.signal || ''))
if (args.length || !isProcExit) {
throw er
}
})
}
}
module.exports = Explore

28
NodeJS/node_modules/npm/lib/commands/find-dupes.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
// dedupe duplicated packages, or find them in the tree
class FindDupes extends ArboristWorkspaceCmd {
static description = 'Find duplication in the package tree'
static name = 'find-dupes'
static params = [
'install-strategy',
'legacy-bundling',
'global-style',
'strict-peer-deps',
'package-lock',
'omit',
'include',
'ignore-scripts',
'audit',
'bin-links',
'fund',
...super.params,
]
async exec () {
this.npm.config.set('dry-run', true)
return this.npm.exec('dedupe', [])
}
}
module.exports = FindDupes

221
NodeJS/node_modules/npm/lib/commands/fund.js generated vendored Normal file
View File

@@ -0,0 +1,221 @@
const archy = require('archy')
const pacote = require('pacote')
const semver = require('semver')
const { output } = require('proc-log')
const npa = require('npm-package-arg')
const { depth } = require('treeverse')
const { readTree: getFundingInfo, normalizeFunding, isValidFunding } = require('libnpmfund')
const { openUrl } = require('../utils/open-url.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const getPrintableName = ({ name, version }) => {
const printableVersion = version ? `@${version}` : ''
return `${name}${printableVersion}`
}
const errCode = (msg, code) => Object.assign(new Error(msg), { code })
class Fund extends ArboristWorkspaceCmd {
static description = 'Retrieve funding information'
static name = 'fund'
static params = ['json', 'browser', 'unicode', 'workspace', 'which']
static usage = ['[<package-spec>]']
// XXX: maybe worth making this generic for all commands?
usageMessage (paramsObj = {}) {
let msg = `\`npm ${this.constructor.name}`
const params = Object.entries(paramsObj)
if (params.length) {
msg += ` ${this.constructor.usage}`
}
for (const [key, value] of params) {
msg += ` --${key}=${value}`
}
return `${msg}\``
}
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
const spec = args[0]
let fundingSourceNumber = this.npm.config.get('which')
if (fundingSourceNumber != null) {
fundingSourceNumber = parseInt(fundingSourceNumber, 10)
if (isNaN(fundingSourceNumber) || fundingSourceNumber < 1) {
throw errCode(
`${this.usageMessage({ which: 'fundingSourceNumber' })} must be given a positive integer`,
'EFUNDNUMBER'
)
}
}
if (this.npm.global) {
throw errCode(
`${this.usageMessage()} does not support global packages`,
'EFUNDGLOBAL'
)
}
const where = this.npm.prefix
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({ ...this.npm.flatOptions, path: where })
const tree = await arb.loadActual()
if (spec) {
await this.openFundingUrl({
path: where,
tree,
spec,
fundingSourceNumber,
})
return
}
// TODO: add !workspacesEnabled option handling to libnpmfund
const fundingInfo = getFundingInfo(tree, {
...this.flatOptions,
Arborist,
workspaces: this.workspaceNames,
})
if (this.npm.config.get('json')) {
output.buffer(fundingInfo)
} else {
output.standard(this.printHuman(fundingInfo))
}
}
printHuman (fundingInfo) {
const unicode = this.npm.config.get('unicode')
const seenUrls = new Map()
const tree = obj => archy(obj, '', { unicode })
const result = depth({
tree: fundingInfo,
// composes human readable package name
// and creates a new archy item for readable output
visit: ({ name, version, funding }) => {
const [fundingSource] = [].concat(normalizeFunding(funding)).filter(isValidFunding)
const { url } = fundingSource || {}
const pkgRef = getPrintableName({ name, version })
if (!url) {
return { label: pkgRef }
}
let item
if (seenUrls.has(url)) {
item = seenUrls.get(url)
item.label += `${this.npm.chalk.dim(',')} ${pkgRef}`
return null
}
item = {
label: tree({
label: this.npm.chalk.blue(url),
nodes: [pkgRef],
}).trim(),
}
// stacks all packages together under the same item
seenUrls.set(url, item)
return item
},
// puts child nodes back into returned archy
// output while also filtering out missing items
leave: (item, children) => {
if (item) {
item.nodes = children.filter(Boolean)
}
return item
},
// turns tree-like object return by libnpmfund
// into children to be properly read by treeverse
getChildren: node =>
Object.keys(node.dependencies || {}).map(key => ({
name: key,
...node.dependencies[key],
})),
})
const res = tree(result)
return res
}
async openFundingUrl ({ path, tree, spec, fundingSourceNumber }) {
const arg = npa(spec, path)
const retrievePackageMetadata = () => {
if (arg.type === 'directory') {
if (tree.path === arg.fetchSpec) {
// matches cwd, e.g: npm fund .
return tree.package
} else {
// matches any file path within current arborist inventory
for (const item of tree.inventory.values()) {
if (item.path === arg.fetchSpec) {
return item.package
}
}
}
} else {
// tries to retrieve a package from arborist inventory
// by matching resulted package name from the provided spec
const [item] = [...tree.inventory.query('name', arg.name)]
.filter(i => semver.valid(i.package.version))
.sort((a, b) => semver.rcompare(a.package.version, b.package.version))
if (item) {
return item.package
}
}
}
const { funding } =
retrievePackageMetadata() ||
(await pacote.manifest(arg, this.npm.flatOptions).catch(() => ({})))
const validSources = [].concat(normalizeFunding(funding)).filter(isValidFunding)
if (!validSources.length) {
throw errCode(`No valid funding method available for: ${spec}`, 'ENOFUND')
}
const fundSource = fundingSourceNumber
? validSources[fundingSourceNumber - 1]
: validSources.length === 1 ? validSources[0]
: null
if (fundSource) {
return openUrl(this.npm, ...this.urlMessage(fundSource))
}
const ambiguousUrlMsg = [
...validSources.map((s, i) => `${i + 1}: ${this.urlMessage(s).reverse().join(': ')}`),
`Run ${this.usageMessage({ which: '1' })}` +
', for example, to open the first funding URL listed in that package',
]
if (fundingSourceNumber) {
ambiguousUrlMsg.unshift(`--which=${fundingSourceNumber} is not a valid index`)
}
output.standard(ambiguousUrlMsg.join('\n'))
}
urlMessage (source) {
const { type, url } = source
const typePrefix = type ? `${type} funding` : 'Funding'
const message = `${typePrefix} available at the following URL`
return [url, message]
}
}
module.exports = Fund

23
NodeJS/node_modules/npm/lib/commands/get.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
const Npm = require('../npm.js')
const BaseCommand = require('../base-cmd.js')
class Get extends BaseCommand {
static description = 'Get a value from the npm configuration'
static name = 'get'
static usage = ['[<key> ...] (See `npm config`)']
static params = ['long']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts) {
const Config = Npm.cmd('config')
return Config.completion(opts)
}
async exec (args) {
return this.npm.exec('config', ['get'].concat(args))
}
}
module.exports = Get

195
NodeJS/node_modules/npm/lib/commands/help-search.js generated vendored Normal file
View File

@@ -0,0 +1,195 @@
const { readFile } = require('node:fs/promises')
const path = require('node:path')
const { glob } = require('glob')
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
const globify = pattern => pattern.split('\\').join('/')
class HelpSearch extends BaseCommand {
static description = 'Search npm help documentation'
static name = 'help-search'
static usage = ['<text>']
static params = ['long']
async exec (args) {
if (!args.length) {
throw this.usageError()
}
const docPath = path.resolve(this.npm.npmRoot, 'docs/content')
let files = await glob(`${globify(docPath)}/*/*.md`)
// preserve glob@8 behavior
files = files.sort((a, b) => a.localeCompare(b, 'en'))
const data = await this.readFiles(files)
const results = await this.searchFiles(args, data)
const formatted = this.formatResults(args, results)
if (!formatted.trim()) {
output.standard(`No matches in help for: ${args.join(' ')}\n`)
} else {
output.standard(formatted)
}
}
async readFiles (files) {
const res = {}
await Promise.all(files.map(async file => {
res[file] = (await readFile(file, 'utf8'))
.replace(/^---\n(.*\n)*?---\n/, '').trim()
}))
return res
}
async searchFiles (args, data) {
const results = []
for (const [file, content] of Object.entries(data)) {
const lowerCase = content.toLowerCase()
// skip if no matches at all
if (!args.some(a => lowerCase.includes(a.toLowerCase()))) {
continue
}
const lines = content.split(/\n+/)
// if a line has a search term, then skip it and the next line.
// if the next line has a search term, then skip all 3
// otherwise, set the line to null. then remove the nulls.
for (let i = 0; i < lines.length; i++) {
const line = lines[i]
const nextLine = lines[i + 1]
let match = false
if (nextLine) {
match = args.some(a =>
nextLine.toLowerCase().includes(a.toLowerCase()))
if (match) {
// skip over the next line, and the line after it.
i += 2
continue
}
}
match = args.some(a => line.toLowerCase().includes(a.toLowerCase()))
if (match) {
// skip over the next line
i++
continue
}
lines[i] = null
}
// now squish any string of nulls into a single null
const pruned = lines.reduce((l, r) => {
if (!(r === null && l[l.length - 1] === null)) {
l.push(r)
}
return l
}, [])
if (pruned[pruned.length - 1] === null) {
pruned.pop()
}
if (pruned[0] === null) {
pruned.shift()
}
// now count how many args were found
const found = {}
let totalHits = 0
for (const line of pruned) {
for (const arg of args) {
const hit = (line || '').toLowerCase()
.split(arg.toLowerCase()).length - 1
if (hit > 0) {
found[arg] = (found[arg] || 0) + hit
totalHits += hit
}
}
}
const cmd = 'npm help ' +
path.basename(file, '.md').replace(/^npm-/, '')
results.push({
file,
cmd,
lines: pruned,
found: Object.keys(found),
hits: found,
totalHits,
})
}
// sort results by number of results found, then by number of hits
// then by number of matching lines
// coverage is ignored here because the contents of results are
// nondeterministic due to either glob or readFiles or Object.entries
return results.sort(/* istanbul ignore next */ (a, b) =>
a.found.length > b.found.length ? -1
: a.found.length < b.found.length ? 1
: a.totalHits > b.totalHits ? -1
: a.totalHits < b.totalHits ? 1
: a.lines.length > b.lines.length ? -1
: a.lines.length < b.lines.length ? 1
: 0).slice(0, 10)
}
formatResults (args, results) {
const cols = Math.min(process.stdout.columns || Infinity, 80) + 1
const formattedOutput = results.map(res => {
const out = [res.cmd]
const r = Object.keys(res.hits)
.map(k => `${k}:${res.hits[k]}`)
.sort((a, b) => a > b ? 1 : -1)
.join(' ')
out.push(' '.repeat((Math.max(1, cols - out.join(' ').length - r.length - 1))))
out.push(r)
if (!this.npm.config.get('long')) {
return out.join('')
}
out.unshift('\n\n')
out.push('\n')
out.push('-'.repeat(cols - 1) + '\n')
res.lines.forEach((line, i) => {
if (line === null || i > 3) {
return
}
const hilitLine = []
for (const arg of args) {
const finder = line.toLowerCase().split(arg.toLowerCase())
let p = 0
for (const f of finder) {
hilitLine.push(line.slice(p, p + f.length))
const word = line.slice(p + f.length, p + f.length + arg.length)
hilitLine.push(this.npm.chalk.blue(word))
p += f.length + arg.length
}
}
out.push(hilitLine.join('') + '\n')
})
return out.join('')
}).join('\n')
const finalOut = results.length && !this.npm.config.get('long')
? 'Top hits for ' + (args.map(JSON.stringify).join(' ')) + '\n' +
'—'.repeat(cols - 1) + '\n' +
formattedOutput + '\n' +
'—'.repeat(cols - 1) + '\n' +
'(run with -l or --long to see more context)'
: formattedOutput
return finalOut.trim()
}
}
module.exports = HelpSearch

117
NodeJS/node_modules/npm/lib/commands/help.js generated vendored Normal file
View File

@@ -0,0 +1,117 @@
const spawn = require('@npmcli/promise-spawn')
const path = require('node:path')
const { openUrl } = require('../utils/open-url.js')
const { glob } = require('glob')
const { output, input } = require('proc-log')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { deref } = require('../utils/cmd-list.js')
const BaseCommand = require('../base-cmd.js')
const globify = pattern => pattern.split('\\').join('/')
// Strips out the number from foo.7 or foo.7. or foo.7.tgz
// We don't currently compress our man pages but if we ever did this would
// seamlessly continue supporting it
const manNumberRegex = /\.(\d+)(\.[^/\\]*)?$/
// hardcoded names for mansections
// XXX: these are used in the docs workspace and should be exported
// from npm so section names can changed more easily
const manSectionNames = {
1: 'commands',
5: 'configuring-npm',
7: 'using-npm',
}
class Help extends BaseCommand {
static description = 'Get help on npm'
static name = 'help'
static usage = ['<term> [<terms..>]']
static params = ['viewer']
static async completion (opts, npm) {
if (opts.conf.argv.remain.length > 2) {
return []
}
const g = path.resolve(npm.npmRoot, 'man/man[0-9]/*.[0-9]')
let files = await glob(globify(g))
// preserve glob@8 behavior
files = files.sort((a, b) => a.localeCompare(b, 'en'))
return Object.keys(files.reduce(function (acc, file) {
file = path.basename(file).replace(/\.[0-9]+$/, '')
file = file.replace(/^npm-/, '')
acc[file] = true
return acc
}, { help: true }))
}
async exec (args) {
// By default we search all of our man subdirectories, but if the user has
// asked for a specific one we limit the search to just there
const manSearch = /^\d+$/.test(args[0]) ? `man${args.shift()}` : 'man*'
if (!args.length) {
return output.standard(this.npm.usage)
}
// npm help foo bar baz: search topics
if (args.length > 1) {
return this.helpSearch(args)
}
// `npm help package.json`
const arg = (deref(args[0]) || args[0]).replace('.json', '-json')
// find either section.n or npm-section.n
const f = globify(path.resolve(this.npm.npmRoot, `man/${manSearch}/?(npm-)${arg}.[0-9]*`))
const [man] = await glob(f).then(r => r.sort((a, b) => {
// Because the glob is (subtly) different from manNumberRegex,
// we can't rely on it passing.
const aManNumberMatch = a.match(manNumberRegex)?.[1] || 999
const bManNumberMatch = b.match(manNumberRegex)?.[1] || 999
if (aManNumberMatch !== bManNumberMatch) {
return aManNumberMatch - bManNumberMatch
}
return localeCompare(a, b)
}))
return man ? this.viewMan(man) : this.helpSearch(args)
}
helpSearch (args) {
return this.npm.exec('help-search', args)
}
async viewMan (man) {
const viewer = this.npm.config.get('viewer')
if (viewer === 'browser') {
return openUrl(this.npm, this.htmlMan(man), 'help available at the following URL', true)
}
let args = ['man', [man]]
if (viewer === 'woman') {
args = ['emacsclient', ['-e', `(woman-find-file '${man}')`]]
}
try {
await input.start(() => spawn(...args, { stdio: 'inherit' }))
} catch (err) {
if (err.code) {
throw new Error(`help process exited with code: ${err.code}`)
} else {
throw err
}
}
}
// Returns the path to the html version of the man page
htmlMan (man) {
const sect = manSectionNames[man.match(manNumberRegex)[1]]
const f = path.basename(man).replace(manNumberRegex, '')
return 'file:///' + path.resolve(this.npm.npmRoot, `docs/output/${sect}/${f}.html`)
}
}
module.exports = Help

109
NodeJS/node_modules/npm/lib/commands/hook.js generated vendored Normal file
View File

@@ -0,0 +1,109 @@
const hookApi = require('libnpmhook')
const { otplease } = require('../utils/auth.js')
const relativeDate = require('tiny-relative-date')
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Hook extends BaseCommand {
static description = 'Manage registry hooks'
static name = 'hook'
static params = [
'registry',
'otp',
]
static usage = [
'add <pkg> <url> <secret> [--type=<type>]',
'ls [pkg]',
'rm <id>',
'update <id> <url> <secret>',
]
async exec (args) {
return otplease(this.npm, { ...this.npm.flatOptions }, (opts) => {
switch (args[0]) {
case 'add':
return this.add(args[1], args[2], args[3], opts)
case 'ls':
return this.ls(args[1], opts)
case 'rm':
return this.rm(args[1], opts)
case 'update':
case 'up':
return this.update(args[1], args[2], args[3], opts)
default:
throw this.usageError()
}
})
}
async add (pkg, uri, secret, opts) {
const hook = await hookApi.add(pkg, uri, secret, opts)
if (opts.json) {
output.buffer(hook)
} else if (opts.parseable) {
output.standard(Object.keys(hook).join('\t'))
output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
output.standard(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`)
}
}
async ls (pkg, opts) {
const hooks = await hookApi.ls({ ...opts, package: pkg })
if (opts.json) {
output.buffer(hooks)
} else if (opts.parseable) {
output.standard(Object.keys(hooks[0]).join('\t'))
hooks.forEach(hook => {
output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
})
} else if (!hooks.length) {
output.standard("You don't have any hooks configured yet.")
} else if (!this.npm.silent) {
output.standard(`You have ${hooks.length} hook${hooks.length !== 1 ? 's' : ''} configured.`)
for (const hook of hooks) {
output.standard(`Hook ${hook.id}: ${this.hookName(hook)}`)
output.standard(`Endpoint: ${hook.endpoint}`)
if (hook.last_delivery) {
/* eslint-disable-next-line max-len */
output.standard(`Triggered ${relativeDate(hook.last_delivery)}, response code was "${hook.response_code}"\n`)
} else {
output.standard('Never triggered\n')
}
}
}
}
async rm (id, opts) {
const hook = await hookApi.rm(id, opts)
if (opts.json) {
output.buffer(hook)
} else if (opts.parseable) {
output.standard(Object.keys(hook).join('\t'))
output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
output.standard(`- ${this.hookName(hook)} ${opts.unicode ? ' ✘ ' : ' X '} ${hook.endpoint}`)
}
}
async update (id, uri, secret, opts) {
const hook = await hookApi.update(id, uri, secret, opts)
if (opts.json) {
output.buffer(hook)
} else if (opts.parseable) {
output.standard(Object.keys(hook).join('\t'))
output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
output.standard(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`)
}
}
hookName (hook) {
return `${hook.type === 'owner' ? '~' : ''}${hook.name}`
}
}
module.exports = Hook

238
NodeJS/node_modules/npm/lib/commands/init.js generated vendored Normal file
View File

@@ -0,0 +1,238 @@
const { statSync } = require('node:fs')
const { relative, resolve } = require('node:path')
const { mkdir } = require('node:fs/promises')
const initJson = require('init-package-json')
const npa = require('npm-package-arg')
const libexec = require('libnpmexec')
const mapWorkspaces = require('@npmcli/map-workspaces')
const PackageJson = require('@npmcli/package-json')
const { log, output, input } = require('proc-log')
const updateWorkspaces = require('../utils/update-workspaces.js')
const BaseCommand = require('../base-cmd.js')
const posixPath = p => p.split('\\').join('/')
class Init extends BaseCommand {
static description = 'Create a package.json file'
static params = [
'init-author-name',
'init-author-url',
'init-license',
'init-module',
'init-version',
'yes',
'force',
'scope',
'workspace',
'workspaces',
'workspaces-update',
'include-workspace-root',
]
static name = 'init'
static usage = [
'<package-spec> (same as `npx create-<package-spec>`)',
'<@scope> (same as `npx <@scope>/create`)',
]
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
// npm exec style
if (args.length) {
return await this.execCreate(args)
}
// no args, uses classic init-package-json boilerplate
await this.template()
}
async execWorkspaces (args) {
// if the root package is uninitiated, take care of it first
if (this.npm.flatOptions.includeWorkspaceRoot) {
await this.exec(args)
}
// reads package.json for the top-level folder first, by doing this we
// ensure the command throw if no package.json is found before trying
// to create a workspace package.json file or its folders
const { content: pkg } = await PackageJson.normalize(this.npm.localPrefix).catch(err => {
if (err.code === 'ENOENT') {
log.warn('init', 'Missing package.json. Try with `--include-workspace-root`.')
}
throw err
})
// these are workspaces that are being created, so we cant use
// this.setWorkspaces()
const filters = this.npm.config.get('workspace')
const wPath = filterArg => resolve(this.npm.localPrefix, filterArg)
const workspacesPaths = []
// npm-exec style, runs in the context of each workspace filter
if (args.length) {
for (const filterArg of filters) {
const path = wPath(filterArg)
await mkdir(path, { recursive: true })
workspacesPaths.push(path)
await this.execCreate(args, path)
await this.setWorkspace(pkg, path)
}
return
}
// no args, uses classic init-package-json boilerplate
for (const filterArg of filters) {
const path = wPath(filterArg)
await mkdir(path, { recursive: true })
workspacesPaths.push(path)
await this.template(path)
await this.setWorkspace(pkg, path)
}
// reify packages once all workspaces have been initialized
await this.update(workspacesPaths)
}
async execCreate (args, runPath = process.cwd()) {
const [initerName, ...otherArgs] = args
let packageName = initerName
// Only a scope, possibly with a version
if (/^@[^/]+$/.test(initerName)) {
const [, scope, version] = initerName.split('@')
packageName = `@${scope}/create`
if (version) {
packageName = `${packageName}@${version}`
}
} else {
const req = npa(initerName)
if (req.type === 'git' && req.hosted) {
const { user, project } = req.hosted
packageName = initerName.replace(`${user}/${project}`, `${user}/create-${project}`)
} else if (req.registry) {
packageName = `${req.name.replace(/^(@[^/]+\/)?/, '$1create-')}@${req.rawSpec}`
} else {
throw Object.assign(new Error(
'Unrecognized initializer: ' + initerName +
'\nFor more package binary executing power check out `npx`:' +
'\nhttps://docs.npmjs.com/cli/commands/npx'
), { code: 'EUNSUPPORTED' })
}
}
const newArgs = [packageName, ...otherArgs]
const {
flatOptions,
localBin,
globalBin,
chalk,
} = this.npm
const scriptShell = this.npm.config.get('script-shell') || undefined
const yes = this.npm.config.get('yes')
await libexec({
...flatOptions,
args: newArgs,
localBin,
globalBin,
output,
chalk,
path: this.npm.localPrefix,
runPath,
scriptShell,
yes,
})
}
async template (path = process.cwd()) {
const initFile = this.npm.config.get('init-module')
if (!this.npm.config.get('yes') && !this.npm.config.get('force')) {
output.standard([
'This utility will walk you through creating a package.json file.',
'It only covers the most common items, and tries to guess sensible defaults.',
'',
'See `npm help init` for definitive documentation on these fields',
'and exactly what they do.',
'',
'Use `npm install <pkg>` afterwards to install a package and',
'save it as a dependency in the package.json file.',
'',
'Press ^C at any time to quit.',
].join('\n'))
}
try {
const data = await input.read(() => initJson(path, initFile, this.npm.config))
log.silly('package data', data)
return data
} catch (er) {
if (er.message === 'canceled') {
log.warn('init', 'canceled')
} else {
throw er
}
}
}
async setWorkspace (pkg, workspacePath) {
const workspaces = await mapWorkspaces({ cwd: this.npm.localPrefix, pkg })
// skip setting workspace if current package.json glob already satisfies it
for (const wPath of workspaces.values()) {
if (wPath === workspacePath) {
return
}
}
// if a create-pkg didn't generate a package.json at the workspace
// folder level, it might not be recognized as a workspace by
// mapWorkspaces, so we're just going to avoid touching the
// top-level package.json
try {
statSync(resolve(workspacePath, 'package.json'))
} catch (err) {
return
}
const pkgJson = await PackageJson.load(this.npm.localPrefix)
pkgJson.update({
workspaces: [
...(pkgJson.content.workspaces || []),
posixPath(relative(this.npm.localPrefix, workspacePath)),
],
})
await pkgJson.save()
}
async update (workspacesPaths) {
// translate workspaces paths into an array containing workspaces names
const workspaces = []
for (const path of workspacesPaths) {
const { content: { name } } = await PackageJson.normalize(path).catch(() => ({ content: {} }))
if (name) {
workspaces.push(name)
}
}
const {
config,
flatOptions,
localPrefix,
} = this.npm
await updateWorkspaces({
config,
flatOptions,
localPrefix,
npm: this.npm,
workspaces,
})
}
}
module.exports = Init

View File

@@ -0,0 +1,15 @@
const CI = require('./ci.js')
// npm install-ci-test
// Runs `npm ci` and then runs `npm test`
class InstallCITest extends CI {
static description = 'Install a project with a clean slate and run tests'
static name = 'install-ci-test'
async exec (args) {
await this.npm.exec('ci', args)
return this.npm.exec('test', [])
}
}
module.exports = InstallCITest

15
NodeJS/node_modules/npm/lib/commands/install-test.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
const Install = require('./install.js')
// npm install-test
// Runs `npm install` and then runs `npm test`
class InstallTest extends Install {
static description = 'Install package(s) and run tests'
static name = 'install-test'
async exec (args) {
await this.npm.exec('install', args)
return this.npm.exec('test', [])
}
}
module.exports = InstallTest

176
NodeJS/node_modules/npm/lib/commands/install.js generated vendored Normal file
View File

@@ -0,0 +1,176 @@
const { readdir } = require('node:fs/promises')
const { resolve, join } = require('node:path')
const { log } = require('proc-log')
const runScript = require('@npmcli/run-script')
const pacote = require('pacote')
const checks = require('npm-install-checks')
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Install extends ArboristWorkspaceCmd {
static description = 'Install a package'
static name = 'install'
// These are in the order they will show up in when running "-h"
// If adding to this list, consider adding also to ci.js
static params = [
'save',
'save-exact',
'global',
'install-strategy',
'legacy-bundling',
'global-style',
'omit',
'include',
'strict-peer-deps',
'prefer-dedupe',
'package-lock',
'package-lock-only',
'foreground-scripts',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
'cpu',
'os',
'libc',
...super.params,
]
static usage = ['[<package-spec> ...]']
static async completion (opts) {
const { partialWord } = opts
// install can complete to a folder with a package.json, or any package.
// if it has a slash, then it's gotta be a folder
// if it starts with https?://, then just give up, because it's a url
if (/^https?:\/\//.test(partialWord)) {
// do not complete to URLs
return []
}
if (/\//.test(partialWord)) {
// Complete fully to folder if there is exactly one match and it
// is a folder containing a package.json file. If that is not the
// case we return 0 matches, which will trigger the default bash
// complete.
const lastSlashIdx = partialWord.lastIndexOf('/')
const partialName = partialWord.slice(lastSlashIdx + 1)
const partialPath = partialWord.slice(0, lastSlashIdx) || '/'
const isDirMatch = async sibling => {
if (sibling.slice(0, partialName.length) !== partialName) {
return false
}
try {
const contents = await readdir(join(partialPath, sibling))
const result = (contents.indexOf('package.json') !== -1)
return result
} catch (er) {
return false
}
}
try {
const siblings = await readdir(partialPath)
const matches = []
for (const sibling of siblings) {
if (await isDirMatch(sibling)) {
matches.push(sibling)
}
}
if (matches.length === 1) {
return [join(partialPath, matches[0])]
}
// no matches
return []
} catch (er) {
return [] // invalid dir: no matching
}
}
// Note: there used to be registry completion here,
// but it stopped making sense somewhere around
// 50,000 packages on the registry
}
async exec (args) {
// the /path/to/node_modules/..
const globalTop = resolve(this.npm.globalDir, '..')
const ignoreScripts = this.npm.config.get('ignore-scripts')
const isGlobalInstall = this.npm.global
const where = isGlobalInstall ? globalTop : this.npm.prefix
const forced = this.npm.config.get('force')
const scriptShell = this.npm.config.get('script-shell') || undefined
// be very strict about engines when trying to update npm itself
const npmInstall = args.find(arg => arg.startsWith('npm@') || arg === 'npm')
if (isGlobalInstall && npmInstall) {
const npmOptions = this.npm.flatOptions
const npmManifest = await pacote.manifest(npmInstall, npmOptions)
try {
checks.checkEngine(npmManifest, npmManifest.version, process.version)
} catch (e) {
if (forced) {
log.warn(
'install',
/* eslint-disable-next-line max-len */
`Forcing global npm install with incompatible version ${npmManifest.version} into node ${process.version}`
)
} else {
throw e
}
}
}
// don't try to install the prefix into itself
args = args.filter(a => resolve(a) !== this.npm.prefix)
// `npm i -g` => "install this package globally"
if (where === globalTop && !args.length) {
args = ['.']
}
// throw usage error if trying to install empty package
// name to global space, e.g: `npm i -g ""`
if (where === globalTop && !args.every(Boolean)) {
throw this.usageError()
}
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
auditLevel: null,
path: where,
add: args,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.reify(opts)
if (!args.length && !isGlobalInstall && !ignoreScripts) {
const scripts = [
'preinstall',
'install',
'postinstall',
'prepublish', // XXX(npm9) should we remove this finally??
'preprepare',
'prepare',
'postprepare',
]
for (const event of scripts) {
await runScript({
path: where,
args: [],
scriptShell,
stdio: 'inherit',
event,
})
}
}
await reifyFinish(this.npm, arb)
}
}
module.exports = Install

189
NodeJS/node_modules/npm/lib/commands/link.js generated vendored Normal file
View File

@@ -0,0 +1,189 @@
const { readdir } = require('node:fs/promises')
const { resolve } = require('node:path')
const npa = require('npm-package-arg')
const pkgJson = require('@npmcli/package-json')
const semver = require('semver')
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Link extends ArboristWorkspaceCmd {
static description = 'Symlink a package folder'
static name = 'link'
static usage = [
'[<package-spec>]',
]
static params = [
'save',
'save-exact',
'global',
'install-strategy',
'legacy-bundling',
'global-style',
'strict-peer-deps',
'package-lock',
'omit',
'include',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
...super.params,
]
static async completion (opts, npm) {
const dir = npm.globalDir
const files = await readdir(dir)
return files.filter(f => !/^[._-]/.test(f))
}
async exec (args) {
if (this.npm.global) {
throw Object.assign(
new Error(
'link should never be --global.\n' +
'Please re-run this command with --local'
),
{ code: 'ELINKGLOBAL' }
)
}
// install-links is implicitly false when running `npm link`
this.npm.config.set('install-links', false)
// link with no args: symlink the folder to the global location
// link with package arg: symlink the global to the local
args = args.filter(a => resolve(a) !== this.npm.prefix)
return args.length
? this.linkInstall(args)
: this.linkPkg()
}
async linkInstall (args) {
// load current packages from the global space,
// and then add symlinks installs locally
const globalTop = resolve(this.npm.globalDir, '..')
const Arborist = require('@npmcli/arborist')
const globalOpts = {
...this.npm.flatOptions,
Arborist,
path: globalTop,
global: true,
prune: false,
}
const globalArb = new Arborist(globalOpts)
// get only current top-level packages from the global space
const globals = await globalArb.loadActual({
filter: (node, kid) =>
!node.isRoot || args.some(a => npa(a).name === kid),
})
// any extra arg that is missing from the current
// global space should be reified there first
const missing = this.missingArgsFromTree(globals, args)
if (missing.length) {
await globalArb.reify({
...globalOpts,
add: missing,
})
}
// get a list of module names that should be linked in the local prefix
const names = []
for (const a of args) {
const arg = npa(a)
if (arg.type === 'directory') {
const { content } = await pkgJson.normalize(arg.fetchSpec)
names.push(content.name)
} else {
names.push(arg.name)
}
}
// npm link should not save=true by default unless you're
// using any of --save-dev or other types
const save =
Boolean(
(this.npm.config.find('save') !== 'default' &&
this.npm.config.get('save')) ||
this.npm.config.get('save-optional') ||
this.npm.config.get('save-peer') ||
this.npm.config.get('save-dev') ||
this.npm.config.get('save-prod')
)
// create a new arborist instance for the local prefix and
// reify all the pending names as symlinks there
const localArb = new Arborist({
...this.npm.flatOptions,
prune: false,
path: this.npm.prefix,
save,
})
await localArb.reify({
...this.npm.flatOptions,
prune: false,
path: this.npm.prefix,
add: names.map(l => `file:${resolve(globalTop, 'node_modules', l).replace(/#/g, '%23')}`),
save,
workspaces: this.workspaceNames,
})
await reifyFinish(this.npm, localArb)
}
async linkPkg () {
const wsp = this.workspacePaths
const paths = wsp && wsp.length ? wsp : [this.npm.prefix]
const add = paths.map(path => `file:${path.replace(/#/g, '%23')}`)
const globalTop = resolve(this.npm.globalDir, '..')
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
Arborist,
path: globalTop,
global: true,
})
await arb.reify({
add,
})
await reifyFinish(this.npm, arb)
}
// Returns a list of items that can't be fulfilled by
// things found in the current arborist inventory
missingArgsFromTree (tree, args) {
if (tree.isLink) {
return this.missingArgsFromTree(tree.target, args)
}
const foundNodes = []
const missing = args.filter(a => {
const arg = npa(a)
const nodes = tree.children.values()
const argFound = [...nodes].every(node => {
// TODO: write tests for unmatching version specs, this is hard to test
// atm but should be simple once we have a mocked registry again
if (arg.name !== node.name /* istanbul ignore next */ || (
arg.version &&
/* istanbul ignore next */
!semver.satisfies(node.version, arg.version)
)) {
foundNodes.push(node)
return true
}
})
return argFound
})
// remote nodes from the loaded tree in order
// to avoid dropping them later when reifying
for (const node of foundNodes) {
node.parent = null
}
return missing
}
}
module.exports = Link

13
NodeJS/node_modules/npm/lib/commands/ll.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
const LS = require('./ls.js')
class LL extends LS {
static name = 'll'
static usage = ['[[<@scope>/]<pkg> ...]']
async exec (args) {
this.npm.config.set('long', true)
return super.exec(args)
}
}
module.exports = LL

50
NodeJS/node_modules/npm/lib/commands/login.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
const { log, output } = require('proc-log')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const auth = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
class Login extends BaseCommand {
static description = 'Login to a registry user account'
static name = 'login'
static params = [
'registry',
'scope',
'auth-type',
]
async exec () {
const scope = this.npm.config.get('scope')
let registry = this.npm.config.get('registry')
if (scope) {
const scopedRegistry = this.npm.config.get(`${scope}:registry`)
const cliRegistry = this.npm.config.get('registry', 'cli')
if (scopedRegistry && !cliRegistry) {
registry = scopedRegistry
}
}
const creds = this.npm.config.getCredentialsByURI(registry)
log.notice('', `Log in on ${replaceInfo(registry)}`)
const { message, newCreds } = await auth.login(this.npm, {
...this.npm.flatOptions,
creds,
registry,
})
this.npm.config.delete('_token', 'user') // prevent legacy pollution
this.npm.config.setCredentialsByURI(registry, newCreds)
if (scope) {
this.npm.config.set(scope + ':registry', registry, 'user')
}
await this.npm.config.save('user')
output.standard(message)
}
}
module.exports = Login

50
NodeJS/node_modules/npm/lib/commands/logout.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
const npmFetch = require('npm-registry-fetch')
const { getAuth } = npmFetch
const { log } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Logout extends BaseCommand {
static description = 'Log out of the registry'
static name = 'logout'
static params = [
'registry',
'scope',
]
async exec () {
const registry = this.npm.config.get('registry')
const scope = this.npm.config.get('scope')
const regRef = scope ? `${scope}:registry` : 'registry'
const reg = this.npm.config.get(regRef) || registry
const auth = getAuth(reg, this.npm.flatOptions)
const level = this.npm.config.find(`${auth.regKey}:${auth.authKey}`)
// find the config level and only delete from there
if (auth.token) {
log.verbose('logout', `clearing token for ${reg}`)
await npmFetch(`/-/user/token/${encodeURIComponent(auth.token)}`, {
...this.npm.flatOptions,
registry: reg,
method: 'DELETE',
ignoreBody: true,
})
} else if (auth.isBasicAuth) {
log.verbose('logout', `clearing user credentials for ${reg}`)
} else {
const msg = `not logged in to ${reg}, so can't log out!`
throw Object.assign(new Error(msg), { code: 'ENEEDAUTH' })
}
if (scope) {
this.npm.config.delete(regRef, level)
}
this.npm.config.clearCredentialsByURI(reg, level)
await this.npm.config.save(level)
}
}
module.exports = Logout

579
NodeJS/node_modules/npm/lib/commands/ls.js generated vendored Normal file
View File

@@ -0,0 +1,579 @@
const { resolve, relative, sep } = require('node:path')
const archy = require('archy')
const { breadth } = require('treeverse')
const npa = require('npm-package-arg')
const { output } = require('proc-log')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const relativePrefix = `.${sep}`
const _depth = Symbol('depth')
const _dedupe = Symbol('dedupe')
const _filteredBy = Symbol('filteredBy')
const _include = Symbol('include')
const _invalid = Symbol('invalid')
const _name = Symbol('name')
const _missing = Symbol('missing')
const _parent = Symbol('parent')
const _problems = Symbol('problems')
const _required = Symbol('required')
const _type = Symbol('type')
class LS extends ArboristWorkspaceCmd {
static description = 'List installed packages'
static name = 'ls'
static usage = ['<package-spec>']
static params = [
'all',
'json',
'long',
'parseable',
'global',
'depth',
'omit',
'include',
'link',
'package-lock-only',
'unicode',
...super.params,
]
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
const all = this.npm.config.get('all')
const chalk = this.npm.chalk
const depth = this.npm.config.get('depth')
const global = this.npm.global
const json = this.npm.config.get('json')
const link = this.npm.config.get('link')
const long = this.npm.config.get('long')
const omit = this.npm.flatOptions.omit
const parseable = this.npm.config.get('parseable')
const unicode = this.npm.config.get('unicode')
const packageLockOnly = this.npm.config.get('package-lock-only')
const workspacesEnabled = this.npm.flatOptions.workspacesEnabled
const path = global ? resolve(this.npm.globalDir, '..') : this.npm.prefix
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
global,
...this.npm.flatOptions,
legacyPeerDeps: false,
path,
})
const tree = await this.initTree({ arb, args, packageLockOnly })
// filters by workspaces nodes when using -w <workspace-name>
// We only have to filter the first layer of edges, so we don't
// explore anything that isn't part of the selected workspace set.
let wsNodes
if (this.workspaceNames && this.workspaceNames.length) {
wsNodes = arb.workspaceNodes(tree, this.workspaceNames)
}
const filterBySelectedWorkspaces = edge => {
if (!workspacesEnabled
&& edge.from.isProjectRoot
&& edge.to.isWorkspace
) {
return false
}
if (!wsNodes || !wsNodes.length) {
return true
}
if (this.npm.flatOptions.includeWorkspaceRoot
&& edge.to && !edge.to.isWorkspace) {
return true
}
if (edge.from.isProjectRoot) {
return (edge.to
&& edge.to.isWorkspace
&& wsNodes.includes(edge.to.target))
}
return true
}
const seenItems = new Set()
const seenNodes = new Map()
const problems = new Set()
// defines special handling of printed depth when filtering with args
const filterDefaultDepth = depth === null ? Infinity : depth
const depthToPrint = (all || args.length)
? filterDefaultDepth
: (depth || 0)
// add root node of tree to list of seenNodes
seenNodes.set(tree.path, tree)
// tree traversal happens here, using treeverse.breadth
const result = await breadth({
tree,
// recursive method, `node` is going to be the current elem (starting from
// the `tree` obj) that was just visited in the `visit` method below
// `nodeResult` is going to be the returned `item` from `visit`
getChildren (node, nodeResult) {
const seenPaths = new Set()
const workspace = node.isWorkspace
const currentDepth = workspace ? 0 : node[_depth]
const shouldSkipChildren =
!(node instanceof Arborist.Node) || (currentDepth > depthToPrint)
return (shouldSkipChildren)
? []
: [...(node.target).edgesOut.values()]
.filter(filterBySelectedWorkspaces)
.filter(currentDepth === 0 ? filterByEdgesTypes({
link,
omit,
}) : () => true)
.map(mapEdgesToNodes({ seenPaths }))
.concat(appendExtraneousChildren({ node, seenPaths }))
.sort(sortAlphabetically)
.map(augmentNodesWithMetadata({
args,
currentDepth,
nodeResult,
seenNodes,
}))
},
// visit each `node` of the `tree`, returning an `item` - these are
// the elements that will be used to build the final output
visit (node) {
node[_problems] = getProblems(node, { global })
const item = json
? getJsonOutputItem(node, { global, long })
: parseable
? null
: getHumanOutputItem(node, { args, chalk, global, long })
// loop through list of node problems to add them to global list
if (node[_include]) {
for (const problem of node[_problems]) {
problems.add(problem)
}
}
seenItems.add(item)
// return a promise so we don't blow the stack
return Promise.resolve(item)
},
})
// handle the special case of a broken package.json in the root folder
const [rootError] = tree.errors.filter(e =>
e.code === 'EJSONPARSE' && e.path === resolve(path, 'package.json'))
if (json) {
output.buffer(jsonOutput({ path, problems, result, rootError, seenItems }))
} else {
output.standard(parseable
? parseableOutput({ seenNodes, global, long })
: humanOutput({ chalk, result, seenItems, unicode })
)
}
// if filtering items, should exit with error code on no results
if (result && !result[_include] && args.length) {
process.exitCode = 1
}
if (rootError) {
throw Object.assign(
new Error('Failed to parse root package.json'),
{ code: 'EJSONPARSE' }
)
}
const shouldThrow = problems.size &&
![...problems].every(problem => problem.startsWith('extraneous:'))
if (shouldThrow) {
throw Object.assign(
new Error([...problems].join('\n')),
{ code: 'ELSPROBLEMS' }
)
}
}
async initTree ({ arb, args, packageLockOnly }) {
const tree = await (
packageLockOnly
? arb.loadVirtual()
: arb.loadActual()
)
tree[_include] = args.length === 0
tree[_depth] = 0
return tree
}
}
module.exports = LS
const isGitNode = (node) => {
if (!node.resolved) {
return
}
try {
const { type } = npa(node.resolved)
return type === 'git' || type === 'hosted'
} catch (err) {
return false
}
}
const isOptional = (node) =>
node[_type] === 'optional' || node[_type] === 'peerOptional'
const isExtraneous = (node, { global }) =>
node.extraneous && !global
const getProblems = (node, { global }) => {
const problems = new Set()
if (node[_missing] && !isOptional(node)) {
problems.add(`missing: ${node.pkgid}, required by ${node[_missing]}`)
}
if (node[_invalid]) {
problems.add(`invalid: ${node.pkgid} ${node.path}`)
}
if (isExtraneous(node, { global })) {
problems.add(`extraneous: ${node.pkgid} ${node.path}`)
}
return problems
}
// annotates _parent and _include metadata into the resulting
// item obj allowing for filtering out results during output
const augmentItemWithIncludeMetadata = (node, item) => {
item[_parent] = node[_parent]
item[_include] = node[_include]
// append current item to its parent.nodes which is the
// structure expected by archy in order to print tree
if (node[_include]) {
// includes all ancestors of included node
let p = node[_parent]
while (p) {
p[_include] = true
p = p[_parent]
}
}
return item
}
const getHumanOutputItem = (node, { args, chalk, global, long }) => {
const { pkgid, path } = node
const workspacePkgId = chalk.blueBright(pkgid)
let printable = node.isWorkspace ? workspacePkgId : pkgid
// special formatting for top-level package name
if (node.isRoot) {
const hasNoPackageJson = !Object.keys(node.package).length
if (hasNoPackageJson || global) {
printable = path
} else {
printable += `${long ? '\n' : ' '}${path}`
}
}
// TODO there is a LOT of overlap with lib/utils/explain-dep.js here
const highlightDepName = args.length && node[_filteredBy]
const missingColor = isOptional(node)
? chalk.yellow
: chalk.red
const missingMsg = `UNMET ${isOptional(node) ? 'OPTIONAL ' : ''}DEPENDENCY`
const targetLocation = node.root
? relative(node.root.realpath, node.realpath)
: node.targetLocation
const invalid = node[_invalid]
? `invalid: ${node[_invalid]}`
: ''
const label =
(
node[_missing]
? missingColor(missingMsg) + ' '
: ''
) +
`${highlightDepName ? chalk.yellow(printable) : printable}` +
(
node[_dedupe]
? ' ' + chalk.dim('deduped')
: ''
) +
(
invalid
? ' ' + chalk.red(invalid)
: ''
) +
(
isExtraneous(node, { global })
? ' ' + chalk.red('extraneous')
: ''
) +
(
node.overridden
? ' ' + chalk.dim('overridden')
: ''
) +
(isGitNode(node) ? ` (${node.resolved})` : '') +
(node.isLink ? ` -> ${relativePrefix}${targetLocation}` : '') +
(long ? `\n${node.package.description || ''}` : '')
return augmentItemWithIncludeMetadata(node, { label, nodes: [] })
}
const getJsonOutputItem = (node, { global, long }) => {
const item = {}
if (node.version) {
item.version = node.version
}
if (node.resolved) {
item.resolved = node.resolved
}
// if the node is the project root, do not add the overridden flag. the project root can't be
// overridden anyway, and if we add the flag it causes undesirable behavior when `npm ls --json`
// is ran in an empty directory since we end up printing an object with only an overridden prop
if (!node.isProjectRoot) {
item.overridden = node.overridden
}
item[_name] = node.name
// special formatting for top-level package name
const hasPackageJson =
node && node.package && Object.keys(node.package).length
if (node.isRoot && hasPackageJson) {
item.name = node.package.name || node.name
}
if (long && !node[_missing]) {
item.name = item[_name]
const { dependencies, ...packageInfo } = node.package
Object.assign(item, packageInfo)
item.extraneous = false
item.path = node.path
item._dependencies = {
...node.package.dependencies,
...node.package.optionalDependencies,
}
item.devDependencies = node.package.devDependencies || {}
item.peerDependencies = node.package.peerDependencies || {}
}
// augment json output items with extra metadata
if (isExtraneous(node, { global })) {
item.extraneous = true
}
if (node[_invalid]) {
item.invalid = node[_invalid]
}
if (node[_missing] && !isOptional(node)) {
item.required = node[_required]
item.missing = true
}
if (node[_include] && node[_problems] && node[_problems].size) {
item.problems = [...node[_problems]]
}
return augmentItemWithIncludeMetadata(node, item)
}
const filterByEdgesTypes = ({ link, omit }) => (edge) => {
for (const omitType of omit) {
if (edge[omitType]) {
return false
}
}
return link ? edge.to && edge.to.isLink : true
}
const appendExtraneousChildren = ({ node, seenPaths }) =>
// extraneous children are not represented
// in edges out, so here we add them to the list:
[...node.children.values()]
.filter(i => !seenPaths.has(i.path) && i.extraneous)
const mapEdgesToNodes = ({ seenPaths }) => (edge) => {
let node = edge.to
// if the edge is linking to a missing node, we go ahead
// and create a new obj that will represent the missing node
if (edge.missing || (edge.optional && !node)) {
const { name, spec } = edge
const pkgid = `${name}@${spec}`
node = { name, pkgid, [_missing]: edge.from.pkgid }
}
// keeps track of a set of seen paths to avoid the edge case in which a tree
// item would appear twice given that it's a children of an extraneous item,
// so it's marked extraneous but it will ALSO show up in edgesOuts of
// its parent so it ends up as two diff nodes if we don't track it
if (node.path) {
seenPaths.add(node.path)
}
node[_required] = edge.spec || '*'
node[_type] = edge.type
if (edge.invalid) {
const spec = JSON.stringify(node[_required])
const from = edge.from.location || 'the root project'
node[_invalid] = (node[_invalid] ? node[_invalid] + ', ' : '') +
(`${spec} from ${from}`)
}
return node
}
const filterByPositionalArgs = (args, { node }) =>
args.length > 0 ? args.some(
(spec) => (node.satisfies && node.satisfies(spec))
) : true
const augmentNodesWithMetadata = ({
args,
currentDepth,
nodeResult,
seenNodes,
}) => (node) => {
// if the original edge was a deduped dep, treeverse will fail to
// revisit that node in tree traversal logic, so we make it so that
// we have a diff obj for deduped nodes:
if (seenNodes.has(node.path)) {
const { realpath, root } = node
const targetLocation = root ? relative(root.realpath, realpath)
: node.targetLocation
node = {
name: node.name,
version: node.version,
pkgid: node.pkgid,
package: node.package,
path: node.path,
isLink: node.isLink,
realpath: node.realpath,
targetLocation,
[_type]: node[_type],
[_invalid]: node[_invalid],
[_missing]: node[_missing],
// if it's missing, it's not deduped, it's just missing
[_dedupe]: !node[_missing],
}
} else {
// keeps track of already seen nodes in order to check for dedupes
seenNodes.set(node.path, node)
}
// _parent is going to be a ref to a treeverse-visited node (returned from
// getHumanOutputItem, getJsonOutputItem, etc) so that we have an easy
// shortcut to place new nodes in their right place during tree traversal
node[_parent] = nodeResult
// _include is the property that allow us to filter based on position args
// e.g: `npm ls foo`, `npm ls simple-output@2`
// _filteredBy is used to apply extra color info to the item that
// was used in args in order to filter
node[_filteredBy] = node[_include] =
filterByPositionalArgs(args, { node: seenNodes.get(node.path) })
// _depth keeps track of how many levels deep tree traversal currently is
// so that we can `npm ls --depth=1`
node[_depth] = currentDepth + 1
return node
}
const sortAlphabetically = ({ pkgid: a }, { pkgid: b }) => localeCompare(a, b)
const humanOutput = ({ chalk, result, seenItems, unicode }) => {
// we need to traverse the entire tree in order to determine which items
// should be included (since a nested transitive included dep will make it
// so that all its ancestors should be displayed)
// here is where we put items in their expected place for archy output
for (const item of seenItems) {
if (item[_include] && item[_parent]) {
item[_parent].nodes.push(item)
}
}
if (!result.nodes.length) {
result.nodes = ['(empty)']
}
const archyOutput = archy(result, '', { unicode })
return chalk.reset(archyOutput)
}
const jsonOutput = ({ path, problems, result, rootError, seenItems }) => {
if (problems.size) {
result.problems = [...problems]
}
if (rootError) {
result.problems = [
...(result.problems || []),
...[`error in ${path}: Failed to parse root package.json`],
]
result.invalid = true
}
// we need to traverse the entire tree in order to determine which items
// should be included (since a nested transitive included dep will make it
// so that all its ancestors should be displayed)
// here is where we put items in their expected place for json output
for (const item of seenItems) {
// append current item to its parent item.dependencies obj in order
// to provide a json object structure that represents the installed tree
if (item[_include] && item[_parent]) {
if (!item[_parent].dependencies) {
item[_parent].dependencies = {}
}
item[_parent].dependencies[item[_name]] = item
}
}
return result
}
const parseableOutput = ({ global, long, seenNodes }) => {
let out = ''
for (const node of seenNodes.values()) {
if (node.path && node[_include]) {
out += node.path
if (long) {
out += `:${node.pkgid}`
out += node.path !== node.realpath ? `:${node.realpath}` : ''
out += isExtraneous(node, { global }) ? ':EXTRANEOUS' : ''
out += node[_invalid] ? ':INVALID' : ''
out += node.overridden ? ':OVERRIDDEN' : ''
}
out += '\n'
}
}
return out.trim()
}

151
NodeJS/node_modules/npm/lib/commands/org.js generated vendored Normal file
View File

@@ -0,0 +1,151 @@
const liborg = require('libnpmorg')
const { otplease } = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
const { output } = require('proc-log')
class Org extends BaseCommand {
static description = 'Manage orgs'
static name = 'org'
static usage = [
'set orgname username [developer | admin | owner]',
'rm orgname username',
'ls orgname [<username>]',
]
static params = ['registry', 'otp', 'json', 'parseable']
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['set', 'rm', 'ls']
}
switch (argv[2]) {
case 'ls':
case 'add':
case 'rm':
case 'set':
return []
default:
throw new Error(argv[2] + ' not recognized')
}
}
async exec ([cmd, orgname, username, role]) {
return otplease(this.npm, {
...this.npm.flatOptions,
}, opts => {
switch (cmd) {
case 'add':
case 'set':
return this.set(orgname, username, role, opts)
case 'rm':
return this.rm(orgname, username, opts)
case 'ls':
return this.ls(orgname, username, opts)
default:
throw this.usageError()
}
})
}
async set (org, user, role, opts) {
role = role || 'developer'
if (!org) {
throw new Error('First argument `orgname` is required.')
}
if (!user) {
throw new Error('Second argument `username` is required.')
}
if (!['owner', 'admin', 'developer'].find(x => x === role)) {
throw new Error(
/* eslint-disable-next-line max-len */
'Third argument `role` must be one of `owner`, `admin`, or `developer`, with `developer` being the default value if omitted.'
)
}
const memDeets = await liborg.set(org, user, role, opts)
if (opts.json) {
output.standard(JSON.stringify(memDeets, null, 2))
} else if (opts.parseable) {
output.standard(['org', 'orgsize', 'user', 'role'].join('\t'))
output.standard(
[memDeets.org.name, memDeets.org.size, memDeets.user, memDeets.role].join('\t')
)
} else if (!this.npm.silent) {
output.standard(
`Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${
memDeets.org.size
} member${memDeets.org.size === 1 ? '' : 's'} in this org.`
)
}
return memDeets
}
async rm (org, user, opts) {
if (!org) {
throw new Error('First argument `orgname` is required.')
}
if (!user) {
throw new Error('Second argument `username` is required.')
}
await liborg.rm(org, user, opts)
const roster = await liborg.ls(org, opts)
user = user.replace(/^[~@]?/, '')
org = org.replace(/^[~@]?/, '')
const userCount = Object.keys(roster).length
if (opts.json) {
output.buffer({
user,
org,
userCount,
deleted: true,
})
} else if (opts.parseable) {
output.standard(['user', 'org', 'userCount', 'deleted'].join('\t'))
output.standard([user, org, userCount, true].join('\t'))
} else if (!this.npm.silent) {
output.standard(
`Successfully removed ${user} from ${org}. You now have ${userCount} member${
userCount === 1 ? '' : 's'
} in this org.`
)
}
}
async ls (org, user, opts) {
if (!org) {
throw new Error('First argument `orgname` is required.')
}
let roster = await liborg.ls(org, opts)
if (user) {
const newRoster = {}
if (roster[user]) {
newRoster[user] = roster[user]
}
roster = newRoster
}
if (opts.json) {
output.buffer(roster)
} else if (opts.parseable) {
output.standard(['user', 'role'].join('\t'))
Object.keys(roster).forEach(u => {
output.standard([u, roster[u]].join('\t'))
})
} else if (!this.npm.silent) {
const chalk = this.npm.chalk
for (const u of Object.keys(roster).sort()) {
output.standard(`${u} - ${chalk.cyan(roster[u])}`)
}
}
}
}
module.exports = Org

282
NodeJS/node_modules/npm/lib/commands/outdated.js generated vendored Normal file
View File

@@ -0,0 +1,282 @@
const { resolve } = require('node:path')
const { stripVTControlCharacters } = require('node:util')
const pacote = require('pacote')
const table = require('text-table')
const npa = require('npm-package-arg')
const pickManifest = require('npm-pick-manifest')
const { output } = require('proc-log')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const safeNpa = (spec) => {
try {
return npa(spec)
} catch {
return null
}
}
// This string is load bearing and is shared with Arborist
const MISSING = 'MISSING'
class Outdated extends ArboristWorkspaceCmd {
static description = 'Check for outdated packages'
static name = 'outdated'
static usage = ['[<package-spec> ...]']
static params = [
'all',
'json',
'long',
'parseable',
'global',
'workspace',
]
#tree
#list = []
#edges = new Set()
#filterSet
async exec (args) {
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
path: this.npm.global ? resolve(this.npm.globalDir, '..') : this.npm.prefix,
})
this.#tree = await arb.loadActual()
if (this.workspaceNames?.length) {
this.#filterSet = arb.workspaceDependencySet(
this.#tree,
this.workspaceNames,
this.npm.flatOptions.includeWorkspaceRoot
)
} else if (!this.npm.flatOptions.workspacesEnabled) {
this.#filterSet = arb.excludeWorkspacesDependencySet(this.#tree)
}
if (args.length) {
for (const arg of args) {
// specific deps
this.#getEdges(this.#tree.inventory.query('name', arg), 'edgesIn')
}
} else {
if (this.npm.config.get('all')) {
// all deps in tree
this.#getEdges(this.#tree.inventory.values(), 'edgesOut')
}
// top-level deps
this.#getEdges()
}
await Promise.all([...this.#edges].map((e) => this.#getOutdatedInfo(e)))
// sorts list alphabetically by name and then dependent
const outdated = this.#list
.sort((a, b) => localeCompare(a.name, b.name) || localeCompare(a.dependent, b.dependent))
if (outdated.length) {
process.exitCode = 1
}
if (this.npm.config.get('json')) {
output.buffer(this.#json(outdated))
return
}
const res = this.npm.config.get('parseable')
? this.#parseable(outdated)
: this.#pretty(outdated)
if (res) {
output.standard(res)
}
}
#getEdges (nodes, type) {
// when no nodes are provided then it should only read direct deps
// from the root node and its workspaces direct dependencies
if (!nodes) {
this.#getEdgesOut(this.#tree)
this.#getWorkspacesEdges()
return
}
for (const node of nodes) {
if (type === 'edgesOut') {
this.#getEdgesOut(node)
} else {
this.#getEdgesIn(node)
}
}
}
#getEdgesIn (node) {
for (const edge of node.edgesIn) {
this.#trackEdge(edge)
}
}
#getEdgesOut (node) {
// TODO: normalize usage of edges and avoid looping through nodes here
const edges = this.npm.global ? node.children.values() : node.edgesOut.values()
for (const edge of edges) {
this.#trackEdge(edge)
}
}
#trackEdge (edge) {
if (edge.from && this.#filterSet?.size > 0 && !this.#filterSet.has(edge.from.target)) {
return
}
this.#edges.add(edge)
}
#getWorkspacesEdges () {
if (this.npm.global) {
return
}
for (const edge of this.#tree.edgesOut.values()) {
if (edge?.to?.target?.isWorkspace) {
this.#getEdgesOut(edge.to.target)
}
}
}
async #getPackument (spec) {
return pacote.packument(spec, {
...this.npm.flatOptions,
fullMetadata: this.npm.config.get('long'),
preferOnline: true,
})
}
async #getOutdatedInfo (edge) {
const alias = safeNpa(edge.spec)?.subSpec
const spec = npa(alias ? alias.name : edge.name)
const node = edge.to || edge
const { path, location, package: { version: current } = {} } = node
const type = edge.optional ? 'optionalDependencies'
: edge.peer ? 'peerDependencies'
: edge.dev ? 'devDependencies'
: 'dependencies'
for (const omitType of this.npm.flatOptions.omit) {
if (node[omitType]) {
return
}
}
// deps different from prod not currently
// on disk are not included in the output
if (edge.error === MISSING && type !== 'dependencies') {
return
}
// if it's not a range, version, or tag, skip it
if (!safeNpa(`${edge.name}@${edge.spec}`)?.registry) {
return null
}
try {
const packument = await this.#getPackument(spec)
const expected = alias ? alias.fetchSpec : edge.spec
const wanted = pickManifest(packument, expected, this.npm.flatOptions)
const latest = pickManifest(packument, '*', this.npm.flatOptions)
if (!current || current !== wanted.version || wanted.version !== latest.version) {
this.#list.push({
name: alias ? edge.spec.replace('npm', edge.name) : edge.name,
path,
type,
current,
location,
wanted: wanted.version,
latest: latest.version,
workspaceDependent: edge.from?.isWorkspace ? edge.from.pkgid : null,
dependent: edge.from?.name ?? 'global',
homepage: packument.homepage,
})
}
} catch (err) {
// silently catch and ignore ETARGET, E403 &
// E404 errors, deps are just skipped
if (!['ETARGET', 'E404', 'E404'].includes(err.code)) {
throw err
}
}
}
// formatting functions
#pretty (list) {
if (!list.length) {
return
}
const long = this.npm.config.get('long')
const { bold, yellow, red, cyan, blue } = this.npm.chalk
return table([
[
'Package',
'Current',
'Wanted',
'Latest',
'Location',
'Depended by',
...long ? ['Package Type', 'Homepage'] : [],
].map(h => bold.underline(h)),
...list.map((d) => [
d.current === d.wanted ? yellow(d.name) : red(d.name),
d.current ?? 'MISSING',
cyan(d.wanted),
blue(d.latest),
d.location ?? '-',
d.workspaceDependent ? blue(d.workspaceDependent) : d.dependent,
...long ? [d.type, blue(d.homepage ?? '')] : [],
]),
], {
align: ['l', 'r', 'r', 'r', 'l'],
stringLength: s => stripVTControlCharacters(s).length,
})
}
// --parseable creates output like this:
// <fullpath>:<name@wanted>:<name@installed>:<name@latest>:<dependedby>
#parseable (list) {
return list.map(d => [
d.path,
`${d.name}@${d.wanted}`,
d.current ? `${d.name}@${d.current}` : 'MISSING',
`${d.name}@${d.latest}`,
d.dependent,
...this.npm.config.get('long') ? [d.type, d.homepage] : [],
].join(':')).join('\n')
}
#json (list) {
// TODO(BREAKING_CHANGE): this should just return an array. It's a list and
// turing it into an object with keys is lossy since multiple items in the
// list could have the same key. For now we hack that by only changing
// top level values into arrays if they have multiple outdated items
return list.reduce((acc, d) => {
const dep = {
current: d.current,
wanted: d.wanted,
latest: d.latest,
dependent: d.dependent,
location: d.path,
...this.npm.config.get('long') ? { type: d.type, homepage: d.homepage } : {},
}
acc[d.name] = acc[d.name]
// If this item alread has an outdated dep then we turn it into an array
? (Array.isArray(acc[d.name]) ? acc[d.name] : [acc[d.name]]).concat(dep)
: dep
return acc
}, {})
}
}
module.exports = Outdated

233
NodeJS/node_modules/npm/lib/commands/owner.js generated vendored Normal file
View File

@@ -0,0 +1,233 @@
const npa = require('npm-package-arg')
const npmFetch = require('npm-registry-fetch')
const pacote = require('pacote')
const { log, output } = require('proc-log')
const { otplease } = require('../utils/auth.js')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
const { redact } = require('@npmcli/redact')
const readJson = async (path) => {
try {
const { content } = await pkgJson.normalize(path)
return content
} catch {
return {}
}
}
class Owner extends BaseCommand {
static description = 'Manage package owners'
static name = 'owner'
static params = [
'registry',
'otp',
'workspace',
'workspaces',
]
static usage = [
'add <user> <package-spec>',
'rm <user> <package-spec>',
'ls <package-spec>',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static async completion (opts, npm) {
const argv = opts.conf.argv.remain
if (argv.length > 3) {
return []
}
if (argv[1] !== 'owner') {
argv.unshift('owner')
}
if (argv.length === 2) {
return ['add', 'rm', 'ls']
}
// reaches registry in order to autocomplete rm
if (argv[2] === 'rm') {
if (npm.global) {
return []
}
const { name } = await readJson(npm.prefix)
if (!name) {
return []
}
const spec = npa(name)
const data = await pacote.packument(spec, {
...npm.flatOptions,
fullMetadata: true,
})
if (data && data.maintainers && data.maintainers.length) {
return data.maintainers.map(m => m.name)
}
}
return []
}
async exec ([action, ...args]) {
if (action === 'ls' || action === 'list') {
await this.ls(args[0])
} else if (action === 'add') {
await this.changeOwners(args[0], args[1], 'add')
} else if (action === 'rm' || action === 'remove') {
await this.changeOwners(args[0], args[1], 'rm')
} else {
throw this.usageError()
}
}
async execWorkspaces ([action, ...args]) {
await this.setWorkspaces()
// ls pkg or owner add/rm package
if ((action === 'ls' && args.length > 0) || args.length > 1) {
const implicitWorkspaces = this.npm.config.get('workspace', 'default')
if (implicitWorkspaces.length === 0) {
log.warn(`Ignoring specified workspace(s)`)
}
return this.exec([action, ...args])
}
for (const [name] of this.workspaces) {
if (action === 'ls' || action === 'list') {
await this.ls(name)
} else if (action === 'add') {
await this.changeOwners(args[0], name, 'add')
} else if (action === 'rm' || action === 'remove') {
await this.changeOwners(args[0], name, 'rm')
} else {
throw this.usageError()
}
}
}
async ls (pkg) {
pkg = await this.getPkg(this.npm.prefix, pkg)
const spec = npa(pkg)
try {
const packumentOpts = { ...this.npm.flatOptions, fullMetadata: true, preferOnline: true }
const { maintainers } = await pacote.packument(spec, packumentOpts)
if (!maintainers || !maintainers.length) {
output.standard('no admin found')
} else {
output.standard(maintainers.map(m => `${m.name} <${m.email}>`).join('\n'))
}
} catch (err) {
log.error('owner ls', "Couldn't get owner data", redact(pkg))
throw err
}
}
async getPkg (prefix, pkg) {
if (!pkg) {
if (this.npm.global) {
throw this.usageError()
}
const { name } = await readJson(prefix)
if (!name) {
throw this.usageError()
}
return name
}
return pkg
}
async changeOwners (user, pkg, addOrRm) {
if (!user) {
throw this.usageError()
}
pkg = await this.getPkg(this.npm.prefix, pkg)
log.verbose(`owner ${addOrRm}`, '%s to %s', user, pkg)
const spec = npa(pkg)
const uri = `/-/user/org.couchdb.user:${encodeURIComponent(user)}`
let u
try {
u = await npmFetch.json(uri, this.npm.flatOptions)
} catch (err) {
log.error('owner mutate', `Error getting user data for ${user}`)
throw err
}
// normalize user data
u = { name: u.name, email: u.email }
const data = await pacote.packument(spec, {
...this.npm.flatOptions,
fullMetadata: true,
preferOnline: true,
})
const owners = data.maintainers || []
let maintainers
if (addOrRm === 'add') {
const existing = owners.find(o => o.name === u.name)
if (existing) {
log.info(
'owner add',
`Already a package owner: ${existing.name} <${existing.email}>`
)
return
}
maintainers = [
...owners,
u,
]
} else {
maintainers = owners.filter(o => o.name !== u.name)
if (maintainers.length === owners.length) {
log.info('owner rm', 'Not a package owner: ' + u.name)
return false
}
if (!maintainers.length) {
throw Object.assign(
new Error(
'Cannot remove all owners of a package. Add someone else first.'
),
{ code: 'EOWNERRM' }
)
}
}
const dataPath = `/${spec.escapedName}/-rev/${encodeURIComponent(data._rev)}`
try {
const res = await otplease(this.npm, this.npm.flatOptions, opts => {
return npmFetch.json(dataPath, {
...opts,
method: 'PUT',
body: {
_id: data._id,
_rev: data._rev,
maintainers,
},
spec,
})
})
if (addOrRm === 'add') {
output.standard(`+ ${user} (${spec.name})`)
} else {
output.standard(`- ${user} (${spec.name})`)
}
return res
} catch (err) {
throw Object.assign(
new Error('Failed to update package: ' + JSON.stringify(err.message)),
{ code: 'EOWNERMUTATE' }
)
}
}
}
module.exports = Owner

85
NodeJS/node_modules/npm/lib/commands/pack.js generated vendored Normal file
View File

@@ -0,0 +1,85 @@
const pacote = require('pacote')
const libpack = require('libnpmpack')
const npa = require('npm-package-arg')
const { log, output } = require('proc-log')
const { getContents, logTar } = require('../utils/tar.js')
const BaseCommand = require('../base-cmd.js')
class Pack extends BaseCommand {
static description = 'Create a tarball from a package'
static name = 'pack'
static params = [
'dry-run',
'json',
'pack-destination',
'workspace',
'workspaces',
'include-workspace-root',
]
static usage = ['<package-spec>']
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
if (args.length === 0) {
args = ['.']
}
const unicode = this.npm.config.get('unicode')
const json = this.npm.config.get('json')
// Get the manifests and filenames first so we can bail early on manifest
// errors before making any tarballs
const manifests = []
for (const arg of args) {
const spec = npa(arg)
const manifest = await pacote.manifest(spec, this.npm.flatOptions)
if (!manifest._id) {
throw new Error('Invalid package, must have name and version')
}
manifests.push({ arg, manifest })
}
// Load tarball names up for printing afterward to isolate from the
// noise generated during packing
const tarballs = []
for (const { arg, manifest } of manifests) {
const tarballData = await libpack(arg, {
...this.npm.flatOptions,
foregroundScripts: this.npm.config.isDefault('foreground-scripts')
? true
: this.npm.config.get('foreground-scripts'),
prefix: this.npm.localPrefix,
workspaces: this.workspacePaths,
})
tarballs.push(await getContents(manifest, tarballData))
}
for (const [index, tar] of Object.entries(tarballs)) {
// XXX(BREAKING_CHANGE): publish outputs a json object with package
// names as keys. Pack should do the same here instead of an array
logTar(tar, { unicode, json, key: index })
if (!json) {
output.standard(tar.filename.replace(/^@/, '').replace(/\//, '-'))
}
}
}
async execWorkspaces (args) {
// If they either ask for nothing, or explicitly include '.' in the args,
// we effectively translate that into each workspace requested
const useWorkspaces = args.length === 0 || args.includes('.')
if (!useWorkspaces) {
log.warn('Ignoring workspaces for specified package(s)')
return this.exec(args)
}
await this.setWorkspaces()
return this.exec([...this.workspacePaths, ...args.filter(a => a !== '.')])
}
}
module.exports = Pack

30
NodeJS/node_modules/npm/lib/commands/ping.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
const { redact } = require('@npmcli/redact')
const { log, output } = require('proc-log')
const pingUtil = require('../utils/ping.js')
const BaseCommand = require('../base-cmd.js')
class Ping extends BaseCommand {
static description = 'Ping npm registry'
static params = ['registry']
static name = 'ping'
async exec () {
const cleanRegistry = redact(this.npm.config.get('registry'))
log.notice('PING', cleanRegistry)
const start = Date.now()
const details = await pingUtil({ ...this.npm.flatOptions })
const time = Date.now() - start
log.notice('PONG', `${time}ms`)
if (this.npm.config.get('json')) {
output.buffer({
registry: cleanRegistry,
time,
details,
})
} else if (Object.keys(details).length) {
log.notice('PONG', JSON.stringify(details, null, 2))
}
}
}
module.exports = Ping

129
NodeJS/node_modules/npm/lib/commands/pkg.js generated vendored Normal file
View File

@@ -0,0 +1,129 @@
const { output } = require('proc-log')
const PackageJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
const Queryable = require('../utils/queryable.js')
class Pkg extends BaseCommand {
static description = 'Manages your package.json'
static name = 'pkg'
static usage = [
'set <key>=<value> [<key>=<value> ...]',
'get [<key> [<key> ...]]',
'delete <key> [<key> ...]',
'set [<array>[<index>].<key>=<value> ...]',
'set [<array>[].<key>=<value> ...]',
'fix',
]
static params = [
'force',
'json',
'workspace',
'workspaces',
]
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args, { path = this.npm.localPrefix, workspace } = {}) {
if (this.npm.global) {
throw Object.assign(
new Error(`There's no package.json file to manage on global mode`),
{ code: 'EPKGGLOBAL' }
)
}
const [cmd, ..._args] = args
switch (cmd) {
case 'get':
return this.get(_args, { path, workspace })
case 'set':
return this.set(_args, { path, workspace }).then(p => p.save())
case 'delete':
return this.delete(_args, { path, workspace }).then(p => p.save())
case 'fix':
return PackageJson.fix(path).then(p => p.save())
default:
throw this.usageError()
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
for (const [workspace, path] of this.workspaces.entries()) {
await this.exec(args, { path, workspace })
}
}
async get (args, { path, workspace }) {
this.npm.config.set('json', true)
const pkgJson = await PackageJson.load(path)
let result = pkgJson.content
if (args.length) {
result = new Queryable(result).query(args)
// in case there's only a single result from the query
// just prints that one element to stdout
// TODO(BREAKING_CHANGE): much like other places where we unwrap single
// item arrays this should go away. it makes the behavior unknown for users
// who don't already know the shape of the data.
if (Object.keys(result).length === 1) {
result = result[args]
}
}
// The display layer is responsible for calling JSON.stringify on the result
// TODO: https://github.com/npm/cli/issues/5508 a raw mode has been requested similar
// to jq -r. If that was added then this method should no longer set `json:true` all the time
output.buffer(workspace ? { [workspace]: result } : result)
}
async set (args, { path }) {
const setError = () =>
this.usageError('npm pkg set expects a key=value pair of args.')
if (!args.length) {
throw setError()
}
const force = this.npm.config.get('force')
const json = this.npm.config.get('json')
const pkgJson = await PackageJson.load(path)
const q = new Queryable(pkgJson.content)
for (const arg of args) {
const [key, ...rest] = arg.split('=')
const value = rest.join('=')
if (!key || !value) {
throw setError()
}
q.set(key, json ? JSON.parse(value) : value, { force })
}
return pkgJson.update(q.toJSON())
}
async delete (args, { path }) {
const setError = () =>
this.usageError('npm pkg delete expects key args.')
if (!args.length) {
throw setError()
}
const pkgJson = await PackageJson.load(path)
const q = new Queryable(pkgJson.content)
for (const key of args) {
if (!key) {
throw setError()
}
q.delete(key)
}
return pkgJson.update(q.toJSON())
}
}
module.exports = Pkg

15
NodeJS/node_modules/npm/lib/commands/prefix.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Prefix extends BaseCommand {
static description = 'Display prefix'
static name = 'prefix'
static params = ['global']
static usage = ['[-g]']
async exec () {
return output.standard(this.npm.prefix)
}
}
module.exports = Prefix

390
NodeJS/node_modules/npm/lib/commands/profile.js generated vendored Normal file
View File

@@ -0,0 +1,390 @@
const { inspect } = require('node:util')
const { URL } = require('node:url')
const { log, output } = require('proc-log')
const { get, set, createToken } = require('npm-profile')
const qrcodeTerminal = require('qrcode-terminal')
const { otplease } = require('../utils/auth.js')
const readUserInfo = require('../utils/read-user-info.js')
const BaseCommand = require('../base-cmd.js')
const qrcode = url =>
new Promise((resolve) => qrcodeTerminal.generate(url, resolve))
const knownProfileKeys = [
'name',
'email',
'two-factor auth',
'fullname',
'homepage',
'freenode',
'twitter',
'github',
'created',
'updated',
]
const writableProfileKeys = [
'email',
'password',
'fullname',
'homepage',
'freenode',
'twitter',
'github',
]
class Profile extends BaseCommand {
static description = 'Change settings on your registry profile'
static name = 'profile'
static usage = [
'enable-2fa [auth-only|auth-and-writes]',
'disable-2fa',
'get [<key>]',
'set <key> <value>',
]
static params = [
'registry',
'json',
'parseable',
'otp',
]
static async completion (opts) {
var argv = opts.conf.argv.remain
if (!argv[2]) {
return ['enable-2fa', 'disable-2fa', 'get', 'set']
}
switch (argv[2]) {
case 'enable-2fa':
case 'enable-tfa':
return ['auth-and-writes', 'auth-only']
case 'disable-2fa':
case 'disable-tfa':
case 'get':
case 'set':
return []
default:
throw new Error(argv[2] + ' not recognized')
}
}
async exec (args) {
if (args.length === 0) {
throw this.usageError()
}
const [subcmd, ...opts] = args
switch (subcmd) {
case 'enable-2fa':
case 'enable-tfa':
case 'enable2fa':
case 'enabletfa':
return this.enable2fa(opts)
case 'disable-2fa':
case 'disable-tfa':
case 'disable2fa':
case 'disabletfa':
return this.disable2fa()
case 'get':
return this.get(opts)
case 'set':
return this.set(opts)
default:
throw new Error('Unknown profile command: ' + subcmd)
}
}
async get (args) {
const tfa = 'two-factor auth'
const info = await get({ ...this.npm.flatOptions })
if (!info.cidr_whitelist) {
delete info.cidr_whitelist
}
if (this.npm.config.get('json')) {
output.buffer(info)
return
}
// clean up and format key/values for output
const cleaned = {}
for (const key of knownProfileKeys) {
cleaned[key] = info[key] || ''
}
const unknownProfileKeys = Object.keys(info).filter((k) => !(k in cleaned))
for (const key of unknownProfileKeys) {
cleaned[key] = info[key] || ''
}
delete cleaned.tfa
delete cleaned.email_verified
cleaned.email += info.email_verified ? ' (verified)' : '(unverified)'
if (info.tfa && !info.tfa.pending) {
cleaned[tfa] = info.tfa.mode
} else {
cleaned[tfa] = 'disabled'
}
if (args.length) {
const values = args // comma or space separated
.join(',')
.split(/,/)
.filter((arg) => arg.trim() !== '')
.map((arg) => cleaned[arg])
.join('\t')
output.standard(values)
} else {
if (this.npm.config.get('parseable')) {
for (const key of Object.keys(info)) {
if (key === 'tfa') {
output.standard(`${key}\t${cleaned[tfa]}`)
} else {
output.standard(`${key}\t${info[key]}`)
}
}
} else {
for (const [key, value] of Object.entries(cleaned)) {
output.standard(`${key}: ${value}`)
}
}
}
}
async set (args) {
const conf = { ...this.npm.flatOptions }
const prop = (args[0] || '').toLowerCase().trim()
let value = args.length > 1 ? args.slice(1).join(' ') : null
const readPasswords = async () => {
const newpassword = await readUserInfo.password('New password: ')
const confirmedpassword = await readUserInfo.password(' Again: ')
if (newpassword !== confirmedpassword) {
log.warn('profile', 'Passwords do not match, please try again.')
return readPasswords()
}
return newpassword
}
if (prop !== 'password' && value === null) {
throw new Error('npm profile set <prop> <value>')
}
if (prop === 'password' && value !== null) {
throw new Error(
'npm profile set password\n' +
'Do not include your current or new passwords on the command line.')
}
if (writableProfileKeys.indexOf(prop) === -1) {
throw new Error(`"${prop}" is not a property we can set. ` +
`Valid properties are: ` + writableProfileKeys.join(', '))
}
if (prop === 'password') {
const current = await readUserInfo.password('Current password: ')
const newpassword = await readPasswords()
value = { old: current, new: newpassword }
}
// FIXME: Work around to not clear everything other than what we're setting
const user = await get(conf)
const newUser = {}
for (const key of writableProfileKeys) {
newUser[key] = user[key]
}
newUser[prop] = value
const result = await otplease(this.npm, conf, c => set(newUser, c))
if (this.npm.config.get('json')) {
output.buffer({ [prop]: result[prop] })
} else if (this.npm.config.get('parseable')) {
output.standard(prop + '\t' + result[prop])
} else if (result[prop] != null) {
output.standard('Set', prop, 'to', result[prop])
} else {
output.standard('Set', prop)
}
}
async enable2fa (args) {
if (args.length > 1) {
throw new Error('npm profile enable-2fa [auth-and-writes|auth-only]')
}
const mode = args[0] || 'auth-and-writes'
if (mode !== 'auth-only' && mode !== 'auth-and-writes') {
throw new Error(
`Invalid two-factor authentication mode "${mode}".\n` +
'Valid modes are:\n' +
' auth-only - Require two-factor authentication only when logging in\n' +
' auth-and-writes - Require two-factor authentication when logging in ' +
'AND when publishing'
)
}
if (this.npm.config.get('json') || this.npm.config.get('parseable')) {
throw new Error(
'Enabling two-factor authentication is an interactive operation and ' +
(this.npm.config.get('json') ? 'JSON' : 'parseable') + ' output mode is not available'
)
}
const info = {
tfa: {
mode: mode,
},
}
// if they're using legacy auth currently then we have to
// update them to a bearer token before continuing.
const creds = this.npm.config.getCredentialsByURI(this.npm.config.get('registry'))
const auth = {}
if (creds.token) {
auth.token = creds.token
} else if (creds.username) {
auth.basic = { username: creds.username, password: creds.password }
} else if (creds.auth) {
const basic = Buffer.from(creds.auth, 'base64').toString().split(':', 2)
auth.basic = { username: basic[0], password: basic[1] }
}
if (!auth.basic && !auth.token) {
throw new Error(
'You need to be logged in to registry ' +
`${this.npm.config.get('registry')} in order to enable 2fa`
)
}
if (auth.basic) {
log.info('profile', 'Updating authentication to bearer token')
const result = await createToken(
auth.basic.password, false, [], { ...this.npm.flatOptions }
)
if (!result.token) {
throw new Error(
`Your registry ${this.npm.config.get('registry')} does not seem to ` +
'support bearer tokens. Bearer tokens are required for ' +
'two-factor authentication'
)
}
this.npm.config.setCredentialsByURI(
this.npm.config.get('registry'),
{ token: result.token }
)
await this.npm.config.save('user')
}
log.notice('profile', 'Enabling two factor authentication for ' + mode)
const password = await readUserInfo.password()
info.tfa.password = password
log.info('profile', 'Determine if tfa is pending')
const userInfo = await get({ ...this.npm.flatOptions })
const conf = { ...this.npm.flatOptions }
if (userInfo && userInfo.tfa && userInfo.tfa.pending) {
log.info('profile', 'Resetting two-factor authentication')
await set({ tfa: { password, mode: 'disable' } }, conf)
} else if (userInfo && userInfo.tfa) {
if (!conf.otp) {
conf.otp = await readUserInfo.otp(
'Enter one-time password: '
)
}
}
log.info('profile', 'Setting two-factor authentication to ' + mode)
const challenge = await set(info, conf)
if (challenge.tfa === null) {
output.standard('Two factor authentication mode changed to: ' + mode)
return
}
const badResponse = typeof challenge.tfa !== 'string'
|| !/^otpauth:[/][/]/.test(challenge.tfa)
if (badResponse) {
throw new Error(
'Unknown error enabling two-factor authentication. Expected otpauth URL' +
', got: ' + inspect(challenge.tfa)
)
}
const otpauth = new URL(challenge.tfa)
const secret = otpauth.searchParams.get('secret')
const code = await qrcode(challenge.tfa)
output.standard(
'Scan into your authenticator app:\n' + code + '\n Or enter code:', secret
)
const interactiveOTP =
await readUserInfo.otp('And an OTP code from your authenticator: ')
log.info('profile', 'Finalizing two-factor authentication')
const result = await set({ tfa: [interactiveOTP] }, conf)
output.standard(
'2FA successfully enabled. Below are your recovery codes, ' +
'please print these out.'
)
output.standard(
'You will need these to recover access to your account ' +
'if you lose your authentication device.'
)
for (const tfaCode of result.tfa) {
output.standard('\t' + tfaCode)
}
}
async disable2fa () {
const conf = { ...this.npm.flatOptions }
const info = await get(conf)
if (!info.tfa || info.tfa.pending) {
output.standard('Two factor authentication not enabled.')
return
}
const password = await readUserInfo.password()
if (!conf.otp) {
const msg = 'Enter one-time password: '
conf.otp = await readUserInfo.otp(msg)
}
log.info('profile', 'disabling tfa')
await set({ tfa: { password: password, mode: 'disable' } }, conf)
if (this.npm.config.get('json')) {
output.buffer({ tfa: false })
} else if (this.npm.config.get('parseable')) {
output.standard('tfa\tfalse')
} else {
output.standard('Two factor authentication disabled.')
}
}
}
module.exports = Profile

34
NodeJS/node_modules/npm/lib/commands/prune.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
// prune extraneous packages
class Prune extends ArboristWorkspaceCmd {
static description = 'Remove extraneous packages'
static name = 'prune'
static params = [
'omit',
'include',
'dry-run',
'json',
'foreground-scripts',
'ignore-scripts',
...super.params,
]
static usage = ['[[<@scope>/]<pkg>...]']
async exec () {
const where = this.npm.prefix
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.prune(opts)
await reifyFinish(this.npm, arb)
}
}
module.exports = Prune

227
NodeJS/node_modules/npm/lib/commands/publish.js generated vendored Normal file
View File

@@ -0,0 +1,227 @@
const { log, output } = require('proc-log')
const semver = require('semver')
const pack = require('libnpmpack')
const libpub = require('libnpmpublish').publish
const runScript = require('@npmcli/run-script')
const pacote = require('pacote')
const npa = require('npm-package-arg')
const npmFetch = require('npm-registry-fetch')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const { otplease } = require('../utils/auth.js')
const { getContents, logTar } = require('../utils/tar.js')
// for historical reasons, publishConfig in package.json can contain ANY config
// keys that npm supports in .npmrc files and elsewhere. We *may* want to
// revisit this at some point, and have a minimal set that's a SemVer-major
// change that ought to get a RFC written on it.
const { flatten } = require('@npmcli/config/lib/definitions')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
class Publish extends BaseCommand {
static description = 'Publish a package'
static name = 'publish'
static params = [
'tag',
'access',
'dry-run',
'otp',
'workspace',
'workspaces',
'include-workspace-root',
'provenance',
]
static usage = ['<package-spec>']
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
if (args.length === 0) {
args = ['.']
}
if (args.length !== 1) {
throw this.usageError()
}
await this.#publish(args)
}
async execWorkspaces (args) {
const useWorkspaces = args.length === 0 || args.includes('.')
if (!useWorkspaces) {
log.warn('Ignoring workspaces for specified package(s)')
return this.exec(args)
}
await this.setWorkspaces()
for (const [name, workspace] of this.workspaces.entries()) {
try {
await this.#publish([workspace], { workspace: name })
} catch (err) {
if (err.code !== 'EPRIVATE') {
throw err
}
// eslint-disable-next-line max-len
log.warn('publish', `Skipping workspace ${this.npm.chalk.cyan(name)}, marked as ${this.npm.chalk.bold('private')}`)
}
}
}
async #publish (args, { workspace } = {}) {
log.verbose('publish', replaceInfo(args))
const unicode = this.npm.config.get('unicode')
const dryRun = this.npm.config.get('dry-run')
const json = this.npm.config.get('json')
const defaultTag = this.npm.config.get('tag')
const ignoreScripts = this.npm.config.get('ignore-scripts')
const { silent } = this.npm
if (semver.validRange(defaultTag)) {
throw new Error('Tag name must not be a valid SemVer range: ' + defaultTag.trim())
}
const opts = { ...this.npm.flatOptions, progress: false }
// you can publish name@version, ./foo.tgz, etc.
// even though the default is the 'file:.' cwd.
const spec = npa(args[0])
let manifest = await this.#getManifest(spec, opts)
// only run scripts for directory type publishes
if (spec.type === 'directory' && !ignoreScripts) {
await runScript({
event: 'prepublishOnly',
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
})
}
// we pass dryRun: true to libnpmpack so it doesn't write the file to disk
const tarballData = await pack(spec, {
...opts,
foregroundScripts: this.npm.config.isDefault('foreground-scripts')
? true
: this.npm.config.get('foreground-scripts'),
dryRun: true,
prefix: this.npm.localPrefix,
workspaces: this.workspacePaths,
})
const pkgContents = await getContents(manifest, tarballData)
const logPkg = () => logTar(pkgContents, { unicode, json, key: workspace })
// The purpose of re-reading the manifest is in case it changed,
// so that we send the latest and greatest thing to the registry
// note that publishConfig might have changed as well!
manifest = await this.#getManifest(spec, opts, true)
// If we are not in JSON mode then we show the user the contents of the tarball
// before it is published so they can see it while their otp is pending
if (!json) {
logPkg()
}
const resolved = npa.resolve(manifest.name, manifest.version)
// make sure tag is valid, this will throw if invalid
npa(`${manifest.name}@${defaultTag}`)
const registry = npmFetch.pickRegistry(resolved, opts)
const creds = this.npm.config.getCredentialsByURI(registry)
const noCreds = !(creds.token || creds.username || creds.certfile && creds.keyfile)
const outputRegistry = replaceInfo(registry)
// if a workspace package is marked private then we skip it
if (workspace && manifest.private) {
throw Object.assign(
new Error(`This package has been marked as private
Remove the 'private' field from the package.json to publish it.`),
{ code: 'EPRIVATE' }
)
}
if (noCreds) {
const msg = `This command requires you to be logged in to ${outputRegistry}`
if (dryRun) {
log.warn('', `${msg} (dry-run)`)
} else {
throw Object.assign(new Error(msg), { code: 'ENEEDAUTH' })
}
}
const access = opts.access === null ? 'default' : opts.access
let msg = `Publishing to ${outputRegistry} with tag ${defaultTag} and ${access} access`
if (dryRun) {
msg = `${msg} (dry-run)`
}
log.notice('', msg)
if (!dryRun) {
await otplease(this.npm, opts, o => libpub(manifest, tarballData, o))
}
// In json mode we dont log until the publish has completed as this will
// add it to the output only if completes successfully
if (json) {
logPkg()
}
if (spec.type === 'directory' && !ignoreScripts) {
await runScript({
event: 'publish',
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
})
await runScript({
event: 'postpublish',
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
})
}
if (!json && !silent) {
output.standard(`+ ${pkgContents.id}`)
}
}
// if it's a directory, read it from the file system
// otherwise, get the full metadata from whatever it is
// XXX can't pacote read the manifest from a directory?
async #getManifest (spec, opts, logWarnings = false) {
let manifest
if (spec.type === 'directory') {
const changes = []
const pkg = await pkgJson.fix(spec.fetchSpec, { changes })
if (changes.length && logWarnings) {
/* eslint-disable-next-line max-len */
log.warn('publish', 'npm auto-corrected some errors in your package.json when publishing. Please run "npm pkg fix" to address these errors.')
log.warn('publish', `errors corrected:\n${changes.join('\n')}`)
}
// Prepare is the special function for publishing, different than normalize
const { content } = await pkg.prepare()
manifest = content
} else {
manifest = await pacote.manifest(spec, {
...opts,
fullmetadata: true,
fullReadJson: true,
})
}
if (manifest.publishConfig) {
const cliFlags = this.npm.config.data.get('cli').raw
// Filter out properties set in CLI flags to prioritize them over
// corresponding `publishConfig` settings
const filteredPublishConfig = Object.fromEntries(
Object.entries(manifest.publishConfig).filter(([key]) => !(key in cliFlags)))
flatten(filteredPublishConfig, opts)
}
return manifest
}
}
module.exports = Publish

126
NodeJS/node_modules/npm/lib/commands/query.js generated vendored Normal file
View File

@@ -0,0 +1,126 @@
const { resolve } = require('node:path')
const BaseCommand = require('../base-cmd.js')
const { log, output } = require('proc-log')
class QuerySelectorItem {
constructor (node) {
// all enumerable properties from the target
Object.assign(this, node.target.package)
// append extra info
this.pkgid = node.target.pkgid
this.location = node.target.location
this.path = node.target.path
this.realpath = node.target.realpath
this.resolved = node.target.resolved
this.from = []
this.to = []
this.dev = node.target.dev
this.inBundle = node.target.inBundle
this.deduped = this.from.length > 1
this.overridden = node.overridden
this.queryContext = node.queryContext
for (const edge of node.target.edgesIn) {
this.from.push(edge.from.location)
}
for (const [, edge] of node.target.edgesOut) {
if (edge.to) {
this.to.push(edge.to.location)
}
}
}
}
class Query extends BaseCommand {
#response = [] // response is the query response
#seen = new Set() // paths we've seen so we can keep response deduped
static description = 'Retrieve a filtered list of packages'
static name = 'query'
static usage = ['<selector>']
static workspaces = true
static ignoreImplicitWorkspace = false
static params = [
'global',
'workspace',
'workspaces',
'include-workspace-root',
'package-lock-only',
'expect-results',
]
constructor (...args) {
super(...args)
this.npm.config.set('json', true)
}
async exec (args) {
const packageLock = this.npm.config.get('package-lock-only')
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
// one dir up from wherever node_modules lives
path: resolve(this.npm.dir, '..'),
forceActual: !packageLock,
})
let tree
if (packageLock) {
try {
tree = await arb.loadVirtual()
} catch (err) {
log.verbose('loadVirtual', err.stack)
throw this.usageError(
'A package lock or shrinkwrap file is required in package-lock-only mode'
)
}
} else {
tree = await arb.loadActual()
}
await this.#queryTree(tree, args[0])
this.#output()
}
async execWorkspaces (args) {
await this.setWorkspaces()
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
path: this.npm.prefix,
})
// FIXME: Workspace support in query does not work as expected so this does not
// do the same package-lock-only check as this.exec().
// https://github.com/npm/cli/pull/6732#issuecomment-1708804921
const tree = await arb.loadActual()
for (const path of this.workspacePaths) {
const wsTree = path === tree.root.path
? tree // --includes-workspace-root
: await tree.querySelectorAll(`.workspace:path(${path})`).then(r => r[0].target)
await this.#queryTree(wsTree, args[0])
}
this.#output()
}
#output () {
this.checkExpected(this.#response.length)
output.buffer(this.#response)
}
// builds a normalized inventory
async #queryTree (tree, arg) {
const items = await tree.querySelectorAll(arg, this.npm.flatOptions)
for (const node of items) {
const { location } = node.target
if (!location || !this.#seen.has(location)) {
const item = new QuerySelectorItem(node)
this.#response.push(item)
if (location) {
this.#seen.add(item.location)
}
}
}
}
}
module.exports = Query

84
NodeJS/node_modules/npm/lib/commands/rebuild.js generated vendored Normal file
View File

@@ -0,0 +1,84 @@
const { resolve } = require('node:path')
const { output } = require('proc-log')
const npa = require('npm-package-arg')
const semver = require('semver')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Rebuild extends ArboristWorkspaceCmd {
static description = 'Rebuild a package'
static name = 'rebuild'
static params = [
'global',
'bin-links',
'foreground-scripts',
'ignore-scripts',
...super.params,
]
static usage = ['[<package-spec>] ...]']
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
const globalTop = resolve(this.npm.globalDir, '..')
const where = this.npm.global ? globalTop : this.npm.prefix
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
path: where,
// TODO when extending ReifyCmd
// workspaces: this.workspaceNames,
})
if (args.length) {
// get the set of nodes matching the name that we want rebuilt
const tree = await arb.loadActual()
const specs = args.map(arg => {
const spec = npa(arg)
if (spec.rawSpec === '*') {
return spec
}
if (spec.type !== 'range' && spec.type !== 'version' && spec.type !== 'directory') {
throw new Error('`npm rebuild` only supports SemVer version/range specifiers')
}
return spec
})
const nodes = tree.inventory.filter(node => this.isNode(specs, node))
await arb.rebuild({ nodes })
} else {
await arb.rebuild()
}
output.standard('rebuilt dependencies successfully')
}
isNode (specs, node) {
return specs.some(spec => {
if (spec.type === 'directory') {
return node.path === spec.fetchSpec
}
if (spec.name !== node.name) {
return false
}
if (spec.rawSpec === '' || spec.rawSpec === '*') {
return true
}
const { version } = node.package
// TODO: add tests for a package with missing version
return semver.satisfies(version, spec.fetchSpec)
})
}
}
module.exports = Rebuild

55
NodeJS/node_modules/npm/lib/commands/repo.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
const { URL } = require('node:url')
const PackageUrlCmd = require('../package-url-cmd.js')
class Repo extends PackageUrlCmd {
static description = 'Open package repository page in the browser'
static name = 'repo'
getUrl (spec, mani) {
const r = mani.repository
const rurl = !r ? null
: typeof r === 'string' ? r
: typeof r === 'object' && typeof r.url === 'string' ? r.url
: null
if (!rurl) {
throw Object.assign(new Error('no repository'), {
pkgid: spec,
})
}
const info = this.hostedFromMani(mani)
const url = info ?
info.browse(mani.repository.directory) : unknownHostedUrl(rurl)
if (!url) {
throw Object.assign(new Error('no repository: could not get url'), {
pkgid: spec,
})
}
return url
}
}
module.exports = Repo
const unknownHostedUrl = url => {
try {
const {
protocol,
hostname,
pathname,
} = new URL(url)
/* istanbul ignore next - URL ctor should prevent this */
if (!protocol || !hostname) {
return null
}
const proto = /(git\+)http:$/.test(protocol) ? 'http:' : 'https:'
const path = pathname.replace(/\.git$/, '')
return `${proto}//${hostname}${path}`
} catch (e) {
return null
}
}

13
NodeJS/node_modules/npm/lib/commands/restart.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
const LifecycleCmd = require('../lifecycle-cmd.js')
// This ends up calling run-script(['restart', ...args])
class Restart extends LifecycleCmd {
static description = 'Restart a package'
static name = 'restart'
static params = [
'ignore-scripts',
'script-shell',
]
}
module.exports = Restart

14
NodeJS/node_modules/npm/lib/commands/root.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Root extends BaseCommand {
static description = 'Display npm root'
static name = 'root'
static params = ['global']
async exec () {
output.standard(this.npm.dir)
}
}
module.exports = Root

218
NodeJS/node_modules/npm/lib/commands/run-script.js generated vendored Normal file
View File

@@ -0,0 +1,218 @@
const { output } = require('proc-log')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
const { getError } = require('../utils/error-message.js')
const { outputError } = require('../utils/output-error.js')
class RunScript extends BaseCommand {
static description = 'Run arbitrary package scripts'
static params = [
'workspace',
'workspaces',
'include-workspace-root',
'if-present',
'ignore-scripts',
'foreground-scripts',
'script-shell',
]
static name = 'run-script'
static usage = ['<command> [-- <args>]']
static workspaces = true
static ignoreImplicitWorkspace = false
static isShellout = true
static checkDevEngines = true
static async completion (opts, npm) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
const { content: { scripts = {} } } = await pkgJson.normalize(npm.localPrefix)
.catch(() => ({ content: {} }))
if (opts.isFish) {
return Object.keys(scripts).map(s => `${s}\t${scripts[s].slice(0, 30)}`)
}
return Object.keys(scripts)
}
}
async exec (args) {
if (args.length) {
await this.#run(args, { path: this.npm.localPrefix })
} else {
await this.#list(this.npm.localPrefix)
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
const ws = [...this.workspaces.entries()]
for (const [workspace, path] of ws) {
const last = path === ws.at(-1)[1]
if (!args.length) {
const newline = await this.#list(path, { workspace })
if (newline && !last) {
output.standard('')
}
continue
}
const pkg = await pkgJson.normalize(path).then(p => p.content)
try {
await this.#run(args, { path, pkg, workspace })
} catch (e) {
const err = getError(e, { npm: this.npm, command: null })
outputError({
...err,
error: [
['', `Lifecycle script \`${args[0]}\` failed with error:`],
...err.error,
['workspace', pkg._id || pkg.name],
['location', path],
],
})
process.exitCode = err.exitCode
if (!last) {
output.error('')
}
}
}
}
async #run ([event, ...args], { path, pkg, workspace }) {
const runScript = require('@npmcli/run-script')
pkg ??= await pkgJson.normalize(path).then(p => p.content)
const { scripts = {} } = pkg
if (event === 'restart' && !scripts.restart) {
scripts.restart = 'npm stop --if-present && npm start'
} else if (event === 'env' && !scripts.env) {
const { isWindowsShell } = require('../utils/is-windows.js')
scripts.env = isWindowsShell ? 'SET' : 'env'
}
pkg.scripts = scripts
if (
!Object.prototype.hasOwnProperty.call(scripts, event) &&
!(event === 'start' && (await runScript.isServerPackage(path)))
) {
if (this.npm.config.get('if-present')) {
return
}
const suggestions = require('../utils/did-you-mean.js')(pkg, event)
const wsArg = workspace && path !== this.npm.localPrefix
? ` --workspace=${pkg._id || pkg.name}`
: ''
throw new Error([
`Missing script: "${event}"${suggestions}\n`,
'To see a list of scripts, run:',
` npm run${wsArg}`,
].join('\n'))
}
// positional args only added to the main event, not pre/post
const events = [[event, args]]
if (!this.npm.config.get('ignore-scripts')) {
if (scripts[`pre${event}`]) {
events.unshift([`pre${event}`, []])
}
if (scripts[`post${event}`]) {
events.push([`post${event}`, []])
}
}
for (const [ev, evArgs] of events) {
await runScript({
path,
// this || undefined is because runScript will be unhappy with the
// default null value
scriptShell: this.npm.config.get('script-shell') || undefined,
stdio: 'inherit',
pkg,
event: ev,
args: evArgs,
})
}
}
async #list (path, { workspace } = {}) {
const { scripts = {}, name, _id } = await pkgJson.normalize(path).then(p => p.content)
const scriptEntries = Object.entries(scripts)
if (this.npm.silent) {
return
}
if (this.npm.config.get('json')) {
output.buffer(workspace ? { [workspace]: scripts } : scripts)
return
}
if (!scriptEntries.length) {
return
}
if (this.npm.config.get('parseable')) {
output.standard(scriptEntries
.map((s) => (workspace ? [workspace, ...s] : s).join(':'))
.join('\n')
.trim())
return
}
const cmdList = [
'prepare', 'prepublishOnly',
'prepack', 'postpack',
'dependencies',
'preinstall', 'install', 'postinstall',
'prepublish', 'publish', 'postpublish',
'prerestart', 'restart', 'postrestart',
'prestart', 'start', 'poststart',
'prestop', 'stop', 'poststop',
'pretest', 'test', 'posttest',
'preuninstall', 'uninstall', 'postuninstall',
'preversion', 'version', 'postversion',
]
const [cmds, runScripts] = scriptEntries.reduce((acc, s) => {
acc[cmdList.includes(s[0]) ? 0 : 1].push(s)
return acc
}, [[], []])
const { reset, bold, cyan, dim, blue } = this.npm.chalk
const pkgId = `in ${cyan(_id || name)}`
const title = (t) => reset(bold(t))
if (cmds.length) {
output.standard(`${title('Lifecycle scripts')} included ${pkgId}:`)
for (const [k, v] of cmds) {
output.standard(` ${k}`)
output.standard(` ${dim(v)}`)
}
}
if (runScripts.length) {
const via = `via \`${blue('npm run-script')}\`:`
if (!cmds.length) {
output.standard(`${title('Scripts')} available ${pkgId} ${via}`)
} else {
output.standard(`available ${via}`)
}
for (const [k, v] of runScripts) {
output.standard(` ${k}`)
output.standard(` ${dim(v)}`)
}
}
// Return true to indicate that something was output for this path
// that should be separated from others
return true
}
}
module.exports = RunScript

134
NodeJS/node_modules/npm/lib/commands/sbom.js generated vendored Normal file
View File

@@ -0,0 +1,134 @@
const localeCompare = require('@isaacs/string-locale-compare')('en')
const BaseCommand = require('../base-cmd.js')
const { log, output } = require('proc-log')
const { cyclonedxOutput } = require('../utils/sbom-cyclonedx.js')
const { spdxOutput } = require('../utils/sbom-spdx.js')
const SBOM_FORMATS = ['cyclonedx', 'spdx']
class SBOM extends BaseCommand {
#response = {} // response is the sbom response
static description = 'Generate a Software Bill of Materials (SBOM)'
static name = 'sbom'
static workspaces = true
static params = [
'omit',
'package-lock-only',
'sbom-format',
'sbom-type',
'workspace',
'workspaces',
]
async exec () {
const sbomFormat = this.npm.config.get('sbom-format')
const packageLockOnly = this.npm.config.get('package-lock-only')
if (!sbomFormat) {
/* eslint-disable-next-line max-len */
throw this.usageError(`Must specify --sbom-format flag with one of: ${SBOM_FORMATS.join(', ')}.`)
}
const opts = {
...this.npm.flatOptions,
path: this.npm.prefix,
forceActual: true,
}
const Arborist = require('@npmcli/arborist')
const arb = new Arborist(opts)
const tree = packageLockOnly ? await arb.loadVirtual(opts).catch(() => {
/* eslint-disable-next-line max-len */
throw this.usageError('A package lock or shrinkwrap file is required in package-lock-only mode')
}) : await arb.loadActual(opts)
// Collect the list of selected workspaces in the project
const wsNodes = this.workspaceNames?.length
? arb.workspaceNodes(tree, this.workspaceNames)
: null
// Build the selector and query the tree for the list of nodes
const selector = this.#buildSelector({ wsNodes })
log.info('sbom', `Using dependency selector: ${selector}`)
const items = await tree.querySelectorAll(selector)
const errors = items.flatMap(node => detectErrors(node))
if (errors.length) {
throw Object.assign(new Error([...new Set(errors)].join('\n')), {
code: 'ESBOMPROBLEMS',
})
}
// Populate the response with the list of unique nodes (sorted by location)
this.#buildResponse(items.sort((a, b) => localeCompare(a.location, b.location)))
// TODO(BREAKING_CHANGE): all sbom output is in json mode but setting it before
// any of the errors will cause those to be thrown in json mode.
this.npm.config.set('json', true)
output.buffer(this.#response)
}
async execWorkspaces (args) {
await this.setWorkspaces()
return this.exec(args)
}
// Build the selector from all of the specified filter options
#buildSelector ({ wsNodes }) {
let selector
const omit = this.npm.flatOptions.omit
const workspacesEnabled = this.npm.flatOptions.workspacesEnabled
// If omit is specified, omit all nodes and their children which match the
// specified selectors
const omits = omit.reduce((acc, o) => `${acc}:not(.${o})`, '')
if (!workspacesEnabled) {
// If workspaces are disabled, omit all workspace nodes and their children
selector = `:root > :not(.workspace)${omits},:root > :not(.workspace) *${omits},:extraneous`
} else if (wsNodes && wsNodes.length > 0) {
// If one or more workspaces are selected, select only those workspaces and their children
selector = wsNodes.map(ws => `#${ws.name},#${ws.name} *${omits}`).join(',')
} else {
selector = `:root *${omits},:extraneous`
}
// Always include the root node
return `:root,${selector}`
}
// builds a normalized inventory
#buildResponse (items) {
const sbomFormat = this.npm.config.get('sbom-format')
const packageType = this.npm.config.get('sbom-type')
const packageLockOnly = this.npm.config.get('package-lock-only')
this.#response = sbomFormat === 'cyclonedx'
? cyclonedxOutput({ npm: this.npm, nodes: items, packageType, packageLockOnly })
: spdxOutput({ npm: this.npm, nodes: items, packageType })
}
}
const detectErrors = (node) => {
const errors = []
// Look for missing dependencies (that are NOT optional), or invalid dependencies
for (const edge of node.edgesOut.values()) {
if (edge.missing && !(edge.type === 'optional' || edge.type === 'peerOptional')) {
errors.push(`missing: ${edge.name}@${edge.spec}, required by ${edge.from.pkgid}`)
}
if (edge.invalid) {
/* istanbul ignore next */
const spec = edge.spec || '*'
const from = edge.from.pkgid
errors.push(`invalid: ${edge.to.pkgid}, ${spec} required by ${from}`)
}
}
return errors
}
module.exports = SBOM

70
NodeJS/node_modules/npm/lib/commands/search.js generated vendored Normal file
View File

@@ -0,0 +1,70 @@
const Pipeline = require('minipass-pipeline')
const libSearch = require('libnpmsearch')
const { log, output } = require('proc-log')
const formatSearchStream = require('../utils/format-search-stream.js')
const BaseCommand = require('../base-cmd.js')
class Search extends BaseCommand {
static description = 'Search for packages'
static name = 'search'
static params = [
'json',
'color',
'parseable',
'description',
'searchlimit',
'searchopts',
'searchexclude',
'registry',
'prefer-online',
'prefer-offline',
'offline',
]
static usage = ['<search term> [<search term> ...]']
async exec (args) {
const opts = {
...this.npm.flatOptions,
...this.npm.flatOptions.search,
include: args.map(s => s.toLowerCase()).filter(Boolean),
exclude: this.npm.flatOptions.search.exclude.split(/\s+/),
}
if (opts.include.length === 0) {
throw new Error('search must be called with arguments')
}
// Used later to figure out whether we had any packages go out
let anyOutput = false
// Grab a configured output stream that will spit out packages in the desired format.
const outputStream = formatSearchStream({
args, // --searchinclude options are not highlighted
...opts,
npm: this.npm,
})
log.silly('search', 'searching packages')
const p = new Pipeline(
libSearch.stream(opts.include, opts),
outputStream
)
p.on('data', chunk => {
if (!anyOutput) {
anyOutput = true
}
output.standard(chunk.toString('utf8'))
})
await p.promise()
if (!anyOutput && !this.npm.config.get('json') && !this.npm.config.get('parseable')) {
output.standard('No matches found for ' + (args.map(JSON.stringify).join(' ')))
}
log.silly('search', 'search completed')
}
}
module.exports = Search

26
NodeJS/node_modules/npm/lib/commands/set.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
const Npm = require('../npm.js')
const BaseCommand = require('../base-cmd.js')
class Set extends BaseCommand {
static description = 'Set a value in the npm configuration'
static name = 'set'
static usage = ['<key>=<value> [<key>=<value> ...] (See `npm config`)']
static params = ['global', 'location']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts) {
const Config = Npm.cmd('config')
return Config.completion(opts)
}
async exec (args) {
if (!args.length) {
throw this.usageError()
}
return this.npm.exec('config', ['set'].concat(args))
}
}
module.exports = Set

73
NodeJS/node_modules/npm/lib/commands/shrinkwrap.js generated vendored Normal file
View File

@@ -0,0 +1,73 @@
const { resolve, basename } = require('node:path')
const { unlink } = require('node:fs/promises')
const { log } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Shrinkwrap extends BaseCommand {
static description = 'Lock down dependency versions for publication'
static name = 'shrinkwrap'
static ignoreImplicitWorkspace = false
async exec () {
// if has a npm-shrinkwrap.json, nothing to do
// if has a package-lock.json, rename to npm-shrinkwrap.json
// if has neither, load the actual tree and save that as npm-shrinkwrap.json
//
// loadVirtual, fall back to loadActual
// rename shrinkwrap file type, and tree.meta.save()
if (this.npm.global) {
const er = new Error('`npm shrinkwrap` does not work for global packages')
er.code = 'ESHRINKWRAPGLOBAL'
throw er
}
const Arborist = require('@npmcli/arborist')
const path = this.npm.prefix
const sw = resolve(path, 'npm-shrinkwrap.json')
const arb = new Arborist({ ...this.npm.flatOptions, path })
const tree = await arb.loadVirtual().catch(() => arb.loadActual())
const { meta } = tree
const newFile = meta.hiddenLockfile || !meta.loadedFromDisk
const oldFilename = meta.filename
const notSW = !newFile && basename(oldFilename) !== 'npm-shrinkwrap.json'
// The computed lockfile version of a hidden lockfile is always 3
// even if the actual value of the property is a different.
// When shrinkwrap is run with only a hidden lockfile we want to
// set the shrinkwrap lockfile version as whatever was explicitly
// requested with a fallback to the actual value from the hidden
// lockfile.
if (meta.hiddenLockfile) {
meta.lockfileVersion = arb.options.lockfileVersion ||
meta.originalLockfileVersion
}
meta.hiddenLockfile = false
meta.filename = sw
await meta.save()
const updatedVersion = meta.originalLockfileVersion !== meta.lockfileVersion
? meta.lockfileVersion
: null
if (newFile) {
let message = 'created a lockfile as npm-shrinkwrap.json'
if (updatedVersion) {
message += ` with version ${updatedVersion}`
}
log.notice('', message)
} else if (notSW) {
await unlink(oldFilename)
let message = 'package-lock.json has been renamed to npm-shrinkwrap.json'
if (updatedVersion) {
message += ` and updated to version ${updatedVersion}`
}
log.notice('', message)
} else if (updatedVersion) {
log.notice('', `npm-shrinkwrap.json updated to version ${updatedVersion}`)
} else {
log.notice('', 'npm-shrinkwrap.json up to date')
}
}
}
module.exports = Shrinkwrap

72
NodeJS/node_modules/npm/lib/commands/star.js generated vendored Normal file
View File

@@ -0,0 +1,72 @@
const fetch = require('npm-registry-fetch')
const npa = require('npm-package-arg')
const { log, output } = require('proc-log')
const getIdentity = require('../utils/get-identity')
const BaseCommand = require('../base-cmd.js')
class Star extends BaseCommand {
static description = 'Mark your favorite packages'
static name = 'star'
static usage = ['[<package-spec>...]']
static params = [
'registry',
'unicode',
'otp',
]
static ignoreImplicitWorkspace = false
async exec (args) {
if (!args.length) {
throw this.usageError()
}
// if we're unstarring, then show an empty star image
// otherwise, show the full star image
const unicode = this.npm.config.get('unicode')
const full = unicode ? '\u2605 ' : '(*)'
const empty = unicode ? '\u2606 ' : '( )'
const show = this.name === 'star' ? full : empty
const pkgs = args.map(npa)
const username = await getIdentity(this.npm, this.npm.flatOptions)
for (const pkg of pkgs) {
const fullData = await fetch.json(pkg.escapedName, {
...this.npm.flatOptions,
spec: pkg,
query: { write: true },
preferOnline: true,
})
const body = {
_id: fullData._id,
_rev: fullData._rev,
users: fullData.users || {},
}
if (this.name === 'star') {
log.info('star', 'starring', body._id)
body.users[username] = true
log.verbose('star', 'starring', body)
} else {
delete body.users[username]
log.info('unstar', 'unstarring', body._id)
log.verbose('unstar', 'unstarring', body)
}
const data = await fetch.json(pkg.escapedName, {
...this.npm.flatOptions,
spec: pkg,
method: 'PUT',
body,
})
output.standard(show + ' ' + pkg.name)
log.verbose('star', data)
return data
}
}
}
module.exports = Star

39
NodeJS/node_modules/npm/lib/commands/stars.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
const fetch = require('npm-registry-fetch')
const { log, output } = require('proc-log')
const getIdentity = require('../utils/get-identity.js')
const BaseCommand = require('../base-cmd.js')
class Stars extends BaseCommand {
static description = 'View packages marked as favorites'
static name = 'stars'
static usage = ['[<user>]']
static params = ['registry']
static ignoreImplicitWorkspace = false
async exec ([user]) {
try {
if (!user) {
user = await getIdentity(this.npm, this.npm.flatOptions)
}
const { rows } = await fetch.json('/-/_view/starredByUser', {
...this.npm.flatOptions,
query: { key: `"${user}"` },
})
if (rows.length === 0) {
log.warn('stars', 'user has not starred any packages')
}
for (const row of rows) {
output.standard(row.value)
}
} catch (err) {
if (err.code === 'ENEEDAUTH') {
log.warn('stars', 'auth is required to look up your username')
}
throw err
}
}
}
module.exports = Stars

13
NodeJS/node_modules/npm/lib/commands/start.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
const LifecycleCmd = require('../lifecycle-cmd.js')
// This ends up calling run-script(['start', ...args])
class Start extends LifecycleCmd {
static description = 'Start a package'
static name = 'start'
static params = [
'ignore-scripts',
'script-shell',
]
}
module.exports = Start

13
NodeJS/node_modules/npm/lib/commands/stop.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
const LifecycleCmd = require('../lifecycle-cmd.js')
// This ends up calling run-script(['stop', ...args])
class Stop extends LifecycleCmd {
static description = 'Stop a package'
static name = 'stop'
static params = [
'ignore-scripts',
'script-shell',
]
}
module.exports = Stop

155
NodeJS/node_modules/npm/lib/commands/team.js generated vendored Normal file
View File

@@ -0,0 +1,155 @@
const columns = require('cli-columns')
const libteam = require('libnpmteam')
const { output } = require('proc-log')
const { otplease } = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
class Team extends BaseCommand {
static description = 'Manage organization teams and team memberships'
static name = 'team'
static usage = [
'create <scope:team> [--otp <otpcode>]',
'destroy <scope:team> [--otp <otpcode>]',
'add <scope:team> <user> [--otp <otpcode>]',
'rm <scope:team> <user> [--otp <otpcode>]',
'ls <scope>|<scope:team>',
]
static params = [
'registry',
'otp',
'parseable',
'json',
]
static ignoreImplicitWorkspace = false
static async completion (opts) {
const { conf: { argv: { remain: argv } } } = opts
const subcommands = ['create', 'destroy', 'add', 'rm', 'ls']
if (argv.length === 2) {
return subcommands
}
if (subcommands.includes(argv[2])) {
return []
}
throw new Error(argv[2] + ' not recognized')
}
async exec ([cmd, entity = '', user = '']) {
// Entities are in the format <scope>:<team>
// XXX: "description" option to libnpmteam is used as a description of the
// team, but in npm's options, this is a boolean meaning "show the
// description in npm search output". Hence its being set to null here.
await otplease(this.npm, { ...this.npm.flatOptions }, opts => {
entity = entity.replace(/^@/, '')
switch (cmd) {
case 'create': return this.create(entity, opts)
case 'destroy': return this.destroy(entity, opts)
case 'add': return this.add(entity, user, opts)
case 'rm': return this.rm(entity, user, opts)
case 'ls': {
const match = entity.match(/[^:]+:.+/)
if (match) {
return this.listUsers(entity, opts)
} else {
return this.listTeams(entity, opts)
}
}
default:
throw this.usageError()
}
})
}
async create (entity, opts) {
await libteam.create(entity, opts)
if (opts.json) {
output.buffer({
created: true,
team: entity,
})
} else if (opts.parseable) {
output.standard(`${entity}\tcreated`)
} else if (!this.npm.silent) {
output.standard(`+@${entity}`)
}
}
async destroy (entity, opts) {
await libteam.destroy(entity, opts)
if (opts.json) {
output.buffer({
deleted: true,
team: entity,
})
} else if (opts.parseable) {
output.standard(`${entity}\tdeleted`)
} else if (!this.npm.silent) {
output.standard(`-@${entity}`)
}
}
async add (entity, user, opts) {
await libteam.add(user, entity, opts)
if (opts.json) {
output.buffer({
added: true,
team: entity,
user,
})
} else if (opts.parseable) {
output.standard(`${user}\t${entity}\tadded`)
} else if (!this.npm.silent) {
output.standard(`${user} added to @${entity}`)
}
}
async rm (entity, user, opts) {
await libteam.rm(user, entity, opts)
if (opts.json) {
output.buffer({
removed: true,
team: entity,
user,
})
} else if (opts.parseable) {
output.standard(`${user}\t${entity}\tremoved`)
} else if (!this.npm.silent) {
output.standard(`${user} removed from @${entity}`)
}
}
async listUsers (entity, opts) {
const users = (await libteam.lsUsers(entity, opts)).sort()
if (opts.json) {
output.buffer(users)
} else if (opts.parseable) {
output.standard(users.join('\n'))
} else if (!this.npm.silent) {
const plural = users.length === 1 ? '' : 's'
const more = users.length === 0 ? '' : ':\n'
output.standard(`\n@${entity} has ${users.length} user${plural}${more}`)
output.standard(columns(users, { padding: 1 }))
}
}
async listTeams (entity, opts) {
const teams = (await libteam.lsTeams(entity, opts)).sort()
if (opts.json) {
output.buffer(teams)
} else if (opts.parseable) {
output.standard(teams.join('\n'))
} else if (!this.npm.silent) {
const plural = teams.length === 1 ? '' : 's'
const more = teams.length === 0 ? '' : ':\n'
output.standard(`\n@${entity} has ${teams.length} team${plural}${more}`)
output.standard(columns(teams.map(t => `@${t}`), { padding: 1 }))
}
}
}
module.exports = Team

13
NodeJS/node_modules/npm/lib/commands/test.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
const LifecycleCmd = require('../lifecycle-cmd.js')
// This ends up calling run-script(['test', ...args])
class Test extends LifecycleCmd {
static description = 'Test a package'
static name = 'test'
static params = [
'ignore-scripts',
'script-shell',
]
}
module.exports = Test

197
NodeJS/node_modules/npm/lib/commands/token.js generated vendored Normal file
View File

@@ -0,0 +1,197 @@
const { log, output } = require('proc-log')
const { listTokens, createToken, removeToken } = require('npm-profile')
const { otplease } = require('../utils/auth.js')
const readUserInfo = require('../utils/read-user-info.js')
const BaseCommand = require('../base-cmd.js')
class Token extends BaseCommand {
static description = 'Manage your authentication tokens'
static name = 'token'
static usage = ['list', 'revoke <id|token>', 'create [--read-only] [--cidr=list]']
static params = ['read-only', 'cidr', 'registry', 'otp']
static async completion (opts) {
const argv = opts.conf.argv.remain
const subcommands = ['list', 'revoke', 'create']
if (argv.length === 2) {
return subcommands
}
if (subcommands.includes(argv[2])) {
return []
}
throw new Error(argv[2] + ' not recognized')
}
async exec (args) {
if (args.length === 0) {
return this.list()
}
switch (args[0]) {
case 'list':
case 'ls':
return this.list()
case 'rm':
case 'delete':
case 'revoke':
case 'remove':
return this.rm(args.slice(1))
case 'create':
return this.create(args.slice(1))
default:
throw this.usageError(`${args[0]} is not a recognized subcommand.`)
}
}
async list () {
const json = this.npm.config.get('json')
const parseable = this.npm.config.get('parseable')
log.info('token', 'getting list')
const tokens = await listTokens(this.npm.flatOptions)
if (json) {
output.buffer(tokens)
return
}
if (parseable) {
output.standard(['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'))
tokens.forEach(token => {
output.standard(
[
token.key,
token.token,
token.created,
token.readonly ? 'true' : 'false',
token.cidr_whitelist ? token.cidr_whitelist.join(',') : '',
].join('\t')
)
})
return
}
this.generateTokenIds(tokens, 6)
const chalk = this.npm.chalk
for (const token of tokens) {
const level = token.readonly ? 'Read only token' : 'Publish token'
const created = String(token.created).slice(0, 10)
/* eslint-disable-next-line max-len */
output.standard(`${chalk.blue(level)} ${token.token}… with id ${chalk.cyan(token.id)} created ${created}`)
if (token.cidr_whitelist) {
output.standard(`with IP whitelist: ${chalk.green(token.cidr_whitelist.join(','))}`)
}
output.standard()
}
}
async rm (args) {
if (args.length === 0) {
throw this.usageError('`<tokenKey>` argument is required.')
}
const json = this.npm.config.get('json')
const parseable = this.npm.config.get('parseable')
const toRemove = []
const opts = { ...this.npm.flatOptions }
log.info('token', `removing ${toRemove.length} tokens`)
const tokens = await listTokens(opts)
args.forEach(id => {
const matches = tokens.filter(token => token.key.indexOf(id) === 0)
if (matches.length === 1) {
toRemove.push(matches[0].key)
} else if (matches.length > 1) {
throw new Error(
/* eslint-disable-next-line max-len */
`Token ID "${id}" was ambiguous, a new token may have been created since you last ran \`npm token list\`.`
)
} else {
const tokenMatches = tokens.some(t => id.indexOf(t.token) === 0)
if (!tokenMatches) {
throw new Error(`Unknown token id or value "${id}".`)
}
toRemove.push(id)
}
})
await Promise.all(
toRemove.map(key => {
return otplease(this.npm, opts, c => removeToken(key, c))
})
)
if (json) {
output.buffer(toRemove)
} else if (parseable) {
output.standard(toRemove.join('\t'))
} else {
output.standard('Removed ' + toRemove.length + ' token' + (toRemove.length !== 1 ? 's' : ''))
}
}
async create () {
const json = this.npm.config.get('json')
const parseable = this.npm.config.get('parseable')
const cidr = this.npm.config.get('cidr')
const readonly = this.npm.config.get('read-only')
const validCIDR = await this.validateCIDRList(cidr)
const password = await readUserInfo.password()
log.info('token', 'creating')
const result = await otplease(
this.npm,
{ ...this.npm.flatOptions },
c => createToken(password, readonly, validCIDR, c)
)
delete result.key
delete result.updated
if (json) {
output.buffer(result)
} else if (parseable) {
Object.keys(result).forEach(k => output.standard(k + '\t' + result[k]))
} else {
const chalk = this.npm.chalk
// Identical to list
const level = result.readonly ? 'read only' : 'publish'
output.standard(`Created ${chalk.blue(level)} token ${result.token}`)
if (result.cidr_whitelist?.length) {
output.standard(`with IP whitelist: ${chalk.green(result.cidr_whitelist.join(','))}`)
}
}
}
invalidCIDRError (msg) {
return Object.assign(new Error(msg), { code: 'EINVALIDCIDR' })
}
generateTokenIds (tokens, minLength) {
for (const token of tokens) {
token.id = token.key
for (let ii = minLength; ii < token.key.length; ++ii) {
const match = tokens.some(
ot => ot !== token && ot.key.slice(0, ii) === token.key.slice(0, ii)
)
if (!match) {
token.id = token.key.slice(0, ii)
break
}
}
}
}
async validateCIDRList (cidrs) {
const { v4: isCidrV4, v6: isCidrV6 } = await import('is-cidr')
const maybeList = [].concat(cidrs).filter(Boolean)
const list = maybeList.length === 1 ? maybeList[0].split(/,\s*/) : maybeList
for (const cidr of list) {
if (isCidrV6(cidr)) {
throw this.invalidCIDRError(
`CIDR whitelist can only contain IPv4 addresses${cidr} is IPv6`
)
}
if (!isCidrV4(cidr)) {
throw this.invalidCIDRError(`CIDR whitelist contains invalid CIDR entry: ${cidr}`)
}
}
return list
}
}
module.exports = Token

56
NodeJS/node_modules/npm/lib/commands/uninstall.js generated vendored Normal file
View File

@@ -0,0 +1,56 @@
const { resolve } = require('node:path')
const pkgJson = require('@npmcli/package-json')
const reifyFinish = require('../utils/reify-finish.js')
const completion = require('../utils/installed-shallow.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Uninstall extends ArboristWorkspaceCmd {
static description = 'Remove a package'
static name = 'uninstall'
static params = ['save', 'global', ...super.params]
static usage = ['[<@scope>/]<pkg>...']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
return completion(npm, opts)
}
async exec (args) {
if (!args.length) {
if (!this.npm.global) {
throw new Error('Must provide a package name to remove')
} else {
try {
const { content: pkg } = await pkgJson.normalize(this.npm.localPrefix)
args.push(pkg.name)
} catch (er) {
if (er.code !== 'ENOENT' && er.code !== 'ENOTDIR') {
throw er
} else {
throw this.usageError()
}
}
}
}
// the /path/to/node_modules/..
const path = this.npm.global
? resolve(this.npm.globalDir, '..')
: this.npm.localPrefix
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path,
rm: args,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.reify(opts)
await reifyFinish(this.npm, arb)
}
}
module.exports = Uninstall

176
NodeJS/node_modules/npm/lib/commands/unpublish.js generated vendored Normal file
View File

@@ -0,0 +1,176 @@
const libaccess = require('libnpmaccess')
const libunpub = require('libnpmpublish').unpublish
const npa = require('npm-package-arg')
const pacote = require('pacote')
const { output, log } = require('proc-log')
const pkgJson = require('@npmcli/package-json')
const { flatten } = require('@npmcli/config/lib/definitions')
const getIdentity = require('../utils/get-identity.js')
const { otplease } = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
const LAST_REMAINING_VERSION_ERROR = 'Refusing to delete the last version of the package. ' +
'It will block from republishing a new version for 24 hours.\n' +
'Run with --force to do this.'
class Unpublish extends BaseCommand {
static description = 'Remove a package from the registry'
static name = 'unpublish'
static params = ['dry-run', 'force', 'workspace', 'workspaces']
static usage = ['[<package-spec>]']
static workspaces = true
static ignoreImplicitWorkspace = false
static async getKeysOfVersions (name, opts) {
const packument = await pacote.packument(name, {
...opts,
spec: name,
query: { write: true },
})
return Object.keys(packument.versions)
}
static async completion (args, npm) {
const { partialWord, conf } = args
if (conf.argv.remain.length >= 3) {
return []
}
const opts = { ...npm.flatOptions }
const username = await getIdentity(npm, { ...opts }).catch(() => null)
if (!username) {
return []
}
const access = await libaccess.getPackages(username, opts)
// do a bit of filtering at this point, so that we don't need
// to fetch versions for more than one thing, but also don't
// accidentally unpublish a whole project
let pkgs = Object.keys(access)
if (!partialWord || !pkgs.length) {
return pkgs
}
const pp = npa(partialWord).name
pkgs = pkgs.filter(p => !p.indexOf(pp))
if (pkgs.length > 1) {
return pkgs
}
const versions = await Unpublish.getKeysOfVersions(pkgs[0], opts)
if (!versions.length) {
return pkgs
} else {
return versions.map(v => `${pkgs[0]}@${v}`)
}
}
async exec (args, { localPrefix } = {}) {
if (args.length > 1) {
throw this.usageError()
}
// workspace mode
if (!localPrefix) {
localPrefix = this.npm.localPrefix
}
const force = this.npm.config.get('force')
const { silent } = this.npm
const dryRun = this.npm.config.get('dry-run')
let spec
if (args.length) {
spec = npa(args[0])
if (spec.type !== 'version' && spec.rawSpec !== '*') {
throw this.usageError(
'Can only unpublish a single version, or the entire project.\n' +
'Tags and ranges are not supported.'
)
}
}
log.silly('unpublish', 'args[0]', args[0])
log.silly('unpublish', 'spec', spec)
if (spec?.rawSpec === '*' && !force) {
throw this.usageError(
'Refusing to delete entire project.\n' +
'Run with --force to do this.'
)
}
const opts = { ...this.npm.flatOptions }
let manifest
try {
const { content } = await pkgJson.prepare(localPrefix)
manifest = content
} catch (err) {
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
if (!spec) {
// We needed a local package.json to figure out what package to
// unpublish
throw this.usageError()
}
} else {
// folks should know if ANY local package.json had a parsing error.
// They may be relying on `publishConfig` to be loading and we don't
// want to ignore errors in that case.
throw err
}
}
let pkgVersion // for cli output
if (spec) {
pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : ''
} else {
spec = npa.resolve(manifest.name, manifest.version)
log.verbose('unpublish', manifest)
pkgVersion = manifest.version ? `@${manifest.version}` : ''
if (!manifest.version && !force) {
throw this.usageError(
'Refusing to delete entire project.\n' +
'Run with --force to do this.'
)
}
}
// If localPrefix has a package.json with a name that matches the package
// being unpublished, load up the publishConfig
if (manifest?.name === spec.name && manifest.publishConfig) {
const cliFlags = this.npm.config.data.get('cli').raw
// Filter out properties set in CLI flags to prioritize them over
// corresponding `publishConfig` settings
const filteredPublishConfig = Object.fromEntries(
Object.entries(manifest.publishConfig).filter(([key]) => !(key in cliFlags)))
flatten(filteredPublishConfig, opts)
}
const versions = await Unpublish.getKeysOfVersions(spec.name, opts)
if (versions.length === 1 && spec.rawSpec === versions[0] && !force) {
throw this.usageError(LAST_REMAINING_VERSION_ERROR)
}
if (versions.length === 1) {
pkgVersion = ''
}
if (!dryRun) {
await otplease(this.npm, opts, o => libunpub(spec, o))
}
if (!silent) {
output.standard(`- ${spec.name}${pkgVersion}`)
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
for (const path of this.workspacePaths) {
await this.exec(args, { localPrefix: path })
}
}
}
module.exports = Unpublish

8
NodeJS/node_modules/npm/lib/commands/unstar.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
const Star = require('./star.js')
class Unstar extends Star {
static description = 'Remove an item from your favorite packages'
static name = 'unstar'
}
module.exports = Unstar

68
NodeJS/node_modules/npm/lib/commands/update.js generated vendored Normal file
View File

@@ -0,0 +1,68 @@
const path = require('node:path')
const { log } = require('proc-log')
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Update extends ArboristWorkspaceCmd {
static description = 'Update packages'
static name = 'update'
static params = [
'save',
'global',
'install-strategy',
'legacy-bundling',
'global-style',
'omit',
'include',
'strict-peer-deps',
'package-lock',
'foreground-scripts',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
...super.params,
]
static usage = ['[<pkg>...]']
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
const update = args.length === 0 ? true : args
const global = path.resolve(this.npm.globalDir, '..')
const where = this.npm.global ? global : this.npm.prefix
// In the context of `npm update` the save
// config value should default to `false`
const save = this.npm.config.isDefault('save')
? false
: this.npm.config.get('save')
if (this.npm.config.get('depth')) {
log.warn('update', 'The --depth option no longer has any effect. See RFC0019.\n' +
'https://github.com/npm/rfcs/blob/latest/implemented/0019-remove-update-depth-option.md')
}
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
save,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.reify({ ...opts, update })
await reifyFinish(this.npm, arb)
}
}
module.exports = Update

151
NodeJS/node_modules/npm/lib/commands/version.js generated vendored Normal file
View File

@@ -0,0 +1,151 @@
const { resolve } = require('node:path')
const { readFile } = require('node:fs/promises')
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Version extends BaseCommand {
static description = 'Bump a package version'
static name = 'version'
static params = [
'allow-same-version',
'commit-hooks',
'git-tag-version',
'json',
'preid',
'sign-git-tag',
'workspace',
'workspaces',
'workspaces-update',
'include-workspace-root',
]
static workspaces = true
static ignoreImplicitWorkspace = false
/* eslint-disable-next-line max-len */
static usage = ['[<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git]']
static async completion (opts) {
const {
conf: {
argv: { remain },
},
} = opts
if (remain.length > 2) {
return []
}
return [
'major',
'minor',
'patch',
'premajor',
'preminor',
'prepatch',
'prerelease',
'from-git',
]
}
async exec (args) {
switch (args.length) {
case 0:
return this.list()
case 1:
return this.change(args)
default:
throw this.usageError()
}
}
async execWorkspaces (args) {
switch (args.length) {
case 0:
return this.listWorkspaces()
case 1:
return this.changeWorkspaces(args)
default:
throw this.usageError()
}
}
async change (args) {
const libnpmversion = require('libnpmversion')
const prefix = this.npm.config.get('tag-version-prefix')
const version = await libnpmversion(args[0], {
...this.npm.flatOptions,
path: this.npm.prefix,
})
return output.standard(`${prefix}${version}`)
}
async changeWorkspaces (args) {
const updateWorkspaces = require('../utils/update-workspaces.js')
const libnpmversion = require('libnpmversion')
const prefix = this.npm.config.get('tag-version-prefix')
const {
config,
flatOptions,
localPrefix,
} = this.npm
await this.setWorkspaces()
const updatedWorkspaces = []
for (const [name, path] of this.workspaces) {
output.standard(name)
const version = await libnpmversion(args[0], {
...flatOptions,
'git-tag-version': false,
path,
})
updatedWorkspaces.push(name)
output.standard(`${prefix}${version}`)
}
return updateWorkspaces({
config,
flatOptions,
localPrefix,
npm: this.npm,
workspaces: updatedWorkspaces,
})
}
async list (results = {}) {
const pj = resolve(this.npm.prefix, 'package.json')
const pkg = await readFile(pj, 'utf8')
.then(data => JSON.parse(data))
.catch(() => ({}))
if (pkg.name && pkg.version) {
results[pkg.name] = pkg.version
}
results.npm = this.npm.version
for (const [key, version] of Object.entries(process.versions)) {
results[key] = version
}
if (this.npm.config.get('json')) {
output.buffer(results)
} else {
output.standard(results)
}
}
async listWorkspaces () {
const results = {}
await this.setWorkspaces()
for (const path of this.workspacePaths) {
const pj = resolve(path, 'package.json')
// setWorkspaces has already parsed package.json so we know it won't error
const pkg = await readFile(pj, 'utf8').then(data => JSON.parse(data))
if (pkg.name && pkg.version) {
results[pkg.name] = pkg.version
}
}
return this.list(results)
}
}
module.exports = Version

472
NodeJS/node_modules/npm/lib/commands/view.js generated vendored Normal file
View File

@@ -0,0 +1,472 @@
const columns = require('cli-columns')
const { readFile } = require('node:fs/promises')
const jsonParse = require('json-parse-even-better-errors')
const { log, output, META } = require('proc-log')
const npa = require('npm-package-arg')
const { resolve } = require('node:path')
const formatBytes = require('../utils/format-bytes.js')
const relativeDate = require('tiny-relative-date')
const semver = require('semver')
const { inspect } = require('node:util')
const { packument } = require('pacote')
const Queryable = require('../utils/queryable.js')
const BaseCommand = require('../base-cmd.js')
const { getError } = require('../utils/error-message.js')
const { jsonError, outputError } = require('../utils/output-error.js')
const readJson = file => readFile(file, 'utf8').then(jsonParse)
class View extends BaseCommand {
static description = 'View registry info'
static name = 'view'
static params = [
'json',
'workspace',
'workspaces',
'include-workspace-root',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static usage = ['[<package-spec>] [<field>[.subfield]...]']
static async completion (opts, npm) {
if (opts.conf.argv.remain.length <= 2) {
// There used to be registry completion here, but it stopped
// making sense somewhere around 50,000 packages on the registry
return
}
// have the package, get the fields
const config = {
...npm.flatOptions,
fullMetadata: true,
preferOnline: true,
}
const spec = npa(opts.conf.argv.remain[2])
const pckmnt = await packument(spec, config)
const defaultTag = npm.config.get('tag')
const dv = pckmnt.versions[pckmnt['dist-tags'][defaultTag]]
pckmnt.versions = Object.keys(pckmnt.versions).sort(semver.compareLoose)
return getCompletionFields(pckmnt).concat(getCompletionFields(dv))
}
async exec (args) {
let { pkg, local, rest } = parseArgs(args)
if (local) {
if (this.npm.global) {
throw new Error('Cannot use view command in global mode.')
}
const dir = this.npm.prefix
const manifest = await readJson(resolve(dir, 'package.json'))
if (!manifest.name) {
throw new Error('Invalid package.json, no "name" field')
}
// put the version back if it existed
pkg = `${manifest.name}${pkg.slice(1)}`
}
await this.#viewPackage(pkg, rest)
}
async execWorkspaces (args) {
const { pkg, local, rest } = parseArgs(args)
if (!local) {
log.warn('Ignoring workspaces for specified package(s)')
return this.exec([pkg, ...rest])
}
const json = this.npm.config.get('json')
await this.setWorkspaces()
for (const name of this.workspaceNames) {
try {
await this.#viewPackage(`${name}${pkg.slice(1)}`, rest, { workspace: true })
} catch (e) {
const err = getError(e, { npm: this.npm, command: this })
if (err.code !== 'E404') {
throw e
}
if (json) {
output.buffer({ [META]: true, jsonError: { [name]: jsonError(err, this.npm) } })
} else {
outputError(err)
}
process.exitCode = err.exitCode
}
}
}
async #viewPackage (name, args, { workspace } = {}) {
const wholePackument = !args.length
const json = this.npm.config.get('json')
// If we are viewing many packages and outputting individual fields then
// output the name before doing any async activity
if (!json && !wholePackument && workspace) {
output.standard(`${name}:`)
}
const [pckmnt, data] = await this.#getData(name, args, wholePackument)
if (!json && wholePackument) {
// pretty view (entire packument)
for (const v of data) {
output.standard(this.#prettyView(pckmnt, Object.values(v)[0][Queryable.ALL]))
}
return
}
const res = this.#packageOutput(cleanData(data, wholePackument), pckmnt._id)
if (res) {
if (json) {
output.buffer(workspace ? { [name]: res } : res)
} else {
output.standard(res)
}
}
}
async #getData (pkg, args) {
const spec = npa(pkg)
const pckmnt = await packument(spec, {
...this.npm.flatOptions,
preferOnline: true,
fullMetadata: true,
})
// get the data about this package
let version = this.npm.config.get('tag')
// rawSpec is the git url if this is from git
if (spec.type !== 'git' && spec.type !== 'directory' && spec.rawSpec !== '*') {
version = spec.rawSpec
}
if (pckmnt['dist-tags']?.[version]) {
version = pckmnt['dist-tags'][version]
}
if (pckmnt.time?.unpublished) {
const u = pckmnt.time.unpublished
throw Object.assign(new Error(`Unpublished on ${u.time}`), {
statusCode: 404,
code: 'E404',
pkgid: pckmnt._id,
})
}
const versions = pckmnt.versions || {}
pckmnt.versions = Object.keys(versions).filter(v => {
if (semver.valid(v)) {
return true
}
log.info('view', `Ignoring invalid version: ${v}`)
return false
}).sort(semver.compareLoose)
// remove readme unless we asked for it
if (args.indexOf('readme') === -1) {
delete pckmnt.readme
}
const data = Object.entries(versions)
.filter(([v]) => semver.satisfies(v, version, true))
.flatMap(([, v]) => {
// remove readme unless we asked for it
if (args.indexOf('readme') !== -1) {
delete v.readme
}
return showFields({
data: pckmnt,
version: v,
fields: args,
json: this.npm.config.get('json'),
})
})
// No data has been pushed because no data is matching the specified version
if (!data.length && version !== 'latest') {
throw Object.assign(new Error(`No match found for version ${version}`), {
statusCode: 404,
code: 'E404',
pkgid: `${pckmnt._id}@${version}`,
})
}
return [pckmnt, data]
}
#packageOutput (data, name) {
const json = this.npm.config.get('json')
const versions = Object.keys(data)
const includeVersions = versions.length > 1
let includeFields
const res = versions.flatMap((v) => {
const fields = Object.entries(data[v])
includeFields ||= (fields.length > 1)
const msg = json ? {} : []
for (let [f, d] of fields) {
d = cleanup(d)
if (json) {
msg[f] = d
continue
}
if (includeVersions || includeFields || typeof d !== 'string') {
d = inspect(d, {
showHidden: false,
depth: 5,
colors: this.npm.color,
maxArrayLength: null,
})
}
if (f && includeFields) {
f += ' = '
}
msg.push(`${includeVersions ? `${name}@${v} ` : ''}${includeFields ? f : ''}${d}`)
}
return msg
})
if (json) {
// TODO(BREAKING_CHANGE): all unwrapping should be removed. Users should know
// based on their arguments if they can expect an array or an object. And this
// unwrapping can break that assumption. Eg `npm view abbrev@^2` should always
// return an array, but currently since there is only one version matching `^2`
// this will return a single object instead.
const first = Object.keys(res[0] || {})
const jsonRes = first.length === 1 ? res.map(m => m[first[0]]) : res
if (jsonRes.length === 0) {
return
}
if (jsonRes.length === 1) {
return jsonRes[0]
}
return jsonRes
}
return res.join('\n').trim()
}
#prettyView (packu, manifest) {
// More modern, pretty printing of default view
const unicode = this.npm.config.get('unicode')
const chalk = this.npm.chalk
const deps = Object.entries(manifest.dependencies || {}).map(([k, dep]) =>
`${chalk.blue(k)}: ${dep}`
)
const site = manifest.homepage?.url || manifest.homepage
const bins = Object.keys(manifest.bin || {})
const licenseField = manifest.license || 'Proprietary'
const license = typeof licenseField === 'string'
? licenseField
: (licenseField.type || 'Proprietary')
const res = []
res.push('')
res.push([
chalk.underline.cyan(`${manifest.name}@${manifest.version}`),
license.toLowerCase().trim() === 'proprietary'
? chalk.red(license)
: chalk.green(license),
`deps: ${deps.length ? chalk.cyan(deps.length) : chalk.cyan('none')}`,
`versions: ${chalk.cyan(packu.versions.length + '')}`,
].join(' | '))
manifest.description && res.push(manifest.description)
if (site) {
res.push(chalk.blue(site))
}
manifest.deprecated && res.push(
`\n${chalk.redBright('DEPRECATED')}${unicode ? ' ⚠️ ' : '!!'} - ${manifest.deprecated}`
)
if (packu.keywords?.length) {
res.push(`\nkeywords: ${
packu.keywords.map(k => chalk.cyan(k)).join(', ')
}`)
}
if (bins.length) {
res.push(`\nbin: ${chalk.cyan(bins.join(', '))}`)
}
res.push('\ndist')
res.push(`.tarball: ${chalk.blue(manifest.dist.tarball)}`)
res.push(`.shasum: ${chalk.green(manifest.dist.shasum)}`)
if (manifest.dist.integrity) {
res.push(`.integrity: ${chalk.green(manifest.dist.integrity)}`)
}
if (manifest.dist.unpackedSize) {
res.push(`.unpackedSize: ${chalk.blue(formatBytes(manifest.dist.unpackedSize, true))}`)
}
if (deps.length) {
const maxDeps = 24
res.push('\ndependencies:')
res.push(columns(deps.slice(0, maxDeps), { padding: 1 }))
if (deps.length > maxDeps) {
res.push(chalk.dim(`(...and ${deps.length - maxDeps} more.)`))
}
}
if (packu.maintainers?.length) {
res.push('\nmaintainers:')
packu.maintainers.forEach(u =>
res.push(`- ${unparsePerson({
name: chalk.blue(u.name),
email: chalk.dim(u.email) })}`)
)
}
res.push('\ndist-tags:')
res.push(columns(Object.entries(packu['dist-tags']).map(([k, t]) =>
`${chalk.blue(k)}: ${t}`
)))
const publisher = manifest._npmUser && unparsePerson({
name: chalk.blue(manifest._npmUser.name),
email: chalk.dim(manifest._npmUser.email),
})
if (publisher || packu.time) {
let publishInfo = 'published'
if (packu.time) {
publishInfo += ` ${chalk.cyan(relativeDate(packu.time[manifest.version]))}`
}
if (publisher) {
publishInfo += ` by ${publisher}`
}
res.push('')
res.push(publishInfo)
}
return res.join('\n')
}
}
module.exports = View
function parseArgs (args) {
if (!args.length) {
args = ['.']
}
const pkg = args.shift()
return {
pkg,
local: /^\.@/.test(pkg) || pkg === '.',
rest: args,
}
}
function cleanData (obj, wholePackument) {
// JSON formatted output (JSON or specific attributes from packument)
const data = obj.reduce((acc, cur) => {
if (cur) {
Object.entries(cur).forEach(([k, v]) => {
acc[k] ||= {}
Object.keys(v).forEach((t) => {
acc[k][t] = cur[k][t]
})
})
}
return acc
}, {})
if (wholePackument) {
const cleaned = Object.entries(data).reduce((acc, [k, v]) => {
acc[k] = v[Queryable.ALL]
return acc
}, {})
log.silly('view', cleaned)
return cleaned
}
return data
}
// return whatever was printed
function showFields ({ data, version, fields, json }) {
const o = [data, version].reduce((acc, s) => {
Object.entries(s).forEach(([k, v]) => {
acc[k] = v
})
return acc
}, {})
const queryable = new Queryable(o)
if (!fields.length) {
return { [version.version]: queryable.query(Queryable.ALL) }
}
return fields.map((field) => {
const s = queryable.query(field, { unwrapSingleItemArrays: !json })
if (s) {
return { [version.version]: s }
}
})
}
function cleanup (data) {
if (Array.isArray(data)) {
return data.map(cleanup)
}
if (!data || typeof data !== 'object') {
return data
}
const keys = Object.keys(data)
if (keys.length <= 3 && data.name && (
(keys.length === 1) ||
(keys.length === 3 && data.email && data.url) ||
(keys.length === 2 && (data.email || data.url))
)) {
data = unparsePerson(data)
}
return data
}
const unparsePerson = (d) =>
`${d.name}${d.email ? ` <${d.email}>` : ''}${d.url ? ` (${d.url})` : ''}`
function getCompletionFields (d, f = [], pref = []) {
Object.entries(d).forEach(([k, v]) => {
if (k.charAt(0) === '_' || k.indexOf('.') !== -1) {
return
}
const p = pref.concat(k).join('.')
f.push(p)
if (Array.isArray(v)) {
v.forEach((val, i) => {
const pi = p + '[' + i + ']'
if (val && typeof val === 'object') {
getCompletionFields(val, f, [p])
} else {
f.push(pi)
}
})
return
}
if (typeof v === 'object') {
getCompletionFields(v, f, [p])
}
})
return f
}

20
NodeJS/node_modules/npm/lib/commands/whoami.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
const { output } = require('proc-log')
const getIdentity = require('../utils/get-identity.js')
const BaseCommand = require('../base-cmd.js')
class Whoami extends BaseCommand {
static description = 'Display npm username'
static name = 'whoami'
static params = ['registry']
async exec () {
const username = await getIdentity(this.npm, { ...this.npm.flatOptions })
if (this.npm.config.get('json')) {
output.buffer(username)
} else {
output.standard(username)
}
}
}
module.exports = Whoami

20
NodeJS/node_modules/npm/lib/lifecycle-cmd.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
const BaseCommand = require('./base-cmd.js')
// The implementation of commands that are just "run a script"
// restart, start, stop, test
class LifecycleCmd extends BaseCommand {
static usage = ['[-- <args>]']
static isShellout = true
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
return this.npm.exec('run-script', [this.constructor.name, ...args])
}
async execWorkspaces (args) {
return this.npm.exec('run-script', [this.constructor.name, ...args])
}
}
module.exports = LifecycleCmd

475
NodeJS/node_modules/npm/lib/npm.js generated vendored Normal file
View File

@@ -0,0 +1,475 @@
const { resolve, dirname, join } = require('node:path')
const Config = require('@npmcli/config')
const which = require('which')
const fs = require('node:fs/promises')
const { definitions, flatten, shorthands } = require('@npmcli/config/lib/definitions')
const usage = require('./utils/npm-usage.js')
const LogFile = require('./utils/log-file.js')
const Timers = require('./utils/timers.js')
const Display = require('./utils/display.js')
const { log, time, output, META } = require('proc-log')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const pkg = require('../package.json')
const { deref } = require('./utils/cmd-list.js')
const { jsonError, outputError } = require('./utils/output-error.js')
class Npm {
static get version () {
return pkg.version
}
static cmd (c) {
const command = deref(c)
if (!command) {
throw Object.assign(new Error(`Unknown command ${c}`), {
code: 'EUNKNOWNCOMMAND',
command: c,
})
}
return require(`./commands/${command}.js`)
}
unrefPromises = []
updateNotification = null
argv = []
#command = null
#runId = new Date().toISOString().replace(/[.:]/g, '_')
#title = 'npm'
#argvClean = []
#npmRoot = null
#display = null
#logFile = new LogFile()
#timers = new Timers()
// all these options are only used by tests in order to make testing more
// closely resemble real world usage. for now, npm has no programmatic API so
// it is ok to add stuff here, but we should not rely on it more than
// necessary. XXX: make these options not necessary by refactoring @npmcli/config
// - npmRoot: this is where npm looks for docs files and the builtin config
// - argv: this allows tests to extend argv in the same way the argv would
// be passed in via a CLI arg.
// - excludeNpmCwd: this is a hack to get @npmcli/config to stop walking up
// dirs to set a local prefix when it encounters the `npmRoot`. this
// allows tests created by tap inside this repo to not set the local
// prefix to `npmRoot` since that is the first dir it would encounter when
// doing implicit detection
constructor ({
stdout = process.stdout,
stderr = process.stderr,
npmRoot = dirname(__dirname),
argv = [],
excludeNpmCwd = false,
} = {}) {
this.#display = new Display({ stdout, stderr })
this.#npmRoot = npmRoot
this.config = new Config({
npmPath: this.#npmRoot,
definitions,
flatten,
shorthands,
argv: [...process.argv, ...argv],
excludeNpmCwd,
})
}
async load () {
let err
try {
return await time.start('npm:load', () => this.#load())
} catch (e) {
err = e
}
return this.#handleError(err)
}
async #load () {
await time.start('npm:load:whichnode', async () => {
// TODO should we throw here?
const node = await which(process.argv[0]).catch(() => {})
if (node && node.toUpperCase() !== process.execPath.toUpperCase()) {
log.verbose('node symlink', node)
process.execPath = node
this.config.execPath = node
}
})
await time.start('npm:load:configload', () => this.config.load())
// npm --versions
if (this.config.get('versions', 'cli')) {
this.argv = ['version']
this.config.set('usage', false, 'cli')
} else {
this.argv = [...this.config.parsedArgv.remain]
}
// Remove first argv since that is our command as typed
// Note that this might not be the actual name of the command
// due to aliases, etc. But we use the raw form of it later
// in user output so it must be preserved as is.
const commandArg = this.argv.shift()
// This is the actual name of the command that will be run or
// undefined if deref could not find a match
const command = deref(commandArg)
await this.#display.load({
command,
loglevel: this.config.get('loglevel'),
stdoutColor: this.color,
stderrColor: this.logColor,
timing: this.config.get('timing'),
unicode: this.config.get('unicode'),
progress: this.flatOptions.progress,
json: this.config.get('json'),
heading: this.config.get('heading'),
})
process.env.COLOR = this.color ? '1' : '0'
// npm -v
// return from here early so we dont create any caches/logfiles/timers etc
if (this.config.get('version', 'cli')) {
output.standard(this.version)
return { exec: false }
}
// mkdir this separately since the logs dir can be set to
// a different location. if this fails, then we don't have
// a cache dir, but we don't want to fail immediately since
// the command might not need a cache dir (like `npm --version`)
await time.start('npm:load:mkdirpcache', () =>
fs.mkdir(this.cache, { recursive: true })
.catch((e) => log.verbose('cache', `could not create cache: ${e}`)))
// it's ok if this fails. user might have specified an invalid dir
// which we will tell them about at the end
if (this.config.get('logs-max') > 0) {
await time.start('npm:load:mkdirplogs', () =>
fs.mkdir(this.#logsDir, { recursive: true })
.catch((e) => log.verbose('logfile', `could not create logs-dir: ${e}`)))
}
// note: this MUST be shorter than the actual argv length, because it
// uses the same memory, so node will truncate it if it's too long.
// We time this because setting process.title is slow sometimes but we
// have to do it for security reasons. But still helpful to know how slow it is.
time.start('npm:load:setTitle', () => {
const { parsedArgv: { cooked, remain } } = this.config
// Secrets are mostly in configs, so title is set using only the positional args
// to keep those from being leaked. We still do a best effort replaceInfo.
this.#title = ['npm'].concat(replaceInfo(remain)).join(' ').trim()
process.title = this.#title
// The cooked argv is also logged separately for debugging purposes. It is
// cleaned as a best effort by replacing known secrets like basic auth
// password and strings that look like npm tokens. XXX: for this to be
// safer the config should create a sanitized version of the argv as it
// has the full context of what each option contains.
this.#argvClean = replaceInfo(cooked)
log.verbose('title', this.title)
log.verbose('argv', this.#argvClean.map(JSON.stringify).join(' '))
})
// logFile.load returns a promise that resolves when old logs are done being cleaned.
// We save this promise to an array so that we can await it in tests to ensure more
// deterministic logging behavior. The process will also hang open if this were to
// take a long time to resolve, but that is why process.exit is called explicitly
// in the exit-handler.
this.unrefPromises.push(this.#logFile.load({
command,
path: this.logPath,
logsMax: this.config.get('logs-max'),
timing: this.config.get('timing'),
}))
this.#timers.load({
path: this.logPath,
timing: this.config.get('timing'),
})
const configScope = this.config.get('scope')
if (configScope && !/^@/.test(configScope)) {
this.config.set('scope', `@${configScope}`, this.config.find('scope'))
}
if (this.config.get('force')) {
log.warn('using --force', 'Recommended protections disabled.')
}
return { exec: true, command: commandArg, args: this.argv }
}
async exec (cmd, args = this.argv) {
if (!this.#command) {
let err
try {
await this.#exec(cmd, args)
} catch (e) {
err = e
}
return this.#handleError(err)
} else {
return this.#exec(cmd, args)
}
}
// Call an npm command
async #exec (cmd, args) {
const Command = this.constructor.cmd(cmd)
const command = new Command(this)
// since 'test', 'start', 'stop', etc. commands re-enter this function
// to call the run-script command, we need to only set it one time.
if (!this.#command) {
this.#command = command
process.env.npm_command = this.command
}
if (this.config.get('usage')) {
return output.standard(command.usage)
}
let execWorkspaces = false
const hasWsConfig = this.config.get('workspaces') || this.config.get('workspace').length
// if cwd is a workspace, the default is set to [that workspace]
const implicitWs = this.config.get('workspace', 'default').length
// (-ws || -w foo) && (cwd is not a workspace || command is not ignoring implicit workspaces)
if (hasWsConfig && (!implicitWs || !Command.ignoreImplicitWorkspace)) {
if (this.global) {
throw new Error('Workspaces not supported for global packages')
}
if (!Command.workspaces) {
throw Object.assign(new Error('This command does not support workspaces.'), {
code: 'ENOWORKSPACES',
})
}
execWorkspaces = true
}
if (command.checkDevEngines && !this.global) {
await command.checkDevEngines()
}
return time.start(`command:${cmd}`, () =>
execWorkspaces ? command.execWorkspaces(args) : command.exec(args))
}
// This gets called at the end of the exit handler and
// during any tests to cleanup all of our listeners
// Everything in here should be synchronous
unload () {
this.#timers.off()
this.#display.off()
this.#logFile.off()
}
finish (err) {
// Finish all our timer work, this will write the file if requested, end timers, etc
this.#timers.finish({
id: this.#runId,
command: this.#argvClean,
logfiles: this.logFiles,
version: this.version,
})
output.flush({
[META]: true,
// json can be set during a command so we send the
// final value of it to the display layer here
json: this.loaded && this.config.get('json'),
jsonError: jsonError(err, this),
})
}
exitErrorMessage () {
if (this.logFiles.length) {
return `A complete log of this run can be found in: ${this.logFiles}`
}
const logsMax = this.config.get('logs-max')
if (logsMax <= 0) {
// user specified no log file
return `Log files were not written due to the config logs-max=${logsMax}`
}
// could be an error writing to the directory
return `Log files were not written due to an error writing to the directory: ${this.#logsDir}` +
'\nYou can rerun the command with `--loglevel=verbose` to see the logs in your terminal'
}
async #handleError (err) {
if (err) {
// Get the local package if it exists for a more helpful error message
const localPkg = await require('@npmcli/package-json')
.normalize(this.localPrefix)
.then(p => p.content)
.catch(() => null)
Object.assign(err, this.#getError(err, { pkg: localPkg }))
}
this.finish(err)
if (err) {
throw err
}
}
#getError (rawErr, opts) {
const { files = [], ...error } = require('./utils/error-message.js').getError(rawErr, {
npm: this,
command: this.#command,
...opts,
})
const { writeFileSync } = require('node:fs')
for (const [file, content] of files) {
const filePath = `${this.logPath}${file}`
const fileContent = `'Log files:\n${this.logFiles.join('\n')}\n\n${content.trim()}\n`
try {
writeFileSync(filePath, fileContent)
error.detail.push(['', `\n\nFor a full report see:\n${filePath}`])
} catch (fileErr) {
log.warn('', `Could not write error message to ${file} due to ${fileErr}`)
}
}
outputError(error)
return error
}
get title () {
return this.#title
}
get loaded () {
return this.config.loaded
}
get version () {
return this.constructor.version
}
get command () {
return this.#command?.name
}
get flatOptions () {
const { flat } = this.config
flat.nodeVersion = process.version
flat.npmVersion = pkg.version
if (this.command) {
flat.npmCommand = this.command
}
return flat
}
// color and logColor are a special derived values that takes into
// consideration not only the config, but whether or not we are operating
// in a tty with the associated output (stdout/stderr)
get color () {
return this.flatOptions.color
}
get logColor () {
return this.flatOptions.logColor
}
get noColorChalk () {
return this.#display.chalk.noColor
}
get chalk () {
return this.#display.chalk.stdout
}
get logChalk () {
return this.#display.chalk.stderr
}
get global () {
return this.config.get('global') || this.config.get('location') === 'global'
}
get silent () {
return this.flatOptions.silent
}
get lockfileVersion () {
return 2
}
get started () {
return this.#timers.started
}
get logFiles () {
return this.#logFile.files
}
get #logsDir () {
return this.config.get('logs-dir') || join(this.cache, '_logs')
}
get logPath () {
return resolve(this.#logsDir, `${this.#runId}-`)
}
get npmRoot () {
return this.#npmRoot
}
get cache () {
return this.config.get('cache')
}
get globalPrefix () {
return this.config.globalPrefix
}
get localPrefix () {
return this.config.localPrefix
}
get localPackage () {
return this.config.localPackage
}
get globalDir () {
return process.platform !== 'win32'
? resolve(this.globalPrefix, 'lib', 'node_modules')
: resolve(this.globalPrefix, 'node_modules')
}
get localDir () {
return resolve(this.localPrefix, 'node_modules')
}
get dir () {
return this.global ? this.globalDir : this.localDir
}
get globalBin () {
const b = this.globalPrefix
return process.platform !== 'win32' ? resolve(b, 'bin') : b
}
get localBin () {
return resolve(this.dir, '.bin')
}
get bin () {
return this.global ? this.globalBin : this.localBin
}
get prefix () {
return this.global ? this.globalPrefix : this.localPrefix
}
get usage () {
return usage(this)
}
}
module.exports = Npm

64
NodeJS/node_modules/npm/lib/package-url-cmd.js generated vendored Normal file
View File

@@ -0,0 +1,64 @@
const pacote = require('pacote')
const { openUrl } = require('./utils/open-url.js')
const { log } = require('proc-log')
const BaseCommand = require('./base-cmd.js')
// Base command for opening urls from a package manifest (bugs, docs, repo)
class PackageUrlCommand extends BaseCommand {
static params = [
'browser',
'registry',
'workspace',
'workspaces',
'include-workspace-root',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static usage = ['[<pkgname> [<pkgname> ...]]']
async exec (args) {
if (!args || !args.length) {
args = ['.']
}
for (const arg of args) {
// XXX It is very odd that `where` is how pacote knows to look anywhere
// other than the cwd.
const opts = {
...this.npm.flatOptions,
where: this.npm.localPrefix,
fullMetadata: true,
}
const mani = await pacote.manifest(arg, opts)
const url = this.getUrl(arg, mani)
log.silly(this.name, 'url', url)
await openUrl(this.npm, url, `${mani.name} ${this.name} available at the following URL`)
}
}
async execWorkspaces (args) {
if (args && args.length) {
return this.exec(args)
}
await this.setWorkspaces()
return this.exec(this.workspacePaths)
}
// given a manifest, try to get the hosted git info from it based on
// repository (if a string) or repository.url (if an object) returns null
// if it's not a valid repo, or not a known hosted repo
hostedFromMani (mani) {
const hostedGitInfo = require('hosted-git-info')
const r = mani.repository
const rurl = !r ? null
: typeof r === 'string' ? r
: typeof r === 'object' && typeof r.url === 'string' ? r.url
: null
// hgi returns undefined sometimes, but let's always return null here
return (rurl && hostedGitInfo.fromUrl(rurl.replace(/^git\+/, ''))) || null
}
}
module.exports = PackageUrlCommand

40
NodeJS/node_modules/npm/lib/utils/audit-error.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
const { log, output } = require('proc-log')
const { redactLog: replaceInfo } = require('@npmcli/redact')
// print an error or just nothing if the audit report has an error
// this is called by the audit command, and by the reify-output util
// prints a JSON version of the error if it's --json
// returns 'true' if there was an error, false otherwise
const auditError = (npm, report) => {
if (!report || !report.error) {
return false
}
if (npm.command !== 'audit') {
return true
}
const { error } = report
// ok, we care about it, then
log.warn('audit', error.message)
const { body: errBody } = error
const body = Buffer.isBuffer(errBody) ? errBody.toString() : errBody
if (npm.flatOptions.json) {
output.buffer({
message: error.message,
method: error.method,
uri: replaceInfo(error.uri),
headers: error.headers,
statusCode: error.statusCode,
body,
})
} else {
output.standard(body)
}
throw 'audit endpoint returned an error'
}
module.exports = auditError

109
NodeJS/node_modules/npm/lib/utils/auth.js generated vendored Normal file
View File

@@ -0,0 +1,109 @@
const { webAuthOpener, adduserWeb, loginWeb, loginCouch, adduserCouch } = require('npm-profile')
const { log } = require('proc-log')
const { createOpener } = require('../utils/open-url.js')
const read = require('../utils/read-user-info.js')
const otplease = async (npm, opts, fn) => {
try {
return await fn(opts)
} catch (err) {
if (!process.stdin.isTTY || !process.stdout.isTTY) {
throw err
}
// web otp
if (err.code === 'EOTP' && err.body?.authUrl && err.body?.doneUrl) {
const { token: otp } = await webAuthOpener(
createOpener(npm, 'Authenticate your account at'),
err.body.authUrl,
err.body.doneUrl,
opts
)
return await fn({ ...opts, otp })
}
// classic otp
if (err.code === 'EOTP' || (err.code === 'E401' && /one-time pass/.test(err.body))) {
const otp = await read.otp('This operation requires a one-time password.\nEnter OTP:')
return await fn({ ...opts, otp })
}
throw err
}
}
const adduser = async (npm, { creds, ...opts }) => {
const authType = npm.config.get('auth-type')
let res
if (authType === 'web') {
try {
res = await adduserWeb(createOpener(npm, 'Create your account at'), opts)
} catch (err) {
if (err.code === 'ENYI') {
log.verbose('web add user not supported, trying couch')
} else {
throw err
}
}
}
// auth type !== web or ENYI error w/ web adduser
if (!res) {
const username = await read.username('Username:', creds.username)
const password = await read.password('Password:', creds.password)
const email = await read.email('Email: (this IS public) ', creds.email)
// npm registry quirk: If you "add" an existing user with their current
// password, it's effectively a login, and if that account has otp you'll
// be prompted for it.
res = await otplease(npm, opts, (reqOpts) => adduserCouch(username, email, password, reqOpts))
}
// We don't know the username if it was a web login, all we can reliably log is scope and registry
const message = `Logged in${opts.scope ? ` to scope ${opts.scope}` : ''} on ${opts.registry}.`
log.info('adduser', message)
return {
message,
newCreds: { token: res.token },
}
}
const login = async (npm, { creds, ...opts }) => {
const authType = npm.config.get('auth-type')
let res
if (authType === 'web') {
try {
res = await loginWeb(createOpener(npm, 'Login at'), opts)
} catch (err) {
if (err.code === 'ENYI') {
log.verbose('web login not supported, trying couch')
} else {
throw err
}
}
}
// auth type !== web or ENYI error w/ web login
if (!res) {
const username = await read.username('Username:', creds.username)
const password = await read.password('Password:', creds.password)
res = await otplease(npm, opts, (reqOpts) => loginCouch(username, password, reqOpts))
}
// We don't know the username if it was a web login, all we can reliably log is scope and registry
const message = `Logged in${opts.scope ? ` to scope ${opts.scope}` : ''} on ${opts.registry}.`
log.info('login', message)
return {
message,
newCreds: { token: res.token },
}
}
module.exports = {
adduser,
login,
otplease,
}

178
NodeJS/node_modules/npm/lib/utils/cmd-list.js generated vendored Normal file
View File

@@ -0,0 +1,178 @@
const abbrev = require('abbrev')
// These correspond to filenames in lib/commands
// Please keep this list sorted alphabetically
const commands = [
'access',
'adduser',
'audit',
'bugs',
'cache',
'ci',
'completion',
'config',
'dedupe',
'deprecate',
'diff',
'dist-tag',
'docs',
'doctor',
'edit',
'exec',
'explain',
'explore',
'find-dupes',
'fund',
'get',
'help',
'help-search',
'hook',
'init',
'install',
'install-ci-test',
'install-test',
'link',
'll',
'login',
'logout',
'ls',
'org',
'outdated',
'owner',
'pack',
'ping',
'pkg',
'prefix',
'profile',
'prune',
'publish',
'query',
'rebuild',
'repo',
'restart',
'root',
'run-script',
'sbom',
'search',
'set',
'shrinkwrap',
'star',
'stars',
'start',
'stop',
'team',
'test',
'token',
'uninstall',
'unpublish',
'unstar',
'update',
'version',
'view',
'whoami',
]
// These must resolve to an entry in commands
const aliases = {
// aliases
author: 'owner',
home: 'docs',
issues: 'bugs',
info: 'view',
show: 'view',
find: 'search',
add: 'install',
unlink: 'uninstall',
remove: 'uninstall',
rm: 'uninstall',
r: 'uninstall',
// short names for common things
un: 'uninstall',
rb: 'rebuild',
list: 'ls',
ln: 'link',
create: 'init',
i: 'install',
it: 'install-test',
cit: 'install-ci-test',
up: 'update',
c: 'config',
s: 'search',
se: 'search',
tst: 'test',
t: 'test',
ddp: 'dedupe',
v: 'view',
run: 'run-script',
'clean-install': 'ci',
'clean-install-test': 'install-ci-test',
x: 'exec',
why: 'explain',
la: 'll',
verison: 'version',
ic: 'ci',
// typos
innit: 'init',
// manually abbrev so that install-test doesn't make insta stop working
in: 'install',
ins: 'install',
inst: 'install',
insta: 'install',
instal: 'install',
isnt: 'install',
isnta: 'install',
isntal: 'install',
isntall: 'install',
'install-clean': 'ci',
'isntall-clean': 'ci',
hlep: 'help',
'dist-tags': 'dist-tag',
upgrade: 'update',
udpate: 'update',
rum: 'run-script',
sit: 'install-ci-test',
urn: 'run-script',
ogr: 'org',
'add-user': 'adduser',
}
const deref = (c) => {
if (!c) {
return
}
// Translate camelCase to snake-case (i.e. installTest to install-test)
if (c.match(/[A-Z]/)) {
c = c.replace(/([A-Z])/g, m => '-' + m.toLowerCase())
}
// if they asked for something exactly we are done
if (commands.includes(c)) {
return c
}
// if they asked for a direct alias
if (aliases[c]) {
return aliases[c]
}
const abbrevs = abbrev(commands.concat(Object.keys(aliases)))
// first deref the abbrev, if there is one
// then resolve any aliases
// so `npm install-cl` will resolve to `install-clean` then to `ci`
let a = abbrevs[c]
while (aliases[a]) {
a = aliases[a]
}
return a
}
module.exports = {
aliases,
commands,
deref,
}

40
NodeJS/node_modules/npm/lib/utils/completion.fish generated vendored Normal file
View File

@@ -0,0 +1,40 @@
# npm completions for Fish shell
# This script is a work in progress and does not fall under the normal semver contract as the rest of npm.
# __fish_npm_needs_command taken from:
# https://stackoverflow.com/questions/16657803/creating-autocomplete-script-with-sub-commands
function __fish_npm_needs_command
set -l cmd (commandline -opc)
if test (count $cmd) -eq 1
return 0
end
return 1
end
# Taken from https://github.com/fish-shell/fish-shell/blob/HEAD/share/completions/npm.fish
function __fish_complete_npm -d "Complete the commandline using npm's 'completion' tool"
# tell npm we are fish shell
set -lx COMP_FISH true
if command -sq npm
# npm completion is bash-centric, so we need to translate fish's "commandline" stuff to bash's $COMP_* stuff
# COMP_LINE is an array with the words in the commandline
set -lx COMP_LINE (commandline -opc)
# COMP_CWORD is the index of the current word in COMP_LINE
# bash starts arrays with 0, so subtract 1
set -lx COMP_CWORD (math (count $COMP_LINE) - 1)
# COMP_POINT is the index of point/cursor when the commandline is viewed as a string
set -lx COMP_POINT (commandline -C)
# If the cursor is after the last word, the empty token will disappear in the expansion
# Readd it
if test (commandline -ct) = ""
set COMP_CWORD (math $COMP_CWORD + 1)
set COMP_LINE $COMP_LINE ""
end
command npm completion -- $COMP_LINE 2>/dev/null
end
end
# flush out what ships with fish
complete -e npm

70
NodeJS/node_modules/npm/lib/utils/completion.sh generated vendored Normal file
View File

@@ -0,0 +1,70 @@
#!/bin/bash
###-begin-npm-completion-###
#
# npm command completion script
#
# Installation: npm completion >> ~/.bashrc (or ~/.zshrc)
# Or, maybe: npm completion > /usr/local/etc/bash_completion.d/npm
#
if type complete &>/dev/null; then
_npm_completion () {
local words cword
if type _get_comp_words_by_ref &>/dev/null; then
_get_comp_words_by_ref -n = -n @ -n : -w words -i cword
else
cword="$COMP_CWORD"
words=("${COMP_WORDS[@]}")
fi
local si="$IFS"
if ! IFS=$'\n' COMPREPLY=($(COMP_CWORD="$cword" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
npm completion -- "${words[@]}" \
2>/dev/null)); then
local ret=$?
IFS="$si"
return $ret
fi
IFS="$si"
if type __ltrim_colon_completions &>/dev/null; then
__ltrim_colon_completions "${words[cword]}"
fi
}
complete -o default -F _npm_completion npm
elif type compdef &>/dev/null; then
_npm_completion() {
local si=$IFS
compadd -- $(COMP_CWORD=$((CURRENT-1)) \
COMP_LINE=$BUFFER \
COMP_POINT=0 \
npm completion -- "${words[@]}" \
2>/dev/null)
IFS=$si
}
compdef _npm_completion npm
elif type compctl &>/dev/null; then
_npm_completion () {
local cword line point words si
read -Ac words
read -cn cword
let cword-=1
read -l line
read -ln point
si="$IFS"
if ! IFS=$'\n' reply=($(COMP_CWORD="$cword" \
COMP_LINE="$line" \
COMP_POINT="$point" \
npm completion -- "${words[@]}" \
2>/dev/null)); then
local ret=$?
IFS="$si"
return $ret
fi
IFS="$si"
}
compctl -K _npm_completion npm
fi
###-end-npm-completion-###

34
NodeJS/node_modules/npm/lib/utils/did-you-mean.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
const Npm = require('../npm')
const { distance } = require('fastest-levenshtein')
const { commands } = require('./cmd-list.js')
const runScripts = ['stop', 'start', 'test', 'restart']
const isClose = (scmd, cmd) => distance(scmd, cmd) < scmd.length * 0.4
const didYouMean = (pkg, scmd) => {
const { scripts = {}, bin = {} } = pkg || {}
const best = [
...commands
.filter(cmd => isClose(scmd, cmd) && scmd !== cmd)
.map(str => [str, Npm.cmd(str).description]),
...Object.keys(scripts)
// We would already be suggesting this in `npm x` so omit them here
.filter(cmd => isClose(scmd, cmd) && !runScripts.includes(cmd))
.map(str => [`run ${str}`, `run the "${str}" package script`]),
...Object.keys(bin)
.filter(cmd => isClose(scmd, cmd))
/* eslint-disable-next-line max-len */
.map(str => [`exec ${str}`, `run the "${str}" command from either this or a remote npm package`]),
]
if (best.length === 0) {
return ''
}
return `\n\nDid you mean ${best.length === 1 ? 'this' : 'one of these'}?\n` +
best.slice(0, 3).map(([msg, comment]) => ` npm ${msg} # ${comment}`).join('\n')
}
module.exports = didYouMean

539
NodeJS/node_modules/npm/lib/utils/display.js generated vendored Normal file
View File

@@ -0,0 +1,539 @@
const { log, output, input, META } = require('proc-log')
const { explain } = require('./explain-eresolve.js')
const { formatWithOptions } = require('./format')
// This is the general approach to color:
// Eventually this will be exposed somewhere we can refer to these by name.
// Foreground colors only. Never set the background color.
/*
* Black # (Don't use)
* Red # Danger
* Green # Success
* Yellow # Warning
* Blue # Accent
* Magenta # Done
* Cyan # Emphasis
* White # (Don't use)
*/
// Translates log levels to chalk colors
const COLOR_PALETTE = ({ chalk: c }) => ({
heading: c.bold,
title: c.blueBright,
timing: c.magentaBright,
// loglevels
error: c.red,
warn: c.yellow,
notice: c.cyanBright,
http: c.green,
info: c.cyan,
verbose: c.blue,
silly: c.blue.dim,
})
const LEVEL_OPTIONS = {
silent: {
index: 0,
},
error: {
index: 1,
},
warn: {
index: 2,
},
notice: {
index: 3,
},
http: {
index: 4,
},
info: {
index: 5,
},
verbose: {
index: 6,
},
silly: {
index: 7,
},
}
const LEVEL_METHODS = {
...LEVEL_OPTIONS,
[log.KEYS.timing]: {
show: ({ timing, index }) => !!timing && index !== 0,
},
}
const setBlocking = (stream) => {
// Copied from https://github.com/yargs/set-blocking
// https://raw.githubusercontent.com/yargs/set-blocking/master/LICENSE.txt
/* istanbul ignore next - we trust that this works */
if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') {
stream._handle.setBlocking(true)
}
return stream
}
// These are important
// This is the key that is returned to the user for errors
const ERROR_KEY = 'error'
// This is the key producers use to indicate that there
// is a json error that should be merged into the finished output
const JSON_ERROR_KEY = 'jsonError'
const isPlainObject = (v) => v && typeof v === 'object' && !Array.isArray(v)
const getArrayOrObject = (items) => {
if (items.length) {
const foundNonObject = items.find(o => !isPlainObject(o))
// Non-objects and arrays cant be merged, so just return the first item
if (foundNonObject) {
return foundNonObject
}
// We use objects with 0,1,2,etc keys to merge array
if (items.every((o, i) => Object.hasOwn(o, i))) {
return Object.assign([], ...items)
}
}
// Otherwise its an object with all object items merged together
return Object.assign({}, ...items.filter(o => isPlainObject(o)))
}
const getJsonBuffer = ({ [JSON_ERROR_KEY]: metaError }, buffer) => {
const items = []
// meta also contains the meta object passed to flush
const errors = metaError ? [metaError] : []
// index 1 is the meta, 2 is the logged argument
for (const [, { [JSON_ERROR_KEY]: error }, obj] of buffer) {
if (obj) {
items.push(obj)
}
if (error) {
errors.push(error)
}
}
if (!items.length && !errors.length) {
return null
}
const res = getArrayOrObject(items)
// This skips any error checking since we can only set an error property
// on an object that can be stringified
// XXX(BREAKING_CHANGE): remove this in favor of always returning an object with result and error keys
if (isPlainObject(res) && errors.length) {
// This is not ideal. JSON output has always been keyed at the root with an `error`
// key, so we cant change that without it being a breaking change. At the same time
// some commands output arbitrary keys at the top level of the output, such as package
// names. So the output could already have the same key. The choice here is to overwrite
// it with our error since that is (probably?) more important.
// XXX(BREAKING_CHANGE): all json output should be keyed under well known keys, eg `result` and `error`
if (res[ERROR_KEY]) {
log.warn('', `overwriting existing ${ERROR_KEY} on json output`)
}
res[ERROR_KEY] = getArrayOrObject(errors)
}
return res
}
const withMeta = (handler) => (level, ...args) => {
let meta = {}
const last = args.at(-1)
if (last && typeof last === 'object' && Object.hasOwn(last, META)) {
meta = args.pop()
}
return handler(level, meta, ...args)
}
class Display {
#logState = {
buffering: true,
buffer: [],
}
#outputState = {
buffering: true,
buffer: [],
}
// colors
#noColorChalk
#stdoutChalk
#stdoutColor
#stderrChalk
#stderrColor
#logColors
// progress
#progress
// options
#command
#levelIndex
#timing
#json
#heading
#silent
// display streams
#stdout
#stderr
constructor ({ stdout, stderr }) {
this.#stdout = setBlocking(stdout)
this.#stderr = setBlocking(stderr)
// Handlers are set immediately so they can buffer all events
process.on('log', this.#logHandler)
process.on('output', this.#outputHandler)
process.on('input', this.#inputHandler)
this.#progress = new Progress({ stream: stderr })
}
off () {
process.off('log', this.#logHandler)
this.#logState.buffer.length = 0
process.off('output', this.#outputHandler)
this.#outputState.buffer.length = 0
process.off('input', this.#inputHandler)
this.#progress.off()
}
get chalk () {
return {
noColor: this.#noColorChalk,
stdout: this.#stdoutChalk,
stderr: this.#stderrChalk,
}
}
async load ({
command,
heading,
json,
loglevel,
progress,
stderrColor,
stdoutColor,
timing,
unicode,
}) {
// get createSupportsColor from chalk directly if this lands
// https://github.com/chalk/chalk/pull/600
const [{ Chalk }, { createSupportsColor }] = await Promise.all([
import('chalk'),
import('supports-color'),
])
// we get the chalk level based on a null stream meaning chalk will only use
// what it knows about the environment to get color support since we already
// determined in our definitions that we want to show colors.
const level = Math.max(createSupportsColor(null).level, 1)
this.#noColorChalk = new Chalk({ level: 0 })
this.#stdoutColor = stdoutColor
this.#stdoutChalk = stdoutColor ? new Chalk({ level }) : this.#noColorChalk
this.#stderrColor = stderrColor
this.#stderrChalk = stderrColor ? new Chalk({ level }) : this.#noColorChalk
this.#logColors = COLOR_PALETTE({ chalk: this.#stderrChalk })
this.#command = command
this.#levelIndex = LEVEL_OPTIONS[loglevel].index
this.#timing = timing
this.#json = json
this.#heading = heading
this.#silent = this.#levelIndex <= 0
// Emit resume event on the logs which will flush output
log.resume()
output.flush()
this.#progress.load({
unicode,
enabled: !!progress && !this.#silent,
})
}
// STREAM WRITES
// Write formatted and (non-)colorized output to streams
#write (stream, options, ...args) {
const colors = stream === this.#stdout ? this.#stdoutColor : this.#stderrColor
const value = formatWithOptions({ colors, ...options }, ...args)
this.#progress.write(() => stream.write(value))
}
// HANDLERS
// Arrow function assigned to a private class field so it can be passed
// directly as a listener and still reference "this"
#logHandler = withMeta((level, meta, ...args) => {
switch (level) {
case log.KEYS.resume:
this.#logState.buffering = false
this.#logState.buffer.forEach((item) => this.#tryWriteLog(...item))
this.#logState.buffer.length = 0
break
case log.KEYS.pause:
this.#logState.buffering = true
break
default:
if (this.#logState.buffering) {
this.#logState.buffer.push([level, meta, ...args])
} else {
this.#tryWriteLog(level, meta, ...args)
}
break
}
})
// Arrow function assigned to a private class field so it can be passed
// directly as a listener and still reference "this"
#outputHandler = withMeta((level, meta, ...args) => {
this.#json = typeof meta.json === 'boolean' ? meta.json : this.#json
switch (level) {
case output.KEYS.flush: {
this.#outputState.buffering = false
if (this.#json) {
const json = getJsonBuffer(meta, this.#outputState.buffer)
if (json) {
this.#writeOutput(output.KEYS.standard, meta, JSON.stringify(json, null, 2))
}
} else {
this.#outputState.buffer.forEach((item) => this.#writeOutput(...item))
}
this.#outputState.buffer.length = 0
break
}
case output.KEYS.buffer:
this.#outputState.buffer.push([output.KEYS.standard, meta, ...args])
break
default:
if (this.#outputState.buffering) {
this.#outputState.buffer.push([level, meta, ...args])
} else {
// HACK: Check if the argument looks like a run-script banner. This can be
// replaced with proc-log.META in @npmcli/run-script
if (typeof args[0] === 'string' && args[0].startsWith('\n> ') && args[0].endsWith('\n')) {
if (this.#silent || ['exec', 'explore'].includes(this.#command)) {
// Silent mode and some specific commands always hide run script banners
break
} else if (this.#json) {
// In json mode, change output to stderr since we dont want to break json
// parsing on stdout if the user is piping to jq or something.
// XXX: in a future (breaking?) change it might make sense for run-script to
// always output these banners with proc-log.output.error if we think they
// align closer with "logging" instead of "output"
level = output.KEYS.error
}
}
this.#writeOutput(level, meta, ...args)
}
break
}
})
#inputHandler = withMeta((level, meta, ...args) => {
switch (level) {
case input.KEYS.start:
log.pause()
this.#outputState.buffering = true
this.#progress.off()
break
case input.KEYS.end:
log.resume()
output.flush()
this.#progress.resume()
break
case input.KEYS.read: {
// The convention when calling input.read is to pass in a single fn that returns
// the promise to await. resolve and reject are provided by proc-log
const [res, rej, p] = args
return input.start(() => p()
.then(res)
.catch(rej)
// Any call to procLog.input.read will render a prompt to the user, so we always
// add a single newline of output to stdout to move the cursor to the next line
.finally(() => output.standard('')))
}
}
})
// OUTPUT
#writeOutput (level, meta, ...args) {
switch (level) {
case output.KEYS.standard:
this.#write(this.#stdout, {}, ...args)
break
case output.KEYS.error:
this.#write(this.#stderr, {}, ...args)
break
}
}
// LOGS
#tryWriteLog (level, meta, ...args) {
try {
// Also (and this is a really inexcusable kludge), we patch the
// log.warn() method so that when we see a peerDep override
// explanation from Arborist, we can replace the object with a
// highly abbreviated explanation of what's being overridden.
// TODO: this could probably be moved to arborist now that display is refactored
const [heading, message, expl] = args
if (level === log.KEYS.warn && heading === 'ERESOLVE' && expl && typeof expl === 'object') {
this.#writeLog(level, meta, heading, message)
this.#writeLog(level, meta, '', explain(expl, this.#stderrChalk, 2))
return
}
this.#writeLog(level, meta, ...args)
} catch (ex) {
try {
// if it crashed once, it might again!
this.#writeLog(log.KEYS.verbose, meta, '', `attempt to log crashed`, ...args, ex)
} catch (ex2) {
// This happens if the object has an inspect method that crashes so just console.error
// with the errors but don't do anything else that might error again.
// eslint-disable-next-line no-console
console.error(`attempt to log crashed`, ex, ex2)
}
}
}
#writeLog (level, meta, ...args) {
const levelOpts = LEVEL_METHODS[level]
const show = levelOpts.show ?? (({ index }) => levelOpts.index <= index)
const force = meta.force && !this.#silent
if (force || show({ index: this.#levelIndex, timing: this.#timing })) {
// this mutates the array so we can pass args directly to format later
const title = args.shift()
const prefix = [
this.#logColors.heading(this.#heading),
this.#logColors[level](level),
title ? this.#logColors.title(title) : null,
]
this.#write(this.#stderr, { prefix }, ...args)
}
}
}
class Progress {
// Taken from https://github.com/sindresorhus/cli-spinners
// MIT License
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
static dots = { duration: 80, frames: ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'] }
static lines = { duration: 130, frames: ['-', '\\', '|', '/'] }
#stream
#spinner
#enabled = false
#frameIndex = 0
#lastUpdate = 0
#interval
#timeout
// We are rendering is enabled option is set and we are not waiting for the render timeout
get #rendering () {
return this.#enabled && !this.#timeout
}
// We are spinning if enabled option is set and the render interval has been set
get #spinning () {
return this.#enabled && this.#interval
}
constructor ({ stream }) {
this.#stream = stream
}
load ({ enabled, unicode }) {
this.#enabled = enabled
this.#spinner = unicode ? Progress.dots : Progress.lines
// Dont render the spinner for short durations
this.#render(200)
}
off () {
if (!this.#enabled) {
return
}
clearTimeout(this.#timeout)
this.#timeout = null
clearInterval(this.#interval)
this.#interval = null
this.#frameIndex = 0
this.#lastUpdate = 0
this.#clearSpinner()
}
resume () {
this.#render()
}
// If we are currenting rendering the spinner we clear it
// before writing our line and then re-render the spinner after.
// If not then all we need to do is write the line
write (write) {
if (this.#spinning) {
this.#clearSpinner()
}
write()
if (this.#spinning) {
this.#render()
}
}
#render (ms) {
if (ms) {
this.#timeout = setTimeout(() => {
this.#timeout = null
this.#renderSpinner()
}, ms)
// Make sure this timeout does not keep the process open
this.#timeout.unref()
} else {
this.#renderSpinner()
}
}
#renderSpinner () {
if (!this.#rendering) {
return
}
// We always attempt to render immediately but we only request to move to the next
// frame if it has been longer than our spinner frame duration since our last update
this.#renderFrame(Date.now() - this.#lastUpdate >= this.#spinner.duration)
clearInterval(this.#interval)
this.#interval = setInterval(() => this.#renderFrame(true), this.#spinner.duration)
}
#renderFrame (next) {
if (next) {
this.#lastUpdate = Date.now()
this.#frameIndex++
if (this.#frameIndex >= this.#spinner.frames.length) {
this.#frameIndex = 0
}
}
this.#clearSpinner()
this.#stream.write(this.#spinner.frames[this.#frameIndex])
}
#clearSpinner () {
// Move to the start of the line and clear the rest of the line
this.#stream.cursorTo(0)
this.#stream.clearLine(1)
}
}
module.exports = Display

447
NodeJS/node_modules/npm/lib/utils/error-message.js generated vendored Normal file
View File

@@ -0,0 +1,447 @@
const { format } = require('node:util')
const { resolve } = require('node:path')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const { log } = require('proc-log')
const errorMessage = (er, npm) => {
const summary = []
const detail = []
const files = []
er.message &&= replaceInfo(er.message)
er.stack &&= replaceInfo(er.stack)
switch (er.code) {
case 'ERESOLVE': {
const { report } = require('./explain-eresolve.js')
summary.push(['ERESOLVE', er.message])
detail.push(['', ''])
// XXX(display): error messages are logged so we use the logColor since that is based
// on stderr. This should be handled solely by the display layer so it could also be
// printed to stdout if necessary.
const { explanation, file } = report(er, npm.logChalk, npm.noColorChalk)
detail.push(['', explanation])
files.push(['eresolve-report.txt', file])
break
}
case 'ENOLOCK': {
const cmd = npm.command || ''
summary.push([cmd, 'This command requires an existing lockfile.'])
detail.push([cmd, 'Try creating one first with: npm i --package-lock-only'])
detail.push([cmd, `Original error: ${er.message}`])
break
}
case 'ENOAUDIT':
summary.push(['audit', er.message])
break
case 'ECONNREFUSED':
summary.push(['', er])
detail.push(['', [
'',
'If you are behind a proxy, please make sure that the',
"'proxy' config is set properly. See: 'npm help config'",
].join('\n')])
break
case 'EACCES':
case 'EPERM': {
const isCachePath =
typeof er.path === 'string' && npm.loaded && er.path.startsWith(npm.config.get('cache'))
const isCacheDest =
typeof er.dest === 'string' && npm.loaded && er.dest.startsWith(npm.config.get('cache'))
if (process.platform !== 'win32' && (isCachePath || isCacheDest)) {
// user probably doesn't need this, but still add it to the debug log
log.verbose(er.stack)
summary.push(['', [
'',
'Your cache folder contains root-owned files, due to a bug in',
'previous versions of npm which has since been addressed.',
'',
'To permanently fix this problem, please run:',
` sudo chown -R ${process.getuid()}:${process.getgid()} "${npm.config.get('cache')}"`,
].join('\n')])
} else {
summary.push(['', er])
detail.push(['', [
'',
'The operation was rejected by your operating system.',
...process.platform === 'win32' ? [
"It's possible that the file was already in use (by a text editor or antivirus),",
'or that you lack permissions to access it.',
] : [
'It is likely you do not have the permissions to access this file as the current user',
],
'',
'If you believe this might be a permissions issue, please double-check the',
'permissions of the file and its containing directories, or try running',
'the command again as root/Administrator.',
].join('\n')])
}
break
}
case 'ENOGIT':
summary.push(['', er.message])
detail.push(['', [
'',
'Failed using git.',
'Please check if you have git installed and in your PATH.',
].join('\n')])
break
case 'EJSONPARSE':
// Check whether we ran into a conflict in our own package.json
if (er.path === resolve(npm.prefix, 'package.json')) {
const { isDiff } = require('parse-conflict-json')
const txt = require('node:fs').readFileSync(er.path, 'utf8').replace(/\r\n/g, '\n')
if (isDiff(txt)) {
detail.push(['', [
'Merge conflict detected in your package.json.',
'',
'Please resolve the package.json conflict and retry.',
].join('\n')])
break
}
}
summary.push(['JSON.parse', er.message])
detail.push(['JSON.parse', [
'Failed to parse JSON data.',
'Note: package.json must be actual JSON, not just JavaScript.',
].join('\n')])
break
case 'EOTP':
case 'E401':
// E401 is for places where we accidentally neglect OTP stuff
if (er.code === 'EOTP' || /one-time pass/.test(er.message)) {
summary.push(['', 'This operation requires a one-time password from your authenticator.'])
detail.push(['', [
'You can provide a one-time password by passing --otp=<code> to the command you ran.',
'If you already provided a one-time password then it is likely that you either typoed',
'it, or it timed out. Please try again.',
].join('\n')])
} else {
// npm ERR! code E401
// npm ERR! Unable to authenticate, need: Basic
const auth = !er.headers || !er.headers['www-authenticate']
? []
: er.headers['www-authenticate'].map(au => au.split(/[,\s]+/))[0]
if (auth.includes('Bearer')) {
summary.push(['',
'Unable to authenticate, your authentication token seems to be invalid.',
])
detail.push(['', [
'To correct this please try logging in again with:',
' npm login',
].join('\n')])
} else if (auth.includes('Basic')) {
summary.push(['', 'Incorrect or missing password.'])
detail.push(['', [
'If you were trying to login, change your password, create an',
'authentication token or enable two-factor authentication then',
'that means you likely typed your password in incorrectly.',
'Please try again, or recover your password at:',
' https://www.npmjs.com/forgot',
'',
'If you were doing some other operation then your saved credentials are',
'probably out of date. To correct this please try logging in again with:',
' npm login',
].join('\n')])
} else {
summary.push(['', er.message || er])
}
}
break
case 'E404':
// There's no need to have 404 in the message as well.
summary.push(['404', er.message.replace(/^404\s+/, '')])
if (er.pkgid && er.pkgid !== '-') {
const pkg = er.pkgid.replace(/(?!^)@.*$/, '')
detail.push(['404', ''])
detail.push(['404', '', `'${replaceInfo(er.pkgid)}' is not in this registry.`])
const nameValidator = require('validate-npm-package-name')
const valResult = nameValidator(pkg)
if (!valResult.validForNewPackages) {
detail.push(['404', 'This package name is not valid, because', ''])
const errorsArray = [...(valResult.errors || []), ...(valResult.warnings || [])]
errorsArray.forEach((item, idx) => detail.push(['404', ' ' + (idx + 1) + '. ' + item]))
}
detail.push(['404', ''])
detail.push(['404', 'Note that you can also install from a'])
detail.push(['404', 'tarball, folder, http url, or git url.'])
}
break
case 'EPUBLISHCONFLICT':
summary.push(['publish fail', 'Cannot publish over existing version.'])
detail.push(['publish fail', "Update the 'version' field in package.json and try again."])
detail.push(['publish fail', ''])
detail.push(['publish fail', 'To automatically increment version numbers, see:'])
detail.push(['publish fail', ' npm help version'])
break
case 'EISGIT':
summary.push(['git', er.message])
summary.push(['git', ` ${er.path}`])
detail.push(['git', [
'Refusing to remove it. Update manually,',
'or move it out of the way first.',
].join('\n')])
break
case 'EBADDEVENGINES': {
const { current, required } = er
summary.push(['EBADDEVENGINES', er.message])
detail.push(['EBADDEVENGINES', { current, required }])
break
}
case 'EBADPLATFORM': {
const actual = er.current
const expected = { ...er.required }
const checkedKeys = []
for (const key in expected) {
if (Array.isArray(expected[key]) && expected[key].length > 0) {
expected[key] = expected[key].join(',')
checkedKeys.push(key)
} else if (expected[key] === undefined ||
Array.isArray(expected[key]) && expected[key].length === 0) {
delete expected[key]
delete actual[key]
} else {
checkedKeys.push(key)
}
}
const longestKey = Math.max(...checkedKeys.map((key) => key.length))
const detailEntry = []
for (const key of checkedKeys) {
const padding = key.length === longestKey
? 1
: 1 + (longestKey - key.length)
// padding + 1 because 'actual' is longer than 'valid'
detailEntry.push(`Valid ${key}:${' '.repeat(padding + 1)}${expected[key]}`)
detailEntry.push(`Actual ${key}:${' '.repeat(padding)}${actual[key]}`)
}
summary.push(['notsup', format(
'Unsupported platform for %s: wanted %j (current: %j)',
er.pkgid,
expected,
actual
)])
detail.push(['notsup', detailEntry.join('\n')])
break
}
case 'EEXIST':
summary.push(['', er.message])
summary.push(['', 'File exists: ' + (er.dest || er.path)])
detail.push(['', 'Remove the existing file and try again, or run npm'])
detail.push(['', 'with --force to overwrite files recklessly.'])
break
case 'ENEEDAUTH':
summary.push(['need auth', er.message])
detail.push(['need auth', 'You need to authorize this machine using `npm adduser`'])
break
case 'ECONNRESET':
case 'ENOTFOUND':
case 'ETIMEDOUT':
case 'ERR_SOCKET_TIMEOUT':
case 'EAI_FAIL':
summary.push(['network', er.message])
detail.push(['network', [
'This is a problem related to network connectivity.',
'In most cases you are behind a proxy or have bad network settings.',
'',
'If you are behind a proxy, please make sure that the',
"'proxy' config is set properly. See: 'npm help config'",
].join('\n')])
break
case 'ETARGET':
summary.push(['notarget', er.message])
detail.push(['notarget', [
'In most cases you or one of your dependencies are requesting',
"a package version that doesn't exist.",
].join('\n')])
break
case 'E403':
summary.push(['403', er.message])
detail.push(['403', [
'In most cases, you or one of your dependencies are requesting',
'a package version that is forbidden by your security policy, or',
'on a server you do not have access to.',
].join('\n')])
break
case 'EBADENGINE':
summary.push(['engine', er.message])
summary.push(['engine', 'Not compatible with your version of node/npm: ' + er.pkgid])
detail.push(['notsup', [
'Not compatible with your version of node/npm: ' + er.pkgid,
'Required: ' + JSON.stringify(er.required),
'Actual: ' +
JSON.stringify({ npm: npm.version, node: process.version }),
].join('\n')])
break
case 'ENOSPC':
summary.push(['nospc', er.message])
detail.push(['nospc', [
'There appears to be insufficient space on your system to finish.',
'Clear up some disk space and try again.',
].join('\n')])
break
case 'EROFS':
summary.push(['rofs', er.message])
detail.push(['rofs', [
'Often virtualized file systems, or other file systems',
"that don't support symlinks, give this error.",
].join('\n')])
break
case 'ENOENT':
summary.push(['enoent', er.message])
detail.push(['enoent', [
'This is related to npm not being able to find a file.',
er.file ? `\nCheck if the file '${er.file}' is present.` : '',
].join('\n')])
break
case 'EMISSINGARG':
case 'EUNKNOWNTYPE':
case 'EINVALIDTYPE':
case 'ETOOMANYARGS':
summary.push(['typeerror', er.stack])
detail.push(['typeerror', [
'This is an error with npm itself. Please report this error at:',
' https://github.com/npm/cli/issues',
].join('\n')])
break
default:
summary.push(['', er.message || er])
if (er.cause) {
detail.push(['cause', er.cause.message])
}
if (er.signal) {
detail.push(['signal', er.signal])
}
if (er.cmd && Array.isArray(er.args)) {
detail.push(['command', ...[er.cmd, ...er.args.map(replaceInfo)]])
}
if (er.stdout) {
detail.push(['', er.stdout.trim()])
}
if (er.stderr) {
detail.push(['', er.stderr.trim()])
}
break
}
return {
summary,
detail,
files,
}
}
const getExitCodeFromError = (err) => {
if (typeof err?.errno === 'number') {
return err.errno
} else if (typeof err?.code === 'number') {
return err.code
}
}
const getError = (err, { npm, command, pkg }) => {
// if we got a command that just shells out to something else, then it
// will presumably print its own errors and exit with a proper status
// code if there's a problem. If we got an error with a code=0, then...
// something else went wrong along the way, so maybe an npm problem?
if (command?.constructor?.isShellout && typeof err.code === 'number' && err.code) {
return {
exitCode: err.code,
suppressError: true,
}
}
// XXX: we should stop throwing strings
if (typeof err === 'string') {
return {
exitCode: 1,
suppressError: true,
summary: [['', err]],
}
}
// XXX: we should stop throwing other non-errors
if (!(err instanceof Error)) {
return {
exitCode: 1,
suppressError: true,
summary: [['weird error', err]],
}
}
if (err.code === 'EUNKNOWNCOMMAND') {
const suggestions = require('./did-you-mean.js')(pkg, err.command)
return {
exitCode: 1,
suppressError: true,
standard: [
`Unknown command: "${err.command}"`,
suggestions,
'To see a list of supported npm commands, run:',
' npm help',
],
}
}
// Anything after this is not suppressed and get more logged information
// add a code to the error if it doesnt have one and mutate some properties
// so they have redacted information
err.code ??= err.message.match(/^(?:Error: )?(E[A-Z]+)/)?.[1]
// this mutates the error and redacts stack/message
const { summary, detail, files } = errorMessage(err, npm)
return {
err,
code: err.code,
exitCode: getExitCodeFromError(err) || 1,
suppressError: false,
summary,
detail,
files,
verbose: ['type', 'stack', 'statusCode', 'pkgid']
.filter(k => err[k])
.map(k => [k, replaceInfo(err[k])]),
error: ['code', 'syscall', 'file', 'path', 'dest', 'errno']
.filter(k => err[k])
.map(k => [k, err[k]]),
}
}
module.exports = {
getExitCodeFromError,
errorMessage,
getError,
}

103
NodeJS/node_modules/npm/lib/utils/explain-dep.js generated vendored Normal file
View File

@@ -0,0 +1,103 @@
const { relative } = require('node:path')
const explainNode = (node, depth, chalk) =>
printNode(node, chalk) +
explainDependents(node, depth, chalk) +
explainLinksIn(node, depth, chalk)
const colorType = (type, chalk) => {
const style = type === 'extraneous' ? chalk.red
: type === 'dev' ? chalk.blue
: type === 'optional' ? chalk.magenta
: type === 'peer' ? chalk.magentaBright
: type === 'bundled' ? chalk.underline.cyan
: type === 'workspace' ? chalk.blueBright
: type === 'overridden' ? chalk.dim
: /* istanbul ignore next */ s => s
return style(type)
}
const printNode = (node, chalk) => {
const extra = []
for (const meta of ['extraneous', 'dev', 'optional', 'peer', 'bundled', 'overridden']) {
if (node[meta]) {
extra.push(` ${colorType(meta, chalk)}`)
}
}
const pkgid = node.isWorkspace
? chalk.blueBright(`${node.name}@${node.version}`)
: `${node.name}@${node.version}`
return `${pkgid}${extra.join('')}` +
(node.location ? chalk.dim(`\n${node.location}`) : '')
}
const explainLinksIn = ({ linksIn }, depth, chalk) => {
if (!linksIn || !linksIn.length || depth <= 0) {
return ''
}
const messages = linksIn.map(link => explainNode(link, depth - 1, chalk))
const str = '\n' + messages.join('\n')
return str.split('\n').join('\n ')
}
const explainDependents = ({ dependents }, depth, chalk) => {
if (!dependents || !dependents.length || depth <= 0) {
return ''
}
const max = Math.ceil(depth / 2)
const messages = dependents.slice(0, max)
.map(edge => explainEdge(edge, depth, chalk))
// show just the names of the first 5 deps that overflowed the list
if (dependents.length > max) {
let len = 0
const maxLen = 50
const showNames = []
for (let i = max; i < dependents.length; i++) {
const { from: { name: depName = 'the root project' } } = dependents[i]
len += depName.length
if (len >= maxLen && i < dependents.length - 1) {
showNames.push('...')
break
}
showNames.push(depName)
}
const show = `(${showNames.join(', ')})`
messages.push(`${dependents.length - max} more ${show}`)
}
const str = '\n' + messages.join('\n')
return str.split('\n').join('\n ')
}
const explainEdge = ({ name, type, bundled, from, spec, rawSpec, overridden }, depth, chalk) => {
let dep = type === 'workspace'
? chalk.bold(relative(from.location, spec.slice('file:'.length)))
: `${name}@"${spec}"`
if (overridden) {
dep = `${colorType('overridden', chalk)} ${dep} (was "${rawSpec}")`
}
const fromMsg = ` from ${explainFrom(from, depth, chalk)}`
return (type === 'prod' ? '' : `${colorType(type, chalk)} `) +
(bundled ? `${colorType('bundled', chalk)} ` : '') +
`${dep}${fromMsg}`
}
const explainFrom = (from, depth, chalk) => {
if (!from.name && !from.version) {
return 'the root project'
}
return printNode(from, chalk) +
explainDependents(from, depth - 1, chalk) +
explainLinksIn(from, depth - 1, chalk)
}
module.exports = { explainNode, printNode, explainEdge }

71
NodeJS/node_modules/npm/lib/utils/explain-eresolve.js generated vendored Normal file
View File

@@ -0,0 +1,71 @@
// this is called when an ERESOLVE error is caught in the exit-handler,
// or when there's a log.warn('eresolve', msg, explanation), to turn it
// into a human-intelligible explanation of what's wrong and how to fix.
const { explainEdge, explainNode, printNode } = require('./explain-dep.js')
// expl is an explanation object that comes from Arborist. It looks like:
// Depth is how far we want to want to descend into the object making a report.
// The full report (ie, depth=Infinity) is always written to the cache folder
// at ${cache}/eresolve-report.txt along with full json.
const explain = (expl, chalk, depth) => {
const { edge, dep, current, peerConflict, currentEdge } = expl
const out = []
const whileInstalling = dep && dep.whileInstalling ||
current && current.whileInstalling ||
edge && edge.from && edge.from.whileInstalling
if (whileInstalling) {
out.push('While resolving: ' + printNode(whileInstalling, chalk))
}
// it "should" be impossible for an ERESOLVE explanation to lack both
// current and currentEdge, but better to have a less helpful error
// than a crashing failure.
if (current) {
out.push('Found: ' + explainNode(current, depth, chalk))
} else if (peerConflict && peerConflict.current) {
out.push('Found: ' + explainNode(peerConflict.current, depth, chalk))
} else if (currentEdge) {
out.push('Found: ' + explainEdge(currentEdge, depth, chalk))
} else /* istanbul ignore else - should always have one */ if (edge) {
out.push('Found: ' + explainEdge(edge, depth, chalk))
}
out.push('\nCould not resolve dependency:\n' +
explainEdge(edge, depth, chalk))
if (peerConflict) {
const heading = '\nConflicting peer dependency:'
const pc = explainNode(peerConflict.peer, depth, chalk)
out.push(heading + ' ' + pc)
}
return out.join('\n')
}
// generate a full verbose report and tell the user how to fix it
const report = (expl, chalk, noColorChalk) => {
const flags = [
expl.strictPeerDeps ? '--no-strict-peer-deps' : '',
'--force',
'--legacy-peer-deps',
].filter(Boolean)
const or = (arr) => arr.length <= 2
? arr.join(' or ') :
arr.map((v, i, l) => i + 1 === l.length ? `or ${v}` : v).join(', ')
const fix = `Fix the upstream dependency conflict, or retry
this command with ${or(flags)}
to accept an incorrect (and potentially broken) dependency resolution.`
return {
explanation: `${explain(expl, chalk, 4)}\n\n${fix}`,
file: `# npm resolution error report\n\n${explain(expl, noColorChalk, Infinity)}\n\n${fix}`,
}
}
module.exports = {
explain,
report,
}

30
NodeJS/node_modules/npm/lib/utils/format-bytes.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
// Convert bytes to printable output, for file reporting in tarballs
// Only supports up to GB because that's way larger than anything the registry
// supports anyways.
const formatBytes = (bytes, space = true) => {
let spacer = ''
if (space) {
spacer = ' '
}
if (bytes < 1000) {
// B
return `${bytes}${spacer}B`
}
if (bytes < 1000000) {
// kB
return `${(bytes / 1000).toFixed(1)}${spacer}kB`
}
if (bytes < 1000000000) {
// MB
return `${(bytes / 1000000).toFixed(1)}${spacer}MB`
}
// GB
return `${(bytes / 1000000000).toFixed(1)}${spacer}GB`
}
module.exports = formatBytes

View File

@@ -0,0 +1,174 @@
/* eslint-disable max-len */
const { stripVTControlCharacters: strip } = require('node:util')
const { Minipass } = require('minipass')
// This module consumes package data in the following format:
//
// {
// name: String,
// description: String,
// maintainers: [{ username: String, email: String }],
// keywords: String | [String],
// version: String,
// date: Date // can be null,
// }
//
// The returned stream will format this package data
// into a byte stream of formatted, displayable output.
function filter (data, exclude) {
const words = [data.name]
.concat(data.maintainers.map(m => m.username))
.concat(data.keywords || [])
.map(f => f?.trim?.())
.filter(Boolean)
.join(' ')
.toLowerCase()
if (exclude.find(pattern => {
// Treats both /foo and /foo/ as regex searches
if (pattern.startsWith('/')) {
if (pattern.endsWith('/')) {
pattern = pattern.slice(0, -1)
}
return words.match(new RegExp(pattern.slice(1)))
}
return words.includes(pattern)
})) {
return false
}
return true
}
module.exports = (opts) => {
return opts.json ? new JSONOutputStream(opts) : new TextOutputStream(opts)
}
class JSONOutputStream extends Minipass {
#didFirst = false
#exclude
constructor (opts) {
super()
this.#exclude = opts.exclude
}
write (obj) {
if (!filter(obj, this.#exclude)) {
return
}
if (!this.#didFirst) {
super.write('[\n')
this.#didFirst = true
} else {
super.write('\n,\n')
}
return super.write(JSON.stringify(obj))
}
end () {
super.write(this.#didFirst ? ']\n' : '\n[]\n')
super.end()
}
}
class TextOutputStream extends Minipass {
#args
#chalk
#exclude
#parseable
constructor (opts) {
super()
this.#args = opts.args.map(s => s.toLowerCase()).filter(Boolean)
this.#chalk = opts.npm.chalk
this.#exclude = opts.exclude
this.#parseable = opts.parseable
}
write (data) {
if (!filter(data, this.#exclude)) {
return
}
// Normalize
const pkg = {
authors: data.maintainers.map((m) => `${strip(m.username)}`).join(' '),
publisher: strip(data.publisher?.username || ''),
date: data.date ? data.date.toISOString().slice(0, 10) : 'prehistoric',
description: strip(data.description ?? ''),
keywords: [],
name: strip(data.name),
version: data.version,
}
if (Array.isArray(data.keywords)) {
pkg.keywords = data.keywords.map(strip)
} else if (typeof data.keywords === 'string') {
pkg.keywords = strip(data.keywords.replace(/[,\s]+/, ' ')).split(' ')
}
let output
if (this.#parseable) {
output = [pkg.name, pkg.description, pkg.author, pkg.date, pkg.version, pkg.keywords]
.filter(Boolean)
.map(col => ('' + col).replace(/\t/g, ' ')).join('\t')
return super.write(output)
}
const keywords = pkg.keywords.map(k => {
if (this.#args.includes(k)) {
return this.#chalk.cyan(k)
} else {
return k
}
}).join(' ')
let description = []
for (const arg of this.#args) {
const finder = pkg.description.toLowerCase().split(arg.toLowerCase())
let p = 0
for (const f of finder) {
description.push(pkg.description.slice(p, p + f.length))
const word = pkg.description.slice(p + f.length, p + f.length + arg.length)
description.push(this.#chalk.cyan(word))
p += f.length + arg.length
}
}
description = description.filter(Boolean)
let name = pkg.name
if (this.#args.includes(pkg.name)) {
name = this.#chalk.cyan(pkg.name)
} else {
name = []
for (const arg of this.#args) {
const finder = pkg.name.toLowerCase().split(arg.toLowerCase())
let p = 0
for (const f of finder) {
name.push(pkg.name.slice(p, p + f.length))
const word = pkg.name.slice(p + f.length, p + f.length + arg.length)
name.push(this.#chalk.cyan(word))
p += f.length + arg.length
}
}
name = this.#chalk.blue(name.join(''))
}
if (description.length) {
output = `${name}\n${description.join('')}\n`
} else {
output = `${name}\n`
}
if (pkg.publisher) {
output += `Version ${this.#chalk.blue(pkg.version)} published ${this.#chalk.blue(pkg.date)} by ${this.#chalk.blue(pkg.publisher)}\n`
} else {
output += `Version ${this.#chalk.blue(pkg.version)} published ${this.#chalk.blue(pkg.date)} by ${this.#chalk.yellow('???')}\n`
}
output += `Maintainers: ${pkg.authors}\n`
if (keywords) {
output += `Keywords: ${keywords}\n`
}
output += `${this.#chalk.blue(`https://npm.im/${pkg.name}`)}\n`
return super.write(output)
}
}

50
NodeJS/node_modules/npm/lib/utils/format.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
const { formatWithOptions: baseFormatWithOptions } = require('node:util')
// These are most assuredly not a mistake
// https://eslint.org/docs/latest/rules/no-control-regex
// \x00 through \x1f, \x7f through \x9f, not including \x09 \x0a \x0b \x0d
/* eslint-disable-next-line no-control-regex */
const HAS_C01 = /[\x00-\x08\x0c\x0e-\x1f\x7f-\x9f]/
// Allows everything up to '[38;5;255m' in 8 bit notation
const ALLOWED_SGR = /^\[[0-9;]{0,8}m/
// '[38;5;255m'.length
const SGR_MAX_LEN = 10
// Strips all ANSI C0 and C1 control characters (except for SGR up to 8 bit)
function STRIP_C01 (str) {
if (!HAS_C01.test(str)) {
return str
}
let result = ''
for (let i = 0; i < str.length; i++) {
const char = str[i]
const code = char.charCodeAt(0)
if (!HAS_C01.test(char)) {
// Most characters are in this set so continue early if we can
result = `${result}${char}`
} else if (code === 27 && ALLOWED_SGR.test(str.slice(i + 1, i + SGR_MAX_LEN + 1))) {
// \x1b with allowed SGR
result = `${result}\x1b`
} else if (code <= 31) {
// escape all other C0 control characters besides \x7f
result = `${result}^${String.fromCharCode(code + 64)}`
} else {
// hasC01 ensures this is now a C1 control character or \x7f
result = `${result}^${String.fromCharCode(code - 64)}`
}
}
return result
}
const formatWithOptions = ({ prefix: prefixes = [], eol = '\n', ...options }, ...args) => {
const prefix = prefixes.filter(p => p != null).join(' ')
const formatted = STRIP_C01(baseFormatWithOptions(options, ...args))
// Splitting could be changed to only `\n` once we are sure we only emit unix newlines.
// The eol param to this function will put the correct newlines in place for the returned string.
const lines = formatted.split(/\r?\n/)
return lines.reduce((acc, l) => `${acc}${prefix}${prefix && l ? ' ' : ''}${l}${eol}`, '')
}
module.exports = { formatWithOptions }

26
NodeJS/node_modules/npm/lib/utils/get-identity.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
const npmFetch = require('npm-registry-fetch')
module.exports = async (npm, opts) => {
const { registry } = opts
// First, check if we have a user/pass-based auth
const creds = npm.config.getCredentialsByURI(registry)
if (creds.username) {
return creds.username
}
// No username, but we have other credentials; fetch the username from registry
if (creds.token || creds.certfile && creds.keyfile) {
const registryData = await npmFetch.json('/-/whoami', { ...opts })
if (typeof registryData?.username === 'string') {
return registryData.username
}
}
// At this point, even if they have a credentials object, it doesn't have a
// valid token.
throw Object.assign(
new Error('This command requires you to be logged in.'),
{ code: 'ENEEDAUTH' }
)
}

54
NodeJS/node_modules/npm/lib/utils/get-workspaces.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
const { resolve, relative } = require('node:path')
const mapWorkspaces = require('@npmcli/map-workspaces')
const { minimatch } = require('minimatch')
const pkgJson = require('@npmcli/package-json')
// minimatch wants forward slashes only for glob patterns
const globify = pattern => pattern.split('\\').join('/')
// Returns an Map of paths to workspaces indexed by workspace name
// { foo => '/path/to/foo' }
const getWorkspaces = async (filters, { path, includeWorkspaceRoot, relativeFrom }) => {
// TODO we need a better error to be bubbled up here if this call fails
const { content: pkg } = await pkgJson.normalize(path)
const workspaces = await mapWorkspaces({ cwd: path, pkg })
let res = new Map()
if (includeWorkspaceRoot) {
res.set(pkg.name, path)
}
if (!filters.length) {
res = new Map([...res, ...workspaces])
}
for (const filterArg of filters) {
for (const [workspaceName, workspacePath] of workspaces.entries()) {
let relativePath = relative(relativeFrom, workspacePath)
if (filterArg.startsWith('./')) {
relativePath = `./${relativePath}`
}
const relativeFilter = relative(path, filterArg)
if (filterArg === workspaceName
|| resolve(relativeFrom, filterArg) === workspacePath
|| minimatch(relativePath, `${globify(relativeFilter)}/*`)
|| minimatch(relativePath, `${globify(filterArg)}/*`)
) {
res.set(workspaceName, workspacePath)
}
}
}
if (!res.size) {
let msg = '!'
if (filters.length) {
msg = `:\n ${filters.reduce(
(acc, filterArg) => `${acc} --workspace=${filterArg}`, '')}`
}
throw new Error(`No workspaces found${msg}`)
}
return res
}
module.exports = getWorkspaces

45
NodeJS/node_modules/npm/lib/utils/installed-deep.js generated vendored Normal file
View File

@@ -0,0 +1,45 @@
const { resolve } = require('node:path')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const installedDeep = async (npm) => {
const Arborist = require('@npmcli/arborist')
const {
depth,
global,
prefix,
workspacesEnabled,
} = npm.flatOptions
const getValues = (tree) =>
[...tree.inventory.values()]
.filter(i => i.location !== '' && !i.isRoot)
.map(i => {
return i
})
.filter(i => (i.depth - 1) <= depth)
.sort((a, b) => (a.depth - b.depth) || localeCompare(a.name, b.name))
const res = new Set()
const gArb = new Arborist({
global: true,
path: resolve(npm.globalDir, '..'),
workspacesEnabled,
})
const gTree = await gArb.loadActual({ global: true })
for (const node of getValues(gTree)) {
res.add(global ? node.name : [node.name, '-g'])
}
if (!global) {
const arb = new Arborist({ global: false, path: prefix, workspacesEnabled })
const tree = await arb.loadActual()
for (const node of getValues(tree)) {
res.add(node.name)
}
}
return [...res]
}
module.exports = installedDeep

19
NodeJS/node_modules/npm/lib/utils/installed-shallow.js generated vendored Normal file
View File

@@ -0,0 +1,19 @@
const { readdirScoped } = require('@npmcli/fs')
const installedShallow = async (npm, opts) => {
const names = async global => {
const paths = await readdirScoped(global ? npm.globalDir : npm.localDir)
return paths.map(p => p.replace(/\\/g, '/'))
}
const { conf: { argv: { remain } } } = opts
if (remain.length > 3) {
return null
}
const { global } = npm.flatOptions
const locals = global ? [] : await names(false)
const globals = (await names(true)).map(n => global ? n : `${n} -g`)
return [...locals, ...globals]
}
module.exports = installedShallow

4
NodeJS/node_modules/npm/lib/utils/is-windows.js generated vendored Normal file
View File

@@ -0,0 +1,4 @@
const isWindowsShell = (process.platform === 'win32') &&
!/^MINGW(32|64)$/.test(process.env.MSYSTEM) && process.env.TERM !== 'cygwin'
exports.isWindowsShell = isWindowsShell

261
NodeJS/node_modules/npm/lib/utils/log-file.js generated vendored Normal file
View File

@@ -0,0 +1,261 @@
const os = require('node:os')
const { join, dirname, basename } = require('node:path')
const fsMiniPass = require('fs-minipass')
const fs = require('node:fs/promises')
const { log } = require('proc-log')
const { formatWithOptions } = require('./format')
const padZero = (n, length) => n.toString().padStart(length.toString().length, '0')
class LogFiles {
// Default to an array so we can buffer
// initial writes before we know the cache location
#logStream = []
// We cap log files at a certain number of log events per file.
// Note that each log event can write more than one line to the
// file. Then we rotate log files once this number of events is reached
#MAX_LOGS_PER_FILE = null
// Now that we write logs continuously we need to have a backstop
// here for infinite loops that still log. This is also partially handled
// by the config.get('max-files') option, but this is a failsafe to
// prevent runaway log file creation
#MAX_FILES_PER_PROCESS = null
#fileLogCount = 0
#totalLogCount = 0
#path = null
#logsMax = null
#files = []
#timing = false
constructor ({
maxLogsPerFile = 50_000,
maxFilesPerProcess = 5,
} = {}) {
this.#MAX_LOGS_PER_FILE = maxLogsPerFile
this.#MAX_FILES_PER_PROCESS = maxFilesPerProcess
this.on()
}
on () {
process.on('log', this.#logHandler)
}
off () {
process.off('log', this.#logHandler)
this.#endStream()
}
load ({ command, path, logsMax = Infinity, timing } = {}) {
if (['completion'].includes(command)) {
return
}
// dir is user configurable and is required to exist so
// this can error if the dir is missing or not configured correctly
this.#path = path
this.#logsMax = logsMax
this.#timing = timing
// Log stream has already ended
if (!this.#logStream) {
return
}
log.verbose('logfile', `logs-max:${logsMax} dir:${this.#path}`)
// Write the contents of our array buffer to our new file stream and
// set that as the new log logstream for future writes
// if logs max is 0 then the user does not want a log file
if (this.#logsMax > 0) {
const initialFile = this.#openLogFile()
if (initialFile) {
for (const item of this.#logStream) {
const formatted = this.#formatLogItem(...item)
if (formatted !== null) {
initialFile.write(formatted)
}
}
this.#logStream = initialFile
}
}
log.verbose('logfile', this.files[0] || 'no logfile created')
// Kickoff cleaning process, even if we aren't writing a logfile.
// This is async but it will always ignore the current logfile
// Return the result so it can be awaited in tests
return this.#cleanLogs()
}
get files () {
return this.#files
}
get #isBuffered () {
return Array.isArray(this.#logStream)
}
#endStream (output) {
if (this.#logStream && !this.#isBuffered) {
this.#logStream.end(output)
this.#logStream = null
}
}
#logHandler = (level, ...args) => {
// Ignore pause and resume events since we
// write everything to the log file
if (level === 'pause' || level === 'resume') {
return
}
// If the stream is ended then do nothing
if (!this.#logStream) {
return
}
if (this.#isBuffered) {
// Cant do anything but buffer the output if we dont
// have a file stream yet
this.#logStream.push([level, ...args])
return
}
const logOutput = this.#formatLogItem(level, ...args)
if (logOutput === null) {
return
}
// Open a new log file if we've written too many logs to this one
if (this.#fileLogCount >= this.#MAX_LOGS_PER_FILE) {
// Write last chunk to the file and close it
this.#endStream(logOutput)
if (this.#files.length >= this.#MAX_FILES_PER_PROCESS) {
// but if its way too many then we just stop listening
this.off()
} else {
// otherwise we are ready for a new file for the next event
this.#logStream = this.#openLogFile()
}
} else {
this.#logStream.write(logOutput)
}
}
#formatLogItem (level, title, ...args) {
// Only right timing logs to logfile if explicitly requests
if (level === log.KEYS.timing && !this.#timing) {
return null
}
this.#fileLogCount += 1
const prefix = [this.#totalLogCount++, level, title || null]
return formatWithOptions({ prefix, eol: os.EOL, colors: false }, ...args)
}
#getLogFilePath (count = '') {
return `${this.#path}debug-${count}.log`
}
#openLogFile () {
// Count in filename will be 0 indexed
const count = this.#files.length
try {
// Pad with zeros so that our log files are always sorted properly
// We never want to write files ending in `-9.log` and `-10.log` because
// log file cleaning is done by deleting the oldest so in this example
// `-10.log` would be deleted next
const f = this.#getLogFilePath(padZero(count, this.#MAX_FILES_PER_PROCESS))
// Some effort was made to make the async, but we need to write logs
// during process.on('exit') which has to be synchronous. So in order
// to never drop log messages, it is easiest to make it sync all the time
// and this was measured to be about 1.5% slower for 40k lines of output
const logStream = new fsMiniPass.WriteStreamSync(f, { flags: 'a' })
if (count > 0) {
// Reset file log count if we are opening
// after our first file
this.#fileLogCount = 0
}
this.#files.push(logStream.path)
return logStream
} catch (e) {
// If the user has a readonly logdir then we don't want to
// warn this on every command so it should be verbose
log.verbose('logfile', `could not be created: ${e}`)
}
}
async #cleanLogs () {
// module to clean out the old log files
// this is a best-effort attempt. if a rm fails, we just
// log a message about it and move on. We do return a
// Promise that succeeds when we've tried to delete everything,
// just for the benefit of testing this function properly.
try {
const logPath = this.#getLogFilePath()
const patternFileName = basename(logPath)
// tell glob to only match digits
.replace(/\d/g, 'd')
// Handle the old (prior to 8.2.0) log file names which did not have a
// counter suffix
.replace('-.log', '')
let files = await fs.readdir(
dirname(logPath), {
withFileTypes: true,
encoding: 'utf-8',
})
files = files.sort((a, b) => basename(a.name).localeCompare(basename(b.name), 'en'))
const logFiles = []
for (const file of files) {
if (!file.isFile()) {
continue
}
const genericFileName = file.name.replace(/\d/g, 'd')
const filePath = join(dirname(logPath), basename(file.name))
// Always ignore the currently written files
if (
genericFileName.includes(patternFileName)
&& genericFileName.endsWith('.log')
&& !this.#files.includes(filePath)
) {
logFiles.push(filePath)
}
}
const toDelete = logFiles.length - this.#logsMax
if (toDelete <= 0) {
return
}
log.silly('logfile', `start cleaning logs, removing ${toDelete} files`)
for (const file of logFiles.slice(0, toDelete)) {
try {
await fs.rm(file, { force: true })
} catch (e) {
log.silly('logfile', 'error removing log file', file, e)
}
}
} catch (e) {
// Disable cleanup failure warnings when log writing is disabled
if (this.#logsMax > 0) {
log.verbose('logfile', 'error cleaning log files', e)
}
} finally {
log.silly('logfile', 'done cleaning log files')
}
}
}
module.exports = LogFiles

74
NodeJS/node_modules/npm/lib/utils/npm-usage.js generated vendored Normal file
View File

@@ -0,0 +1,74 @@
const { commands } = require('./cmd-list')
const COL_MAX = 60
const COL_MIN = 24
const COL_GUTTER = 16
const INDENT = 4
const indent = (repeat = INDENT) => ' '.repeat(repeat)
const indentNewline = (repeat) => `\n${indent(repeat)}`
module.exports = (npm) => {
const browser = npm.config.get('viewer') === 'browser' ? ' (in a browser)' : ''
const allCommands = npm.config.get('long') ? cmdUsages(npm.constructor) : cmdNames()
return `npm <command>
Usage:
npm install install all the dependencies in your project
npm install <foo> add the <foo> dependency to your project
npm test run this project's tests
npm run <foo> run the script named <foo>
npm <command> -h quick help on <command>
npm -l display usage info for all commands
npm help <term> search for help on <term>${browser}
npm help npm more involved overview${browser}
All commands:
${allCommands}
Specify configs in the ini-formatted file:
${indent() + npm.config.get('userconfig')}
or on the command line via: npm <command> --key=value
More configuration info: npm help config
Configuration fields: npm help 7 config
npm@${npm.version} ${npm.npmRoot}`
}
const cmdNames = () => {
const out = ['']
const line = !process.stdout.columns ? COL_MAX
: Math.min(COL_MAX, Math.max(process.stdout.columns - COL_GUTTER, COL_MIN))
let l = 0
for (const c of commands) {
if (out[l].length + c.length + 2 < line) {
out[l] += ', ' + c
} else {
out[l++] += ','
out[l] = c
}
}
return indentNewline() + out.join(indentNewline()).slice(2)
}
const cmdUsages = (Npm) => {
// return a string of <command>: <usage>
let maxLen = 0
const set = []
for (const c of commands) {
set.push([c, Npm.cmd(c).describeUsage.split('\n')])
maxLen = Math.max(maxLen, c.length)
}
return set.map(([name, usageLines]) => {
const gutter = indent(maxLen - name.length + 1)
const usage = usageLines.join(indentNewline(INDENT + maxLen + 1))
return indentNewline() + name + gutter + usage
}).join('\n')
}

98
NodeJS/node_modules/npm/lib/utils/open-url.js generated vendored Normal file
View File

@@ -0,0 +1,98 @@
const { open } = require('@npmcli/promise-spawn')
const { output, input } = require('proc-log')
const { URL } = require('node:url')
const readline = require('node:readline/promises')
const { once } = require('node:events')
const assertValidUrl = (url) => {
try {
if (!/^https?:$/.test(new URL(url).protocol)) {
throw new Error()
}
} catch {
throw new Error('Invalid URL: ' + url)
}
}
const outputMsg = (json, title, url) => {
if (json) {
output.buffer({ title, url })
} else {
output.standard(`${title}:\n${url}`)
}
}
// attempt to open URL in web-browser, print address otherwise:
const openUrl = async (npm, url, title, isFile) => {
url = encodeURI(url)
const browser = npm.config.get('browser')
const json = npm.config.get('json')
if (browser === false) {
outputMsg(json, title, url)
return
}
// We pass this in as true from the help command so we know we don't have to
// check the protocol
if (!isFile) {
assertValidUrl(url)
}
try {
await input.start(() => open(url, {
command: browser === true ? null : browser,
}))
} catch (err) {
if (err.code !== 127) {
throw err
}
outputMsg(json, title, url)
}
}
// Prompt to open URL in browser if possible
const openUrlPrompt = async (npm, url, title, prompt, { signal }) => {
const browser = npm.config.get('browser')
const json = npm.config.get('json')
assertValidUrl(url)
outputMsg(json, title, url)
if (browser === false || !process.stdin.isTTY || !process.stdout.isTTY) {
return
}
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
})
try {
await input.read(() => Promise.race([
rl.question(prompt, { signal }),
once(rl, 'error'),
once(rl, 'SIGINT').then(() => {
throw new Error('canceled')
}),
]))
rl.close()
await openUrl(npm, url, 'Browser unavailable. Please open the URL manually')
} catch (err) {
rl.close()
if (err.name !== 'AbortError') {
throw err
}
}
}
// Rearrange arguments and return a function that takes the two arguments
// returned from the npm-profile methods that take an opener
const createOpener = (npm, title, prompt = 'Press ENTER to open in the browser...') =>
(url, opts) => openUrlPrompt(npm, url, title, prompt, opts)
module.exports = {
openUrl,
openUrlPrompt,
createOpener,
}

29
NodeJS/node_modules/npm/lib/utils/output-error.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
const { log, output } = require('proc-log')
const outputError = ({ standard = [], verbose = [], error = [], summary = [], detail = [] }) => {
for (const line of standard) {
// Each output line is just a single string
output.standard(line)
}
for (const line of verbose) {
log.verbose(...line)
}
for (const line of [...error, ...summary, ...detail]) {
log.error(...line)
}
}
const jsonError = (error, npm) => {
if (error && npm?.loaded && npm?.config.get('json')) {
return {
code: error.code,
summary: (error.summary || []).map(l => l.slice(1).join(' ')).join('\n').trim(),
detail: (error.detail || []).map(l => l.slice(1).join(' ')).join('\n').trim(),
}
}
}
module.exports = {
outputError,
jsonError,
}

7
NodeJS/node_modules/npm/lib/utils/ping.js generated vendored Normal file
View File

@@ -0,0 +1,7 @@
// ping the npm registry
// used by the ping and doctor commands
const fetch = require('npm-registry-fetch')
module.exports = async (flatOptions) => {
const res = await fetch('/-/ping', { ...flatOptions, cache: false })
return res.json().catch(() => ({}))
}

Some files were not shown because too many files have changed in this diff Show More