Removed the Requirement to Install Python and NodeJS (Now Bundled with Borealis)

This commit is contained in:
2025-04-24 00:42:19 -06:00
parent 785265d3e7
commit 9c68cdea84
7786 changed files with 2386458 additions and 217 deletions

View File

@ -0,0 +1,222 @@
const libnpmaccess = require('libnpmaccess')
const npa = require('npm-package-arg')
const { output } = require('proc-log')
const pkgJson = require('@npmcli/package-json')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { otplease } = require('../utils/auth.js')
const getIdentity = require('../utils/get-identity.js')
const BaseCommand = require('../base-cmd.js')
const commands = [
'get',
'grant',
'list',
'revoke',
'set',
]
const setCommands = [
'status=public',
'status=private',
'mfa=none',
'mfa=publish',
'mfa=automation',
'2fa=none',
'2fa=publish',
'2fa=automation',
]
class Access extends BaseCommand {
static description = 'Set access level on published packages'
static name = 'access'
static params = [
'json',
'otp',
'registry',
]
static usage = [
'list packages [<user>|<scope>|<scope:team>] [<package>]',
'list collaborators [<package> [<user>]]',
'get status [<package>]',
'set status=public|private [<package>]',
'set mfa=none|publish|automation [<package>]',
'grant <read-only|read-write> <scope:team> [<package>]',
'revoke <scope:team> [<package>]',
]
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return commands
}
if (argv.length === 3) {
switch (argv[2]) {
case 'grant':
return ['read-only', 'read-write']
case 'revoke':
return []
case 'list':
case 'ls':
return ['packages', 'collaborators']
case 'get':
return ['status']
case 'set':
return setCommands
default:
throw new Error(argv[2] + ' not recognized')
}
}
}
async exec ([cmd, subcmd, ...args]) {
if (!cmd) {
throw this.usageError()
}
if (!commands.includes(cmd)) {
throw this.usageError(`${cmd} is not a valid access command`)
}
// All commands take at least one more parameter so we can do this check up front
if (!subcmd) {
throw this.usageError()
}
switch (cmd) {
case 'grant':
if (!['read-only', 'read-write'].includes(subcmd)) {
throw this.usageError('grant must be either `read-only` or `read-write`')
}
if (!args[0]) {
throw this.usageError('`<scope:team>` argument is required')
}
return this.#grant(subcmd, args[0], args[1])
case 'revoke':
return this.#revoke(subcmd, args[0])
case 'list':
case 'ls':
if (subcmd === 'packages') {
return this.#listPackages(args[0], args[1])
}
if (subcmd === 'collaborators') {
return this.#listCollaborators(args[0], args[1])
}
throw this.usageError(`list ${subcmd} is not a valid access command`)
case 'get':
if (subcmd !== 'status') {
throw this.usageError(`get ${subcmd} is not a valid access command`)
}
return this.#getStatus(args[0])
case 'set':
if (!setCommands.includes(subcmd)) {
throw this.usageError(`set ${subcmd} is not a valid access command`)
}
return this.#set(subcmd, args[0])
}
}
async #grant (permissions, scope, pkg) {
await libnpmaccess.setPermissions(scope, pkg, permissions, this.npm.flatOptions)
}
async #revoke (scope, pkg) {
await libnpmaccess.removePermissions(scope, pkg, this.npm.flatOptions)
}
async #listPackages (owner, pkg) {
if (!owner) {
owner = await getIdentity(this.npm, this.npm.flatOptions)
}
const pkgs = await libnpmaccess.getPackages(owner, this.npm.flatOptions)
this.#output(pkgs, pkg)
}
async #listCollaborators (pkg, user) {
const pkgName = await this.#getPackage(pkg, false)
const collabs = await libnpmaccess.getCollaborators(pkgName, this.npm.flatOptions)
this.#output(collabs, user)
}
async #getStatus (pkg) {
const pkgName = await this.#getPackage(pkg, false)
const visibility = await libnpmaccess.getVisibility(pkgName, this.npm.flatOptions)
this.#output({ [pkgName]: visibility.public ? 'public' : 'private' })
}
async #set (subcmd, pkg) {
const [subkey, subval] = subcmd.split('=')
switch (subkey) {
case 'mfa':
case '2fa':
return this.#setMfa(pkg, subval)
case 'status':
return this.#setStatus(pkg, subval)
}
}
async #setMfa (pkg, level) {
const pkgName = await this.#getPackage(pkg, false)
await otplease(this.npm, this.npm.flatOptions, (opts) => {
return libnpmaccess.setMfa(pkgName, level, opts)
})
}
async #setStatus (pkg, status) {
// only scoped packages can have their access changed
const pkgName = await this.#getPackage(pkg, true)
if (status === 'private') {
status = 'restricted'
}
await otplease(this.npm, this.npm.flatOptions, (opts) => {
return libnpmaccess.setAccess(pkgName, status, opts)
})
return this.#getStatus(pkgName)
}
async #getPackage (name, requireScope) {
if (!name) {
try {
const { content } = await pkgJson.normalize(this.npm.prefix)
name = content.name
} catch (err) {
if (err.code === 'ENOENT') {
throw Object.assign(new Error('no package name given and no package.json found'), {
code: 'ENOENT',
})
} else {
throw err
}
}
}
const spec = npa(name)
if (requireScope && !spec.scope) {
throw this.usageError('This command is only available for scoped packages.')
}
return name
}
#output (items, limiter) {
const outputs = {}
const lookup = {
__proto__: null,
read: 'read-only',
write: 'read-write',
}
for (const item in items) {
const val = items[item]
outputs[item] = lookup[val] || val
}
if (this.npm.config.get('json')) {
output.buffer(outputs)
} else {
for (const item of Object.keys(outputs).sort(localeCompare)) {
if (!limiter || limiter === item) {
output.standard(`${item}: ${outputs[item]}`)
}
}
}
}
}
module.exports = Access

View File

@ -0,0 +1,50 @@
const { log, output } = require('proc-log')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const auth = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
class AddUser extends BaseCommand {
static description = 'Add a registry user account'
static name = 'adduser'
static params = [
'registry',
'scope',
'auth-type',
]
async exec () {
const scope = this.npm.config.get('scope')
let registry = this.npm.config.get('registry')
if (scope) {
const scopedRegistry = this.npm.config.get(`${scope}:registry`)
const cliRegistry = this.npm.config.get('registry', 'cli')
if (scopedRegistry && !cliRegistry) {
registry = scopedRegistry
}
}
const creds = this.npm.config.getCredentialsByURI(registry)
log.notice('', `Log in on ${replaceInfo(registry)}`)
const { message, newCreds } = await auth.adduser(this.npm, {
...this.npm.flatOptions,
creds,
registry,
})
this.npm.config.delete('_token', 'user') // prevent legacy pollution
this.npm.config.setCredentialsByURI(registry, newCreds)
if (scope) {
this.npm.config.set(scope + ':registry', registry, 'user')
}
await this.npm.config.save('user')
output.standard(message)
}
}
module.exports = AddUser

View File

@ -0,0 +1,121 @@
const npmAuditReport = require('npm-audit-report')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const auditError = require('../utils/audit-error.js')
const { log, output } = require('proc-log')
const reifyFinish = require('../utils/reify-finish.js')
const VerifySignatures = require('../utils/verify-signatures.js')
class Audit extends ArboristWorkspaceCmd {
static description = 'Run a security audit'
static name = 'audit'
static params = [
'audit-level',
'dry-run',
'force',
'json',
'package-lock-only',
'package-lock',
'omit',
'include',
'foreground-scripts',
'ignore-scripts',
...super.params,
]
static usage = ['[fix|signatures]']
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['fix', 'signatures']
}
switch (argv[2]) {
case 'fix':
case 'signatures':
return []
default:
throw Object.assign(new Error(argv[2] + ' not recognized'), {
code: 'EUSAGE',
})
}
}
async exec (args) {
if (args[0] === 'signatures') {
await this.auditSignatures()
} else {
await this.auditAdvisories(args)
}
}
async auditAdvisories (args) {
const fix = args[0] === 'fix'
if (this.npm.config.get('package-lock') === false && fix) {
throw this.usageError('fix can not be used without a package-lock')
}
const reporter = this.npm.config.get('json') ? 'json' : 'detail'
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
audit: true,
path: this.npm.prefix,
reporter,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.audit({ fix })
if (fix) {
await reifyFinish(this.npm, arb)
} else {
// will throw if there's an error, because this is an audit command
auditError(this.npm, arb.auditReport)
const result = npmAuditReport(arb.auditReport, {
...opts,
chalk: this.npm.chalk,
})
process.exitCode = process.exitCode || result.exitCode
output.standard(result.report)
}
}
async auditSignatures () {
if (this.npm.global) {
throw Object.assign(
new Error('`npm audit signatures` does not support global packages'), {
code: 'EAUDITGLOBAL',
}
)
}
log.verbose('audit', 'loading installed dependencies')
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: this.npm.prefix,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
const tree = await arb.loadActual()
let filterSet = new Set()
if (opts.workspaces && opts.workspaces.length) {
filterSet =
arb.workspaceDependencySet(
tree,
opts.workspaces,
this.npm.flatOptions.includeWorkspaceRoot
)
} else if (!this.npm.flatOptions.workspacesEnabled) {
filterSet =
arb.excludeWorkspacesDependencySet(tree)
}
const verify = new VerifySignatures(tree, filterSet, this.npm, { ...opts })
await verify.run()
}
}
module.exports = Audit

View File

@ -0,0 +1,34 @@
const PackageUrlCmd = require('../package-url-cmd.js')
class Bugs extends PackageUrlCmd {
static description = 'Report bugs for a package in a web browser'
static name = 'bugs'
getUrl (spec, mani) {
if (mani.bugs) {
if (typeof mani.bugs === 'string') {
return mani.bugs
}
if (typeof mani.bugs === 'object' && mani.bugs.url) {
return mani.bugs.url
}
if (typeof mani.bugs === 'object' && mani.bugs.email) {
return `mailto:${mani.bugs.email}`
}
}
// try to get it from the repo, if possible
const info = this.hostedFromMani(mani)
const infoUrl = info?.bugs()
if (infoUrl) {
return infoUrl
}
// just send them to the website, hopefully that has some info!
return `https://www.npmjs.com/package/${mani.name}`
}
}
module.exports = Bugs

View File

@ -0,0 +1,218 @@
const cacache = require('cacache')
const pacote = require('pacote')
const fs = require('node:fs/promises')
const { join } = require('node:path')
const semver = require('semver')
const BaseCommand = require('../base-cmd.js')
const npa = require('npm-package-arg')
const jsonParse = require('json-parse-even-better-errors')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { log, output } = require('proc-log')
const searchCachePackage = async (path, parsed, cacheKeys) => {
/* eslint-disable-next-line max-len */
const searchMFH = new RegExp(`^make-fetch-happen:request-cache:.*(?<!/[@a-zA-Z]+)/${parsed.name}/-/(${parsed.name}[^/]+.tgz)$`)
const searchPack = new RegExp(`^make-fetch-happen:request-cache:.*/${parsed.escapedName}$`)
const results = new Set()
cacheKeys = new Set(cacheKeys)
for (const key of cacheKeys) {
// match on the public key registry url format
if (searchMFH.test(key)) {
// extract the version from the filename
const filename = key.match(searchMFH)[1]
const noExt = filename.slice(0, -4)
const noScope = `${parsed.name.split('/').pop()}-`
const ver = noExt.slice(noScope.length)
if (semver.satisfies(ver, parsed.rawSpec)) {
results.add(key)
}
continue
}
// is this key a packument?
if (!searchPack.test(key)) {
continue
}
results.add(key)
let packument, details
try {
details = await cacache.get(path, key)
packument = jsonParse(details.data)
} catch (_) {
// if we couldn't parse the packument, abort
continue
}
if (!packument.versions || typeof packument.versions !== 'object') {
continue
}
// assuming this is a packument
for (const ver of Object.keys(packument.versions)) {
if (semver.satisfies(ver, parsed.rawSpec)) {
if (packument.versions[ver].dist &&
typeof packument.versions[ver].dist === 'object' &&
packument.versions[ver].dist.tarball !== undefined &&
cacheKeys.has(`make-fetch-happen:request-cache:${packument.versions[ver].dist.tarball}`)
) {
results.add(`make-fetch-happen:request-cache:${packument.versions[ver].dist.tarball}`)
}
}
}
}
return results
}
class Cache extends BaseCommand {
static description = 'Manipulates packages cache'
static name = 'cache'
static params = ['cache']
static usage = [
'add <package-spec>',
'clean [<key>]',
'ls [<name>@<version>]',
'verify',
]
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['add', 'clean', 'verify', 'ls']
}
// TODO - eventually...
switch (argv[2]) {
case 'verify':
case 'clean':
case 'add':
case 'ls':
return []
}
}
async exec (args) {
const cmd = args.shift()
switch (cmd) {
case 'rm': case 'clear': case 'clean':
return await this.clean(args)
case 'add':
return await this.add(args)
case 'verify': case 'check':
return await this.verify()
case 'ls':
return await this.ls(args)
default:
throw this.usageError()
}
}
// npm cache clean [pkg]*
async clean (args) {
const cachePath = join(this.npm.cache, '_cacache')
if (args.length === 0) {
if (!this.npm.config.get('force')) {
throw new Error(`As of npm@5, the npm cache self-heals from corruption issues
by treating integrity mismatches as cache misses. As a result,
data extracted from the cache is guaranteed to be valid. If you
want to make sure everything is consistent, use \`npm cache verify\`
instead. Deleting the cache can only make npm go slower, and is
not likely to correct any problems you may be encountering!
On the other hand, if you're debugging an issue with the installer,
or race conditions that depend on the timing of writing to an empty
cache, you can use \`npm install --cache /tmp/empty-cache\` to use a
temporary cache instead of nuking the actual one.
If you're sure you want to delete the entire cache, rerun this command
with --force.`)
}
return fs.rm(cachePath, { recursive: true, force: true })
}
for (const key of args) {
let entry
try {
entry = await cacache.get(cachePath, key)
} catch (err) {
log.warn('cache', `Not Found: ${key}`)
break
}
output.standard(`Deleted: ${key}`)
await cacache.rm.entry(cachePath, key)
// XXX this could leave other entries without content!
await cacache.rm.content(cachePath, entry.integrity)
}
}
// npm cache add <tarball-url>...
// npm cache add <pkg> <ver>...
// npm cache add <tarball>...
// npm cache add <folder>...
async add (args) {
log.silly('cache add', 'args', args)
if (args.length === 0) {
throw this.usageError('First argument to `add` is required')
}
await Promise.all(args.map(async spec => {
log.silly('cache add', 'spec', spec)
// we ask pacote for the thing, and then just throw the data
// away so that it tee-pipes it into the cache like it does
// for a normal request.
await pacote.tarball.stream(spec, stream => {
stream.resume()
return stream.promise()
}, { ...this.npm.flatOptions })
await pacote.manifest(spec, {
...this.npm.flatOptions,
fullMetadata: true,
})
}))
}
async verify () {
const cache = join(this.npm.cache, '_cacache')
const prefix = cache.indexOf(process.env.HOME) === 0
? `~${cache.slice(process.env.HOME.length)}`
: cache
const stats = await cacache.verify(cache)
output.standard(`Cache verified and compressed (${prefix})`)
output.standard(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`)
if (stats.badContentCount) {
output.standard(`Corrupted content removed: ${stats.badContentCount}`)
}
if (stats.reclaimedCount) {
/* eslint-disable-next-line max-len */
output.standard(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`)
}
if (stats.missingContent) {
output.standard(`Missing content: ${stats.missingContent}`)
}
output.standard(`Index entries: ${stats.totalEntries}`)
output.standard(`Finished in ${stats.runTime.total / 1000}s`)
}
// npm cache ls [--package <spec> ...]
async ls (specs) {
const cachePath = join(this.npm.cache, '_cacache')
const cacheKeys = Object.keys(await cacache.ls(cachePath))
if (specs.length > 0) {
// get results for each package spec specified
const results = new Set()
for (const spec of specs) {
const parsed = npa(spec)
if (parsed.rawSpec !== '' && parsed.type === 'tag') {
throw this.usageError('Cannot list cache keys for a tagged package.')
}
const keySet = await searchCachePackage(cachePath, parsed, cacheKeys)
for (const key of keySet) {
results.add(key)
}
}
[...results].sort(localeCompare).forEach(key => output.standard(key))
return
}
cacheKeys.sort(localeCompare).forEach(key => output.standard(key))
}
}
module.exports = Cache

129
Dependencies/NodeJS/node_modules/npm/lib/commands/ci.js generated vendored Normal file
View File

@ -0,0 +1,129 @@
const reifyFinish = require('../utils/reify-finish.js')
const runScript = require('@npmcli/run-script')
const fs = require('node:fs/promises')
const path = require('node:path')
const { log, time } = require('proc-log')
const validateLockfile = require('../utils/validate-lockfile.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const getWorkspaces = require('../utils/get-workspaces.js')
class CI extends ArboristWorkspaceCmd {
static description = 'Clean install a project'
static name = 'ci'
// These are in the order they will show up in when running "-h"
static params = [
'install-strategy',
'legacy-bundling',
'global-style',
'omit',
'include',
'strict-peer-deps',
'foreground-scripts',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
...super.params,
]
async exec () {
if (this.npm.global) {
throw Object.assign(new Error('`npm ci` does not work for global packages'), {
code: 'ECIGLOBAL',
})
}
const where = this.npm.prefix
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
packageLock: true, // npm ci should never skip lock files
path: where,
save: false, // npm ci should never modify the lockfile or package.json
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.loadVirtual().catch(er => {
log.verbose('loadVirtual', er.stack)
const msg =
'The `npm ci` command can only install with an existing package-lock.json or\n' +
'npm-shrinkwrap.json with lockfileVersion >= 1. Run an install with npm@5 or\n' +
'later to generate a package-lock.json file, then try again.'
throw this.usageError(msg)
})
// retrieves inventory of packages from loaded virtual tree (lock file)
const virtualInventory = new Map(arb.virtualTree.inventory)
// build ideal tree step needs to come right after retrieving the virtual
// inventory since it's going to erase the previous ref to virtualTree
await arb.buildIdealTree()
// verifies that the packages from the ideal tree will match
// the same versions that are present in the virtual tree (lock file)
// throws a validation error in case of mismatches
const errors = validateLockfile(virtualInventory, arb.idealTree.inventory)
if (errors.length) {
throw this.usageError(
'`npm ci` can only install packages when your package.json and ' +
'package-lock.json or npm-shrinkwrap.json are in sync. Please ' +
'update your lock file with `npm install` ' +
'before continuing.\n\n' +
errors.join('\n')
)
}
const dryRun = this.npm.config.get('dry-run')
if (!dryRun) {
const workspacePaths = await getWorkspaces([], {
path: this.npm.localPrefix,
includeWorkspaceRoot: true,
})
// Only remove node_modules after we've successfully loaded the virtual
// tree and validated the lockfile
await time.start('npm-ci:rm', async () => {
return await Promise.all([...workspacePaths.values()].map(async modulePath => {
const fullPath = path.join(modulePath, 'node_modules')
// get the list of entries so we can skip the glob for performance
const entries = await fs.readdir(fullPath, null).catch(() => [])
return Promise.all(entries.map(folder => {
return fs.rm(path.join(fullPath, folder), { force: true, recursive: true })
}))
}))
})
}
await arb.reify(opts)
const ignoreScripts = this.npm.config.get('ignore-scripts')
// run the same set of scripts that `npm install` runs.
if (!ignoreScripts) {
const scripts = [
'preinstall',
'install',
'postinstall',
'prepublish', // XXX should we remove this finally??
'preprepare',
'prepare',
'postprepare',
]
const scriptShell = this.npm.config.get('script-shell') || undefined
for (const event of scripts) {
await runScript({
path: where,
args: [],
scriptShell,
stdio: 'inherit',
event,
})
}
}
await reifyFinish(this.npm, arb)
}
}
module.exports = CI

View File

@ -0,0 +1,283 @@
// Each command has a completion function that takes an options object and a cb
// The callback gets called with an error and an array of possible completions.
// The options object is built up based on the environment variables set by
// zsh or bash when calling a function for completion, based on the cursor
// position and the command line thus far. These are:
// COMP_CWORD: the index of the "word" in the command line being completed
// COMP_LINE: the full command line thusfar as a string
// COMP_POINT: the cursor index at the point of triggering completion
//
// We parse the command line with nopt, like npm does, and then create an
// options object containing:
// words: array of words in the command line
// w: the index of the word being completed (ie, COMP_CWORD)
// word: the word being completed
// line: the COMP_LINE
// lineLength
// point: the COMP_POINT, usually equal to line length, but not always, eg if
// the user has pressed the left-arrow to complete an earlier word
// partialLine: the line up to the point
// partialWord: the word being completed (which might be ''), up to the point
// conf: a nopt parse of the command line
//
// When the implementation completion method returns its list of strings,
// and arrays of strings, we filter that by any that start with the
// partialWord, since only those can possibly be valid matches.
//
// Matches are wrapped with ' to escape them, if necessary, and then printed
// one per line for the shell completion method to consume in IFS=$'\n' mode
// as an array.
const fs = require('node:fs/promises')
const nopt = require('nopt')
const { resolve } = require('node:path')
const { output } = require('proc-log')
const Npm = require('../npm.js')
const { definitions, shorthands } = require('@npmcli/config/lib/definitions')
const { commands, aliases, deref } = require('../utils/cmd-list.js')
const { isWindowsShell } = require('../utils/is-windows.js')
const BaseCommand = require('../base-cmd.js')
const fileExists = (file) => fs.stat(file).then(s => s.isFile()).catch(() => false)
const configNames = Object.keys(definitions)
const shorthandNames = Object.keys(shorthands)
const allConfs = configNames.concat(shorthandNames)
class Completion extends BaseCommand {
static description = 'Tab Completion for npm'
static name = 'completion'
// completion for the completion command
static async completion (opts) {
if (opts.w > 2) {
return
}
const [bashExists, zshExists] = await Promise.all([
fileExists(resolve(process.env.HOME, '.bashrc')),
fileExists(resolve(process.env.HOME, '.zshrc')),
])
const out = []
if (zshExists) {
out.push(['>>', '~/.zshrc'])
}
if (bashExists) {
out.push(['>>', '~/.bashrc'])
}
return out
}
async exec (args) {
if (isWindowsShell) {
const msg = 'npm completion supported only in MINGW / Git bash on Windows'
throw Object.assign(new Error(msg), {
code: 'ENOTSUP',
})
}
const { COMP_CWORD, COMP_LINE, COMP_POINT, COMP_FISH } = process.env
// if the COMP_* isn't in the env, then just dump the script.
if (COMP_CWORD === undefined || COMP_LINE === undefined || COMP_POINT === undefined) {
return dumpScript(resolve(this.npm.npmRoot, 'lib', 'utils', 'completion.sh'))
}
// ok we're actually looking at the envs and outputting the suggestions
// get the partial line and partial word,
// if the point isn't at the end.
// ie, tabbing at: npm foo b|ar
const w = +COMP_CWORD
const words = args.map(unescape)
const word = words[w]
const line = COMP_LINE
const point = +COMP_POINT
const partialLine = line.slice(0, point)
const partialWords = words.slice(0, w)
// figure out where in that last word the point is.
const partialWordRaw = args[w]
let i = partialWordRaw.length
while (partialWordRaw.slice(0, i) !== partialLine.slice(-1 * i) && i > 0) {
i--
}
const partialWord = unescape(partialWordRaw.slice(0, i))
partialWords.push(partialWord)
const opts = {
isFish: COMP_FISH === 'true',
words,
w,
word,
line,
lineLength: line.length,
point,
partialLine,
partialWords,
partialWord,
raw: args,
}
if (partialWords.slice(0, -1).indexOf('--') === -1) {
if (word.charAt(0) === '-') {
return this.wrap(opts, configCompl(opts))
}
if (words[w - 1] &&
words[w - 1].charAt(0) === '-' &&
!isFlag(words[w - 1])) {
// awaiting a value for a non-bool config.
// don't even try to do this for now
return this.wrap(opts, configValueCompl(opts))
}
}
// try to find the npm command.
// it's the first thing after all the configs.
// take a little shortcut and use npm's arg parsing logic.
// don't have to worry about the last arg being implicitly
// boolean'ed, since the last block will catch that.
const types = Object.entries(definitions).reduce((acc, [key, def]) => {
acc[key] = def.type
return acc
}, {})
const parsed = opts.conf =
nopt(types, shorthands, partialWords.slice(0, -1), 0)
// check if there's a command already.
const cmd = parsed.argv.remain[1]
if (!cmd) {
return this.wrap(opts, cmdCompl(opts, this.npm))
}
Object.keys(parsed).forEach(k => this.npm.config.set(k, parsed[k]))
// at this point, if words[1] is some kind of npm command,
// then complete on it.
// otherwise, do nothing
try {
const { completion } = Npm.cmd(cmd)
if (completion) {
const comps = await completion(opts, this.npm)
return this.wrap(opts, comps)
}
} catch {
// it wasnt a valid command, so do nothing
}
}
// The command should respond with an array. Loop over that,
// wrapping quotes around any that have spaces, and writing
// them to stdout.
// If any of the items are arrays, then join them with a space.
// Ie, returning ['a', 'b c', ['d', 'e']] would allow it to expand
// to: 'a', 'b c', or 'd' 'e'
wrap (opts, compls) {
// TODO this was dead code, leaving it in case we find some command we
// forgot that requires this. if so *that command should fix its
// completions*
// compls = compls.map(w => !/\s+/.test(w) ? w : '\'' + w + '\'')
if (opts.partialWord) {
compls = compls.filter(c => c.startsWith(opts.partialWord))
}
if (compls.length > 0) {
output.standard(compls.join('\n'))
}
}
}
const dumpScript = async (p) => {
const d = (await fs.readFile(p, 'utf8')).replace(/^#!.*?\n/, '')
await new Promise((res, rej) => {
let done = false
process.stdout.on('error', er => {
if (done) {
return
}
done = true
// Darwin is a pain sometimes.
//
// This is necessary because the "source" or "." program in
// bash on OS X closes its file argument before reading
// from it, meaning that you get exactly 1 write, which will
// work most of the time, and will always raise an EPIPE.
//
// Really, one should not be tossing away EPIPE errors, or any
// errors, so casually. But, without this, `. <(npm completion)`
// can never ever work on OS X.
// TODO Ignoring coverage, see 'non EPIPE errors cause failures' test.
/* istanbul ignore next */
if (er.errno === 'EPIPE') {
res()
} else {
rej(er)
}
})
process.stdout.write(d, () => {
if (done) {
return
}
done = true
res()
})
})
}
const unescape = w => w.charAt(0) === '\'' ? w.replace(/^'|'$/g, '')
: w.replace(/\\ /g, ' ')
// the current word has a dash. Return the config names,
// with the same number of dashes as the current word has.
const configCompl = opts => {
const word = opts.word
const split = word.match(/^(-+)((?:no-)*)(.*)$/)
const dashes = split[1]
const no = split[2]
const flags = configNames.filter(isFlag)
return allConfs.map(c => dashes + c)
.concat(flags.map(f => dashes + (no || 'no-') + f))
}
// expand with the valid values of various config values.
// not yet implemented.
const configValueCompl = () => []
// check if the thing is a flag or not.
const isFlag = word => {
// shorthands never take args.
const split = word.match(/^(-*)((?:no-)+)?(.*)$/)
const no = split[2]
const conf = split[3]
const { type } = definitions[conf]
return no ||
type === Boolean ||
(Array.isArray(type) && type.includes(Boolean)) ||
shorthands[conf]
}
// complete against the npm commands
// if they all resolve to the same thing, just return the thing it already is
const cmdCompl = (opts) => {
const allCommands = commands.concat(Object.keys(aliases))
const matches = allCommands.filter(c => c.startsWith(opts.partialWord))
if (!matches.length) {
return matches
}
const derefs = new Set([...matches.map(c => deref(c))])
if (derefs.size === 1) {
return [...derefs]
}
return allCommands
}
module.exports = Completion

View File

@ -0,0 +1,410 @@
const { mkdir, readFile, writeFile } = require('node:fs/promises')
const { dirname, resolve } = require('node:path')
const { spawn } = require('node:child_process')
const { EOL } = require('node:os')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const pkgJson = require('@npmcli/package-json')
const { defaults, definitions } = require('@npmcli/config/lib/definitions')
const { log, output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
const { redact } = require('@npmcli/redact')
// These are the configs that we can nerf-dart. Not all of them currently even
// *have* config definitions so we have to explicitly validate them here.
// This is used to validate during "npm config set"
const nerfDarts = [
'_auth',
'_authToken',
'_password',
'certfile',
'email',
'keyfile',
'username',
]
// These are the config values to swap with "protected". It does not catch
// every single sensitive thing a user may put in the npmrc file but it gets
// the common ones. This is distinct from nerfDarts because that is used to
// validate valid configs during "npm config set", and folks may have old
// invalid entries lying around in a config file that we still want to protect
// when running "npm config list"
// This is a more general list of values to consider protected. You can not
// "npm config get" them, and they will not display during "npm config list"
const protected = [
'auth',
'authToken',
'certfile',
'email',
'keyfile',
'password',
'username',
]
// take an array of `[key, value, k2=v2, k3, v3, ...]` and turn into
// { key: value, k2: v2, k3: v3 }
const keyValues = args => {
const kv = {}
for (let i = 0; i < args.length; i++) {
const arg = args[i].split('=')
const key = arg.shift()
const val = arg.length ? arg.join('=')
: i < args.length - 1 ? args[++i]
: ''
kv[key.trim()] = val.trim()
}
return kv
}
const isProtected = (k) => {
// _password
if (k.startsWith('_')) {
return true
}
if (protected.includes(k)) {
return true
}
// //localhost:8080/:_password
if (k.startsWith('//')) {
if (k.includes(':_')) {
return true
}
// //registry:_authToken or //registry:authToken
for (const p of protected) {
if (k.endsWith(`:${p}`) || k.endsWith(`:_${p}`)) {
return true
}
}
}
return false
}
// Private fields are either protected or they can redacted info
const isPrivate = (k, v) => isProtected(k) || redact(v) !== v
const displayVar = (k, v) =>
`${k} = ${isProtected(k, v) ? '(protected)' : JSON.stringify(redact(v))}`
class Config extends BaseCommand {
static description = 'Manage the npm configuration files'
static name = 'config'
static usage = [
'set <key>=<value> [<key>=<value> ...]',
'get [<key> [<key> ...]]',
'delete <key> [<key> ...]',
'list [--json]',
'edit',
'fix',
]
static params = [
'json',
'global',
'editor',
'location',
'long',
]
static ignoreImplicitWorkspace = false
static skipConfigValidation = true
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv[1] !== 'config') {
argv.unshift('config')
}
if (argv.length === 2) {
const cmds = ['get', 'set', 'delete', 'ls', 'rm', 'edit', 'fix']
if (opts.partialWord !== 'l') {
cmds.push('list')
}
return cmds
}
const action = argv[2]
switch (action) {
case 'set':
// todo: complete with valid values, if possible.
if (argv.length > 3) {
return []
}
// fallthrough
/* eslint no-fallthrough:0 */
case 'get':
case 'delete':
case 'rm':
return Object.keys(definitions)
case 'edit':
case 'list':
case 'ls':
case 'fix':
default:
return []
}
}
async exec ([action, ...args]) {
switch (action) {
case 'set':
await this.set(args)
break
case 'get':
await this.get(args)
break
case 'delete':
case 'rm':
case 'del':
await this.del(args)
break
case 'list':
case 'ls':
await (this.npm.flatOptions.json ? this.listJson() : this.list())
break
case 'edit':
await this.edit()
break
case 'fix':
await this.fix()
break
default:
throw this.usageError()
}
}
async set (args) {
if (!args.length) {
throw this.usageError()
}
const where = this.npm.flatOptions.location
for (const [key, val] of Object.entries(keyValues(args))) {
log.info('config', 'set %j %j', key, val)
const baseKey = key.split(':').pop()
if (!this.npm.config.definitions[baseKey] && !nerfDarts.includes(baseKey)) {
throw new Error(`\`${baseKey}\` is not a valid npm option`)
}
const deprecated = this.npm.config.definitions[baseKey]?.deprecated
if (deprecated) {
throw new Error(
`The \`${baseKey}\` option is deprecated, and can not be set in this way${deprecated}`
)
}
if (val === '') {
this.npm.config.delete(key, where)
} else {
this.npm.config.set(key, val, where)
}
if (!this.npm.config.validate(where)) {
log.warn('config', 'omitting invalid config values')
}
}
await this.npm.config.save(where)
}
async get (keys) {
if (!keys.length) {
return this.list()
}
const out = []
for (const key of keys) {
const val = this.npm.config.get(key)
if (isPrivate(key, val)) {
throw new Error(`The ${key} option is protected, and can not be retrieved in this way`)
}
const pref = keys.length > 1 ? `${key}=` : ''
out.push(pref + val)
}
output.standard(out.join('\n'))
}
async del (keys) {
if (!keys.length) {
throw this.usageError()
}
const where = this.npm.flatOptions.location
for (const key of keys) {
this.npm.config.delete(key, where)
}
await this.npm.config.save(where)
}
async edit () {
const ini = require('ini')
const e = this.npm.flatOptions.editor
const where = this.npm.flatOptions.location
const file = this.npm.config.data.get(where).source
// save first, just to make sure it's synced up
// this also removes all the comments from the last time we edited it.
await this.npm.config.save(where)
const data = (
await readFile(file, 'utf8').catch(() => '')
).replace(/\r\n/g, '\n')
const entries = Object.entries(defaults)
const defData = entries.reduce((str, [key, val]) => {
const obj = { [key]: val }
const i = ini.stringify(obj)
.replace(/\r\n/g, '\n') // normalizes output from ini.stringify
.replace(/\n$/m, '')
.replace(/^/g, '; ')
.replace(/\n/g, '\n; ')
.split('\n')
return str + '\n' + i
}, '')
const tmpData = `;;;;
; npm ${where}config file: ${file}
; this is a simple ini-formatted file
; lines that start with semi-colons are comments
; run \`npm help 7 config\` for documentation of the various options
;
; Configs like \`@scope:registry\` map a scope to a given registry url.
;
; Configs like \`//<hostname>/:_authToken\` are auth that is restricted
; to the registry host specified.
${data.split('\n').sort(localeCompare).join('\n').trim()}
;;;;
; all available options shown below with default values
;;;;
${defData}
`.split('\n').join(EOL)
await mkdir(dirname(file), { recursive: true })
await writeFile(file, tmpData, 'utf8')
await new Promise((res, rej) => {
const [bin, ...args] = e.split(/\s+/)
const editor = spawn(bin, [...args, file], { stdio: 'inherit' })
editor.on('exit', (code) => {
if (code) {
return rej(new Error(`editor process exited with code: ${code}`))
}
return res()
})
})
}
async fix () {
let problems
try {
this.npm.config.validate()
return // if validate doesn't throw we have nothing to do
} catch (err) {
// coverage skipped because we don't need to test rethrowing errors
// istanbul ignore next
if (err.code !== 'ERR_INVALID_AUTH') {
throw err
}
problems = err.problems
}
if (!this.npm.config.isDefault('location')) {
problems = problems.filter((problem) => {
return problem.where === this.npm.config.get('location')
})
}
this.npm.config.repair(problems)
const locations = []
output.standard('The following configuration problems have been repaired:\n')
const summary = problems.map(({ action, from, to, key, where }) => {
// coverage disabled for else branch because it is intentionally omitted
// istanbul ignore else
if (action === 'rename') {
// we keep track of which configs were modified here so we know what to save later
locations.push(where)
return `~ \`${from}\` renamed to \`${to}\` in ${where} config`
} else if (action === 'delete') {
locations.push(where)
return `- \`${key}\` deleted from ${where} config`
}
}).join('\n')
output.standard(summary)
return await Promise.all(locations.map((location) => this.npm.config.save(location)))
}
async list () {
const msg = []
// long does not have a flattener
const long = this.npm.config.get('long')
for (const [where, { data, source }] of this.npm.config.data.entries()) {
if (where === 'default' && !long) {
continue
}
const entries = Object.entries(data).sort(([a], [b]) => localeCompare(a, b))
if (!entries.length) {
continue
}
msg.push(`; "${where}" config from ${source}`, '')
for (const [k, v] of entries) {
const display = displayVar(k, v)
const src = this.npm.config.find(k)
msg.push(src === where ? display : `; ${display} ; overridden by ${src}`)
msg.push()
}
msg.push('')
}
if (!long) {
msg.push(
`; node bin location = ${process.execPath}`,
`; node version = ${process.version}`,
`; npm local prefix = ${this.npm.localPrefix}`,
`; npm version = ${this.npm.version}`,
`; cwd = ${process.cwd()}`,
`; HOME = ${process.env.HOME}`,
'; Run `npm config ls -l` to show all defaults.'
)
msg.push('')
}
if (!this.npm.global) {
const { content } = await pkgJson.normalize(this.npm.prefix).catch(() => ({ content: {} }))
if (content.publishConfig) {
const pkgPath = resolve(this.npm.prefix, 'package.json')
msg.push(`; "publishConfig" from ${pkgPath}`)
msg.push('; This set of config values will be used at publish-time.', '')
const entries = Object.entries(content.publishConfig)
.sort(([a], [b]) => localeCompare(a, b))
for (const [k, value] of entries) {
msg.push(displayVar(k, value))
}
msg.push('')
}
}
output.standard(msg.join('\n').trim())
}
async listJson () {
const publicConf = {}
for (const key in this.npm.config.list[0]) {
const value = this.npm.config.get(key)
if (isPrivate(key, value)) {
continue
}
publicConf[key] = value
}
output.buffer(publicConf)
}
}
module.exports = Config

View File

@ -0,0 +1,51 @@
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
// dedupe duplicated packages, or find them in the tree
class Dedupe extends ArboristWorkspaceCmd {
static description = 'Reduce duplication in the package tree'
static name = 'dedupe'
static params = [
'install-strategy',
'legacy-bundling',
'global-style',
'strict-peer-deps',
'package-lock',
'omit',
'include',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
...super.params,
]
async exec () {
if (this.npm.global) {
const er = new Error('`npm dedupe` does not work in global mode.')
er.code = 'EDEDUPEGLOBAL'
throw er
}
const dryRun = this.npm.config.get('dry-run')
const where = this.npm.prefix
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
dryRun,
// Saving during dedupe would only update if one of your direct
// dependencies was also duplicated somewhere in your tree. It would be
// confusing if running this were to also update your package.json. In
// order to reduce potential confusion we set this to false.
save: false,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.dedupe(opts)
await reifyFinish(this.npm, arb)
}
}
module.exports = Dedupe

View File

@ -0,0 +1,76 @@
const fetch = require('npm-registry-fetch')
const { otplease } = require('../utils/auth.js')
const npa = require('npm-package-arg')
const { log } = require('proc-log')
const semver = require('semver')
const getIdentity = require('../utils/get-identity.js')
const libaccess = require('libnpmaccess')
const BaseCommand = require('../base-cmd.js')
class Deprecate extends BaseCommand {
static description = 'Deprecate a version of a package'
static name = 'deprecate'
static usage = ['<package-spec> <message>']
static params = [
'registry',
'otp',
]
static ignoreImplicitWorkspace = true
static async completion (opts, npm) {
if (opts.conf.argv.remain.length > 1) {
return []
}
const username = await getIdentity(npm, npm.flatOptions)
const packages = await libaccess.getPackages(username, npm.flatOptions)
return Object.keys(packages)
.filter((name) =>
packages[name] === 'write' &&
(opts.conf.argv.remain.length === 0 ||
name.startsWith(opts.conf.argv.remain[0])))
}
async exec ([pkg, msg]) {
// msg == null because '' is a valid value, it indicates undeprecate
if (!pkg || msg == null) {
throw this.usageError()
}
// fetch the data and make sure it exists.
const p = npa(pkg)
const spec = p.rawSpec === '*' ? '*' : p.fetchSpec
if (semver.validRange(spec, true) === null) {
throw new Error(`invalid version range: ${spec}`)
}
const uri = '/' + p.escapedName
const packument = await fetch.json(uri, {
...this.npm.flatOptions,
spec: p,
query: { write: true },
})
const versions = Object.keys(packument.versions)
.filter(v => semver.satisfies(v, spec, { includePrerelease: true }))
if (versions.length) {
for (const v of versions) {
packument.versions[v].deprecated = msg
}
return otplease(this.npm, this.npm.flatOptions, opts => fetch(uri, {
...opts,
spec: p,
method: 'PUT',
body: packument,
ignoreBody: true,
}))
} else {
log.warn('deprecate', 'No version found for', p.rawSpec)
}
}
}
module.exports = Deprecate

View File

@ -0,0 +1,291 @@
const { resolve } = require('node:path')
const semver = require('semver')
const libnpmdiff = require('libnpmdiff')
const npa = require('npm-package-arg')
const pacote = require('pacote')
const pickManifest = require('npm-pick-manifest')
const { log, output } = require('proc-log')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
class Diff extends BaseCommand {
static description = 'The registry diff command'
static name = 'diff'
static usage = [
'[...<paths>]',
]
static params = [
'diff',
'diff-name-only',
'diff-unified',
'diff-ignore-all-space',
'diff-no-prefix',
'diff-src-prefix',
'diff-dst-prefix',
'diff-text',
'global',
'tag',
'workspace',
'workspaces',
'include-workspace-root',
]
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
const specs = this.npm.config.get('diff').filter(d => d)
if (specs.length > 2) {
throw this.usageError(`Can't use more than two --diff arguments.`)
}
// execWorkspaces may have set this already
if (!this.prefix) {
this.prefix = this.npm.prefix
}
// this is the "top" directory, one up from node_modules
// in global mode we have to walk one up from globalDir because our
// node_modules is sometimes under ./lib, and in global mode we're only ever
// walking through node_modules (because we will have been given a package
// name already)
if (this.npm.global) {
this.top = resolve(this.npm.globalDir, '..')
} else {
this.top = this.prefix
}
const [a, b] = await this.retrieveSpecs(specs)
log.info('diff', { src: a, dst: b })
const res = await libnpmdiff([a, b], {
...this.npm.flatOptions,
diffFiles: args,
where: this.top,
})
return output.standard(res)
}
async execWorkspaces (args) {
await this.setWorkspaces()
for (const workspacePath of this.workspacePaths) {
this.top = workspacePath
this.prefix = workspacePath
await this.exec(args)
}
}
// get the package name from the packument at `path`
// throws if no packument is present OR if it does not have `name` attribute
async packageName () {
let name
try {
const { content: pkg } = await pkgJson.normalize(this.prefix)
name = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
}
if (!name) {
throw this.usageError('Needs multiple arguments to compare or run from a project dir.')
}
return name
}
async retrieveSpecs ([a, b]) {
if (a && b) {
const specs = await this.convertVersionsToSpecs([a, b])
return this.findVersionsByPackageName(specs)
}
// no arguments, defaults to comparing cwd
// to its latest published registry version
if (!a) {
const pkgName = await this.packageName()
return [
`${pkgName}@${this.npm.config.get('tag')}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
]
}
// single argument, used to compare wanted versions of an
// installed dependency or to compare the cwd to a published version
let noPackageJson
let pkgName
try {
const { content: pkg } = await pkgJson.normalize(this.prefix)
pkgName = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
noPackageJson = true
}
const missingPackageJson =
this.usageError('Needs multiple arguments to compare or run from a project dir.')
// using a valid semver range, that means it should just diff
// the cwd against a published version to the registry using the
// same project name and the provided semver range
if (semver.validRange(a)) {
if (!pkgName) {
throw missingPackageJson
}
return [
`${pkgName}@${a}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
]
}
// when using a single package name as arg and it's part of the current
// install tree, then retrieve the current installed version and compare
// it against the same value `npm outdated` would suggest you to update to
const spec = npa(a)
if (spec.registry) {
let actualTree
let node
const Arborist = require('@npmcli/arborist')
try {
const opts = {
...this.npm.flatOptions,
path: this.top,
}
const arb = new Arborist(opts)
actualTree = await arb.loadActual(opts)
node = actualTree &&
actualTree.inventory.query('name', spec.name)
.values().next().value
} catch (e) {
log.verbose('diff', 'failed to load actual install tree')
}
if (!node || !node.name || !node.package || !node.package.version) {
if (noPackageJson) {
throw missingPackageJson
}
return [
`${spec.name}@${spec.fetchSpec}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
]
}
const tryRootNodeSpec = () =>
(actualTree && actualTree.edgesOut.get(spec.name) || {}).spec
const tryAnySpec = () => {
for (const edge of node.edgesIn) {
return edge.spec
}
}
const aSpec = `file:${node.realpath.replace(/#/g, '%23')}`
// finds what version of the package to compare against, if a exact
// version or tag was passed than it should use that, otherwise
// work from the top of the arborist tree to find the original semver
// range declared in the package that depends on the package.
let bSpec
if (spec.rawSpec !== '*') {
bSpec = spec.rawSpec
} else {
const bTargetVersion =
tryRootNodeSpec()
|| tryAnySpec()
// figure out what to compare against,
// follows same logic to npm outdated "Wanted" results
const packument = await pacote.packument(spec, {
...this.npm.flatOptions,
preferOnline: true,
})
bSpec = pickManifest(
packument,
bTargetVersion,
{ ...this.npm.flatOptions }
).version
}
return [
`${spec.name}@${aSpec}`,
`${spec.name}@${bSpec}`,
]
} else if (spec.type === 'directory') {
return [
`file:${spec.fetchSpec.replace(/#/g, '%23')}`,
`file:${this.prefix.replace(/#/g, '%23')}`,
]
} else {
throw this.usageError(`Spec type ${spec.type} not supported.`)
}
}
async convertVersionsToSpecs ([a, b]) {
const semverA = semver.validRange(a)
const semverB = semver.validRange(b)
// both specs are semver versions, assume current project dir name
if (semverA && semverB) {
let pkgName
try {
const { content: pkg } = await pkgJson.normalize(this.prefix)
pkgName = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
}
if (!pkgName) {
throw this.usageError('Needs to be run from a project dir in order to diff two versions.')
}
return [`${pkgName}@${a}`, `${pkgName}@${b}`]
}
// otherwise uses the name from the other arg to
// figure out the spec.name of what to compare
if (!semverA && semverB) {
return [a, `${npa(a).name}@${b}`]
}
if (semverA && !semverB) {
return [`${npa(b).name}@${a}`, b]
}
// no valid semver ranges used
return [a, b]
}
async findVersionsByPackageName (specs) {
let actualTree
const Arborist = require('@npmcli/arborist')
try {
const opts = {
...this.npm.flatOptions,
path: this.top,
}
const arb = new Arborist(opts)
actualTree = await arb.loadActual(opts)
} catch (e) {
log.verbose('diff', 'failed to load actual install tree')
}
return specs.map(i => {
const spec = npa(i)
if (spec.rawSpec !== '*') {
return i
}
const node = actualTree
&& actualTree.inventory.query('name', spec.name)
.values().next().value
const res = !node || !node.package || !node.package.version
? spec.fetchSpec
: `file:${node.realpath.replace(/#/g, '%23')}`
return `${spec.name}@${res}`
})
}
}
module.exports = Diff

View File

@ -0,0 +1,209 @@
const npa = require('npm-package-arg')
const regFetch = require('npm-registry-fetch')
const semver = require('semver')
const { log, output } = require('proc-log')
const { otplease } = require('../utils/auth.js')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
class DistTag extends BaseCommand {
static description = 'Modify package distribution tags'
static params = ['workspace', 'workspaces', 'include-workspace-root']
static name = 'dist-tag'
static usage = [
'add <package-spec (with version)> [<tag>]',
'rm <package-spec> <tag>',
'ls [<package-spec>]',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['add', 'rm', 'ls']
}
switch (argv[2]) {
default:
return []
}
}
async exec ([cmdName, pkg, tag]) {
const opts = {
...this.npm.flatOptions,
}
if (['add', 'a', 'set', 's'].includes(cmdName)) {
return this.add(pkg, tag, opts)
}
if (['rm', 'r', 'del', 'd', 'remove'].includes(cmdName)) {
return this.remove(pkg, tag, opts)
}
if (['ls', 'l', 'sl', 'list'].includes(cmdName)) {
return this.list(pkg, opts)
}
if (!pkg) {
// when only using the pkg name the default behavior
// should be listing the existing tags
return this.list(cmdName, opts)
} else {
throw this.usageError()
}
}
async execWorkspaces ([cmdName, pkg, tag]) {
// cmdName is some form of list
// pkg is one of:
// - unset
// - .
// - .@version
if (['ls', 'l', 'sl', 'list'].includes(cmdName) && (!pkg || pkg === '.' || /^\.@/.test(pkg))) {
return this.listWorkspaces()
}
// pkg is unset
// cmdName is one of:
// - unset
// - .
// - .@version
if (!pkg && (!cmdName || cmdName === '.' || /^\.@/.test(cmdName))) {
return this.listWorkspaces()
}
// anything else is just a regular dist-tag command
// so we fallback to the non-workspaces implementation
log.warn('dist-tag', 'Ignoring workspaces for specified package')
return this.exec([cmdName, pkg, tag])
}
async add (spec, tag, opts) {
spec = npa(spec || '')
const version = spec.rawSpec
const defaultTag = tag || this.npm.config.get('tag')
log.verbose('dist-tag add', defaultTag, 'to', spec.name + '@' + version)
// make sure new spec with tag is valid, this will throw if invalid
npa(`${spec.name}@${defaultTag}`)
if (!spec.name || !version || !defaultTag) {
throw this.usageError('must provide a spec with a name and version, and a tag to add')
}
const t = defaultTag.trim()
if (semver.validRange(t)) {
throw new Error('Tag name must not be a valid SemVer range: ' + t)
}
const tags = await this.fetchTags(spec, opts)
if (tags[t] === version) {
log.warn('dist-tag add', t, 'is already set to version', version)
return
}
tags[t] = version
const url =
`/-/package/${spec.escapedName}/dist-tags/${encodeURIComponent(t)}`
const reqOpts = {
...opts,
method: 'PUT',
body: JSON.stringify(version),
headers: {
'content-type': 'application/json',
},
spec,
}
await otplease(this.npm, reqOpts, o => regFetch(url, o))
output.standard(`+${t}: ${spec.name}@${version}`)
}
async remove (spec, tag, opts) {
spec = npa(spec || '')
log.verbose('dist-tag del', tag, 'from', spec.name)
if (!spec.name) {
throw this.usageError()
}
const tags = await this.fetchTags(spec, opts)
if (!tags[tag]) {
log.info('dist-tag del', tag, 'is not a dist-tag on', spec.name)
throw new Error(tag + ' is not a dist-tag on ' + spec.name)
}
const version = tags[tag]
delete tags[tag]
const url =
`/-/package/${spec.escapedName}/dist-tags/${encodeURIComponent(tag)}`
const reqOpts = {
...opts,
method: 'DELETE',
spec,
}
await otplease(this.npm, reqOpts, o => regFetch(url, o))
output.standard(`-${tag}: ${spec.name}@${version}`)
}
async list (spec, opts) {
if (!spec) {
if (this.npm.global) {
throw this.usageError()
}
const { content: { name } } = await pkgJson.normalize(this.npm.prefix)
if (!name) {
throw this.usageError()
}
return this.list(name, opts)
}
spec = npa(spec)
try {
const tags = await this.fetchTags(spec, opts)
const msg =
Object.keys(tags).map(k => `${k}: ${tags[k]}`).sort().join('\n')
output.standard(msg)
return tags
} catch (err) {
log.error('dist-tag ls', "Couldn't get dist-tag data for", spec)
throw err
}
}
async listWorkspaces () {
await this.setWorkspaces()
for (const name of this.workspaceNames) {
try {
output.standard(`${name}:`)
await this.list(npa(name), this.npm.flatOptions)
} catch (err) {
// set the exitCode directly, but ignore the error
// since it will have already been logged by this.list()
process.exitCode = 1
}
}
}
async fetchTags (spec, opts) {
const data = await regFetch.json(
`/-/package/${spec.escapedName}/dist-tags`,
{ ...opts, 'prefer-online': true, spec }
)
if (data && typeof data === 'object') {
delete data._etag
}
if (!data || !Object.keys(data).length) {
throw new Error('No dist-tags found for ' + spec.name)
}
return data
}
}
module.exports = DistTag

View File

@ -0,0 +1,21 @@
const PackageUrlCmd = require('../package-url-cmd.js')
class Docs extends PackageUrlCmd {
static description = 'Open documentation for a package in a web browser'
static name = 'docs'
getUrl (spec, mani) {
if (mani.homepage) {
return mani.homepage
}
const info = this.hostedFromMani(mani)
if (info) {
return info.docs()
}
return `https://www.npmjs.com/package/${mani.name}`
}
}
module.exports = Docs

View File

@ -0,0 +1,347 @@
const cacache = require('cacache')
const { access, lstat, readdir, constants: { R_OK, W_OK, X_OK } } = require('node:fs/promises')
const fetch = require('make-fetch-happen')
const which = require('which')
const pacote = require('pacote')
const { resolve } = require('node:path')
const semver = require('semver')
const { log, output } = require('proc-log')
const ping = require('../utils/ping.js')
const { defaults } = require('@npmcli/config/lib/definitions')
const BaseCommand = require('../base-cmd.js')
const maskLabel = mask => {
const label = []
if (mask & R_OK) {
label.push('readable')
}
if (mask & W_OK) {
label.push('writable')
}
if (mask & X_OK) {
label.push('executable')
}
return label.join(', ')
}
const subcommands = [
{
// Ping is left in as a legacy command but is listed as "connection" to
// make more sense to more people
groups: ['connection', 'ping', 'registry'],
title: 'Connecting to the registry',
cmd: 'checkPing',
}, {
groups: ['versions'],
title: 'Checking npm version',
cmd: 'getLatestNpmVersion',
}, {
groups: ['versions'],
title: 'Checking node version',
cmd: 'getLatestNodejsVersion',
}, {
groups: ['registry'],
title: 'Checking configured npm registry',
cmd: 'checkNpmRegistry',
}, {
groups: ['environment'],
title: 'Checking for git executable in PATH',
cmd: 'getGitPath',
}, {
groups: ['environment'],
title: 'Checking for global bin folder in PATH',
cmd: 'getBinPath',
}, {
groups: ['permissions', 'cache'],
title: 'Checking permissions on cached files (this may take awhile)',
cmd: 'checkCachePermission',
windows: false,
}, {
groups: ['permissions'],
title: 'Checking permissions on local node_modules (this may take awhile)',
cmd: 'checkLocalModulesPermission',
windows: false,
}, {
groups: ['permissions'],
title: 'Checking permissions on global node_modules (this may take awhile)',
cmd: 'checkGlobalModulesPermission',
windows: false,
}, {
groups: ['permissions'],
title: 'Checking permissions on local bin folder',
cmd: 'checkLocalBinPermission',
windows: false,
}, {
groups: ['permissions'],
title: 'Checking permissions on global bin folder',
cmd: 'checkGlobalBinPermission',
windows: false,
}, {
groups: ['cache'],
title: 'Verifying cache contents (this may take awhile)',
cmd: 'verifyCachedFiles',
windows: false,
},
// TODO:
// group === 'dependencies'?
// - ensure arborist.loadActual() runs without errors and no invalid edges
// - ensure package-lock.json matches loadActual()
// - verify loadActual without hidden lock file matches hidden lockfile
// group === '???'
// - verify all local packages have bins linked
// What is the fix for these?
]
class Doctor extends BaseCommand {
static description = 'Check the health of your npm environment'
static name = 'doctor'
static params = ['registry']
static ignoreImplicitWorkspace = false
static usage = [`[${subcommands.flatMap(s => s.groups)
.filter((value, index, self) => self.indexOf(value) === index && value !== 'ping')
.join('] [')}]`]
static subcommands = subcommands
async exec (args) {
log.info('doctor', 'Running checkup')
let allOk = true
const actions = this.actions(args)
const chalk = this.npm.chalk
for (const { title, cmd } of actions) {
this.output(title)
// TODO when we have an in progress indicator that could go here
let result
try {
result = await this[cmd]()
this.output(`${chalk.green('Ok')}${result ? `\n${result}` : ''}\n`)
} catch (err) {
allOk = false
this.output(`${chalk.red('Not ok')}\n${chalk.cyan(err)}\n`)
}
}
if (!allOk) {
if (this.npm.silent) {
/* eslint-disable-next-line max-len */
throw new Error('Some problems found. Check logs or disable silent mode for recommendations.')
} else {
throw new Error('Some problems found. See above for recommendations.')
}
}
}
async checkPing () {
log.info('doctor', 'Pinging registry')
try {
await ping({ ...this.npm.flatOptions, retry: false })
return ''
} catch (er) {
if (/^E\d{3}$/.test(er.code || '')) {
throw er.code.slice(1) + ' ' + er.message
} else {
throw er.message
}
}
}
async getLatestNpmVersion () {
log.info('doctor', 'Getting npm package information')
const latest = (await pacote.manifest('npm@latest', this.npm.flatOptions)).version
if (semver.gte(this.npm.version, latest)) {
return `current: v${this.npm.version}, latest: v${latest}`
} else {
throw `Use npm v${latest}`
}
}
async getLatestNodejsVersion () {
// XXX get the latest in the current major as well
const current = process.version
const currentRange = `^${current}`
const url = 'https://nodejs.org/dist/index.json'
log.info('doctor', 'Getting Node.js release information')
const res = await fetch(url, { method: 'GET', ...this.npm.flatOptions })
const data = await res.json()
let maxCurrent = '0.0.0'
let maxLTS = '0.0.0'
for (const { lts, version } of data) {
if (lts && semver.gt(version, maxLTS)) {
maxLTS = version
}
if (semver.satisfies(version, currentRange) && semver.gt(version, maxCurrent)) {
maxCurrent = version
}
}
const recommended = semver.gt(maxCurrent, maxLTS) ? maxCurrent : maxLTS
if (semver.gte(process.version, recommended)) {
return `current: ${current}, recommended: ${recommended}`
} else {
throw `Use node ${recommended} (current: ${current})`
}
}
async getBinPath () {
log.info('doctor', 'getBinPath', 'Finding npm global bin in your PATH')
if (!process.env.PATH.includes(this.npm.globalBin)) {
throw new Error(`Add ${this.npm.globalBin} to your $PATH`)
}
return this.npm.globalBin
}
async checkCachePermission () {
return this.checkFilesPermission(this.npm.cache, true, R_OK)
}
async checkLocalModulesPermission () {
return this.checkFilesPermission(this.npm.localDir, true, R_OK | W_OK, true)
}
async checkGlobalModulesPermission () {
return this.checkFilesPermission(this.npm.globalDir, false, R_OK)
}
async checkLocalBinPermission () {
return this.checkFilesPermission(this.npm.localBin, false, R_OK | W_OK | X_OK, true)
}
async checkGlobalBinPermission () {
return this.checkFilesPermission(this.npm.globalBin, false, X_OK)
}
async checkFilesPermission (root, shouldOwn, mask, missingOk) {
let ok = true
try {
const uid = process.getuid()
const gid = process.getgid()
const files = new Set([root])
for (const f of files) {
const st = await lstat(f).catch(er => {
// if it can't be missing, or if it can and the error wasn't that it was missing
if (!missingOk || er.code !== 'ENOENT') {
ok = false
log.warn('doctor', 'checkFilesPermission', 'error getting info for ' + f)
}
})
if (!st) {
continue
}
if (shouldOwn && (uid !== st.uid || gid !== st.gid)) {
log.warn('doctor', 'checkFilesPermission', 'should be owner of ' + f)
ok = false
}
if (!st.isDirectory() && !st.isFile()) {
continue
}
try {
await access(f, mask)
} catch (er) {
ok = false
const msg = `Missing permissions on ${f} (expect: ${maskLabel(mask)})`
log.error('doctor', 'checkFilesPermission', msg)
continue
}
if (st.isDirectory()) {
const entries = await readdir(f).catch(() => {
ok = false
log.warn('doctor', 'checkFilesPermission', 'error reading directory ' + f)
return []
})
for (const entry of entries) {
files.add(resolve(f, entry))
}
}
}
} finally {
if (!ok) {
throw (
`Check the permissions of files in ${root}` +
(shouldOwn ? ' (should be owned by current user)' : '')
)
} else {
return ''
}
}
}
async getGitPath () {
log.info('doctor', 'Finding git in your PATH')
return await which('git').catch(er => {
log.warn('doctor', 'getGitPath', er)
throw new Error("Install git and ensure it's in your PATH.")
})
}
async verifyCachedFiles () {
log.info('doctor', 'verifyCachedFiles', 'Verifying the npm cache')
const stats = await cacache.verify(this.npm.flatOptions.cache)
const { badContentCount, reclaimedCount, missingContent, reclaimedSize } = stats
if (badContentCount || reclaimedCount || missingContent) {
if (badContentCount) {
log.warn('doctor', 'verifyCachedFiles', `Corrupted content removed: ${badContentCount}`)
}
if (reclaimedCount) {
log.warn(
'doctor',
'verifyCachedFiles',
`Content garbage-collected: ${reclaimedCount} (${reclaimedSize} bytes)`
)
}
if (missingContent) {
log.warn('doctor', 'verifyCachedFiles', `Missing content: ${missingContent}`)
}
log.warn('doctor', 'verifyCachedFiles', 'Cache issues have been fixed')
}
log.info(
'doctor',
'verifyCachedFiles',
`Verification complete. Stats: ${JSON.stringify(stats, null, 2)}`
)
return `verified ${stats.verifiedContent} tarballs`
}
async checkNpmRegistry () {
if (this.npm.flatOptions.registry !== defaults.registry) {
throw `Try \`npm config set registry=${defaults.registry}\``
} else {
return `using default registry (${defaults.registry})`
}
}
output (...args) {
// TODO display layer should do this
if (!this.npm.silent) {
output.standard(...args)
}
}
actions (params) {
return this.constructor.subcommands.filter(subcmd => {
if (process.platform === 'win32' && subcmd.windows === false) {
return false
}
if (params.length) {
return params.some(param => subcmd.groups.includes(param))
}
return true
})
}
}
module.exports = Doctor

View File

@ -0,0 +1,64 @@
const { resolve } = require('node:path')
const { lstat } = require('node:fs/promises')
const cp = require('node:child_process')
const completion = require('../utils/installed-shallow.js')
const BaseCommand = require('../base-cmd.js')
const splitPackageNames = (path) => path.split('/')
// combine scoped parts
.reduce((parts, part) => {
if (parts.length === 0) {
return [part]
}
const lastPart = parts[parts.length - 1]
// check if previous part is the first part of a scoped package
if (lastPart[0] === '@' && !lastPart.includes('/')) {
parts[parts.length - 1] += '/' + part
} else {
parts.push(part)
}
return parts
}, [])
.join('/node_modules/')
.replace(/(\/node_modules)+/, '/node_modules')
// npm edit <pkg>
// open the package folder in the $EDITOR
class Edit extends BaseCommand {
static description = 'Edit an installed package'
static name = 'edit'
static usage = ['<pkg>[/<subpkg>...]']
static params = ['editor']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
return completion(npm, opts)
}
async exec (args) {
if (args.length !== 1) {
throw this.usageError()
}
const path = splitPackageNames(args[0])
const dir = resolve(this.npm.dir, path)
await lstat(dir)
await new Promise((res, rej) => {
const [bin, ...spawnArgs] = this.npm.config.get('editor').split(/\s+/)
const editor = cp.spawn(bin, [...spawnArgs, dir], { stdio: 'inherit' })
editor.on('exit', async (code) => {
if (code) {
return rej(new Error(`editor process exited with code: ${code}`))
}
await this.npm.exec('rebuild', [dir]).then(res).catch(rej)
})
})
}
}
module.exports = Edit

View File

@ -0,0 +1,110 @@
const { resolve } = require('node:path')
const libexec = require('libnpmexec')
const BaseCommand = require('../base-cmd.js')
class Exec extends BaseCommand {
static description = 'Run a command from a local or remote npm package'
static params = [
'package',
'call',
'workspace',
'workspaces',
'include-workspace-root',
]
static name = 'exec'
static usage = [
'-- <pkg>[@<version>] [args...]',
'--package=<pkg>[@<version>] -- <cmd> [args...]',
'-c \'<cmd> [args...]\'',
'--package=foo -c \'<cmd> [args...]\'',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static isShellout = true
async exec (args) {
return this.callExec(args)
}
async execWorkspaces (args) {
await this.setWorkspaces()
for (const [name, path] of this.workspaces) {
const locationMsg =
`in workspace ${this.npm.chalk.green(name)} at location:\n${this.npm.chalk.dim(path)}`
await this.callExec(args, { name, locationMsg, runPath: path })
}
}
async callExec (args, { name, locationMsg, runPath } = {}) {
let localBin = this.npm.localBin
let pkgPath = this.npm.localPrefix
// This is where libnpmexec will actually run the scripts from
if (!runPath) {
runPath = process.cwd()
} else {
// We have to consider if the workspace has its own separate versions
// libnpmexec will walk up to localDir after looking here
localBin = resolve(this.npm.localDir, name, 'node_modules', '.bin')
// We also need to look for `bin` entries in the workspace package.json
// libnpmexec will NOT look in the project root for the bin entry
pkgPath = runPath
}
const call = this.npm.config.get('call')
let globalPath
const {
flatOptions,
globalBin,
globalDir,
chalk,
} = this.npm
const scriptShell = this.npm.config.get('script-shell') || undefined
const packages = this.npm.config.get('package')
const yes = this.npm.config.get('yes')
// --prefix sets both of these to the same thing, meaning the global prefix
// is invalid (i.e. no lib/node_modules). This is not a trivial thing to
// untangle and fix so we work around it here.
if (this.npm.localPrefix !== this.npm.globalPrefix) {
globalPath = resolve(globalDir, '..')
}
if (call && args.length) {
throw this.usageError()
}
return libexec({
...flatOptions,
// we explicitly set packageLockOnly to false because if it's true
// when we try to install a missing package, we won't actually install it
packageLockOnly: false,
// what the user asked to run args[0] is run by default
args: [...args], // copy args so they dont get mutated
// specify a custom command to be run instead of args[0]
call,
chalk,
// where to look for bins globally, if a file matches call or args[0] it is called
globalBin,
// where to look for packages globally, if a package matches call or args[0] it is called
globalPath,
// where to look for bins locally, if a file matches call or args[0] it is called
localBin,
locationMsg,
// packages that need to be installed
packages,
// path where node_modules is
path: this.npm.localPrefix,
// where to look for package.json#bin entries first
pkgPath,
// cwd to run from
runPath,
scriptShell,
yes,
})
}
}
module.exports = Exec

View File

@ -0,0 +1,130 @@
const { explainNode } = require('../utils/explain-dep.js')
const npa = require('npm-package-arg')
const semver = require('semver')
const { relative, resolve } = require('node:path')
const validName = require('validate-npm-package-name')
const { output } = require('proc-log')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Explain extends ArboristWorkspaceCmd {
static description = 'Explain installed packages'
static name = 'explain'
static usage = ['<package-spec>']
static params = [
'json',
'workspace',
]
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
if (!args.length) {
throw this.usageError()
}
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({ path: this.npm.prefix, ...this.npm.flatOptions })
const tree = await arb.loadActual()
if (this.npm.flatOptions.workspacesEnabled
&& this.workspaceNames
&& this.workspaceNames.length
) {
this.filterSet = arb.workspaceDependencySet(tree, this.workspaceNames)
} else if (!this.npm.flatOptions.workspacesEnabled) {
this.filterSet =
arb.excludeWorkspacesDependencySet(tree)
}
const nodes = new Set()
for (const arg of args) {
for (const node of this.getNodes(tree, arg)) {
const filteredOut = this.filterSet
&& this.filterSet.size > 0
&& !this.filterSet.has(node)
if (!filteredOut) {
nodes.add(node)
}
}
}
if (nodes.size === 0) {
throw new Error(`No dependencies found matching ${args.join(', ')}`)
}
const expls = []
for (const node of nodes) {
const { extraneous, dev, optional, devOptional, peer, inBundle, overridden } = node
const expl = node.explain()
if (extraneous) {
expl.extraneous = true
} else {
expl.dev = dev
expl.optional = optional
expl.devOptional = devOptional
expl.peer = peer
expl.bundled = inBundle
expl.overridden = overridden
}
expls.push(expl)
}
if (this.npm.flatOptions.json) {
output.buffer(expls)
} else {
output.standard(expls.map(expl => {
return explainNode(expl, Infinity, this.npm.chalk)
}).join('\n\n'))
}
}
getNodes (tree, arg) {
// if it's just a name, return packages by that name
const { validForOldPackages: valid } = validName(arg)
if (valid) {
return tree.inventory.query('packageName', arg)
}
// if it's a location, get that node
const maybeLoc = arg.replace(/\\/g, '/').replace(/\/+$/, '')
const nodeByLoc = tree.inventory.get(maybeLoc)
if (nodeByLoc) {
return [nodeByLoc]
}
// maybe a path to a node_modules folder
const maybePath = relative(this.npm.prefix, resolve(maybeLoc))
.replace(/\\/g, '/').replace(/\/+$/, '')
const nodeByPath = tree.inventory.get(maybePath)
if (nodeByPath) {
return [nodeByPath]
}
// otherwise, try to select all matching nodes
try {
return this.getNodesByVersion(tree, arg)
} catch (er) {
return []
}
}
getNodesByVersion (tree, arg) {
const spec = npa(arg, this.npm.prefix)
if (spec.type !== 'version' && spec.type !== 'range') {
return []
}
return tree.inventory.filter(node => {
return node.package.name === spec.name &&
semver.satisfies(node.package.version, spec.rawSpec)
})
}
}
module.exports = Explain

View File

@ -0,0 +1,74 @@
const pkgJson = require('@npmcli/package-json')
const runScript = require('@npmcli/run-script')
const { join, relative } = require('node:path')
const { log, output } = require('proc-log')
const completion = require('../utils/installed-shallow.js')
const BaseCommand = require('../base-cmd.js')
// npm explore <pkg>[@<version>]
// open a subshell to the package folder.
class Explore extends BaseCommand {
static description = 'Browse an installed package'
static name = 'explore'
static usage = ['<pkg> [ -- <command>]']
static params = ['shell']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
return completion(npm, opts)
}
async exec (args) {
if (args.length < 1 || !args[0]) {
throw this.usageError()
}
const pkgname = args.shift()
// detect and prevent any .. shenanigans
const path = join(this.npm.dir, join('/', pkgname))
if (relative(path, this.npm.dir) === '') {
throw this.usageError()
}
// run as if running a script named '_explore', which we set to either
// the set of arguments, or the shell config, and let @npmcli/run-script
// handle all the escaping and PATH setup stuff.
const { content: pkg } = await pkgJson.normalize(path).catch(er => {
log.error('explore', `It doesn't look like ${pkgname} is installed.`)
throw er
})
const { shell } = this.npm.flatOptions
pkg.scripts = {
...(pkg.scripts || {}),
_explore: args.join(' ').trim() || shell,
}
if (!args.length) {
output.standard(`\nExploring ${path}\nType 'exit' or ^D when finished\n`)
}
return runScript({
...this.npm.flatOptions,
pkg,
path,
event: '_explore',
stdio: 'inherit',
}).catch(er => {
process.exitCode = typeof er.code === 'number' && er.code !== 0 ? er.code
: 1
// if it's not an exit error, or non-interactive, throw it
const isProcExit = er.message === 'command failed' &&
(typeof er.code === 'number' || /^SIG/.test(er.signal || ''))
if (args.length || !isProcExit) {
throw er
}
})
}
}
module.exports = Explore

View File

@ -0,0 +1,28 @@
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
// dedupe duplicated packages, or find them in the tree
class FindDupes extends ArboristWorkspaceCmd {
static description = 'Find duplication in the package tree'
static name = 'find-dupes'
static params = [
'install-strategy',
'legacy-bundling',
'global-style',
'strict-peer-deps',
'package-lock',
'omit',
'include',
'ignore-scripts',
'audit',
'bin-links',
'fund',
...super.params,
]
async exec () {
this.npm.config.set('dry-run', true)
return this.npm.exec('dedupe', [])
}
}
module.exports = FindDupes

View File

@ -0,0 +1,221 @@
const archy = require('archy')
const pacote = require('pacote')
const semver = require('semver')
const { output } = require('proc-log')
const npa = require('npm-package-arg')
const { depth } = require('treeverse')
const { readTree: getFundingInfo, normalizeFunding, isValidFunding } = require('libnpmfund')
const { openUrl } = require('../utils/open-url.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const getPrintableName = ({ name, version }) => {
const printableVersion = version ? `@${version}` : ''
return `${name}${printableVersion}`
}
const errCode = (msg, code) => Object.assign(new Error(msg), { code })
class Fund extends ArboristWorkspaceCmd {
static description = 'Retrieve funding information'
static name = 'fund'
static params = ['json', 'browser', 'unicode', 'workspace', 'which']
static usage = ['[<package-spec>]']
// XXX: maybe worth making this generic for all commands?
usageMessage (paramsObj = {}) {
let msg = `\`npm ${this.constructor.name}`
const params = Object.entries(paramsObj)
if (params.length) {
msg += ` ${this.constructor.usage}`
}
for (const [key, value] of params) {
msg += ` --${key}=${value}`
}
return `${msg}\``
}
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
const spec = args[0]
let fundingSourceNumber = this.npm.config.get('which')
if (fundingSourceNumber != null) {
fundingSourceNumber = parseInt(fundingSourceNumber, 10)
if (isNaN(fundingSourceNumber) || fundingSourceNumber < 1) {
throw errCode(
`${this.usageMessage({ which: 'fundingSourceNumber' })} must be given a positive integer`,
'EFUNDNUMBER'
)
}
}
if (this.npm.global) {
throw errCode(
`${this.usageMessage()} does not support global packages`,
'EFUNDGLOBAL'
)
}
const where = this.npm.prefix
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({ ...this.npm.flatOptions, path: where })
const tree = await arb.loadActual()
if (spec) {
await this.openFundingUrl({
path: where,
tree,
spec,
fundingSourceNumber,
})
return
}
// TODO: add !workspacesEnabled option handling to libnpmfund
const fundingInfo = getFundingInfo(tree, {
...this.flatOptions,
Arborist,
workspaces: this.workspaceNames,
})
if (this.npm.config.get('json')) {
output.buffer(fundingInfo)
} else {
output.standard(this.printHuman(fundingInfo))
}
}
printHuman (fundingInfo) {
const unicode = this.npm.config.get('unicode')
const seenUrls = new Map()
const tree = obj => archy(obj, '', { unicode })
const result = depth({
tree: fundingInfo,
// composes human readable package name
// and creates a new archy item for readable output
visit: ({ name, version, funding }) => {
const [fundingSource] = [].concat(normalizeFunding(funding)).filter(isValidFunding)
const { url } = fundingSource || {}
const pkgRef = getPrintableName({ name, version })
if (!url) {
return { label: pkgRef }
}
let item
if (seenUrls.has(url)) {
item = seenUrls.get(url)
item.label += `${this.npm.chalk.dim(',')} ${pkgRef}`
return null
}
item = {
label: tree({
label: this.npm.chalk.blue(url),
nodes: [pkgRef],
}).trim(),
}
// stacks all packages together under the same item
seenUrls.set(url, item)
return item
},
// puts child nodes back into returned archy
// output while also filtering out missing items
leave: (item, children) => {
if (item) {
item.nodes = children.filter(Boolean)
}
return item
},
// turns tree-like object return by libnpmfund
// into children to be properly read by treeverse
getChildren: node =>
Object.keys(node.dependencies || {}).map(key => ({
name: key,
...node.dependencies[key],
})),
})
const res = tree(result)
return res
}
async openFundingUrl ({ path, tree, spec, fundingSourceNumber }) {
const arg = npa(spec, path)
const retrievePackageMetadata = () => {
if (arg.type === 'directory') {
if (tree.path === arg.fetchSpec) {
// matches cwd, e.g: npm fund .
return tree.package
} else {
// matches any file path within current arborist inventory
for (const item of tree.inventory.values()) {
if (item.path === arg.fetchSpec) {
return item.package
}
}
}
} else {
// tries to retrieve a package from arborist inventory
// by matching resulted package name from the provided spec
const [item] = [...tree.inventory.query('name', arg.name)]
.filter(i => semver.valid(i.package.version))
.sort((a, b) => semver.rcompare(a.package.version, b.package.version))
if (item) {
return item.package
}
}
}
const { funding } =
retrievePackageMetadata() ||
(await pacote.manifest(arg, this.npm.flatOptions).catch(() => ({})))
const validSources = [].concat(normalizeFunding(funding)).filter(isValidFunding)
if (!validSources.length) {
throw errCode(`No valid funding method available for: ${spec}`, 'ENOFUND')
}
const fundSource = fundingSourceNumber
? validSources[fundingSourceNumber - 1]
: validSources.length === 1 ? validSources[0]
: null
if (fundSource) {
return openUrl(this.npm, ...this.urlMessage(fundSource))
}
const ambiguousUrlMsg = [
...validSources.map((s, i) => `${i + 1}: ${this.urlMessage(s).reverse().join(': ')}`),
`Run ${this.usageMessage({ which: '1' })}` +
', for example, to open the first funding URL listed in that package',
]
if (fundingSourceNumber) {
ambiguousUrlMsg.unshift(`--which=${fundingSourceNumber} is not a valid index`)
}
output.standard(ambiguousUrlMsg.join('\n'))
}
urlMessage (source) {
const { type, url } = source
const typePrefix = type ? `${type} funding` : 'Funding'
const message = `${typePrefix} available at the following URL`
return [url, message]
}
}
module.exports = Fund

View File

@ -0,0 +1,23 @@
const Npm = require('../npm.js')
const BaseCommand = require('../base-cmd.js')
class Get extends BaseCommand {
static description = 'Get a value from the npm configuration'
static name = 'get'
static usage = ['[<key> ...] (See `npm config`)']
static params = ['long']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts) {
const Config = Npm.cmd('config')
return Config.completion(opts)
}
async exec (args) {
return this.npm.exec('config', ['get'].concat(args))
}
}
module.exports = Get

View File

@ -0,0 +1,195 @@
const { readFile } = require('node:fs/promises')
const path = require('node:path')
const { glob } = require('glob')
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
const globify = pattern => pattern.split('\\').join('/')
class HelpSearch extends BaseCommand {
static description = 'Search npm help documentation'
static name = 'help-search'
static usage = ['<text>']
static params = ['long']
async exec (args) {
if (!args.length) {
throw this.usageError()
}
const docPath = path.resolve(this.npm.npmRoot, 'docs/content')
let files = await glob(`${globify(docPath)}/*/*.md`)
// preserve glob@8 behavior
files = files.sort((a, b) => a.localeCompare(b, 'en'))
const data = await this.readFiles(files)
const results = await this.searchFiles(args, data)
const formatted = this.formatResults(args, results)
if (!formatted.trim()) {
output.standard(`No matches in help for: ${args.join(' ')}\n`)
} else {
output.standard(formatted)
}
}
async readFiles (files) {
const res = {}
await Promise.all(files.map(async file => {
res[file] = (await readFile(file, 'utf8'))
.replace(/^---\n(.*\n)*?---\n/, '').trim()
}))
return res
}
async searchFiles (args, data) {
const results = []
for (const [file, content] of Object.entries(data)) {
const lowerCase = content.toLowerCase()
// skip if no matches at all
if (!args.some(a => lowerCase.includes(a.toLowerCase()))) {
continue
}
const lines = content.split(/\n+/)
// if a line has a search term, then skip it and the next line.
// if the next line has a search term, then skip all 3
// otherwise, set the line to null. then remove the nulls.
for (let i = 0; i < lines.length; i++) {
const line = lines[i]
const nextLine = lines[i + 1]
let match = false
if (nextLine) {
match = args.some(a =>
nextLine.toLowerCase().includes(a.toLowerCase()))
if (match) {
// skip over the next line, and the line after it.
i += 2
continue
}
}
match = args.some(a => line.toLowerCase().includes(a.toLowerCase()))
if (match) {
// skip over the next line
i++
continue
}
lines[i] = null
}
// now squish any string of nulls into a single null
const pruned = lines.reduce((l, r) => {
if (!(r === null && l[l.length - 1] === null)) {
l.push(r)
}
return l
}, [])
if (pruned[pruned.length - 1] === null) {
pruned.pop()
}
if (pruned[0] === null) {
pruned.shift()
}
// now count how many args were found
const found = {}
let totalHits = 0
for (const line of pruned) {
for (const arg of args) {
const hit = (line || '').toLowerCase()
.split(arg.toLowerCase()).length - 1
if (hit > 0) {
found[arg] = (found[arg] || 0) + hit
totalHits += hit
}
}
}
const cmd = 'npm help ' +
path.basename(file, '.md').replace(/^npm-/, '')
results.push({
file,
cmd,
lines: pruned,
found: Object.keys(found),
hits: found,
totalHits,
})
}
// sort results by number of results found, then by number of hits
// then by number of matching lines
// coverage is ignored here because the contents of results are
// nondeterministic due to either glob or readFiles or Object.entries
return results.sort(/* istanbul ignore next */ (a, b) =>
a.found.length > b.found.length ? -1
: a.found.length < b.found.length ? 1
: a.totalHits > b.totalHits ? -1
: a.totalHits < b.totalHits ? 1
: a.lines.length > b.lines.length ? -1
: a.lines.length < b.lines.length ? 1
: 0).slice(0, 10)
}
formatResults (args, results) {
const cols = Math.min(process.stdout.columns || Infinity, 80) + 1
const formattedOutput = results.map(res => {
const out = [res.cmd]
const r = Object.keys(res.hits)
.map(k => `${k}:${res.hits[k]}`)
.sort((a, b) => a > b ? 1 : -1)
.join(' ')
out.push(' '.repeat((Math.max(1, cols - out.join(' ').length - r.length - 1))))
out.push(r)
if (!this.npm.config.get('long')) {
return out.join('')
}
out.unshift('\n\n')
out.push('\n')
out.push('-'.repeat(cols - 1) + '\n')
res.lines.forEach((line, i) => {
if (line === null || i > 3) {
return
}
const hilitLine = []
for (const arg of args) {
const finder = line.toLowerCase().split(arg.toLowerCase())
let p = 0
for (const f of finder) {
hilitLine.push(line.slice(p, p + f.length))
const word = line.slice(p + f.length, p + f.length + arg.length)
hilitLine.push(this.npm.chalk.blue(word))
p += f.length + arg.length
}
}
out.push(hilitLine.join('') + '\n')
})
return out.join('')
}).join('\n')
const finalOut = results.length && !this.npm.config.get('long')
? 'Top hits for ' + (args.map(JSON.stringify).join(' ')) + '\n' +
'—'.repeat(cols - 1) + '\n' +
formattedOutput + '\n' +
'—'.repeat(cols - 1) + '\n' +
'(run with -l or --long to see more context)'
: formattedOutput
return finalOut.trim()
}
}
module.exports = HelpSearch

View File

@ -0,0 +1,117 @@
const spawn = require('@npmcli/promise-spawn')
const path = require('node:path')
const { openUrl } = require('../utils/open-url.js')
const { glob } = require('glob')
const { output, input } = require('proc-log')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { deref } = require('../utils/cmd-list.js')
const BaseCommand = require('../base-cmd.js')
const globify = pattern => pattern.split('\\').join('/')
// Strips out the number from foo.7 or foo.7. or foo.7.tgz
// We don't currently compress our man pages but if we ever did this would
// seamlessly continue supporting it
const manNumberRegex = /\.(\d+)(\.[^/\\]*)?$/
// hardcoded names for mansections
// XXX: these are used in the docs workspace and should be exported
// from npm so section names can changed more easily
const manSectionNames = {
1: 'commands',
5: 'configuring-npm',
7: 'using-npm',
}
class Help extends BaseCommand {
static description = 'Get help on npm'
static name = 'help'
static usage = ['<term> [<terms..>]']
static params = ['viewer']
static async completion (opts, npm) {
if (opts.conf.argv.remain.length > 2) {
return []
}
const g = path.resolve(npm.npmRoot, 'man/man[0-9]/*.[0-9]')
let files = await glob(globify(g))
// preserve glob@8 behavior
files = files.sort((a, b) => a.localeCompare(b, 'en'))
return Object.keys(files.reduce(function (acc, file) {
file = path.basename(file).replace(/\.[0-9]+$/, '')
file = file.replace(/^npm-/, '')
acc[file] = true
return acc
}, { help: true }))
}
async exec (args) {
// By default we search all of our man subdirectories, but if the user has
// asked for a specific one we limit the search to just there
const manSearch = /^\d+$/.test(args[0]) ? `man${args.shift()}` : 'man*'
if (!args.length) {
return output.standard(this.npm.usage)
}
// npm help foo bar baz: search topics
if (args.length > 1) {
return this.helpSearch(args)
}
// `npm help package.json`
const arg = (deref(args[0]) || args[0]).replace('.json', '-json')
// find either section.n or npm-section.n
const f = globify(path.resolve(this.npm.npmRoot, `man/${manSearch}/?(npm-)${arg}.[0-9]*`))
const [man] = await glob(f).then(r => r.sort((a, b) => {
// Because the glob is (subtly) different from manNumberRegex,
// we can't rely on it passing.
const aManNumberMatch = a.match(manNumberRegex)?.[1] || 999
const bManNumberMatch = b.match(manNumberRegex)?.[1] || 999
if (aManNumberMatch !== bManNumberMatch) {
return aManNumberMatch - bManNumberMatch
}
return localeCompare(a, b)
}))
return man ? this.viewMan(man) : this.helpSearch(args)
}
helpSearch (args) {
return this.npm.exec('help-search', args)
}
async viewMan (man) {
const viewer = this.npm.config.get('viewer')
if (viewer === 'browser') {
return openUrl(this.npm, this.htmlMan(man), 'help available at the following URL', true)
}
let args = ['man', [man]]
if (viewer === 'woman') {
args = ['emacsclient', ['-e', `(woman-find-file '${man}')`]]
}
try {
await input.start(() => spawn(...args, { stdio: 'inherit' }))
} catch (err) {
if (err.code) {
throw new Error(`help process exited with code: ${err.code}`)
} else {
throw err
}
}
}
// Returns the path to the html version of the man page
htmlMan (man) {
const sect = manSectionNames[man.match(manNumberRegex)[1]]
const f = path.basename(man).replace(manNumberRegex, '')
return 'file:///' + path.resolve(this.npm.npmRoot, `docs/output/${sect}/${f}.html`)
}
}
module.exports = Help

View File

@ -0,0 +1,109 @@
const hookApi = require('libnpmhook')
const { otplease } = require('../utils/auth.js')
const relativeDate = require('tiny-relative-date')
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Hook extends BaseCommand {
static description = 'Manage registry hooks'
static name = 'hook'
static params = [
'registry',
'otp',
]
static usage = [
'add <pkg> <url> <secret> [--type=<type>]',
'ls [pkg]',
'rm <id>',
'update <id> <url> <secret>',
]
async exec (args) {
return otplease(this.npm, { ...this.npm.flatOptions }, (opts) => {
switch (args[0]) {
case 'add':
return this.add(args[1], args[2], args[3], opts)
case 'ls':
return this.ls(args[1], opts)
case 'rm':
return this.rm(args[1], opts)
case 'update':
case 'up':
return this.update(args[1], args[2], args[3], opts)
default:
throw this.usageError()
}
})
}
async add (pkg, uri, secret, opts) {
const hook = await hookApi.add(pkg, uri, secret, opts)
if (opts.json) {
output.buffer(hook)
} else if (opts.parseable) {
output.standard(Object.keys(hook).join('\t'))
output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
output.standard(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`)
}
}
async ls (pkg, opts) {
const hooks = await hookApi.ls({ ...opts, package: pkg })
if (opts.json) {
output.buffer(hooks)
} else if (opts.parseable) {
output.standard(Object.keys(hooks[0]).join('\t'))
hooks.forEach(hook => {
output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
})
} else if (!hooks.length) {
output.standard("You don't have any hooks configured yet.")
} else if (!this.npm.silent) {
output.standard(`You have ${hooks.length} hook${hooks.length !== 1 ? 's' : ''} configured.`)
for (const hook of hooks) {
output.standard(`Hook ${hook.id}: ${this.hookName(hook)}`)
output.standard(`Endpoint: ${hook.endpoint}`)
if (hook.last_delivery) {
/* eslint-disable-next-line max-len */
output.standard(`Triggered ${relativeDate(hook.last_delivery)}, response code was "${hook.response_code}"\n`)
} else {
output.standard('Never triggered\n')
}
}
}
}
async rm (id, opts) {
const hook = await hookApi.rm(id, opts)
if (opts.json) {
output.buffer(hook)
} else if (opts.parseable) {
output.standard(Object.keys(hook).join('\t'))
output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
output.standard(`- ${this.hookName(hook)} ${opts.unicode ? ' ✘ ' : ' X '} ${hook.endpoint}`)
}
}
async update (id, uri, secret, opts) {
const hook = await hookApi.update(id, uri, secret, opts)
if (opts.json) {
output.buffer(hook)
} else if (opts.parseable) {
output.standard(Object.keys(hook).join('\t'))
output.standard(Object.keys(hook).map(k => hook[k]).join('\t'))
} else if (!this.npm.silent) {
output.standard(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`)
}
}
hookName (hook) {
return `${hook.type === 'owner' ? '~' : ''}${hook.name}`
}
}
module.exports = Hook

View File

@ -0,0 +1,238 @@
const { statSync } = require('node:fs')
const { relative, resolve } = require('node:path')
const { mkdir } = require('node:fs/promises')
const initJson = require('init-package-json')
const npa = require('npm-package-arg')
const libexec = require('libnpmexec')
const mapWorkspaces = require('@npmcli/map-workspaces')
const PackageJson = require('@npmcli/package-json')
const { log, output, input } = require('proc-log')
const updateWorkspaces = require('../utils/update-workspaces.js')
const BaseCommand = require('../base-cmd.js')
const posixPath = p => p.split('\\').join('/')
class Init extends BaseCommand {
static description = 'Create a package.json file'
static params = [
'init-author-name',
'init-author-url',
'init-license',
'init-module',
'init-version',
'yes',
'force',
'scope',
'workspace',
'workspaces',
'workspaces-update',
'include-workspace-root',
]
static name = 'init'
static usage = [
'<package-spec> (same as `npx create-<package-spec>`)',
'<@scope> (same as `npx <@scope>/create`)',
]
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
// npm exec style
if (args.length) {
return await this.execCreate(args)
}
// no args, uses classic init-package-json boilerplate
await this.template()
}
async execWorkspaces (args) {
// if the root package is uninitiated, take care of it first
if (this.npm.flatOptions.includeWorkspaceRoot) {
await this.exec(args)
}
// reads package.json for the top-level folder first, by doing this we
// ensure the command throw if no package.json is found before trying
// to create a workspace package.json file or its folders
const { content: pkg } = await PackageJson.normalize(this.npm.localPrefix).catch(err => {
if (err.code === 'ENOENT') {
log.warn('init', 'Missing package.json. Try with `--include-workspace-root`.')
}
throw err
})
// these are workspaces that are being created, so we cant use
// this.setWorkspaces()
const filters = this.npm.config.get('workspace')
const wPath = filterArg => resolve(this.npm.localPrefix, filterArg)
const workspacesPaths = []
// npm-exec style, runs in the context of each workspace filter
if (args.length) {
for (const filterArg of filters) {
const path = wPath(filterArg)
await mkdir(path, { recursive: true })
workspacesPaths.push(path)
await this.execCreate(args, path)
await this.setWorkspace(pkg, path)
}
return
}
// no args, uses classic init-package-json boilerplate
for (const filterArg of filters) {
const path = wPath(filterArg)
await mkdir(path, { recursive: true })
workspacesPaths.push(path)
await this.template(path)
await this.setWorkspace(pkg, path)
}
// reify packages once all workspaces have been initialized
await this.update(workspacesPaths)
}
async execCreate (args, runPath = process.cwd()) {
const [initerName, ...otherArgs] = args
let packageName = initerName
// Only a scope, possibly with a version
if (/^@[^/]+$/.test(initerName)) {
const [, scope, version] = initerName.split('@')
packageName = `@${scope}/create`
if (version) {
packageName = `${packageName}@${version}`
}
} else {
const req = npa(initerName)
if (req.type === 'git' && req.hosted) {
const { user, project } = req.hosted
packageName = initerName.replace(`${user}/${project}`, `${user}/create-${project}`)
} else if (req.registry) {
packageName = `${req.name.replace(/^(@[^/]+\/)?/, '$1create-')}@${req.rawSpec}`
} else {
throw Object.assign(new Error(
'Unrecognized initializer: ' + initerName +
'\nFor more package binary executing power check out `npx`:' +
'\nhttps://docs.npmjs.com/cli/commands/npx'
), { code: 'EUNSUPPORTED' })
}
}
const newArgs = [packageName, ...otherArgs]
const {
flatOptions,
localBin,
globalBin,
chalk,
} = this.npm
const scriptShell = this.npm.config.get('script-shell') || undefined
const yes = this.npm.config.get('yes')
await libexec({
...flatOptions,
args: newArgs,
localBin,
globalBin,
output,
chalk,
path: this.npm.localPrefix,
runPath,
scriptShell,
yes,
})
}
async template (path = process.cwd()) {
const initFile = this.npm.config.get('init-module')
if (!this.npm.config.get('yes') && !this.npm.config.get('force')) {
output.standard([
'This utility will walk you through creating a package.json file.',
'It only covers the most common items, and tries to guess sensible defaults.',
'',
'See `npm help init` for definitive documentation on these fields',
'and exactly what they do.',
'',
'Use `npm install <pkg>` afterwards to install a package and',
'save it as a dependency in the package.json file.',
'',
'Press ^C at any time to quit.',
].join('\n'))
}
try {
const data = await input.read(() => initJson(path, initFile, this.npm.config))
log.silly('package data', data)
return data
} catch (er) {
if (er.message === 'canceled') {
log.warn('init', 'canceled')
} else {
throw er
}
}
}
async setWorkspace (pkg, workspacePath) {
const workspaces = await mapWorkspaces({ cwd: this.npm.localPrefix, pkg })
// skip setting workspace if current package.json glob already satisfies it
for (const wPath of workspaces.values()) {
if (wPath === workspacePath) {
return
}
}
// if a create-pkg didn't generate a package.json at the workspace
// folder level, it might not be recognized as a workspace by
// mapWorkspaces, so we're just going to avoid touching the
// top-level package.json
try {
statSync(resolve(workspacePath, 'package.json'))
} catch (err) {
return
}
const pkgJson = await PackageJson.load(this.npm.localPrefix)
pkgJson.update({
workspaces: [
...(pkgJson.content.workspaces || []),
posixPath(relative(this.npm.localPrefix, workspacePath)),
],
})
await pkgJson.save()
}
async update (workspacesPaths) {
// translate workspaces paths into an array containing workspaces names
const workspaces = []
for (const path of workspacesPaths) {
const { content: { name } } = await PackageJson.normalize(path).catch(() => ({ content: {} }))
if (name) {
workspaces.push(name)
}
}
const {
config,
flatOptions,
localPrefix,
} = this.npm
await updateWorkspaces({
config,
flatOptions,
localPrefix,
npm: this.npm,
workspaces,
})
}
}
module.exports = Init

View File

@ -0,0 +1,15 @@
const CI = require('./ci.js')
// npm install-ci-test
// Runs `npm ci` and then runs `npm test`
class InstallCITest extends CI {
static description = 'Install a project with a clean slate and run tests'
static name = 'install-ci-test'
async exec (args) {
await this.npm.exec('ci', args)
return this.npm.exec('test', [])
}
}
module.exports = InstallCITest

View File

@ -0,0 +1,15 @@
const Install = require('./install.js')
// npm install-test
// Runs `npm install` and then runs `npm test`
class InstallTest extends Install {
static description = 'Install package(s) and run tests'
static name = 'install-test'
async exec (args) {
await this.npm.exec('install', args)
return this.npm.exec('test', [])
}
}
module.exports = InstallTest

View File

@ -0,0 +1,176 @@
const { readdir } = require('node:fs/promises')
const { resolve, join } = require('node:path')
const { log } = require('proc-log')
const runScript = require('@npmcli/run-script')
const pacote = require('pacote')
const checks = require('npm-install-checks')
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Install extends ArboristWorkspaceCmd {
static description = 'Install a package'
static name = 'install'
// These are in the order they will show up in when running "-h"
// If adding to this list, consider adding also to ci.js
static params = [
'save',
'save-exact',
'global',
'install-strategy',
'legacy-bundling',
'global-style',
'omit',
'include',
'strict-peer-deps',
'prefer-dedupe',
'package-lock',
'package-lock-only',
'foreground-scripts',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
'cpu',
'os',
'libc',
...super.params,
]
static usage = ['[<package-spec> ...]']
static async completion (opts) {
const { partialWord } = opts
// install can complete to a folder with a package.json, or any package.
// if it has a slash, then it's gotta be a folder
// if it starts with https?://, then just give up, because it's a url
if (/^https?:\/\//.test(partialWord)) {
// do not complete to URLs
return []
}
if (/\//.test(partialWord)) {
// Complete fully to folder if there is exactly one match and it
// is a folder containing a package.json file. If that is not the
// case we return 0 matches, which will trigger the default bash
// complete.
const lastSlashIdx = partialWord.lastIndexOf('/')
const partialName = partialWord.slice(lastSlashIdx + 1)
const partialPath = partialWord.slice(0, lastSlashIdx) || '/'
const isDirMatch = async sibling => {
if (sibling.slice(0, partialName.length) !== partialName) {
return false
}
try {
const contents = await readdir(join(partialPath, sibling))
const result = (contents.indexOf('package.json') !== -1)
return result
} catch (er) {
return false
}
}
try {
const siblings = await readdir(partialPath)
const matches = []
for (const sibling of siblings) {
if (await isDirMatch(sibling)) {
matches.push(sibling)
}
}
if (matches.length === 1) {
return [join(partialPath, matches[0])]
}
// no matches
return []
} catch (er) {
return [] // invalid dir: no matching
}
}
// Note: there used to be registry completion here,
// but it stopped making sense somewhere around
// 50,000 packages on the registry
}
async exec (args) {
// the /path/to/node_modules/..
const globalTop = resolve(this.npm.globalDir, '..')
const ignoreScripts = this.npm.config.get('ignore-scripts')
const isGlobalInstall = this.npm.global
const where = isGlobalInstall ? globalTop : this.npm.prefix
const forced = this.npm.config.get('force')
const scriptShell = this.npm.config.get('script-shell') || undefined
// be very strict about engines when trying to update npm itself
const npmInstall = args.find(arg => arg.startsWith('npm@') || arg === 'npm')
if (isGlobalInstall && npmInstall) {
const npmOptions = this.npm.flatOptions
const npmManifest = await pacote.manifest(npmInstall, npmOptions)
try {
checks.checkEngine(npmManifest, npmManifest.version, process.version)
} catch (e) {
if (forced) {
log.warn(
'install',
/* eslint-disable-next-line max-len */
`Forcing global npm install with incompatible version ${npmManifest.version} into node ${process.version}`
)
} else {
throw e
}
}
}
// don't try to install the prefix into itself
args = args.filter(a => resolve(a) !== this.npm.prefix)
// `npm i -g` => "install this package globally"
if (where === globalTop && !args.length) {
args = ['.']
}
// throw usage error if trying to install empty package
// name to global space, e.g: `npm i -g ""`
if (where === globalTop && !args.every(Boolean)) {
throw this.usageError()
}
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
auditLevel: null,
path: where,
add: args,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.reify(opts)
if (!args.length && !isGlobalInstall && !ignoreScripts) {
const scripts = [
'preinstall',
'install',
'postinstall',
'prepublish', // XXX(npm9) should we remove this finally??
'preprepare',
'prepare',
'postprepare',
]
for (const event of scripts) {
await runScript({
path: where,
args: [],
scriptShell,
stdio: 'inherit',
event,
})
}
}
await reifyFinish(this.npm, arb)
}
}
module.exports = Install

View File

@ -0,0 +1,189 @@
const { readdir } = require('node:fs/promises')
const { resolve } = require('node:path')
const npa = require('npm-package-arg')
const pkgJson = require('@npmcli/package-json')
const semver = require('semver')
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Link extends ArboristWorkspaceCmd {
static description = 'Symlink a package folder'
static name = 'link'
static usage = [
'[<package-spec>]',
]
static params = [
'save',
'save-exact',
'global',
'install-strategy',
'legacy-bundling',
'global-style',
'strict-peer-deps',
'package-lock',
'omit',
'include',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
...super.params,
]
static async completion (opts, npm) {
const dir = npm.globalDir
const files = await readdir(dir)
return files.filter(f => !/^[._-]/.test(f))
}
async exec (args) {
if (this.npm.global) {
throw Object.assign(
new Error(
'link should never be --global.\n' +
'Please re-run this command with --local'
),
{ code: 'ELINKGLOBAL' }
)
}
// install-links is implicitly false when running `npm link`
this.npm.config.set('install-links', false)
// link with no args: symlink the folder to the global location
// link with package arg: symlink the global to the local
args = args.filter(a => resolve(a) !== this.npm.prefix)
return args.length
? this.linkInstall(args)
: this.linkPkg()
}
async linkInstall (args) {
// load current packages from the global space,
// and then add symlinks installs locally
const globalTop = resolve(this.npm.globalDir, '..')
const Arborist = require('@npmcli/arborist')
const globalOpts = {
...this.npm.flatOptions,
Arborist,
path: globalTop,
global: true,
prune: false,
}
const globalArb = new Arborist(globalOpts)
// get only current top-level packages from the global space
const globals = await globalArb.loadActual({
filter: (node, kid) =>
!node.isRoot || args.some(a => npa(a).name === kid),
})
// any extra arg that is missing from the current
// global space should be reified there first
const missing = this.missingArgsFromTree(globals, args)
if (missing.length) {
await globalArb.reify({
...globalOpts,
add: missing,
})
}
// get a list of module names that should be linked in the local prefix
const names = []
for (const a of args) {
const arg = npa(a)
if (arg.type === 'directory') {
const { content } = await pkgJson.normalize(arg.fetchSpec)
names.push(content.name)
} else {
names.push(arg.name)
}
}
// npm link should not save=true by default unless you're
// using any of --save-dev or other types
const save =
Boolean(
(this.npm.config.find('save') !== 'default' &&
this.npm.config.get('save')) ||
this.npm.config.get('save-optional') ||
this.npm.config.get('save-peer') ||
this.npm.config.get('save-dev') ||
this.npm.config.get('save-prod')
)
// create a new arborist instance for the local prefix and
// reify all the pending names as symlinks there
const localArb = new Arborist({
...this.npm.flatOptions,
prune: false,
path: this.npm.prefix,
save,
})
await localArb.reify({
...this.npm.flatOptions,
prune: false,
path: this.npm.prefix,
add: names.map(l => `file:${resolve(globalTop, 'node_modules', l).replace(/#/g, '%23')}`),
save,
workspaces: this.workspaceNames,
})
await reifyFinish(this.npm, localArb)
}
async linkPkg () {
const wsp = this.workspacePaths
const paths = wsp && wsp.length ? wsp : [this.npm.prefix]
const add = paths.map(path => `file:${path.replace(/#/g, '%23')}`)
const globalTop = resolve(this.npm.globalDir, '..')
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
Arborist,
path: globalTop,
global: true,
})
await arb.reify({
add,
})
await reifyFinish(this.npm, arb)
}
// Returns a list of items that can't be fulfilled by
// things found in the current arborist inventory
missingArgsFromTree (tree, args) {
if (tree.isLink) {
return this.missingArgsFromTree(tree.target, args)
}
const foundNodes = []
const missing = args.filter(a => {
const arg = npa(a)
const nodes = tree.children.values()
const argFound = [...nodes].every(node => {
// TODO: write tests for unmatching version specs, this is hard to test
// atm but should be simple once we have a mocked registry again
if (arg.name !== node.name /* istanbul ignore next */ || (
arg.version &&
/* istanbul ignore next */
!semver.satisfies(node.version, arg.version)
)) {
foundNodes.push(node)
return true
}
})
return argFound
})
// remote nodes from the loaded tree in order
// to avoid dropping them later when reifying
for (const node of foundNodes) {
node.parent = null
}
return missing
}
}
module.exports = Link

View File

@ -0,0 +1,13 @@
const LS = require('./ls.js')
class LL extends LS {
static name = 'll'
static usage = ['[[<@scope>/]<pkg> ...]']
async exec (args) {
this.npm.config.set('long', true)
return super.exec(args)
}
}
module.exports = LL

View File

@ -0,0 +1,50 @@
const { log, output } = require('proc-log')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const auth = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
class Login extends BaseCommand {
static description = 'Login to a registry user account'
static name = 'login'
static params = [
'registry',
'scope',
'auth-type',
]
async exec () {
const scope = this.npm.config.get('scope')
let registry = this.npm.config.get('registry')
if (scope) {
const scopedRegistry = this.npm.config.get(`${scope}:registry`)
const cliRegistry = this.npm.config.get('registry', 'cli')
if (scopedRegistry && !cliRegistry) {
registry = scopedRegistry
}
}
const creds = this.npm.config.getCredentialsByURI(registry)
log.notice('', `Log in on ${replaceInfo(registry)}`)
const { message, newCreds } = await auth.login(this.npm, {
...this.npm.flatOptions,
creds,
registry,
})
this.npm.config.delete('_token', 'user') // prevent legacy pollution
this.npm.config.setCredentialsByURI(registry, newCreds)
if (scope) {
this.npm.config.set(scope + ':registry', registry, 'user')
}
await this.npm.config.save('user')
output.standard(message)
}
}
module.exports = Login

View File

@ -0,0 +1,50 @@
const npmFetch = require('npm-registry-fetch')
const { getAuth } = npmFetch
const { log } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Logout extends BaseCommand {
static description = 'Log out of the registry'
static name = 'logout'
static params = [
'registry',
'scope',
]
async exec () {
const registry = this.npm.config.get('registry')
const scope = this.npm.config.get('scope')
const regRef = scope ? `${scope}:registry` : 'registry'
const reg = this.npm.config.get(regRef) || registry
const auth = getAuth(reg, this.npm.flatOptions)
const level = this.npm.config.find(`${auth.regKey}:${auth.authKey}`)
// find the config level and only delete from there
if (auth.token) {
log.verbose('logout', `clearing token for ${reg}`)
await npmFetch(`/-/user/token/${encodeURIComponent(auth.token)}`, {
...this.npm.flatOptions,
registry: reg,
method: 'DELETE',
ignoreBody: true,
})
} else if (auth.isBasicAuth) {
log.verbose('logout', `clearing user credentials for ${reg}`)
} else {
const msg = `not logged in to ${reg}, so can't log out!`
throw Object.assign(new Error(msg), { code: 'ENEEDAUTH' })
}
if (scope) {
this.npm.config.delete(regRef, level)
}
this.npm.config.clearCredentialsByURI(reg, level)
await this.npm.config.save(level)
}
}
module.exports = Logout

579
Dependencies/NodeJS/node_modules/npm/lib/commands/ls.js generated vendored Normal file
View File

@ -0,0 +1,579 @@
const { resolve, relative, sep } = require('node:path')
const archy = require('archy')
const { breadth } = require('treeverse')
const npa = require('npm-package-arg')
const { output } = require('proc-log')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const relativePrefix = `.${sep}`
const _depth = Symbol('depth')
const _dedupe = Symbol('dedupe')
const _filteredBy = Symbol('filteredBy')
const _include = Symbol('include')
const _invalid = Symbol('invalid')
const _name = Symbol('name')
const _missing = Symbol('missing')
const _parent = Symbol('parent')
const _problems = Symbol('problems')
const _required = Symbol('required')
const _type = Symbol('type')
class LS extends ArboristWorkspaceCmd {
static description = 'List installed packages'
static name = 'ls'
static usage = ['<package-spec>']
static params = [
'all',
'json',
'long',
'parseable',
'global',
'depth',
'omit',
'include',
'link',
'package-lock-only',
'unicode',
...super.params,
]
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
const all = this.npm.config.get('all')
const chalk = this.npm.chalk
const depth = this.npm.config.get('depth')
const global = this.npm.global
const json = this.npm.config.get('json')
const link = this.npm.config.get('link')
const long = this.npm.config.get('long')
const omit = this.npm.flatOptions.omit
const parseable = this.npm.config.get('parseable')
const unicode = this.npm.config.get('unicode')
const packageLockOnly = this.npm.config.get('package-lock-only')
const workspacesEnabled = this.npm.flatOptions.workspacesEnabled
const path = global ? resolve(this.npm.globalDir, '..') : this.npm.prefix
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
global,
...this.npm.flatOptions,
legacyPeerDeps: false,
path,
})
const tree = await this.initTree({ arb, args, packageLockOnly })
// filters by workspaces nodes when using -w <workspace-name>
// We only have to filter the first layer of edges, so we don't
// explore anything that isn't part of the selected workspace set.
let wsNodes
if (this.workspaceNames && this.workspaceNames.length) {
wsNodes = arb.workspaceNodes(tree, this.workspaceNames)
}
const filterBySelectedWorkspaces = edge => {
if (!workspacesEnabled
&& edge.from.isProjectRoot
&& edge.to.isWorkspace
) {
return false
}
if (!wsNodes || !wsNodes.length) {
return true
}
if (this.npm.flatOptions.includeWorkspaceRoot
&& edge.to && !edge.to.isWorkspace) {
return true
}
if (edge.from.isProjectRoot) {
return (edge.to
&& edge.to.isWorkspace
&& wsNodes.includes(edge.to.target))
}
return true
}
const seenItems = new Set()
const seenNodes = new Map()
const problems = new Set()
// defines special handling of printed depth when filtering with args
const filterDefaultDepth = depth === null ? Infinity : depth
const depthToPrint = (all || args.length)
? filterDefaultDepth
: (depth || 0)
// add root node of tree to list of seenNodes
seenNodes.set(tree.path, tree)
// tree traversal happens here, using treeverse.breadth
const result = await breadth({
tree,
// recursive method, `node` is going to be the current elem (starting from
// the `tree` obj) that was just visited in the `visit` method below
// `nodeResult` is going to be the returned `item` from `visit`
getChildren (node, nodeResult) {
const seenPaths = new Set()
const workspace = node.isWorkspace
const currentDepth = workspace ? 0 : node[_depth]
const shouldSkipChildren =
!(node instanceof Arborist.Node) || (currentDepth > depthToPrint)
return (shouldSkipChildren)
? []
: [...(node.target).edgesOut.values()]
.filter(filterBySelectedWorkspaces)
.filter(currentDepth === 0 ? filterByEdgesTypes({
link,
omit,
}) : () => true)
.map(mapEdgesToNodes({ seenPaths }))
.concat(appendExtraneousChildren({ node, seenPaths }))
.sort(sortAlphabetically)
.map(augmentNodesWithMetadata({
args,
currentDepth,
nodeResult,
seenNodes,
}))
},
// visit each `node` of the `tree`, returning an `item` - these are
// the elements that will be used to build the final output
visit (node) {
node[_problems] = getProblems(node, { global })
const item = json
? getJsonOutputItem(node, { global, long })
: parseable
? null
: getHumanOutputItem(node, { args, chalk, global, long })
// loop through list of node problems to add them to global list
if (node[_include]) {
for (const problem of node[_problems]) {
problems.add(problem)
}
}
seenItems.add(item)
// return a promise so we don't blow the stack
return Promise.resolve(item)
},
})
// handle the special case of a broken package.json in the root folder
const [rootError] = tree.errors.filter(e =>
e.code === 'EJSONPARSE' && e.path === resolve(path, 'package.json'))
if (json) {
output.buffer(jsonOutput({ path, problems, result, rootError, seenItems }))
} else {
output.standard(parseable
? parseableOutput({ seenNodes, global, long })
: humanOutput({ chalk, result, seenItems, unicode })
)
}
// if filtering items, should exit with error code on no results
if (result && !result[_include] && args.length) {
process.exitCode = 1
}
if (rootError) {
throw Object.assign(
new Error('Failed to parse root package.json'),
{ code: 'EJSONPARSE' }
)
}
const shouldThrow = problems.size &&
![...problems].every(problem => problem.startsWith('extraneous:'))
if (shouldThrow) {
throw Object.assign(
new Error([...problems].join('\n')),
{ code: 'ELSPROBLEMS' }
)
}
}
async initTree ({ arb, args, packageLockOnly }) {
const tree = await (
packageLockOnly
? arb.loadVirtual()
: arb.loadActual()
)
tree[_include] = args.length === 0
tree[_depth] = 0
return tree
}
}
module.exports = LS
const isGitNode = (node) => {
if (!node.resolved) {
return
}
try {
const { type } = npa(node.resolved)
return type === 'git' || type === 'hosted'
} catch (err) {
return false
}
}
const isOptional = (node) =>
node[_type] === 'optional' || node[_type] === 'peerOptional'
const isExtraneous = (node, { global }) =>
node.extraneous && !global
const getProblems = (node, { global }) => {
const problems = new Set()
if (node[_missing] && !isOptional(node)) {
problems.add(`missing: ${node.pkgid}, required by ${node[_missing]}`)
}
if (node[_invalid]) {
problems.add(`invalid: ${node.pkgid} ${node.path}`)
}
if (isExtraneous(node, { global })) {
problems.add(`extraneous: ${node.pkgid} ${node.path}`)
}
return problems
}
// annotates _parent and _include metadata into the resulting
// item obj allowing for filtering out results during output
const augmentItemWithIncludeMetadata = (node, item) => {
item[_parent] = node[_parent]
item[_include] = node[_include]
// append current item to its parent.nodes which is the
// structure expected by archy in order to print tree
if (node[_include]) {
// includes all ancestors of included node
let p = node[_parent]
while (p) {
p[_include] = true
p = p[_parent]
}
}
return item
}
const getHumanOutputItem = (node, { args, chalk, global, long }) => {
const { pkgid, path } = node
const workspacePkgId = chalk.blueBright(pkgid)
let printable = node.isWorkspace ? workspacePkgId : pkgid
// special formatting for top-level package name
if (node.isRoot) {
const hasNoPackageJson = !Object.keys(node.package).length
if (hasNoPackageJson || global) {
printable = path
} else {
printable += `${long ? '\n' : ' '}${path}`
}
}
// TODO there is a LOT of overlap with lib/utils/explain-dep.js here
const highlightDepName = args.length && node[_filteredBy]
const missingColor = isOptional(node)
? chalk.yellow
: chalk.red
const missingMsg = `UNMET ${isOptional(node) ? 'OPTIONAL ' : ''}DEPENDENCY`
const targetLocation = node.root
? relative(node.root.realpath, node.realpath)
: node.targetLocation
const invalid = node[_invalid]
? `invalid: ${node[_invalid]}`
: ''
const label =
(
node[_missing]
? missingColor(missingMsg) + ' '
: ''
) +
`${highlightDepName ? chalk.yellow(printable) : printable}` +
(
node[_dedupe]
? ' ' + chalk.dim('deduped')
: ''
) +
(
invalid
? ' ' + chalk.red(invalid)
: ''
) +
(
isExtraneous(node, { global })
? ' ' + chalk.red('extraneous')
: ''
) +
(
node.overridden
? ' ' + chalk.dim('overridden')
: ''
) +
(isGitNode(node) ? ` (${node.resolved})` : '') +
(node.isLink ? ` -> ${relativePrefix}${targetLocation}` : '') +
(long ? `\n${node.package.description || ''}` : '')
return augmentItemWithIncludeMetadata(node, { label, nodes: [] })
}
const getJsonOutputItem = (node, { global, long }) => {
const item = {}
if (node.version) {
item.version = node.version
}
if (node.resolved) {
item.resolved = node.resolved
}
// if the node is the project root, do not add the overridden flag. the project root can't be
// overridden anyway, and if we add the flag it causes undesirable behavior when `npm ls --json`
// is ran in an empty directory since we end up printing an object with only an overridden prop
if (!node.isProjectRoot) {
item.overridden = node.overridden
}
item[_name] = node.name
// special formatting for top-level package name
const hasPackageJson =
node && node.package && Object.keys(node.package).length
if (node.isRoot && hasPackageJson) {
item.name = node.package.name || node.name
}
if (long && !node[_missing]) {
item.name = item[_name]
const { dependencies, ...packageInfo } = node.package
Object.assign(item, packageInfo)
item.extraneous = false
item.path = node.path
item._dependencies = {
...node.package.dependencies,
...node.package.optionalDependencies,
}
item.devDependencies = node.package.devDependencies || {}
item.peerDependencies = node.package.peerDependencies || {}
}
// augment json output items with extra metadata
if (isExtraneous(node, { global })) {
item.extraneous = true
}
if (node[_invalid]) {
item.invalid = node[_invalid]
}
if (node[_missing] && !isOptional(node)) {
item.required = node[_required]
item.missing = true
}
if (node[_include] && node[_problems] && node[_problems].size) {
item.problems = [...node[_problems]]
}
return augmentItemWithIncludeMetadata(node, item)
}
const filterByEdgesTypes = ({ link, omit }) => (edge) => {
for (const omitType of omit) {
if (edge[omitType]) {
return false
}
}
return link ? edge.to && edge.to.isLink : true
}
const appendExtraneousChildren = ({ node, seenPaths }) =>
// extraneous children are not represented
// in edges out, so here we add them to the list:
[...node.children.values()]
.filter(i => !seenPaths.has(i.path) && i.extraneous)
const mapEdgesToNodes = ({ seenPaths }) => (edge) => {
let node = edge.to
// if the edge is linking to a missing node, we go ahead
// and create a new obj that will represent the missing node
if (edge.missing || (edge.optional && !node)) {
const { name, spec } = edge
const pkgid = `${name}@${spec}`
node = { name, pkgid, [_missing]: edge.from.pkgid }
}
// keeps track of a set of seen paths to avoid the edge case in which a tree
// item would appear twice given that it's a children of an extraneous item,
// so it's marked extraneous but it will ALSO show up in edgesOuts of
// its parent so it ends up as two diff nodes if we don't track it
if (node.path) {
seenPaths.add(node.path)
}
node[_required] = edge.spec || '*'
node[_type] = edge.type
if (edge.invalid) {
const spec = JSON.stringify(node[_required])
const from = edge.from.location || 'the root project'
node[_invalid] = (node[_invalid] ? node[_invalid] + ', ' : '') +
(`${spec} from ${from}`)
}
return node
}
const filterByPositionalArgs = (args, { node }) =>
args.length > 0 ? args.some(
(spec) => (node.satisfies && node.satisfies(spec))
) : true
const augmentNodesWithMetadata = ({
args,
currentDepth,
nodeResult,
seenNodes,
}) => (node) => {
// if the original edge was a deduped dep, treeverse will fail to
// revisit that node in tree traversal logic, so we make it so that
// we have a diff obj for deduped nodes:
if (seenNodes.has(node.path)) {
const { realpath, root } = node
const targetLocation = root ? relative(root.realpath, realpath)
: node.targetLocation
node = {
name: node.name,
version: node.version,
pkgid: node.pkgid,
package: node.package,
path: node.path,
isLink: node.isLink,
realpath: node.realpath,
targetLocation,
[_type]: node[_type],
[_invalid]: node[_invalid],
[_missing]: node[_missing],
// if it's missing, it's not deduped, it's just missing
[_dedupe]: !node[_missing],
}
} else {
// keeps track of already seen nodes in order to check for dedupes
seenNodes.set(node.path, node)
}
// _parent is going to be a ref to a treeverse-visited node (returned from
// getHumanOutputItem, getJsonOutputItem, etc) so that we have an easy
// shortcut to place new nodes in their right place during tree traversal
node[_parent] = nodeResult
// _include is the property that allow us to filter based on position args
// e.g: `npm ls foo`, `npm ls simple-output@2`
// _filteredBy is used to apply extra color info to the item that
// was used in args in order to filter
node[_filteredBy] = node[_include] =
filterByPositionalArgs(args, { node: seenNodes.get(node.path) })
// _depth keeps track of how many levels deep tree traversal currently is
// so that we can `npm ls --depth=1`
node[_depth] = currentDepth + 1
return node
}
const sortAlphabetically = ({ pkgid: a }, { pkgid: b }) => localeCompare(a, b)
const humanOutput = ({ chalk, result, seenItems, unicode }) => {
// we need to traverse the entire tree in order to determine which items
// should be included (since a nested transitive included dep will make it
// so that all its ancestors should be displayed)
// here is where we put items in their expected place for archy output
for (const item of seenItems) {
if (item[_include] && item[_parent]) {
item[_parent].nodes.push(item)
}
}
if (!result.nodes.length) {
result.nodes = ['(empty)']
}
const archyOutput = archy(result, '', { unicode })
return chalk.reset(archyOutput)
}
const jsonOutput = ({ path, problems, result, rootError, seenItems }) => {
if (problems.size) {
result.problems = [...problems]
}
if (rootError) {
result.problems = [
...(result.problems || []),
...[`error in ${path}: Failed to parse root package.json`],
]
result.invalid = true
}
// we need to traverse the entire tree in order to determine which items
// should be included (since a nested transitive included dep will make it
// so that all its ancestors should be displayed)
// here is where we put items in their expected place for json output
for (const item of seenItems) {
// append current item to its parent item.dependencies obj in order
// to provide a json object structure that represents the installed tree
if (item[_include] && item[_parent]) {
if (!item[_parent].dependencies) {
item[_parent].dependencies = {}
}
item[_parent].dependencies[item[_name]] = item
}
}
return result
}
const parseableOutput = ({ global, long, seenNodes }) => {
let out = ''
for (const node of seenNodes.values()) {
if (node.path && node[_include]) {
out += node.path
if (long) {
out += `:${node.pkgid}`
out += node.path !== node.realpath ? `:${node.realpath}` : ''
out += isExtraneous(node, { global }) ? ':EXTRANEOUS' : ''
out += node[_invalid] ? ':INVALID' : ''
out += node.overridden ? ':OVERRIDDEN' : ''
}
out += '\n'
}
}
return out.trim()
}

View File

@ -0,0 +1,151 @@
const liborg = require('libnpmorg')
const { otplease } = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
const { output } = require('proc-log')
class Org extends BaseCommand {
static description = 'Manage orgs'
static name = 'org'
static usage = [
'set orgname username [developer | admin | owner]',
'rm orgname username',
'ls orgname [<username>]',
]
static params = ['registry', 'otp', 'json', 'parseable']
static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['set', 'rm', 'ls']
}
switch (argv[2]) {
case 'ls':
case 'add':
case 'rm':
case 'set':
return []
default:
throw new Error(argv[2] + ' not recognized')
}
}
async exec ([cmd, orgname, username, role]) {
return otplease(this.npm, {
...this.npm.flatOptions,
}, opts => {
switch (cmd) {
case 'add':
case 'set':
return this.set(orgname, username, role, opts)
case 'rm':
return this.rm(orgname, username, opts)
case 'ls':
return this.ls(orgname, username, opts)
default:
throw this.usageError()
}
})
}
async set (org, user, role, opts) {
role = role || 'developer'
if (!org) {
throw new Error('First argument `orgname` is required.')
}
if (!user) {
throw new Error('Second argument `username` is required.')
}
if (!['owner', 'admin', 'developer'].find(x => x === role)) {
throw new Error(
/* eslint-disable-next-line max-len */
'Third argument `role` must be one of `owner`, `admin`, or `developer`, with `developer` being the default value if omitted.'
)
}
const memDeets = await liborg.set(org, user, role, opts)
if (opts.json) {
output.standard(JSON.stringify(memDeets, null, 2))
} else if (opts.parseable) {
output.standard(['org', 'orgsize', 'user', 'role'].join('\t'))
output.standard(
[memDeets.org.name, memDeets.org.size, memDeets.user, memDeets.role].join('\t')
)
} else if (!this.npm.silent) {
output.standard(
`Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${
memDeets.org.size
} member${memDeets.org.size === 1 ? '' : 's'} in this org.`
)
}
return memDeets
}
async rm (org, user, opts) {
if (!org) {
throw new Error('First argument `orgname` is required.')
}
if (!user) {
throw new Error('Second argument `username` is required.')
}
await liborg.rm(org, user, opts)
const roster = await liborg.ls(org, opts)
user = user.replace(/^[~@]?/, '')
org = org.replace(/^[~@]?/, '')
const userCount = Object.keys(roster).length
if (opts.json) {
output.buffer({
user,
org,
userCount,
deleted: true,
})
} else if (opts.parseable) {
output.standard(['user', 'org', 'userCount', 'deleted'].join('\t'))
output.standard([user, org, userCount, true].join('\t'))
} else if (!this.npm.silent) {
output.standard(
`Successfully removed ${user} from ${org}. You now have ${userCount} member${
userCount === 1 ? '' : 's'
} in this org.`
)
}
}
async ls (org, user, opts) {
if (!org) {
throw new Error('First argument `orgname` is required.')
}
let roster = await liborg.ls(org, opts)
if (user) {
const newRoster = {}
if (roster[user]) {
newRoster[user] = roster[user]
}
roster = newRoster
}
if (opts.json) {
output.buffer(roster)
} else if (opts.parseable) {
output.standard(['user', 'role'].join('\t'))
Object.keys(roster).forEach(u => {
output.standard([u, roster[u]].join('\t'))
})
} else if (!this.npm.silent) {
const chalk = this.npm.chalk
for (const u of Object.keys(roster).sort()) {
output.standard(`${u} - ${chalk.cyan(roster[u])}`)
}
}
}
}
module.exports = Org

View File

@ -0,0 +1,282 @@
const { resolve } = require('node:path')
const { stripVTControlCharacters } = require('node:util')
const pacote = require('pacote')
const table = require('text-table')
const npa = require('npm-package-arg')
const pickManifest = require('npm-pick-manifest')
const { output } = require('proc-log')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const safeNpa = (spec) => {
try {
return npa(spec)
} catch {
return null
}
}
// This string is load bearing and is shared with Arborist
const MISSING = 'MISSING'
class Outdated extends ArboristWorkspaceCmd {
static description = 'Check for outdated packages'
static name = 'outdated'
static usage = ['[<package-spec> ...]']
static params = [
'all',
'json',
'long',
'parseable',
'global',
'workspace',
]
#tree
#list = []
#edges = new Set()
#filterSet
async exec (args) {
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
path: this.npm.global ? resolve(this.npm.globalDir, '..') : this.npm.prefix,
})
this.#tree = await arb.loadActual()
if (this.workspaceNames?.length) {
this.#filterSet = arb.workspaceDependencySet(
this.#tree,
this.workspaceNames,
this.npm.flatOptions.includeWorkspaceRoot
)
} else if (!this.npm.flatOptions.workspacesEnabled) {
this.#filterSet = arb.excludeWorkspacesDependencySet(this.#tree)
}
if (args.length) {
for (const arg of args) {
// specific deps
this.#getEdges(this.#tree.inventory.query('name', arg), 'edgesIn')
}
} else {
if (this.npm.config.get('all')) {
// all deps in tree
this.#getEdges(this.#tree.inventory.values(), 'edgesOut')
}
// top-level deps
this.#getEdges()
}
await Promise.all([...this.#edges].map((e) => this.#getOutdatedInfo(e)))
// sorts list alphabetically by name and then dependent
const outdated = this.#list
.sort((a, b) => localeCompare(a.name, b.name) || localeCompare(a.dependent, b.dependent))
if (outdated.length) {
process.exitCode = 1
}
if (this.npm.config.get('json')) {
output.buffer(this.#json(outdated))
return
}
const res = this.npm.config.get('parseable')
? this.#parseable(outdated)
: this.#pretty(outdated)
if (res) {
output.standard(res)
}
}
#getEdges (nodes, type) {
// when no nodes are provided then it should only read direct deps
// from the root node and its workspaces direct dependencies
if (!nodes) {
this.#getEdgesOut(this.#tree)
this.#getWorkspacesEdges()
return
}
for (const node of nodes) {
if (type === 'edgesOut') {
this.#getEdgesOut(node)
} else {
this.#getEdgesIn(node)
}
}
}
#getEdgesIn (node) {
for (const edge of node.edgesIn) {
this.#trackEdge(edge)
}
}
#getEdgesOut (node) {
// TODO: normalize usage of edges and avoid looping through nodes here
const edges = this.npm.global ? node.children.values() : node.edgesOut.values()
for (const edge of edges) {
this.#trackEdge(edge)
}
}
#trackEdge (edge) {
if (edge.from && this.#filterSet?.size > 0 && !this.#filterSet.has(edge.from.target)) {
return
}
this.#edges.add(edge)
}
#getWorkspacesEdges () {
if (this.npm.global) {
return
}
for (const edge of this.#tree.edgesOut.values()) {
if (edge?.to?.target?.isWorkspace) {
this.#getEdgesOut(edge.to.target)
}
}
}
async #getPackument (spec) {
return pacote.packument(spec, {
...this.npm.flatOptions,
fullMetadata: this.npm.config.get('long'),
preferOnline: true,
})
}
async #getOutdatedInfo (edge) {
const alias = safeNpa(edge.spec)?.subSpec
const spec = npa(alias ? alias.name : edge.name)
const node = edge.to || edge
const { path, location, package: { version: current } = {} } = node
const type = edge.optional ? 'optionalDependencies'
: edge.peer ? 'peerDependencies'
: edge.dev ? 'devDependencies'
: 'dependencies'
for (const omitType of this.npm.flatOptions.omit) {
if (node[omitType]) {
return
}
}
// deps different from prod not currently
// on disk are not included in the output
if (edge.error === MISSING && type !== 'dependencies') {
return
}
// if it's not a range, version, or tag, skip it
if (!safeNpa(`${edge.name}@${edge.spec}`)?.registry) {
return null
}
try {
const packument = await this.#getPackument(spec)
const expected = alias ? alias.fetchSpec : edge.spec
const wanted = pickManifest(packument, expected, this.npm.flatOptions)
const latest = pickManifest(packument, '*', this.npm.flatOptions)
if (!current || current !== wanted.version || wanted.version !== latest.version) {
this.#list.push({
name: alias ? edge.spec.replace('npm', edge.name) : edge.name,
path,
type,
current,
location,
wanted: wanted.version,
latest: latest.version,
workspaceDependent: edge.from?.isWorkspace ? edge.from.pkgid : null,
dependent: edge.from?.name ?? 'global',
homepage: packument.homepage,
})
}
} catch (err) {
// silently catch and ignore ETARGET, E403 &
// E404 errors, deps are just skipped
if (!['ETARGET', 'E404', 'E404'].includes(err.code)) {
throw err
}
}
}
// formatting functions
#pretty (list) {
if (!list.length) {
return
}
const long = this.npm.config.get('long')
const { bold, yellow, red, cyan, blue } = this.npm.chalk
return table([
[
'Package',
'Current',
'Wanted',
'Latest',
'Location',
'Depended by',
...long ? ['Package Type', 'Homepage'] : [],
].map(h => bold.underline(h)),
...list.map((d) => [
d.current === d.wanted ? yellow(d.name) : red(d.name),
d.current ?? 'MISSING',
cyan(d.wanted),
blue(d.latest),
d.location ?? '-',
d.workspaceDependent ? blue(d.workspaceDependent) : d.dependent,
...long ? [d.type, blue(d.homepage ?? '')] : [],
]),
], {
align: ['l', 'r', 'r', 'r', 'l'],
stringLength: s => stripVTControlCharacters(s).length,
})
}
// --parseable creates output like this:
// <fullpath>:<name@wanted>:<name@installed>:<name@latest>:<dependedby>
#parseable (list) {
return list.map(d => [
d.path,
`${d.name}@${d.wanted}`,
d.current ? `${d.name}@${d.current}` : 'MISSING',
`${d.name}@${d.latest}`,
d.dependent,
...this.npm.config.get('long') ? [d.type, d.homepage] : [],
].join(':')).join('\n')
}
#json (list) {
// TODO(BREAKING_CHANGE): this should just return an array. It's a list and
// turing it into an object with keys is lossy since multiple items in the
// list could have the same key. For now we hack that by only changing
// top level values into arrays if they have multiple outdated items
return list.reduce((acc, d) => {
const dep = {
current: d.current,
wanted: d.wanted,
latest: d.latest,
dependent: d.dependent,
location: d.path,
...this.npm.config.get('long') ? { type: d.type, homepage: d.homepage } : {},
}
acc[d.name] = acc[d.name]
// If this item alread has an outdated dep then we turn it into an array
? (Array.isArray(acc[d.name]) ? acc[d.name] : [acc[d.name]]).concat(dep)
: dep
return acc
}, {})
}
}
module.exports = Outdated

View File

@ -0,0 +1,233 @@
const npa = require('npm-package-arg')
const npmFetch = require('npm-registry-fetch')
const pacote = require('pacote')
const { log, output } = require('proc-log')
const { otplease } = require('../utils/auth.js')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
const { redact } = require('@npmcli/redact')
const readJson = async (path) => {
try {
const { content } = await pkgJson.normalize(path)
return content
} catch {
return {}
}
}
class Owner extends BaseCommand {
static description = 'Manage package owners'
static name = 'owner'
static params = [
'registry',
'otp',
'workspace',
'workspaces',
]
static usage = [
'add <user> <package-spec>',
'rm <user> <package-spec>',
'ls <package-spec>',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static async completion (opts, npm) {
const argv = opts.conf.argv.remain
if (argv.length > 3) {
return []
}
if (argv[1] !== 'owner') {
argv.unshift('owner')
}
if (argv.length === 2) {
return ['add', 'rm', 'ls']
}
// reaches registry in order to autocomplete rm
if (argv[2] === 'rm') {
if (npm.global) {
return []
}
const { name } = await readJson(npm.prefix)
if (!name) {
return []
}
const spec = npa(name)
const data = await pacote.packument(spec, {
...npm.flatOptions,
fullMetadata: true,
})
if (data && data.maintainers && data.maintainers.length) {
return data.maintainers.map(m => m.name)
}
}
return []
}
async exec ([action, ...args]) {
if (action === 'ls' || action === 'list') {
await this.ls(args[0])
} else if (action === 'add') {
await this.changeOwners(args[0], args[1], 'add')
} else if (action === 'rm' || action === 'remove') {
await this.changeOwners(args[0], args[1], 'rm')
} else {
throw this.usageError()
}
}
async execWorkspaces ([action, ...args]) {
await this.setWorkspaces()
// ls pkg or owner add/rm package
if ((action === 'ls' && args.length > 0) || args.length > 1) {
const implicitWorkspaces = this.npm.config.get('workspace', 'default')
if (implicitWorkspaces.length === 0) {
log.warn(`Ignoring specified workspace(s)`)
}
return this.exec([action, ...args])
}
for (const [name] of this.workspaces) {
if (action === 'ls' || action === 'list') {
await this.ls(name)
} else if (action === 'add') {
await this.changeOwners(args[0], name, 'add')
} else if (action === 'rm' || action === 'remove') {
await this.changeOwners(args[0], name, 'rm')
} else {
throw this.usageError()
}
}
}
async ls (pkg) {
pkg = await this.getPkg(this.npm.prefix, pkg)
const spec = npa(pkg)
try {
const packumentOpts = { ...this.npm.flatOptions, fullMetadata: true, preferOnline: true }
const { maintainers } = await pacote.packument(spec, packumentOpts)
if (!maintainers || !maintainers.length) {
output.standard('no admin found')
} else {
output.standard(maintainers.map(m => `${m.name} <${m.email}>`).join('\n'))
}
} catch (err) {
log.error('owner ls', "Couldn't get owner data", redact(pkg))
throw err
}
}
async getPkg (prefix, pkg) {
if (!pkg) {
if (this.npm.global) {
throw this.usageError()
}
const { name } = await readJson(prefix)
if (!name) {
throw this.usageError()
}
return name
}
return pkg
}
async changeOwners (user, pkg, addOrRm) {
if (!user) {
throw this.usageError()
}
pkg = await this.getPkg(this.npm.prefix, pkg)
log.verbose(`owner ${addOrRm}`, '%s to %s', user, pkg)
const spec = npa(pkg)
const uri = `/-/user/org.couchdb.user:${encodeURIComponent(user)}`
let u
try {
u = await npmFetch.json(uri, this.npm.flatOptions)
} catch (err) {
log.error('owner mutate', `Error getting user data for ${user}`)
throw err
}
// normalize user data
u = { name: u.name, email: u.email }
const data = await pacote.packument(spec, {
...this.npm.flatOptions,
fullMetadata: true,
preferOnline: true,
})
const owners = data.maintainers || []
let maintainers
if (addOrRm === 'add') {
const existing = owners.find(o => o.name === u.name)
if (existing) {
log.info(
'owner add',
`Already a package owner: ${existing.name} <${existing.email}>`
)
return
}
maintainers = [
...owners,
u,
]
} else {
maintainers = owners.filter(o => o.name !== u.name)
if (maintainers.length === owners.length) {
log.info('owner rm', 'Not a package owner: ' + u.name)
return false
}
if (!maintainers.length) {
throw Object.assign(
new Error(
'Cannot remove all owners of a package. Add someone else first.'
),
{ code: 'EOWNERRM' }
)
}
}
const dataPath = `/${spec.escapedName}/-rev/${encodeURIComponent(data._rev)}`
try {
const res = await otplease(this.npm, this.npm.flatOptions, opts => {
return npmFetch.json(dataPath, {
...opts,
method: 'PUT',
body: {
_id: data._id,
_rev: data._rev,
maintainers,
},
spec,
})
})
if (addOrRm === 'add') {
output.standard(`+ ${user} (${spec.name})`)
} else {
output.standard(`- ${user} (${spec.name})`)
}
return res
} catch (err) {
throw Object.assign(
new Error('Failed to update package: ' + JSON.stringify(err.message)),
{ code: 'EOWNERMUTATE' }
)
}
}
}
module.exports = Owner

View File

@ -0,0 +1,85 @@
const pacote = require('pacote')
const libpack = require('libnpmpack')
const npa = require('npm-package-arg')
const { log, output } = require('proc-log')
const { getContents, logTar } = require('../utils/tar.js')
const BaseCommand = require('../base-cmd.js')
class Pack extends BaseCommand {
static description = 'Create a tarball from a package'
static name = 'pack'
static params = [
'dry-run',
'json',
'pack-destination',
'workspace',
'workspaces',
'include-workspace-root',
]
static usage = ['<package-spec>']
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
if (args.length === 0) {
args = ['.']
}
const unicode = this.npm.config.get('unicode')
const json = this.npm.config.get('json')
// Get the manifests and filenames first so we can bail early on manifest
// errors before making any tarballs
const manifests = []
for (const arg of args) {
const spec = npa(arg)
const manifest = await pacote.manifest(spec, this.npm.flatOptions)
if (!manifest._id) {
throw new Error('Invalid package, must have name and version')
}
manifests.push({ arg, manifest })
}
// Load tarball names up for printing afterward to isolate from the
// noise generated during packing
const tarballs = []
for (const { arg, manifest } of manifests) {
const tarballData = await libpack(arg, {
...this.npm.flatOptions,
foregroundScripts: this.npm.config.isDefault('foreground-scripts')
? true
: this.npm.config.get('foreground-scripts'),
prefix: this.npm.localPrefix,
workspaces: this.workspacePaths,
})
tarballs.push(await getContents(manifest, tarballData))
}
for (const [index, tar] of Object.entries(tarballs)) {
// XXX(BREAKING_CHANGE): publish outputs a json object with package
// names as keys. Pack should do the same here instead of an array
logTar(tar, { unicode, json, key: index })
if (!json) {
output.standard(tar.filename.replace(/^@/, '').replace(/\//, '-'))
}
}
}
async execWorkspaces (args) {
// If they either ask for nothing, or explicitly include '.' in the args,
// we effectively translate that into each workspace requested
const useWorkspaces = args.length === 0 || args.includes('.')
if (!useWorkspaces) {
log.warn('Ignoring workspaces for specified package(s)')
return this.exec(args)
}
await this.setWorkspaces()
return this.exec([...this.workspacePaths, ...args.filter(a => a !== '.')])
}
}
module.exports = Pack

View File

@ -0,0 +1,30 @@
const { redact } = require('@npmcli/redact')
const { log, output } = require('proc-log')
const pingUtil = require('../utils/ping.js')
const BaseCommand = require('../base-cmd.js')
class Ping extends BaseCommand {
static description = 'Ping npm registry'
static params = ['registry']
static name = 'ping'
async exec () {
const cleanRegistry = redact(this.npm.config.get('registry'))
log.notice('PING', cleanRegistry)
const start = Date.now()
const details = await pingUtil({ ...this.npm.flatOptions })
const time = Date.now() - start
log.notice('PONG', `${time}ms`)
if (this.npm.config.get('json')) {
output.buffer({
registry: cleanRegistry,
time,
details,
})
} else if (Object.keys(details).length) {
log.notice('PONG', JSON.stringify(details, null, 2))
}
}
}
module.exports = Ping

View File

@ -0,0 +1,129 @@
const { output } = require('proc-log')
const PackageJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
const Queryable = require('../utils/queryable.js')
class Pkg extends BaseCommand {
static description = 'Manages your package.json'
static name = 'pkg'
static usage = [
'set <key>=<value> [<key>=<value> ...]',
'get [<key> [<key> ...]]',
'delete <key> [<key> ...]',
'set [<array>[<index>].<key>=<value> ...]',
'set [<array>[].<key>=<value> ...]',
'fix',
]
static params = [
'force',
'json',
'workspace',
'workspaces',
]
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args, { path = this.npm.localPrefix, workspace } = {}) {
if (this.npm.global) {
throw Object.assign(
new Error(`There's no package.json file to manage on global mode`),
{ code: 'EPKGGLOBAL' }
)
}
const [cmd, ..._args] = args
switch (cmd) {
case 'get':
return this.get(_args, { path, workspace })
case 'set':
return this.set(_args, { path, workspace }).then(p => p.save())
case 'delete':
return this.delete(_args, { path, workspace }).then(p => p.save())
case 'fix':
return PackageJson.fix(path).then(p => p.save())
default:
throw this.usageError()
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
for (const [workspace, path] of this.workspaces.entries()) {
await this.exec(args, { path, workspace })
}
}
async get (args, { path, workspace }) {
this.npm.config.set('json', true)
const pkgJson = await PackageJson.load(path)
let result = pkgJson.content
if (args.length) {
result = new Queryable(result).query(args)
// in case there's only a single result from the query
// just prints that one element to stdout
// TODO(BREAKING_CHANGE): much like other places where we unwrap single
// item arrays this should go away. it makes the behavior unknown for users
// who don't already know the shape of the data.
if (Object.keys(result).length === 1) {
result = result[args]
}
}
// The display layer is responsible for calling JSON.stringify on the result
// TODO: https://github.com/npm/cli/issues/5508 a raw mode has been requested similar
// to jq -r. If that was added then this method should no longer set `json:true` all the time
output.buffer(workspace ? { [workspace]: result } : result)
}
async set (args, { path }) {
const setError = () =>
this.usageError('npm pkg set expects a key=value pair of args.')
if (!args.length) {
throw setError()
}
const force = this.npm.config.get('force')
const json = this.npm.config.get('json')
const pkgJson = await PackageJson.load(path)
const q = new Queryable(pkgJson.content)
for (const arg of args) {
const [key, ...rest] = arg.split('=')
const value = rest.join('=')
if (!key || !value) {
throw setError()
}
q.set(key, json ? JSON.parse(value) : value, { force })
}
return pkgJson.update(q.toJSON())
}
async delete (args, { path }) {
const setError = () =>
this.usageError('npm pkg delete expects key args.')
if (!args.length) {
throw setError()
}
const pkgJson = await PackageJson.load(path)
const q = new Queryable(pkgJson.content)
for (const key of args) {
if (!key) {
throw setError()
}
q.delete(key)
}
return pkgJson.update(q.toJSON())
}
}
module.exports = Pkg

View File

@ -0,0 +1,15 @@
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Prefix extends BaseCommand {
static description = 'Display prefix'
static name = 'prefix'
static params = ['global']
static usage = ['[-g]']
async exec () {
return output.standard(this.npm.prefix)
}
}
module.exports = Prefix

View File

@ -0,0 +1,390 @@
const { inspect } = require('node:util')
const { URL } = require('node:url')
const { log, output } = require('proc-log')
const { get, set, createToken } = require('npm-profile')
const qrcodeTerminal = require('qrcode-terminal')
const { otplease } = require('../utils/auth.js')
const readUserInfo = require('../utils/read-user-info.js')
const BaseCommand = require('../base-cmd.js')
const qrcode = url =>
new Promise((resolve) => qrcodeTerminal.generate(url, resolve))
const knownProfileKeys = [
'name',
'email',
'two-factor auth',
'fullname',
'homepage',
'freenode',
'twitter',
'github',
'created',
'updated',
]
const writableProfileKeys = [
'email',
'password',
'fullname',
'homepage',
'freenode',
'twitter',
'github',
]
class Profile extends BaseCommand {
static description = 'Change settings on your registry profile'
static name = 'profile'
static usage = [
'enable-2fa [auth-only|auth-and-writes]',
'disable-2fa',
'get [<key>]',
'set <key> <value>',
]
static params = [
'registry',
'json',
'parseable',
'otp',
]
static async completion (opts) {
var argv = opts.conf.argv.remain
if (!argv[2]) {
return ['enable-2fa', 'disable-2fa', 'get', 'set']
}
switch (argv[2]) {
case 'enable-2fa':
case 'enable-tfa':
return ['auth-and-writes', 'auth-only']
case 'disable-2fa':
case 'disable-tfa':
case 'get':
case 'set':
return []
default:
throw new Error(argv[2] + ' not recognized')
}
}
async exec (args) {
if (args.length === 0) {
throw this.usageError()
}
const [subcmd, ...opts] = args
switch (subcmd) {
case 'enable-2fa':
case 'enable-tfa':
case 'enable2fa':
case 'enabletfa':
return this.enable2fa(opts)
case 'disable-2fa':
case 'disable-tfa':
case 'disable2fa':
case 'disabletfa':
return this.disable2fa()
case 'get':
return this.get(opts)
case 'set':
return this.set(opts)
default:
throw new Error('Unknown profile command: ' + subcmd)
}
}
async get (args) {
const tfa = 'two-factor auth'
const info = await get({ ...this.npm.flatOptions })
if (!info.cidr_whitelist) {
delete info.cidr_whitelist
}
if (this.npm.config.get('json')) {
output.buffer(info)
return
}
// clean up and format key/values for output
const cleaned = {}
for (const key of knownProfileKeys) {
cleaned[key] = info[key] || ''
}
const unknownProfileKeys = Object.keys(info).filter((k) => !(k in cleaned))
for (const key of unknownProfileKeys) {
cleaned[key] = info[key] || ''
}
delete cleaned.tfa
delete cleaned.email_verified
cleaned.email += info.email_verified ? ' (verified)' : '(unverified)'
if (info.tfa && !info.tfa.pending) {
cleaned[tfa] = info.tfa.mode
} else {
cleaned[tfa] = 'disabled'
}
if (args.length) {
const values = args // comma or space separated
.join(',')
.split(/,/)
.filter((arg) => arg.trim() !== '')
.map((arg) => cleaned[arg])
.join('\t')
output.standard(values)
} else {
if (this.npm.config.get('parseable')) {
for (const key of Object.keys(info)) {
if (key === 'tfa') {
output.standard(`${key}\t${cleaned[tfa]}`)
} else {
output.standard(`${key}\t${info[key]}`)
}
}
} else {
for (const [key, value] of Object.entries(cleaned)) {
output.standard(`${key}: ${value}`)
}
}
}
}
async set (args) {
const conf = { ...this.npm.flatOptions }
const prop = (args[0] || '').toLowerCase().trim()
let value = args.length > 1 ? args.slice(1).join(' ') : null
const readPasswords = async () => {
const newpassword = await readUserInfo.password('New password: ')
const confirmedpassword = await readUserInfo.password(' Again: ')
if (newpassword !== confirmedpassword) {
log.warn('profile', 'Passwords do not match, please try again.')
return readPasswords()
}
return newpassword
}
if (prop !== 'password' && value === null) {
throw new Error('npm profile set <prop> <value>')
}
if (prop === 'password' && value !== null) {
throw new Error(
'npm profile set password\n' +
'Do not include your current or new passwords on the command line.')
}
if (writableProfileKeys.indexOf(prop) === -1) {
throw new Error(`"${prop}" is not a property we can set. ` +
`Valid properties are: ` + writableProfileKeys.join(', '))
}
if (prop === 'password') {
const current = await readUserInfo.password('Current password: ')
const newpassword = await readPasswords()
value = { old: current, new: newpassword }
}
// FIXME: Work around to not clear everything other than what we're setting
const user = await get(conf)
const newUser = {}
for (const key of writableProfileKeys) {
newUser[key] = user[key]
}
newUser[prop] = value
const result = await otplease(this.npm, conf, c => set(newUser, c))
if (this.npm.config.get('json')) {
output.buffer({ [prop]: result[prop] })
} else if (this.npm.config.get('parseable')) {
output.standard(prop + '\t' + result[prop])
} else if (result[prop] != null) {
output.standard('Set', prop, 'to', result[prop])
} else {
output.standard('Set', prop)
}
}
async enable2fa (args) {
if (args.length > 1) {
throw new Error('npm profile enable-2fa [auth-and-writes|auth-only]')
}
const mode = args[0] || 'auth-and-writes'
if (mode !== 'auth-only' && mode !== 'auth-and-writes') {
throw new Error(
`Invalid two-factor authentication mode "${mode}".\n` +
'Valid modes are:\n' +
' auth-only - Require two-factor authentication only when logging in\n' +
' auth-and-writes - Require two-factor authentication when logging in ' +
'AND when publishing'
)
}
if (this.npm.config.get('json') || this.npm.config.get('parseable')) {
throw new Error(
'Enabling two-factor authentication is an interactive operation and ' +
(this.npm.config.get('json') ? 'JSON' : 'parseable') + ' output mode is not available'
)
}
const info = {
tfa: {
mode: mode,
},
}
// if they're using legacy auth currently then we have to
// update them to a bearer token before continuing.
const creds = this.npm.config.getCredentialsByURI(this.npm.config.get('registry'))
const auth = {}
if (creds.token) {
auth.token = creds.token
} else if (creds.username) {
auth.basic = { username: creds.username, password: creds.password }
} else if (creds.auth) {
const basic = Buffer.from(creds.auth, 'base64').toString().split(':', 2)
auth.basic = { username: basic[0], password: basic[1] }
}
if (!auth.basic && !auth.token) {
throw new Error(
'You need to be logged in to registry ' +
`${this.npm.config.get('registry')} in order to enable 2fa`
)
}
if (auth.basic) {
log.info('profile', 'Updating authentication to bearer token')
const result = await createToken(
auth.basic.password, false, [], { ...this.npm.flatOptions }
)
if (!result.token) {
throw new Error(
`Your registry ${this.npm.config.get('registry')} does not seem to ` +
'support bearer tokens. Bearer tokens are required for ' +
'two-factor authentication'
)
}
this.npm.config.setCredentialsByURI(
this.npm.config.get('registry'),
{ token: result.token }
)
await this.npm.config.save('user')
}
log.notice('profile', 'Enabling two factor authentication for ' + mode)
const password = await readUserInfo.password()
info.tfa.password = password
log.info('profile', 'Determine if tfa is pending')
const userInfo = await get({ ...this.npm.flatOptions })
const conf = { ...this.npm.flatOptions }
if (userInfo && userInfo.tfa && userInfo.tfa.pending) {
log.info('profile', 'Resetting two-factor authentication')
await set({ tfa: { password, mode: 'disable' } }, conf)
} else if (userInfo && userInfo.tfa) {
if (!conf.otp) {
conf.otp = await readUserInfo.otp(
'Enter one-time password: '
)
}
}
log.info('profile', 'Setting two-factor authentication to ' + mode)
const challenge = await set(info, conf)
if (challenge.tfa === null) {
output.standard('Two factor authentication mode changed to: ' + mode)
return
}
const badResponse = typeof challenge.tfa !== 'string'
|| !/^otpauth:[/][/]/.test(challenge.tfa)
if (badResponse) {
throw new Error(
'Unknown error enabling two-factor authentication. Expected otpauth URL' +
', got: ' + inspect(challenge.tfa)
)
}
const otpauth = new URL(challenge.tfa)
const secret = otpauth.searchParams.get('secret')
const code = await qrcode(challenge.tfa)
output.standard(
'Scan into your authenticator app:\n' + code + '\n Or enter code:', secret
)
const interactiveOTP =
await readUserInfo.otp('And an OTP code from your authenticator: ')
log.info('profile', 'Finalizing two-factor authentication')
const result = await set({ tfa: [interactiveOTP] }, conf)
output.standard(
'2FA successfully enabled. Below are your recovery codes, ' +
'please print these out.'
)
output.standard(
'You will need these to recover access to your account ' +
'if you lose your authentication device.'
)
for (const tfaCode of result.tfa) {
output.standard('\t' + tfaCode)
}
}
async disable2fa () {
const conf = { ...this.npm.flatOptions }
const info = await get(conf)
if (!info.tfa || info.tfa.pending) {
output.standard('Two factor authentication not enabled.')
return
}
const password = await readUserInfo.password()
if (!conf.otp) {
const msg = 'Enter one-time password: '
conf.otp = await readUserInfo.otp(msg)
}
log.info('profile', 'disabling tfa')
await set({ tfa: { password: password, mode: 'disable' } }, conf)
if (this.npm.config.get('json')) {
output.buffer({ tfa: false })
} else if (this.npm.config.get('parseable')) {
output.standard('tfa\tfalse')
} else {
output.standard('Two factor authentication disabled.')
}
}
}
module.exports = Profile

View File

@ -0,0 +1,34 @@
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
// prune extraneous packages
class Prune extends ArboristWorkspaceCmd {
static description = 'Remove extraneous packages'
static name = 'prune'
static params = [
'omit',
'include',
'dry-run',
'json',
'foreground-scripts',
'ignore-scripts',
...super.params,
]
static usage = ['[[<@scope>/]<pkg>...]']
async exec () {
const where = this.npm.prefix
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.prune(opts)
await reifyFinish(this.npm, arb)
}
}
module.exports = Prune

View File

@ -0,0 +1,227 @@
const { log, output } = require('proc-log')
const semver = require('semver')
const pack = require('libnpmpack')
const libpub = require('libnpmpublish').publish
const runScript = require('@npmcli/run-script')
const pacote = require('pacote')
const npa = require('npm-package-arg')
const npmFetch = require('npm-registry-fetch')
const { redactLog: replaceInfo } = require('@npmcli/redact')
const { otplease } = require('../utils/auth.js')
const { getContents, logTar } = require('../utils/tar.js')
// for historical reasons, publishConfig in package.json can contain ANY config
// keys that npm supports in .npmrc files and elsewhere. We *may* want to
// revisit this at some point, and have a minimal set that's a SemVer-major
// change that ought to get a RFC written on it.
const { flatten } = require('@npmcli/config/lib/definitions')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
class Publish extends BaseCommand {
static description = 'Publish a package'
static name = 'publish'
static params = [
'tag',
'access',
'dry-run',
'otp',
'workspace',
'workspaces',
'include-workspace-root',
'provenance',
]
static usage = ['<package-spec>']
static workspaces = true
static ignoreImplicitWorkspace = false
async exec (args) {
if (args.length === 0) {
args = ['.']
}
if (args.length !== 1) {
throw this.usageError()
}
await this.#publish(args)
}
async execWorkspaces (args) {
const useWorkspaces = args.length === 0 || args.includes('.')
if (!useWorkspaces) {
log.warn('Ignoring workspaces for specified package(s)')
return this.exec(args)
}
await this.setWorkspaces()
for (const [name, workspace] of this.workspaces.entries()) {
try {
await this.#publish([workspace], { workspace: name })
} catch (err) {
if (err.code !== 'EPRIVATE') {
throw err
}
// eslint-disable-next-line max-len
log.warn('publish', `Skipping workspace ${this.npm.chalk.cyan(name)}, marked as ${this.npm.chalk.bold('private')}`)
}
}
}
async #publish (args, { workspace } = {}) {
log.verbose('publish', replaceInfo(args))
const unicode = this.npm.config.get('unicode')
const dryRun = this.npm.config.get('dry-run')
const json = this.npm.config.get('json')
const defaultTag = this.npm.config.get('tag')
const ignoreScripts = this.npm.config.get('ignore-scripts')
const { silent } = this.npm
if (semver.validRange(defaultTag)) {
throw new Error('Tag name must not be a valid SemVer range: ' + defaultTag.trim())
}
const opts = { ...this.npm.flatOptions, progress: false }
// you can publish name@version, ./foo.tgz, etc.
// even though the default is the 'file:.' cwd.
const spec = npa(args[0])
let manifest = await this.#getManifest(spec, opts)
// only run scripts for directory type publishes
if (spec.type === 'directory' && !ignoreScripts) {
await runScript({
event: 'prepublishOnly',
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
})
}
// we pass dryRun: true to libnpmpack so it doesn't write the file to disk
const tarballData = await pack(spec, {
...opts,
foregroundScripts: this.npm.config.isDefault('foreground-scripts')
? true
: this.npm.config.get('foreground-scripts'),
dryRun: true,
prefix: this.npm.localPrefix,
workspaces: this.workspacePaths,
})
const pkgContents = await getContents(manifest, tarballData)
const logPkg = () => logTar(pkgContents, { unicode, json, key: workspace })
// The purpose of re-reading the manifest is in case it changed,
// so that we send the latest and greatest thing to the registry
// note that publishConfig might have changed as well!
manifest = await this.#getManifest(spec, opts, true)
// If we are not in JSON mode then we show the user the contents of the tarball
// before it is published so they can see it while their otp is pending
if (!json) {
logPkg()
}
const resolved = npa.resolve(manifest.name, manifest.version)
// make sure tag is valid, this will throw if invalid
npa(`${manifest.name}@${defaultTag}`)
const registry = npmFetch.pickRegistry(resolved, opts)
const creds = this.npm.config.getCredentialsByURI(registry)
const noCreds = !(creds.token || creds.username || creds.certfile && creds.keyfile)
const outputRegistry = replaceInfo(registry)
// if a workspace package is marked private then we skip it
if (workspace && manifest.private) {
throw Object.assign(
new Error(`This package has been marked as private
Remove the 'private' field from the package.json to publish it.`),
{ code: 'EPRIVATE' }
)
}
if (noCreds) {
const msg = `This command requires you to be logged in to ${outputRegistry}`
if (dryRun) {
log.warn('', `${msg} (dry-run)`)
} else {
throw Object.assign(new Error(msg), { code: 'ENEEDAUTH' })
}
}
const access = opts.access === null ? 'default' : opts.access
let msg = `Publishing to ${outputRegistry} with tag ${defaultTag} and ${access} access`
if (dryRun) {
msg = `${msg} (dry-run)`
}
log.notice('', msg)
if (!dryRun) {
await otplease(this.npm, opts, o => libpub(manifest, tarballData, o))
}
// In json mode we dont log until the publish has completed as this will
// add it to the output only if completes successfully
if (json) {
logPkg()
}
if (spec.type === 'directory' && !ignoreScripts) {
await runScript({
event: 'publish',
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
})
await runScript({
event: 'postpublish',
path: spec.fetchSpec,
stdio: 'inherit',
pkg: manifest,
})
}
if (!json && !silent) {
output.standard(`+ ${pkgContents.id}`)
}
}
// if it's a directory, read it from the file system
// otherwise, get the full metadata from whatever it is
// XXX can't pacote read the manifest from a directory?
async #getManifest (spec, opts, logWarnings = false) {
let manifest
if (spec.type === 'directory') {
const changes = []
const pkg = await pkgJson.fix(spec.fetchSpec, { changes })
if (changes.length && logWarnings) {
/* eslint-disable-next-line max-len */
log.warn('publish', 'npm auto-corrected some errors in your package.json when publishing. Please run "npm pkg fix" to address these errors.')
log.warn('publish', `errors corrected:\n${changes.join('\n')}`)
}
// Prepare is the special function for publishing, different than normalize
const { content } = await pkg.prepare()
manifest = content
} else {
manifest = await pacote.manifest(spec, {
...opts,
fullmetadata: true,
fullReadJson: true,
})
}
if (manifest.publishConfig) {
const cliFlags = this.npm.config.data.get('cli').raw
// Filter out properties set in CLI flags to prioritize them over
// corresponding `publishConfig` settings
const filteredPublishConfig = Object.fromEntries(
Object.entries(manifest.publishConfig).filter(([key]) => !(key in cliFlags)))
flatten(filteredPublishConfig, opts)
}
return manifest
}
}
module.exports = Publish

View File

@ -0,0 +1,126 @@
const { resolve } = require('node:path')
const BaseCommand = require('../base-cmd.js')
const { log, output } = require('proc-log')
class QuerySelectorItem {
constructor (node) {
// all enumerable properties from the target
Object.assign(this, node.target.package)
// append extra info
this.pkgid = node.target.pkgid
this.location = node.target.location
this.path = node.target.path
this.realpath = node.target.realpath
this.resolved = node.target.resolved
this.from = []
this.to = []
this.dev = node.target.dev
this.inBundle = node.target.inBundle
this.deduped = this.from.length > 1
this.overridden = node.overridden
this.queryContext = node.queryContext
for (const edge of node.target.edgesIn) {
this.from.push(edge.from.location)
}
for (const [, edge] of node.target.edgesOut) {
if (edge.to) {
this.to.push(edge.to.location)
}
}
}
}
class Query extends BaseCommand {
#response = [] // response is the query response
#seen = new Set() // paths we've seen so we can keep response deduped
static description = 'Retrieve a filtered list of packages'
static name = 'query'
static usage = ['<selector>']
static workspaces = true
static ignoreImplicitWorkspace = false
static params = [
'global',
'workspace',
'workspaces',
'include-workspace-root',
'package-lock-only',
'expect-results',
]
constructor (...args) {
super(...args)
this.npm.config.set('json', true)
}
async exec (args) {
const packageLock = this.npm.config.get('package-lock-only')
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
// one dir up from wherever node_modules lives
path: resolve(this.npm.dir, '..'),
forceActual: !packageLock,
})
let tree
if (packageLock) {
try {
tree = await arb.loadVirtual()
} catch (err) {
log.verbose('loadVirtual', err.stack)
throw this.usageError(
'A package lock or shrinkwrap file is required in package-lock-only mode'
)
}
} else {
tree = await arb.loadActual()
}
await this.#queryTree(tree, args[0])
this.#output()
}
async execWorkspaces (args) {
await this.setWorkspaces()
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
path: this.npm.prefix,
})
// FIXME: Workspace support in query does not work as expected so this does not
// do the same package-lock-only check as this.exec().
// https://github.com/npm/cli/pull/6732#issuecomment-1708804921
const tree = await arb.loadActual()
for (const path of this.workspacePaths) {
const wsTree = path === tree.root.path
? tree // --includes-workspace-root
: await tree.querySelectorAll(`.workspace:path(${path})`).then(r => r[0].target)
await this.#queryTree(wsTree, args[0])
}
this.#output()
}
#output () {
this.checkExpected(this.#response.length)
output.buffer(this.#response)
}
// builds a normalized inventory
async #queryTree (tree, arg) {
const items = await tree.querySelectorAll(arg, this.npm.flatOptions)
for (const node of items) {
const { location } = node.target
if (!location || !this.#seen.has(location)) {
const item = new QuerySelectorItem(node)
this.#response.push(item)
if (location) {
this.#seen.add(item.location)
}
}
}
}
}
module.exports = Query

View File

@ -0,0 +1,84 @@
const { resolve } = require('node:path')
const { output } = require('proc-log')
const npa = require('npm-package-arg')
const semver = require('semver')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Rebuild extends ArboristWorkspaceCmd {
static description = 'Rebuild a package'
static name = 'rebuild'
static params = [
'global',
'bin-links',
'foreground-scripts',
'ignore-scripts',
...super.params,
]
static usage = ['[<package-spec>] ...]']
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
const globalTop = resolve(this.npm.globalDir, '..')
const where = this.npm.global ? globalTop : this.npm.prefix
const Arborist = require('@npmcli/arborist')
const arb = new Arborist({
...this.npm.flatOptions,
path: where,
// TODO when extending ReifyCmd
// workspaces: this.workspaceNames,
})
if (args.length) {
// get the set of nodes matching the name that we want rebuilt
const tree = await arb.loadActual()
const specs = args.map(arg => {
const spec = npa(arg)
if (spec.rawSpec === '*') {
return spec
}
if (spec.type !== 'range' && spec.type !== 'version' && spec.type !== 'directory') {
throw new Error('`npm rebuild` only supports SemVer version/range specifiers')
}
return spec
})
const nodes = tree.inventory.filter(node => this.isNode(specs, node))
await arb.rebuild({ nodes })
} else {
await arb.rebuild()
}
output.standard('rebuilt dependencies successfully')
}
isNode (specs, node) {
return specs.some(spec => {
if (spec.type === 'directory') {
return node.path === spec.fetchSpec
}
if (spec.name !== node.name) {
return false
}
if (spec.rawSpec === '' || spec.rawSpec === '*') {
return true
}
const { version } = node.package
// TODO: add tests for a package with missing version
return semver.satisfies(version, spec.fetchSpec)
})
}
}
module.exports = Rebuild

View File

@ -0,0 +1,55 @@
const { URL } = require('node:url')
const PackageUrlCmd = require('../package-url-cmd.js')
class Repo extends PackageUrlCmd {
static description = 'Open package repository page in the browser'
static name = 'repo'
getUrl (spec, mani) {
const r = mani.repository
const rurl = !r ? null
: typeof r === 'string' ? r
: typeof r === 'object' && typeof r.url === 'string' ? r.url
: null
if (!rurl) {
throw Object.assign(new Error('no repository'), {
pkgid: spec,
})
}
const info = this.hostedFromMani(mani)
const url = info ?
info.browse(mani.repository.directory) : unknownHostedUrl(rurl)
if (!url) {
throw Object.assign(new Error('no repository: could not get url'), {
pkgid: spec,
})
}
return url
}
}
module.exports = Repo
const unknownHostedUrl = url => {
try {
const {
protocol,
hostname,
pathname,
} = new URL(url)
/* istanbul ignore next - URL ctor should prevent this */
if (!protocol || !hostname) {
return null
}
const proto = /(git\+)http:$/.test(protocol) ? 'http:' : 'https:'
const path = pathname.replace(/\.git$/, '')
return `${proto}//${hostname}${path}`
} catch (e) {
return null
}
}

View File

@ -0,0 +1,13 @@
const LifecycleCmd = require('../lifecycle-cmd.js')
// This ends up calling run-script(['restart', ...args])
class Restart extends LifecycleCmd {
static description = 'Restart a package'
static name = 'restart'
static params = [
'ignore-scripts',
'script-shell',
]
}
module.exports = Restart

View File

@ -0,0 +1,14 @@
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Root extends BaseCommand {
static description = 'Display npm root'
static name = 'root'
static params = ['global']
async exec () {
output.standard(this.npm.dir)
}
}
module.exports = Root

View File

@ -0,0 +1,218 @@
const { output } = require('proc-log')
const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-cmd.js')
const { getError } = require('../utils/error-message.js')
const { outputError } = require('../utils/output-error.js')
class RunScript extends BaseCommand {
static description = 'Run arbitrary package scripts'
static params = [
'workspace',
'workspaces',
'include-workspace-root',
'if-present',
'ignore-scripts',
'foreground-scripts',
'script-shell',
]
static name = 'run-script'
static usage = ['<command> [-- <args>]']
static workspaces = true
static ignoreImplicitWorkspace = false
static isShellout = true
static checkDevEngines = true
static async completion (opts, npm) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
const { content: { scripts = {} } } = await pkgJson.normalize(npm.localPrefix)
.catch(() => ({ content: {} }))
if (opts.isFish) {
return Object.keys(scripts).map(s => `${s}\t${scripts[s].slice(0, 30)}`)
}
return Object.keys(scripts)
}
}
async exec (args) {
if (args.length) {
await this.#run(args, { path: this.npm.localPrefix })
} else {
await this.#list(this.npm.localPrefix)
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
const ws = [...this.workspaces.entries()]
for (const [workspace, path] of ws) {
const last = path === ws.at(-1)[1]
if (!args.length) {
const newline = await this.#list(path, { workspace })
if (newline && !last) {
output.standard('')
}
continue
}
const pkg = await pkgJson.normalize(path).then(p => p.content)
try {
await this.#run(args, { path, pkg, workspace })
} catch (e) {
const err = getError(e, { npm: this.npm, command: null })
outputError({
...err,
error: [
['', `Lifecycle script \`${args[0]}\` failed with error:`],
...err.error,
['workspace', pkg._id || pkg.name],
['location', path],
],
})
process.exitCode = err.exitCode
if (!last) {
output.error('')
}
}
}
}
async #run ([event, ...args], { path, pkg, workspace }) {
const runScript = require('@npmcli/run-script')
pkg ??= await pkgJson.normalize(path).then(p => p.content)
const { scripts = {} } = pkg
if (event === 'restart' && !scripts.restart) {
scripts.restart = 'npm stop --if-present && npm start'
} else if (event === 'env' && !scripts.env) {
const { isWindowsShell } = require('../utils/is-windows.js')
scripts.env = isWindowsShell ? 'SET' : 'env'
}
pkg.scripts = scripts
if (
!Object.prototype.hasOwnProperty.call(scripts, event) &&
!(event === 'start' && (await runScript.isServerPackage(path)))
) {
if (this.npm.config.get('if-present')) {
return
}
const suggestions = require('../utils/did-you-mean.js')(pkg, event)
const wsArg = workspace && path !== this.npm.localPrefix
? ` --workspace=${pkg._id || pkg.name}`
: ''
throw new Error([
`Missing script: "${event}"${suggestions}\n`,
'To see a list of scripts, run:',
` npm run${wsArg}`,
].join('\n'))
}
// positional args only added to the main event, not pre/post
const events = [[event, args]]
if (!this.npm.config.get('ignore-scripts')) {
if (scripts[`pre${event}`]) {
events.unshift([`pre${event}`, []])
}
if (scripts[`post${event}`]) {
events.push([`post${event}`, []])
}
}
for (const [ev, evArgs] of events) {
await runScript({
path,
// this || undefined is because runScript will be unhappy with the
// default null value
scriptShell: this.npm.config.get('script-shell') || undefined,
stdio: 'inherit',
pkg,
event: ev,
args: evArgs,
})
}
}
async #list (path, { workspace } = {}) {
const { scripts = {}, name, _id } = await pkgJson.normalize(path).then(p => p.content)
const scriptEntries = Object.entries(scripts)
if (this.npm.silent) {
return
}
if (this.npm.config.get('json')) {
output.buffer(workspace ? { [workspace]: scripts } : scripts)
return
}
if (!scriptEntries.length) {
return
}
if (this.npm.config.get('parseable')) {
output.standard(scriptEntries
.map((s) => (workspace ? [workspace, ...s] : s).join(':'))
.join('\n')
.trim())
return
}
const cmdList = [
'prepare', 'prepublishOnly',
'prepack', 'postpack',
'dependencies',
'preinstall', 'install', 'postinstall',
'prepublish', 'publish', 'postpublish',
'prerestart', 'restart', 'postrestart',
'prestart', 'start', 'poststart',
'prestop', 'stop', 'poststop',
'pretest', 'test', 'posttest',
'preuninstall', 'uninstall', 'postuninstall',
'preversion', 'version', 'postversion',
]
const [cmds, runScripts] = scriptEntries.reduce((acc, s) => {
acc[cmdList.includes(s[0]) ? 0 : 1].push(s)
return acc
}, [[], []])
const { reset, bold, cyan, dim, blue } = this.npm.chalk
const pkgId = `in ${cyan(_id || name)}`
const title = (t) => reset(bold(t))
if (cmds.length) {
output.standard(`${title('Lifecycle scripts')} included ${pkgId}:`)
for (const [k, v] of cmds) {
output.standard(` ${k}`)
output.standard(` ${dim(v)}`)
}
}
if (runScripts.length) {
const via = `via \`${blue('npm run-script')}\`:`
if (!cmds.length) {
output.standard(`${title('Scripts')} available ${pkgId} ${via}`)
} else {
output.standard(`available ${via}`)
}
for (const [k, v] of runScripts) {
output.standard(` ${k}`)
output.standard(` ${dim(v)}`)
}
}
// Return true to indicate that something was output for this path
// that should be separated from others
return true
}
}
module.exports = RunScript

View File

@ -0,0 +1,134 @@
const localeCompare = require('@isaacs/string-locale-compare')('en')
const BaseCommand = require('../base-cmd.js')
const { log, output } = require('proc-log')
const { cyclonedxOutput } = require('../utils/sbom-cyclonedx.js')
const { spdxOutput } = require('../utils/sbom-spdx.js')
const SBOM_FORMATS = ['cyclonedx', 'spdx']
class SBOM extends BaseCommand {
#response = {} // response is the sbom response
static description = 'Generate a Software Bill of Materials (SBOM)'
static name = 'sbom'
static workspaces = true
static params = [
'omit',
'package-lock-only',
'sbom-format',
'sbom-type',
'workspace',
'workspaces',
]
async exec () {
const sbomFormat = this.npm.config.get('sbom-format')
const packageLockOnly = this.npm.config.get('package-lock-only')
if (!sbomFormat) {
/* eslint-disable-next-line max-len */
throw this.usageError(`Must specify --sbom-format flag with one of: ${SBOM_FORMATS.join(', ')}.`)
}
const opts = {
...this.npm.flatOptions,
path: this.npm.prefix,
forceActual: true,
}
const Arborist = require('@npmcli/arborist')
const arb = new Arborist(opts)
const tree = packageLockOnly ? await arb.loadVirtual(opts).catch(() => {
/* eslint-disable-next-line max-len */
throw this.usageError('A package lock or shrinkwrap file is required in package-lock-only mode')
}) : await arb.loadActual(opts)
// Collect the list of selected workspaces in the project
const wsNodes = this.workspaceNames?.length
? arb.workspaceNodes(tree, this.workspaceNames)
: null
// Build the selector and query the tree for the list of nodes
const selector = this.#buildSelector({ wsNodes })
log.info('sbom', `Using dependency selector: ${selector}`)
const items = await tree.querySelectorAll(selector)
const errors = items.flatMap(node => detectErrors(node))
if (errors.length) {
throw Object.assign(new Error([...new Set(errors)].join('\n')), {
code: 'ESBOMPROBLEMS',
})
}
// Populate the response with the list of unique nodes (sorted by location)
this.#buildResponse(items.sort((a, b) => localeCompare(a.location, b.location)))
// TODO(BREAKING_CHANGE): all sbom output is in json mode but setting it before
// any of the errors will cause those to be thrown in json mode.
this.npm.config.set('json', true)
output.buffer(this.#response)
}
async execWorkspaces (args) {
await this.setWorkspaces()
return this.exec(args)
}
// Build the selector from all of the specified filter options
#buildSelector ({ wsNodes }) {
let selector
const omit = this.npm.flatOptions.omit
const workspacesEnabled = this.npm.flatOptions.workspacesEnabled
// If omit is specified, omit all nodes and their children which match the
// specified selectors
const omits = omit.reduce((acc, o) => `${acc}:not(.${o})`, '')
if (!workspacesEnabled) {
// If workspaces are disabled, omit all workspace nodes and their children
selector = `:root > :not(.workspace)${omits},:root > :not(.workspace) *${omits},:extraneous`
} else if (wsNodes && wsNodes.length > 0) {
// If one or more workspaces are selected, select only those workspaces and their children
selector = wsNodes.map(ws => `#${ws.name},#${ws.name} *${omits}`).join(',')
} else {
selector = `:root *${omits},:extraneous`
}
// Always include the root node
return `:root,${selector}`
}
// builds a normalized inventory
#buildResponse (items) {
const sbomFormat = this.npm.config.get('sbom-format')
const packageType = this.npm.config.get('sbom-type')
const packageLockOnly = this.npm.config.get('package-lock-only')
this.#response = sbomFormat === 'cyclonedx'
? cyclonedxOutput({ npm: this.npm, nodes: items, packageType, packageLockOnly })
: spdxOutput({ npm: this.npm, nodes: items, packageType })
}
}
const detectErrors = (node) => {
const errors = []
// Look for missing dependencies (that are NOT optional), or invalid dependencies
for (const edge of node.edgesOut.values()) {
if (edge.missing && !(edge.type === 'optional' || edge.type === 'peerOptional')) {
errors.push(`missing: ${edge.name}@${edge.spec}, required by ${edge.from.pkgid}`)
}
if (edge.invalid) {
/* istanbul ignore next */
const spec = edge.spec || '*'
const from = edge.from.pkgid
errors.push(`invalid: ${edge.to.pkgid}, ${spec} required by ${from}`)
}
}
return errors
}
module.exports = SBOM

View File

@ -0,0 +1,70 @@
const Pipeline = require('minipass-pipeline')
const libSearch = require('libnpmsearch')
const { log, output } = require('proc-log')
const formatSearchStream = require('../utils/format-search-stream.js')
const BaseCommand = require('../base-cmd.js')
class Search extends BaseCommand {
static description = 'Search for packages'
static name = 'search'
static params = [
'json',
'color',
'parseable',
'description',
'searchlimit',
'searchopts',
'searchexclude',
'registry',
'prefer-online',
'prefer-offline',
'offline',
]
static usage = ['<search term> [<search term> ...]']
async exec (args) {
const opts = {
...this.npm.flatOptions,
...this.npm.flatOptions.search,
include: args.map(s => s.toLowerCase()).filter(Boolean),
exclude: this.npm.flatOptions.search.exclude.split(/\s+/),
}
if (opts.include.length === 0) {
throw new Error('search must be called with arguments')
}
// Used later to figure out whether we had any packages go out
let anyOutput = false
// Grab a configured output stream that will spit out packages in the desired format.
const outputStream = formatSearchStream({
args, // --searchinclude options are not highlighted
...opts,
npm: this.npm,
})
log.silly('search', 'searching packages')
const p = new Pipeline(
libSearch.stream(opts.include, opts),
outputStream
)
p.on('data', chunk => {
if (!anyOutput) {
anyOutput = true
}
output.standard(chunk.toString('utf8'))
})
await p.promise()
if (!anyOutput && !this.npm.config.get('json') && !this.npm.config.get('parseable')) {
output.standard('No matches found for ' + (args.map(JSON.stringify).join(' ')))
}
log.silly('search', 'search completed')
}
}
module.exports = Search

View File

@ -0,0 +1,26 @@
const Npm = require('../npm.js')
const BaseCommand = require('../base-cmd.js')
class Set extends BaseCommand {
static description = 'Set a value in the npm configuration'
static name = 'set'
static usage = ['<key>=<value> [<key>=<value> ...] (See `npm config`)']
static params = ['global', 'location']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts) {
const Config = Npm.cmd('config')
return Config.completion(opts)
}
async exec (args) {
if (!args.length) {
throw this.usageError()
}
return this.npm.exec('config', ['set'].concat(args))
}
}
module.exports = Set

View File

@ -0,0 +1,73 @@
const { resolve, basename } = require('node:path')
const { unlink } = require('node:fs/promises')
const { log } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Shrinkwrap extends BaseCommand {
static description = 'Lock down dependency versions for publication'
static name = 'shrinkwrap'
static ignoreImplicitWorkspace = false
async exec () {
// if has a npm-shrinkwrap.json, nothing to do
// if has a package-lock.json, rename to npm-shrinkwrap.json
// if has neither, load the actual tree and save that as npm-shrinkwrap.json
//
// loadVirtual, fall back to loadActual
// rename shrinkwrap file type, and tree.meta.save()
if (this.npm.global) {
const er = new Error('`npm shrinkwrap` does not work for global packages')
er.code = 'ESHRINKWRAPGLOBAL'
throw er
}
const Arborist = require('@npmcli/arborist')
const path = this.npm.prefix
const sw = resolve(path, 'npm-shrinkwrap.json')
const arb = new Arborist({ ...this.npm.flatOptions, path })
const tree = await arb.loadVirtual().catch(() => arb.loadActual())
const { meta } = tree
const newFile = meta.hiddenLockfile || !meta.loadedFromDisk
const oldFilename = meta.filename
const notSW = !newFile && basename(oldFilename) !== 'npm-shrinkwrap.json'
// The computed lockfile version of a hidden lockfile is always 3
// even if the actual value of the property is a different.
// When shrinkwrap is run with only a hidden lockfile we want to
// set the shrinkwrap lockfile version as whatever was explicitly
// requested with a fallback to the actual value from the hidden
// lockfile.
if (meta.hiddenLockfile) {
meta.lockfileVersion = arb.options.lockfileVersion ||
meta.originalLockfileVersion
}
meta.hiddenLockfile = false
meta.filename = sw
await meta.save()
const updatedVersion = meta.originalLockfileVersion !== meta.lockfileVersion
? meta.lockfileVersion
: null
if (newFile) {
let message = 'created a lockfile as npm-shrinkwrap.json'
if (updatedVersion) {
message += ` with version ${updatedVersion}`
}
log.notice('', message)
} else if (notSW) {
await unlink(oldFilename)
let message = 'package-lock.json has been renamed to npm-shrinkwrap.json'
if (updatedVersion) {
message += ` and updated to version ${updatedVersion}`
}
log.notice('', message)
} else if (updatedVersion) {
log.notice('', `npm-shrinkwrap.json updated to version ${updatedVersion}`)
} else {
log.notice('', 'npm-shrinkwrap.json up to date')
}
}
}
module.exports = Shrinkwrap

View File

@ -0,0 +1,72 @@
const fetch = require('npm-registry-fetch')
const npa = require('npm-package-arg')
const { log, output } = require('proc-log')
const getIdentity = require('../utils/get-identity')
const BaseCommand = require('../base-cmd.js')
class Star extends BaseCommand {
static description = 'Mark your favorite packages'
static name = 'star'
static usage = ['[<package-spec>...]']
static params = [
'registry',
'unicode',
'otp',
]
static ignoreImplicitWorkspace = false
async exec (args) {
if (!args.length) {
throw this.usageError()
}
// if we're unstarring, then show an empty star image
// otherwise, show the full star image
const unicode = this.npm.config.get('unicode')
const full = unicode ? '\u2605 ' : '(*)'
const empty = unicode ? '\u2606 ' : '( )'
const show = this.name === 'star' ? full : empty
const pkgs = args.map(npa)
const username = await getIdentity(this.npm, this.npm.flatOptions)
for (const pkg of pkgs) {
const fullData = await fetch.json(pkg.escapedName, {
...this.npm.flatOptions,
spec: pkg,
query: { write: true },
preferOnline: true,
})
const body = {
_id: fullData._id,
_rev: fullData._rev,
users: fullData.users || {},
}
if (this.name === 'star') {
log.info('star', 'starring', body._id)
body.users[username] = true
log.verbose('star', 'starring', body)
} else {
delete body.users[username]
log.info('unstar', 'unstarring', body._id)
log.verbose('unstar', 'unstarring', body)
}
const data = await fetch.json(pkg.escapedName, {
...this.npm.flatOptions,
spec: pkg,
method: 'PUT',
body,
})
output.standard(show + ' ' + pkg.name)
log.verbose('star', data)
return data
}
}
}
module.exports = Star

View File

@ -0,0 +1,39 @@
const fetch = require('npm-registry-fetch')
const { log, output } = require('proc-log')
const getIdentity = require('../utils/get-identity.js')
const BaseCommand = require('../base-cmd.js')
class Stars extends BaseCommand {
static description = 'View packages marked as favorites'
static name = 'stars'
static usage = ['[<user>]']
static params = ['registry']
static ignoreImplicitWorkspace = false
async exec ([user]) {
try {
if (!user) {
user = await getIdentity(this.npm, this.npm.flatOptions)
}
const { rows } = await fetch.json('/-/_view/starredByUser', {
...this.npm.flatOptions,
query: { key: `"${user}"` },
})
if (rows.length === 0) {
log.warn('stars', 'user has not starred any packages')
}
for (const row of rows) {
output.standard(row.value)
}
} catch (err) {
if (err.code === 'ENEEDAUTH') {
log.warn('stars', 'auth is required to look up your username')
}
throw err
}
}
}
module.exports = Stars

View File

@ -0,0 +1,13 @@
const LifecycleCmd = require('../lifecycle-cmd.js')
// This ends up calling run-script(['start', ...args])
class Start extends LifecycleCmd {
static description = 'Start a package'
static name = 'start'
static params = [
'ignore-scripts',
'script-shell',
]
}
module.exports = Start

View File

@ -0,0 +1,13 @@
const LifecycleCmd = require('../lifecycle-cmd.js')
// This ends up calling run-script(['stop', ...args])
class Stop extends LifecycleCmd {
static description = 'Stop a package'
static name = 'stop'
static params = [
'ignore-scripts',
'script-shell',
]
}
module.exports = Stop

View File

@ -0,0 +1,155 @@
const columns = require('cli-columns')
const libteam = require('libnpmteam')
const { output } = require('proc-log')
const { otplease } = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
class Team extends BaseCommand {
static description = 'Manage organization teams and team memberships'
static name = 'team'
static usage = [
'create <scope:team> [--otp <otpcode>]',
'destroy <scope:team> [--otp <otpcode>]',
'add <scope:team> <user> [--otp <otpcode>]',
'rm <scope:team> <user> [--otp <otpcode>]',
'ls <scope>|<scope:team>',
]
static params = [
'registry',
'otp',
'parseable',
'json',
]
static ignoreImplicitWorkspace = false
static async completion (opts) {
const { conf: { argv: { remain: argv } } } = opts
const subcommands = ['create', 'destroy', 'add', 'rm', 'ls']
if (argv.length === 2) {
return subcommands
}
if (subcommands.includes(argv[2])) {
return []
}
throw new Error(argv[2] + ' not recognized')
}
async exec ([cmd, entity = '', user = '']) {
// Entities are in the format <scope>:<team>
// XXX: "description" option to libnpmteam is used as a description of the
// team, but in npm's options, this is a boolean meaning "show the
// description in npm search output". Hence its being set to null here.
await otplease(this.npm, { ...this.npm.flatOptions }, opts => {
entity = entity.replace(/^@/, '')
switch (cmd) {
case 'create': return this.create(entity, opts)
case 'destroy': return this.destroy(entity, opts)
case 'add': return this.add(entity, user, opts)
case 'rm': return this.rm(entity, user, opts)
case 'ls': {
const match = entity.match(/[^:]+:.+/)
if (match) {
return this.listUsers(entity, opts)
} else {
return this.listTeams(entity, opts)
}
}
default:
throw this.usageError()
}
})
}
async create (entity, opts) {
await libteam.create(entity, opts)
if (opts.json) {
output.buffer({
created: true,
team: entity,
})
} else if (opts.parseable) {
output.standard(`${entity}\tcreated`)
} else if (!this.npm.silent) {
output.standard(`+@${entity}`)
}
}
async destroy (entity, opts) {
await libteam.destroy(entity, opts)
if (opts.json) {
output.buffer({
deleted: true,
team: entity,
})
} else if (opts.parseable) {
output.standard(`${entity}\tdeleted`)
} else if (!this.npm.silent) {
output.standard(`-@${entity}`)
}
}
async add (entity, user, opts) {
await libteam.add(user, entity, opts)
if (opts.json) {
output.buffer({
added: true,
team: entity,
user,
})
} else if (opts.parseable) {
output.standard(`${user}\t${entity}\tadded`)
} else if (!this.npm.silent) {
output.standard(`${user} added to @${entity}`)
}
}
async rm (entity, user, opts) {
await libteam.rm(user, entity, opts)
if (opts.json) {
output.buffer({
removed: true,
team: entity,
user,
})
} else if (opts.parseable) {
output.standard(`${user}\t${entity}\tremoved`)
} else if (!this.npm.silent) {
output.standard(`${user} removed from @${entity}`)
}
}
async listUsers (entity, opts) {
const users = (await libteam.lsUsers(entity, opts)).sort()
if (opts.json) {
output.buffer(users)
} else if (opts.parseable) {
output.standard(users.join('\n'))
} else if (!this.npm.silent) {
const plural = users.length === 1 ? '' : 's'
const more = users.length === 0 ? '' : ':\n'
output.standard(`\n@${entity} has ${users.length} user${plural}${more}`)
output.standard(columns(users, { padding: 1 }))
}
}
async listTeams (entity, opts) {
const teams = (await libteam.lsTeams(entity, opts)).sort()
if (opts.json) {
output.buffer(teams)
} else if (opts.parseable) {
output.standard(teams.join('\n'))
} else if (!this.npm.silent) {
const plural = teams.length === 1 ? '' : 's'
const more = teams.length === 0 ? '' : ':\n'
output.standard(`\n@${entity} has ${teams.length} team${plural}${more}`)
output.standard(columns(teams.map(t => `@${t}`), { padding: 1 }))
}
}
}
module.exports = Team

View File

@ -0,0 +1,13 @@
const LifecycleCmd = require('../lifecycle-cmd.js')
// This ends up calling run-script(['test', ...args])
class Test extends LifecycleCmd {
static description = 'Test a package'
static name = 'test'
static params = [
'ignore-scripts',
'script-shell',
]
}
module.exports = Test

View File

@ -0,0 +1,197 @@
const { log, output } = require('proc-log')
const { listTokens, createToken, removeToken } = require('npm-profile')
const { otplease } = require('../utils/auth.js')
const readUserInfo = require('../utils/read-user-info.js')
const BaseCommand = require('../base-cmd.js')
class Token extends BaseCommand {
static description = 'Manage your authentication tokens'
static name = 'token'
static usage = ['list', 'revoke <id|token>', 'create [--read-only] [--cidr=list]']
static params = ['read-only', 'cidr', 'registry', 'otp']
static async completion (opts) {
const argv = opts.conf.argv.remain
const subcommands = ['list', 'revoke', 'create']
if (argv.length === 2) {
return subcommands
}
if (subcommands.includes(argv[2])) {
return []
}
throw new Error(argv[2] + ' not recognized')
}
async exec (args) {
if (args.length === 0) {
return this.list()
}
switch (args[0]) {
case 'list':
case 'ls':
return this.list()
case 'rm':
case 'delete':
case 'revoke':
case 'remove':
return this.rm(args.slice(1))
case 'create':
return this.create(args.slice(1))
default:
throw this.usageError(`${args[0]} is not a recognized subcommand.`)
}
}
async list () {
const json = this.npm.config.get('json')
const parseable = this.npm.config.get('parseable')
log.info('token', 'getting list')
const tokens = await listTokens(this.npm.flatOptions)
if (json) {
output.buffer(tokens)
return
}
if (parseable) {
output.standard(['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'))
tokens.forEach(token => {
output.standard(
[
token.key,
token.token,
token.created,
token.readonly ? 'true' : 'false',
token.cidr_whitelist ? token.cidr_whitelist.join(',') : '',
].join('\t')
)
})
return
}
this.generateTokenIds(tokens, 6)
const chalk = this.npm.chalk
for (const token of tokens) {
const level = token.readonly ? 'Read only token' : 'Publish token'
const created = String(token.created).slice(0, 10)
/* eslint-disable-next-line max-len */
output.standard(`${chalk.blue(level)} ${token.token}… with id ${chalk.cyan(token.id)} created ${created}`)
if (token.cidr_whitelist) {
output.standard(`with IP whitelist: ${chalk.green(token.cidr_whitelist.join(','))}`)
}
output.standard()
}
}
async rm (args) {
if (args.length === 0) {
throw this.usageError('`<tokenKey>` argument is required.')
}
const json = this.npm.config.get('json')
const parseable = this.npm.config.get('parseable')
const toRemove = []
const opts = { ...this.npm.flatOptions }
log.info('token', `removing ${toRemove.length} tokens`)
const tokens = await listTokens(opts)
args.forEach(id => {
const matches = tokens.filter(token => token.key.indexOf(id) === 0)
if (matches.length === 1) {
toRemove.push(matches[0].key)
} else if (matches.length > 1) {
throw new Error(
/* eslint-disable-next-line max-len */
`Token ID "${id}" was ambiguous, a new token may have been created since you last ran \`npm token list\`.`
)
} else {
const tokenMatches = tokens.some(t => id.indexOf(t.token) === 0)
if (!tokenMatches) {
throw new Error(`Unknown token id or value "${id}".`)
}
toRemove.push(id)
}
})
await Promise.all(
toRemove.map(key => {
return otplease(this.npm, opts, c => removeToken(key, c))
})
)
if (json) {
output.buffer(toRemove)
} else if (parseable) {
output.standard(toRemove.join('\t'))
} else {
output.standard('Removed ' + toRemove.length + ' token' + (toRemove.length !== 1 ? 's' : ''))
}
}
async create () {
const json = this.npm.config.get('json')
const parseable = this.npm.config.get('parseable')
const cidr = this.npm.config.get('cidr')
const readonly = this.npm.config.get('read-only')
const validCIDR = await this.validateCIDRList(cidr)
const password = await readUserInfo.password()
log.info('token', 'creating')
const result = await otplease(
this.npm,
{ ...this.npm.flatOptions },
c => createToken(password, readonly, validCIDR, c)
)
delete result.key
delete result.updated
if (json) {
output.buffer(result)
} else if (parseable) {
Object.keys(result).forEach(k => output.standard(k + '\t' + result[k]))
} else {
const chalk = this.npm.chalk
// Identical to list
const level = result.readonly ? 'read only' : 'publish'
output.standard(`Created ${chalk.blue(level)} token ${result.token}`)
if (result.cidr_whitelist?.length) {
output.standard(`with IP whitelist: ${chalk.green(result.cidr_whitelist.join(','))}`)
}
}
}
invalidCIDRError (msg) {
return Object.assign(new Error(msg), { code: 'EINVALIDCIDR' })
}
generateTokenIds (tokens, minLength) {
for (const token of tokens) {
token.id = token.key
for (let ii = minLength; ii < token.key.length; ++ii) {
const match = tokens.some(
ot => ot !== token && ot.key.slice(0, ii) === token.key.slice(0, ii)
)
if (!match) {
token.id = token.key.slice(0, ii)
break
}
}
}
}
async validateCIDRList (cidrs) {
const { v4: isCidrV4, v6: isCidrV6 } = await import('is-cidr')
const maybeList = [].concat(cidrs).filter(Boolean)
const list = maybeList.length === 1 ? maybeList[0].split(/,\s*/) : maybeList
for (const cidr of list) {
if (isCidrV6(cidr)) {
throw this.invalidCIDRError(
`CIDR whitelist can only contain IPv4 addresses${cidr} is IPv6`
)
}
if (!isCidrV4(cidr)) {
throw this.invalidCIDRError(`CIDR whitelist contains invalid CIDR entry: ${cidr}`)
}
}
return list
}
}
module.exports = Token

View File

@ -0,0 +1,56 @@
const { resolve } = require('node:path')
const pkgJson = require('@npmcli/package-json')
const reifyFinish = require('../utils/reify-finish.js')
const completion = require('../utils/installed-shallow.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Uninstall extends ArboristWorkspaceCmd {
static description = 'Remove a package'
static name = 'uninstall'
static params = ['save', 'global', ...super.params]
static usage = ['[<@scope>/]<pkg>...']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
return completion(npm, opts)
}
async exec (args) {
if (!args.length) {
if (!this.npm.global) {
throw new Error('Must provide a package name to remove')
} else {
try {
const { content: pkg } = await pkgJson.normalize(this.npm.localPrefix)
args.push(pkg.name)
} catch (er) {
if (er.code !== 'ENOENT' && er.code !== 'ENOTDIR') {
throw er
} else {
throw this.usageError()
}
}
}
}
// the /path/to/node_modules/..
const path = this.npm.global
? resolve(this.npm.globalDir, '..')
: this.npm.localPrefix
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path,
rm: args,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.reify(opts)
await reifyFinish(this.npm, arb)
}
}
module.exports = Uninstall

View File

@ -0,0 +1,176 @@
const libaccess = require('libnpmaccess')
const libunpub = require('libnpmpublish').unpublish
const npa = require('npm-package-arg')
const pacote = require('pacote')
const { output, log } = require('proc-log')
const pkgJson = require('@npmcli/package-json')
const { flatten } = require('@npmcli/config/lib/definitions')
const getIdentity = require('../utils/get-identity.js')
const { otplease } = require('../utils/auth.js')
const BaseCommand = require('../base-cmd.js')
const LAST_REMAINING_VERSION_ERROR = 'Refusing to delete the last version of the package. ' +
'It will block from republishing a new version for 24 hours.\n' +
'Run with --force to do this.'
class Unpublish extends BaseCommand {
static description = 'Remove a package from the registry'
static name = 'unpublish'
static params = ['dry-run', 'force', 'workspace', 'workspaces']
static usage = ['[<package-spec>]']
static workspaces = true
static ignoreImplicitWorkspace = false
static async getKeysOfVersions (name, opts) {
const packument = await pacote.packument(name, {
...opts,
spec: name,
query: { write: true },
})
return Object.keys(packument.versions)
}
static async completion (args, npm) {
const { partialWord, conf } = args
if (conf.argv.remain.length >= 3) {
return []
}
const opts = { ...npm.flatOptions }
const username = await getIdentity(npm, { ...opts }).catch(() => null)
if (!username) {
return []
}
const access = await libaccess.getPackages(username, opts)
// do a bit of filtering at this point, so that we don't need
// to fetch versions for more than one thing, but also don't
// accidentally unpublish a whole project
let pkgs = Object.keys(access)
if (!partialWord || !pkgs.length) {
return pkgs
}
const pp = npa(partialWord).name
pkgs = pkgs.filter(p => !p.indexOf(pp))
if (pkgs.length > 1) {
return pkgs
}
const versions = await Unpublish.getKeysOfVersions(pkgs[0], opts)
if (!versions.length) {
return pkgs
} else {
return versions.map(v => `${pkgs[0]}@${v}`)
}
}
async exec (args, { localPrefix } = {}) {
if (args.length > 1) {
throw this.usageError()
}
// workspace mode
if (!localPrefix) {
localPrefix = this.npm.localPrefix
}
const force = this.npm.config.get('force')
const { silent } = this.npm
const dryRun = this.npm.config.get('dry-run')
let spec
if (args.length) {
spec = npa(args[0])
if (spec.type !== 'version' && spec.rawSpec !== '*') {
throw this.usageError(
'Can only unpublish a single version, or the entire project.\n' +
'Tags and ranges are not supported.'
)
}
}
log.silly('unpublish', 'args[0]', args[0])
log.silly('unpublish', 'spec', spec)
if (spec?.rawSpec === '*' && !force) {
throw this.usageError(
'Refusing to delete entire project.\n' +
'Run with --force to do this.'
)
}
const opts = { ...this.npm.flatOptions }
let manifest
try {
const { content } = await pkgJson.prepare(localPrefix)
manifest = content
} catch (err) {
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
if (!spec) {
// We needed a local package.json to figure out what package to
// unpublish
throw this.usageError()
}
} else {
// folks should know if ANY local package.json had a parsing error.
// They may be relying on `publishConfig` to be loading and we don't
// want to ignore errors in that case.
throw err
}
}
let pkgVersion // for cli output
if (spec) {
pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : ''
} else {
spec = npa.resolve(manifest.name, manifest.version)
log.verbose('unpublish', manifest)
pkgVersion = manifest.version ? `@${manifest.version}` : ''
if (!manifest.version && !force) {
throw this.usageError(
'Refusing to delete entire project.\n' +
'Run with --force to do this.'
)
}
}
// If localPrefix has a package.json with a name that matches the package
// being unpublished, load up the publishConfig
if (manifest?.name === spec.name && manifest.publishConfig) {
const cliFlags = this.npm.config.data.get('cli').raw
// Filter out properties set in CLI flags to prioritize them over
// corresponding `publishConfig` settings
const filteredPublishConfig = Object.fromEntries(
Object.entries(manifest.publishConfig).filter(([key]) => !(key in cliFlags)))
flatten(filteredPublishConfig, opts)
}
const versions = await Unpublish.getKeysOfVersions(spec.name, opts)
if (versions.length === 1 && spec.rawSpec === versions[0] && !force) {
throw this.usageError(LAST_REMAINING_VERSION_ERROR)
}
if (versions.length === 1) {
pkgVersion = ''
}
if (!dryRun) {
await otplease(this.npm, opts, o => libunpub(spec, o))
}
if (!silent) {
output.standard(`- ${spec.name}${pkgVersion}`)
}
}
async execWorkspaces (args) {
await this.setWorkspaces()
for (const path of this.workspacePaths) {
await this.exec(args, { localPrefix: path })
}
}
}
module.exports = Unpublish

View File

@ -0,0 +1,8 @@
const Star = require('./star.js')
class Unstar extends Star {
static description = 'Remove an item from your favorite packages'
static name = 'unstar'
}
module.exports = Unstar

View File

@ -0,0 +1,68 @@
const path = require('node:path')
const { log } = require('proc-log')
const reifyFinish = require('../utils/reify-finish.js')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Update extends ArboristWorkspaceCmd {
static description = 'Update packages'
static name = 'update'
static params = [
'save',
'global',
'install-strategy',
'legacy-bundling',
'global-style',
'omit',
'include',
'strict-peer-deps',
'package-lock',
'foreground-scripts',
'ignore-scripts',
'audit',
'bin-links',
'fund',
'dry-run',
...super.params,
]
static usage = ['[<pkg>...]']
// TODO
/* istanbul ignore next */
static async completion (opts, npm) {
const completion = require('../utils/installed-deep.js')
return completion(npm, opts)
}
async exec (args) {
const update = args.length === 0 ? true : args
const global = path.resolve(this.npm.globalDir, '..')
const where = this.npm.global ? global : this.npm.prefix
// In the context of `npm update` the save
// config value should default to `false`
const save = this.npm.config.isDefault('save')
? false
: this.npm.config.get('save')
if (this.npm.config.get('depth')) {
log.warn('update', 'The --depth option no longer has any effect. See RFC0019.\n' +
'https://github.com/npm/rfcs/blob/latest/implemented/0019-remove-update-depth-option.md')
}
const Arborist = require('@npmcli/arborist')
const opts = {
...this.npm.flatOptions,
path: where,
save,
workspaces: this.workspaceNames,
}
const arb = new Arborist(opts)
await arb.reify({ ...opts, update })
await reifyFinish(this.npm, arb)
}
}
module.exports = Update

View File

@ -0,0 +1,151 @@
const { resolve } = require('node:path')
const { readFile } = require('node:fs/promises')
const { output } = require('proc-log')
const BaseCommand = require('../base-cmd.js')
class Version extends BaseCommand {
static description = 'Bump a package version'
static name = 'version'
static params = [
'allow-same-version',
'commit-hooks',
'git-tag-version',
'json',
'preid',
'sign-git-tag',
'workspace',
'workspaces',
'workspaces-update',
'include-workspace-root',
]
static workspaces = true
static ignoreImplicitWorkspace = false
/* eslint-disable-next-line max-len */
static usage = ['[<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git]']
static async completion (opts) {
const {
conf: {
argv: { remain },
},
} = opts
if (remain.length > 2) {
return []
}
return [
'major',
'minor',
'patch',
'premajor',
'preminor',
'prepatch',
'prerelease',
'from-git',
]
}
async exec (args) {
switch (args.length) {
case 0:
return this.list()
case 1:
return this.change(args)
default:
throw this.usageError()
}
}
async execWorkspaces (args) {
switch (args.length) {
case 0:
return this.listWorkspaces()
case 1:
return this.changeWorkspaces(args)
default:
throw this.usageError()
}
}
async change (args) {
const libnpmversion = require('libnpmversion')
const prefix = this.npm.config.get('tag-version-prefix')
const version = await libnpmversion(args[0], {
...this.npm.flatOptions,
path: this.npm.prefix,
})
return output.standard(`${prefix}${version}`)
}
async changeWorkspaces (args) {
const updateWorkspaces = require('../utils/update-workspaces.js')
const libnpmversion = require('libnpmversion')
const prefix = this.npm.config.get('tag-version-prefix')
const {
config,
flatOptions,
localPrefix,
} = this.npm
await this.setWorkspaces()
const updatedWorkspaces = []
for (const [name, path] of this.workspaces) {
output.standard(name)
const version = await libnpmversion(args[0], {
...flatOptions,
'git-tag-version': false,
path,
})
updatedWorkspaces.push(name)
output.standard(`${prefix}${version}`)
}
return updateWorkspaces({
config,
flatOptions,
localPrefix,
npm: this.npm,
workspaces: updatedWorkspaces,
})
}
async list (results = {}) {
const pj = resolve(this.npm.prefix, 'package.json')
const pkg = await readFile(pj, 'utf8')
.then(data => JSON.parse(data))
.catch(() => ({}))
if (pkg.name && pkg.version) {
results[pkg.name] = pkg.version
}
results.npm = this.npm.version
for (const [key, version] of Object.entries(process.versions)) {
results[key] = version
}
if (this.npm.config.get('json')) {
output.buffer(results)
} else {
output.standard(results)
}
}
async listWorkspaces () {
const results = {}
await this.setWorkspaces()
for (const path of this.workspacePaths) {
const pj = resolve(path, 'package.json')
// setWorkspaces has already parsed package.json so we know it won't error
const pkg = await readFile(pj, 'utf8').then(data => JSON.parse(data))
if (pkg.name && pkg.version) {
results[pkg.name] = pkg.version
}
}
return this.list(results)
}
}
module.exports = Version

View File

@ -0,0 +1,472 @@
const columns = require('cli-columns')
const { readFile } = require('node:fs/promises')
const jsonParse = require('json-parse-even-better-errors')
const { log, output, META } = require('proc-log')
const npa = require('npm-package-arg')
const { resolve } = require('node:path')
const formatBytes = require('../utils/format-bytes.js')
const relativeDate = require('tiny-relative-date')
const semver = require('semver')
const { inspect } = require('node:util')
const { packument } = require('pacote')
const Queryable = require('../utils/queryable.js')
const BaseCommand = require('../base-cmd.js')
const { getError } = require('../utils/error-message.js')
const { jsonError, outputError } = require('../utils/output-error.js')
const readJson = file => readFile(file, 'utf8').then(jsonParse)
class View extends BaseCommand {
static description = 'View registry info'
static name = 'view'
static params = [
'json',
'workspace',
'workspaces',
'include-workspace-root',
]
static workspaces = true
static ignoreImplicitWorkspace = false
static usage = ['[<package-spec>] [<field>[.subfield]...]']
static async completion (opts, npm) {
if (opts.conf.argv.remain.length <= 2) {
// There used to be registry completion here, but it stopped
// making sense somewhere around 50,000 packages on the registry
return
}
// have the package, get the fields
const config = {
...npm.flatOptions,
fullMetadata: true,
preferOnline: true,
}
const spec = npa(opts.conf.argv.remain[2])
const pckmnt = await packument(spec, config)
const defaultTag = npm.config.get('tag')
const dv = pckmnt.versions[pckmnt['dist-tags'][defaultTag]]
pckmnt.versions = Object.keys(pckmnt.versions).sort(semver.compareLoose)
return getCompletionFields(pckmnt).concat(getCompletionFields(dv))
}
async exec (args) {
let { pkg, local, rest } = parseArgs(args)
if (local) {
if (this.npm.global) {
throw new Error('Cannot use view command in global mode.')
}
const dir = this.npm.prefix
const manifest = await readJson(resolve(dir, 'package.json'))
if (!manifest.name) {
throw new Error('Invalid package.json, no "name" field')
}
// put the version back if it existed
pkg = `${manifest.name}${pkg.slice(1)}`
}
await this.#viewPackage(pkg, rest)
}
async execWorkspaces (args) {
const { pkg, local, rest } = parseArgs(args)
if (!local) {
log.warn('Ignoring workspaces for specified package(s)')
return this.exec([pkg, ...rest])
}
const json = this.npm.config.get('json')
await this.setWorkspaces()
for (const name of this.workspaceNames) {
try {
await this.#viewPackage(`${name}${pkg.slice(1)}`, rest, { workspace: true })
} catch (e) {
const err = getError(e, { npm: this.npm, command: this })
if (err.code !== 'E404') {
throw e
}
if (json) {
output.buffer({ [META]: true, jsonError: { [name]: jsonError(err, this.npm) } })
} else {
outputError(err)
}
process.exitCode = err.exitCode
}
}
}
async #viewPackage (name, args, { workspace } = {}) {
const wholePackument = !args.length
const json = this.npm.config.get('json')
// If we are viewing many packages and outputting individual fields then
// output the name before doing any async activity
if (!json && !wholePackument && workspace) {
output.standard(`${name}:`)
}
const [pckmnt, data] = await this.#getData(name, args, wholePackument)
if (!json && wholePackument) {
// pretty view (entire packument)
for (const v of data) {
output.standard(this.#prettyView(pckmnt, Object.values(v)[0][Queryable.ALL]))
}
return
}
const res = this.#packageOutput(cleanData(data, wholePackument), pckmnt._id)
if (res) {
if (json) {
output.buffer(workspace ? { [name]: res } : res)
} else {
output.standard(res)
}
}
}
async #getData (pkg, args) {
const spec = npa(pkg)
const pckmnt = await packument(spec, {
...this.npm.flatOptions,
preferOnline: true,
fullMetadata: true,
})
// get the data about this package
let version = this.npm.config.get('tag')
// rawSpec is the git url if this is from git
if (spec.type !== 'git' && spec.type !== 'directory' && spec.rawSpec !== '*') {
version = spec.rawSpec
}
if (pckmnt['dist-tags']?.[version]) {
version = pckmnt['dist-tags'][version]
}
if (pckmnt.time?.unpublished) {
const u = pckmnt.time.unpublished
throw Object.assign(new Error(`Unpublished on ${u.time}`), {
statusCode: 404,
code: 'E404',
pkgid: pckmnt._id,
})
}
const versions = pckmnt.versions || {}
pckmnt.versions = Object.keys(versions).filter(v => {
if (semver.valid(v)) {
return true
}
log.info('view', `Ignoring invalid version: ${v}`)
return false
}).sort(semver.compareLoose)
// remove readme unless we asked for it
if (args.indexOf('readme') === -1) {
delete pckmnt.readme
}
const data = Object.entries(versions)
.filter(([v]) => semver.satisfies(v, version, true))
.flatMap(([, v]) => {
// remove readme unless we asked for it
if (args.indexOf('readme') !== -1) {
delete v.readme
}
return showFields({
data: pckmnt,
version: v,
fields: args,
json: this.npm.config.get('json'),
})
})
// No data has been pushed because no data is matching the specified version
if (!data.length && version !== 'latest') {
throw Object.assign(new Error(`No match found for version ${version}`), {
statusCode: 404,
code: 'E404',
pkgid: `${pckmnt._id}@${version}`,
})
}
return [pckmnt, data]
}
#packageOutput (data, name) {
const json = this.npm.config.get('json')
const versions = Object.keys(data)
const includeVersions = versions.length > 1
let includeFields
const res = versions.flatMap((v) => {
const fields = Object.entries(data[v])
includeFields ||= (fields.length > 1)
const msg = json ? {} : []
for (let [f, d] of fields) {
d = cleanup(d)
if (json) {
msg[f] = d
continue
}
if (includeVersions || includeFields || typeof d !== 'string') {
d = inspect(d, {
showHidden: false,
depth: 5,
colors: this.npm.color,
maxArrayLength: null,
})
}
if (f && includeFields) {
f += ' = '
}
msg.push(`${includeVersions ? `${name}@${v} ` : ''}${includeFields ? f : ''}${d}`)
}
return msg
})
if (json) {
// TODO(BREAKING_CHANGE): all unwrapping should be removed. Users should know
// based on their arguments if they can expect an array or an object. And this
// unwrapping can break that assumption. Eg `npm view abbrev@^2` should always
// return an array, but currently since there is only one version matching `^2`
// this will return a single object instead.
const first = Object.keys(res[0] || {})
const jsonRes = first.length === 1 ? res.map(m => m[first[0]]) : res
if (jsonRes.length === 0) {
return
}
if (jsonRes.length === 1) {
return jsonRes[0]
}
return jsonRes
}
return res.join('\n').trim()
}
#prettyView (packu, manifest) {
// More modern, pretty printing of default view
const unicode = this.npm.config.get('unicode')
const chalk = this.npm.chalk
const deps = Object.entries(manifest.dependencies || {}).map(([k, dep]) =>
`${chalk.blue(k)}: ${dep}`
)
const site = manifest.homepage?.url || manifest.homepage
const bins = Object.keys(manifest.bin || {})
const licenseField = manifest.license || 'Proprietary'
const license = typeof licenseField === 'string'
? licenseField
: (licenseField.type || 'Proprietary')
const res = []
res.push('')
res.push([
chalk.underline.cyan(`${manifest.name}@${manifest.version}`),
license.toLowerCase().trim() === 'proprietary'
? chalk.red(license)
: chalk.green(license),
`deps: ${deps.length ? chalk.cyan(deps.length) : chalk.cyan('none')}`,
`versions: ${chalk.cyan(packu.versions.length + '')}`,
].join(' | '))
manifest.description && res.push(manifest.description)
if (site) {
res.push(chalk.blue(site))
}
manifest.deprecated && res.push(
`\n${chalk.redBright('DEPRECATED')}${unicode ? ' ⚠️ ' : '!!'} - ${manifest.deprecated}`
)
if (packu.keywords?.length) {
res.push(`\nkeywords: ${
packu.keywords.map(k => chalk.cyan(k)).join(', ')
}`)
}
if (bins.length) {
res.push(`\nbin: ${chalk.cyan(bins.join(', '))}`)
}
res.push('\ndist')
res.push(`.tarball: ${chalk.blue(manifest.dist.tarball)}`)
res.push(`.shasum: ${chalk.green(manifest.dist.shasum)}`)
if (manifest.dist.integrity) {
res.push(`.integrity: ${chalk.green(manifest.dist.integrity)}`)
}
if (manifest.dist.unpackedSize) {
res.push(`.unpackedSize: ${chalk.blue(formatBytes(manifest.dist.unpackedSize, true))}`)
}
if (deps.length) {
const maxDeps = 24
res.push('\ndependencies:')
res.push(columns(deps.slice(0, maxDeps), { padding: 1 }))
if (deps.length > maxDeps) {
res.push(chalk.dim(`(...and ${deps.length - maxDeps} more.)`))
}
}
if (packu.maintainers?.length) {
res.push('\nmaintainers:')
packu.maintainers.forEach(u =>
res.push(`- ${unparsePerson({
name: chalk.blue(u.name),
email: chalk.dim(u.email) })}`)
)
}
res.push('\ndist-tags:')
res.push(columns(Object.entries(packu['dist-tags']).map(([k, t]) =>
`${chalk.blue(k)}: ${t}`
)))
const publisher = manifest._npmUser && unparsePerson({
name: chalk.blue(manifest._npmUser.name),
email: chalk.dim(manifest._npmUser.email),
})
if (publisher || packu.time) {
let publishInfo = 'published'
if (packu.time) {
publishInfo += ` ${chalk.cyan(relativeDate(packu.time[manifest.version]))}`
}
if (publisher) {
publishInfo += ` by ${publisher}`
}
res.push('')
res.push(publishInfo)
}
return res.join('\n')
}
}
module.exports = View
function parseArgs (args) {
if (!args.length) {
args = ['.']
}
const pkg = args.shift()
return {
pkg,
local: /^\.@/.test(pkg) || pkg === '.',
rest: args,
}
}
function cleanData (obj, wholePackument) {
// JSON formatted output (JSON or specific attributes from packument)
const data = obj.reduce((acc, cur) => {
if (cur) {
Object.entries(cur).forEach(([k, v]) => {
acc[k] ||= {}
Object.keys(v).forEach((t) => {
acc[k][t] = cur[k][t]
})
})
}
return acc
}, {})
if (wholePackument) {
const cleaned = Object.entries(data).reduce((acc, [k, v]) => {
acc[k] = v[Queryable.ALL]
return acc
}, {})
log.silly('view', cleaned)
return cleaned
}
return data
}
// return whatever was printed
function showFields ({ data, version, fields, json }) {
const o = [data, version].reduce((acc, s) => {
Object.entries(s).forEach(([k, v]) => {
acc[k] = v
})
return acc
}, {})
const queryable = new Queryable(o)
if (!fields.length) {
return { [version.version]: queryable.query(Queryable.ALL) }
}
return fields.map((field) => {
const s = queryable.query(field, { unwrapSingleItemArrays: !json })
if (s) {
return { [version.version]: s }
}
})
}
function cleanup (data) {
if (Array.isArray(data)) {
return data.map(cleanup)
}
if (!data || typeof data !== 'object') {
return data
}
const keys = Object.keys(data)
if (keys.length <= 3 && data.name && (
(keys.length === 1) ||
(keys.length === 3 && data.email && data.url) ||
(keys.length === 2 && (data.email || data.url))
)) {
data = unparsePerson(data)
}
return data
}
const unparsePerson = (d) =>
`${d.name}${d.email ? ` <${d.email}>` : ''}${d.url ? ` (${d.url})` : ''}`
function getCompletionFields (d, f = [], pref = []) {
Object.entries(d).forEach(([k, v]) => {
if (k.charAt(0) === '_' || k.indexOf('.') !== -1) {
return
}
const p = pref.concat(k).join('.')
f.push(p)
if (Array.isArray(v)) {
v.forEach((val, i) => {
const pi = p + '[' + i + ']'
if (val && typeof val === 'object') {
getCompletionFields(val, f, [p])
} else {
f.push(pi)
}
})
return
}
if (typeof v === 'object') {
getCompletionFields(v, f, [p])
}
})
return f
}

View File

@ -0,0 +1,20 @@
const { output } = require('proc-log')
const getIdentity = require('../utils/get-identity.js')
const BaseCommand = require('../base-cmd.js')
class Whoami extends BaseCommand {
static description = 'Display npm username'
static name = 'whoami'
static params = ['registry']
async exec () {
const username = await getIdentity(this.npm, { ...this.npm.flatOptions })
if (this.npm.config.get('json')) {
output.buffer(username)
} else {
output.standard(username)
}
}
}
module.exports = Whoami